mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-09-02 10:36:28 +00:00
7266 lines
216 KiB
Perl
Executable File
7266 lines
216 KiB
Perl
Executable File
#!/usr/bin/env perl
|
|
|
|
# This program is part of Percona Toolkit: http://www.percona.com/software/
|
|
# See "COPYRIGHT, LICENSE, AND WARRANTY" at the end of this file for legal
|
|
# notices and disclaimers.
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
|
|
# This tool is "fat-packed": most of its dependent modules are embedded
|
|
# in this file. Setting %INC to this file for each module makes Perl aware
|
|
# of this so it will not try to load the module from @INC. See the tool's
|
|
# documentation for a full list of dependencies.
|
|
BEGIN {
|
|
$INC{$_} = __FILE__ for map { (my $pkg = "$_.pm") =~ s!::!/!g; $pkg } (qw(
|
|
Percona::Toolkit
|
|
MasterSlave
|
|
OptionParser
|
|
Lmo::Utils
|
|
Lmo::Meta
|
|
Lmo::Object
|
|
Lmo::Types
|
|
Lmo
|
|
DSNParser
|
|
Daemon
|
|
Quoter
|
|
TableParser
|
|
Retry
|
|
Transformers
|
|
HTTP::Micro
|
|
VersionCheck
|
|
VersionParser
|
|
));
|
|
}
|
|
|
|
# ###########################################################################
|
|
# Percona::Toolkit package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Percona/Toolkit.pm
|
|
# t/lib/Percona/Toolkit.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Percona::Toolkit;
|
|
|
|
our $VERSION = '3.0.5';
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Carp qw(carp cluck);
|
|
use Data::Dumper qw();
|
|
|
|
require Exporter;
|
|
our @ISA = qw(Exporter);
|
|
our @EXPORT_OK = qw(
|
|
have_required_args
|
|
Dumper
|
|
_d
|
|
);
|
|
|
|
sub have_required_args {
|
|
my ($args, @required_args) = @_;
|
|
my $have_required_args = 1;
|
|
foreach my $arg ( @required_args ) {
|
|
if ( !defined $args->{$arg} ) {
|
|
$have_required_args = 0;
|
|
carp "Argument $arg is not defined";
|
|
}
|
|
}
|
|
cluck unless $have_required_args; # print backtrace
|
|
return $have_required_args;
|
|
}
|
|
|
|
sub Dumper {
|
|
local $Data::Dumper::Indent = 1;
|
|
local $Data::Dumper::Sortkeys = 1;
|
|
local $Data::Dumper::Quotekeys = 0;
|
|
Data::Dumper::Dumper(@_);
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Percona::Toolkit package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# MasterSlave package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/MasterSlave.pm
|
|
# t/lib/MasterSlave.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package MasterSlave;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
sub check_recursion_method {
|
|
my ($methods) = @_;
|
|
if ( @$methods != 1 ) {
|
|
if ( grep({ !m/processlist|hosts/i } @$methods)
|
|
&& $methods->[0] !~ /^dsn=/i )
|
|
{
|
|
die "Invalid combination of recursion methods: "
|
|
. join(", ", map { defined($_) ? $_ : 'undef' } @$methods) . ". "
|
|
. "Only hosts and processlist may be combined.\n"
|
|
}
|
|
}
|
|
else {
|
|
my ($method) = @$methods;
|
|
die "Invalid recursion method: " . ( $method || 'undef' )
|
|
unless $method && $method =~ m/^(?:processlist$|hosts$|none$|cluster$|dsn=)/i;
|
|
}
|
|
}
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
my @required_args = qw(OptionParser DSNParser Quoter);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my $self = {
|
|
%args,
|
|
replication_thread => {},
|
|
};
|
|
return bless $self, $class;
|
|
}
|
|
|
|
sub get_slaves {
|
|
my ($self, %args) = @_;
|
|
my @required_args = qw(make_cxn);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($make_cxn) = @args{@required_args};
|
|
|
|
my $slaves = [];
|
|
my $dp = $self->{DSNParser};
|
|
my $methods = $self->_resolve_recursion_methods($args{dsn});
|
|
|
|
return $slaves unless @$methods;
|
|
|
|
if ( grep { m/processlist|hosts/i } @$methods ) {
|
|
my @required_args = qw(dbh dsn);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($dbh, $dsn) = @args{@required_args};
|
|
my $o = $self->{OptionParser};
|
|
|
|
$self->recurse_to_slaves(
|
|
{ dbh => $dbh,
|
|
dsn => $dsn,
|
|
slave_user => $o->got('slave-user') ? $o->get('slave-user') : '',
|
|
slave_password => $o->got('slave-password') ? $o->get('slave-password') : '',
|
|
callback => sub {
|
|
my ( $dsn, $dbh, $level, $parent ) = @_;
|
|
return unless $level;
|
|
PTDEBUG && _d('Found slave:', $dp->as_string($dsn));
|
|
my $slave_dsn = $dsn;
|
|
if ($o->got('slave-user')) {
|
|
$slave_dsn->{u} = $o->get('slave-user');
|
|
PTDEBUG && _d("Using slave user ".$o->get('slave-user')." on ".$slave_dsn->{h}.":".$slave_dsn->{P});
|
|
}
|
|
if ($o->got('slave-password')) {
|
|
$slave_dsn->{p} = $o->get('slave-password');
|
|
PTDEBUG && _d("Slave password set");
|
|
}
|
|
push @$slaves, $make_cxn->(dsn => $slave_dsn, dbh => $dbh);
|
|
return;
|
|
},
|
|
}
|
|
);
|
|
} elsif ( $methods->[0] =~ m/^dsn=/i ) {
|
|
(my $dsn_table_dsn = join ",", @$methods) =~ s/^dsn=//i;
|
|
$slaves = $self->get_cxn_from_dsn_table(
|
|
%args,
|
|
dsn_table_dsn => $dsn_table_dsn,
|
|
);
|
|
}
|
|
elsif ( $methods->[0] =~ m/none/i ) {
|
|
PTDEBUG && _d('Not getting to slaves');
|
|
}
|
|
else {
|
|
die "Unexpected recursion methods: @$methods";
|
|
}
|
|
|
|
return $slaves;
|
|
}
|
|
|
|
sub _resolve_recursion_methods {
|
|
my ($self, $dsn) = @_;
|
|
my $o = $self->{OptionParser};
|
|
if ( $o->got('recursion-method') ) {
|
|
return $o->get('recursion-method');
|
|
}
|
|
elsif ( $dsn && ($dsn->{P} || 3306) != 3306 ) {
|
|
PTDEBUG && _d('Port number is non-standard; using only hosts method');
|
|
return [qw(hosts)];
|
|
}
|
|
else {
|
|
return $o->get('recursion-method');
|
|
}
|
|
}
|
|
|
|
sub recurse_to_slaves {
|
|
my ( $self, $args, $level ) = @_;
|
|
$level ||= 0;
|
|
my $dp = $self->{DSNParser};
|
|
my $recurse = $args->{recurse} || $self->{OptionParser}->get('recurse');
|
|
my $dsn = $args->{dsn};
|
|
my $slave_user = $args->{slave_user} || '';
|
|
my $slave_password = $args->{slave_password} || '';
|
|
|
|
my $methods = $self->_resolve_recursion_methods($dsn);
|
|
PTDEBUG && _d('Recursion methods:', @$methods);
|
|
if ( lc($methods->[0]) eq 'none' ) {
|
|
PTDEBUG && _d('Not recursing to slaves');
|
|
return;
|
|
}
|
|
|
|
my $slave_dsn = $dsn;
|
|
if ($slave_user) {
|
|
$slave_dsn->{u} = $slave_user;
|
|
PTDEBUG && _d("Using slave user $slave_user on ".$slave_dsn->{h}.":".$slave_dsn->{P});
|
|
}
|
|
if ($slave_password) {
|
|
$slave_dsn->{p} = $slave_password;
|
|
PTDEBUG && _d("Slave password set");
|
|
}
|
|
|
|
my $dbh;
|
|
eval {
|
|
$dbh = $args->{dbh} || $dp->get_dbh(
|
|
$dp->get_cxn_params($slave_dsn), { AutoCommit => 1 });
|
|
PTDEBUG && _d('Connected to', $dp->as_string($slave_dsn));
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
print STDERR "Cannot connect to ", $dp->as_string($slave_dsn), "\n"
|
|
or die "Cannot print: $OS_ERROR";
|
|
return;
|
|
}
|
|
|
|
my $sql = 'SELECT @@SERVER_ID';
|
|
PTDEBUG && _d($sql);
|
|
my ($id) = $dbh->selectrow_array($sql);
|
|
PTDEBUG && _d('Working on server ID', $id);
|
|
my $master_thinks_i_am = $dsn->{server_id};
|
|
if ( !defined $id
|
|
|| ( defined $master_thinks_i_am && $master_thinks_i_am != $id )
|
|
|| $args->{server_ids_seen}->{$id}++
|
|
) {
|
|
PTDEBUG && _d('Server ID seen, or not what master said');
|
|
if ( $args->{skip_callback} ) {
|
|
$args->{skip_callback}->($dsn, $dbh, $level, $args->{parent});
|
|
}
|
|
return;
|
|
}
|
|
|
|
$args->{callback}->($dsn, $dbh, $level, $args->{parent});
|
|
|
|
if ( !defined $recurse || $level < $recurse ) {
|
|
|
|
my @slaves =
|
|
grep { !$_->{master_id} || $_->{master_id} == $id } # Only my slaves.
|
|
$self->find_slave_hosts($dp, $dbh, $dsn, $methods);
|
|
|
|
foreach my $slave ( @slaves ) {
|
|
PTDEBUG && _d('Recursing from',
|
|
$dp->as_string($dsn), 'to', $dp->as_string($slave));
|
|
$self->recurse_to_slaves(
|
|
{ %$args, dsn => $slave, dbh => undef, parent => $dsn, slave_user => $slave_user, $slave_password => $slave_password }, $level + 1 );
|
|
}
|
|
}
|
|
}
|
|
|
|
sub find_slave_hosts {
|
|
my ( $self, $dsn_parser, $dbh, $dsn, $methods ) = @_;
|
|
|
|
PTDEBUG && _d('Looking for slaves on', $dsn_parser->as_string($dsn),
|
|
'using methods', @$methods);
|
|
|
|
my @slaves;
|
|
METHOD:
|
|
foreach my $method ( @$methods ) {
|
|
my $find_slaves = "_find_slaves_by_$method";
|
|
PTDEBUG && _d('Finding slaves with', $find_slaves);
|
|
@slaves = $self->$find_slaves($dsn_parser, $dbh, $dsn);
|
|
last METHOD if @slaves;
|
|
}
|
|
|
|
PTDEBUG && _d('Found', scalar(@slaves), 'slaves');
|
|
return @slaves;
|
|
}
|
|
|
|
sub _find_slaves_by_processlist {
|
|
my ( $self, $dsn_parser, $dbh, $dsn ) = @_;
|
|
|
|
my @slaves = map {
|
|
my $slave = $dsn_parser->parse("h=$_", $dsn);
|
|
$slave->{source} = 'processlist';
|
|
$slave;
|
|
}
|
|
grep { $_ }
|
|
map {
|
|
my ( $host ) = $_->{host} =~ m/^([^:]+):/;
|
|
if ( $host eq 'localhost' ) {
|
|
$host = '127.0.0.1'; # Replication never uses sockets.
|
|
}
|
|
$host;
|
|
} $self->get_connected_slaves($dbh);
|
|
|
|
return @slaves;
|
|
}
|
|
|
|
sub _find_slaves_by_hosts {
|
|
my ( $self, $dsn_parser, $dbh, $dsn ) = @_;
|
|
|
|
my @slaves;
|
|
my $sql = 'SHOW SLAVE HOSTS';
|
|
PTDEBUG && _d($dbh, $sql);
|
|
@slaves = @{$dbh->selectall_arrayref($sql, { Slice => {} })};
|
|
|
|
if ( @slaves ) {
|
|
PTDEBUG && _d('Found some SHOW SLAVE HOSTS info');
|
|
@slaves = map {
|
|
my %hash;
|
|
@hash{ map { lc $_ } keys %$_ } = values %$_;
|
|
my $spec = "h=$hash{host},P=$hash{port}"
|
|
. ( $hash{user} ? ",u=$hash{user}" : '')
|
|
. ( $hash{password} ? ",p=$hash{password}" : '');
|
|
my $dsn = $dsn_parser->parse($spec, $dsn);
|
|
$dsn->{server_id} = $hash{server_id};
|
|
$dsn->{master_id} = $hash{master_id};
|
|
$dsn->{source} = 'hosts';
|
|
$dsn;
|
|
} @slaves;
|
|
}
|
|
|
|
return @slaves;
|
|
}
|
|
|
|
sub get_connected_slaves {
|
|
my ( $self, $dbh ) = @_;
|
|
|
|
my $show = "SHOW GRANTS FOR ";
|
|
my $user = 'CURRENT_USER()';
|
|
my $sql = $show . $user;
|
|
PTDEBUG && _d($dbh, $sql);
|
|
|
|
my $proc;
|
|
eval {
|
|
$proc = grep {
|
|
m/ALL PRIVILEGES.*?\*\.\*|PROCESS/
|
|
} @{$dbh->selectcol_arrayref($sql)};
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
|
|
if ( $EVAL_ERROR =~ m/no such grant defined for user/ ) {
|
|
PTDEBUG && _d('Retrying SHOW GRANTS without host; error:',
|
|
$EVAL_ERROR);
|
|
($user) = split('@', $user);
|
|
$sql = $show . $user;
|
|
PTDEBUG && _d($sql);
|
|
eval {
|
|
$proc = grep {
|
|
m/ALL PRIVILEGES.*?\*\.\*|PROCESS/
|
|
} @{$dbh->selectcol_arrayref($sql)};
|
|
};
|
|
}
|
|
|
|
die "Failed to $sql: $EVAL_ERROR" if $EVAL_ERROR;
|
|
}
|
|
if ( !$proc ) {
|
|
die "You do not have the PROCESS privilege";
|
|
}
|
|
|
|
$sql = 'SHOW FULL PROCESSLIST';
|
|
PTDEBUG && _d($dbh, $sql);
|
|
grep { $_->{command} =~ m/Binlog Dump/i }
|
|
map { # Lowercase the column names
|
|
my %hash;
|
|
@hash{ map { lc $_ } keys %$_ } = values %$_;
|
|
\%hash;
|
|
}
|
|
@{$dbh->selectall_arrayref($sql, { Slice => {} })};
|
|
}
|
|
|
|
sub is_master_of {
|
|
my ( $self, $master, $slave ) = @_;
|
|
my $master_status = $self->get_master_status($master)
|
|
or die "The server specified as a master is not a master";
|
|
my $slave_status = $self->get_slave_status($slave)
|
|
or die "The server specified as a slave is not a slave";
|
|
my @connected = $self->get_connected_slaves($master)
|
|
or die "The server specified as a master has no connected slaves";
|
|
my (undef, $port) = $master->selectrow_array("SHOW VARIABLES LIKE 'port'");
|
|
|
|
if ( $port != $slave_status->{master_port} ) {
|
|
die "The slave is connected to $slave_status->{master_port} "
|
|
. "but the master's port is $port";
|
|
}
|
|
|
|
if ( !grep { $slave_status->{master_user} eq $_->{user} } @connected ) {
|
|
die "I don't see any slave I/O thread connected with user "
|
|
. $slave_status->{master_user};
|
|
}
|
|
|
|
if ( ($slave_status->{slave_io_state} || '')
|
|
eq 'Waiting for master to send event' )
|
|
{
|
|
my ( $master_log_name, $master_log_num )
|
|
= $master_status->{file} =~ m/^(.*?)\.0*([1-9][0-9]*)$/;
|
|
my ( $slave_log_name, $slave_log_num )
|
|
= $slave_status->{master_log_file} =~ m/^(.*?)\.0*([1-9][0-9]*)$/;
|
|
if ( $master_log_name ne $slave_log_name
|
|
|| abs($master_log_num - $slave_log_num) > 1 )
|
|
{
|
|
die "The slave thinks it is reading from "
|
|
. "$slave_status->{master_log_file}, but the "
|
|
. "master is writing to $master_status->{file}";
|
|
}
|
|
}
|
|
return 1;
|
|
}
|
|
|
|
sub get_master_dsn {
|
|
my ( $self, $dbh, $dsn, $dsn_parser ) = @_;
|
|
my $master = $self->get_slave_status($dbh) or return undef;
|
|
my $spec = "h=$master->{master_host},P=$master->{master_port}";
|
|
return $dsn_parser->parse($spec, $dsn);
|
|
}
|
|
|
|
sub get_slave_status {
|
|
my ( $self, $dbh ) = @_;
|
|
|
|
if ( !$self->{not_a_slave}->{$dbh} ) {
|
|
my $sth = $self->{sths}->{$dbh}->{SLAVE_STATUS}
|
|
||= $dbh->prepare('SHOW SLAVE STATUS');
|
|
PTDEBUG && _d($dbh, 'SHOW SLAVE STATUS');
|
|
$sth->execute();
|
|
my ($sss_rows) = $sth->fetchall_arrayref({}); # Show Slave Status rows
|
|
|
|
my $ss;
|
|
if ( $sss_rows && @$sss_rows ) {
|
|
if (scalar @$sss_rows > 1) {
|
|
if (!$self->{channel}) {
|
|
die 'This server returned more than one row for SHOW SLAVE STATUS but "channel" was not specified on the command line';
|
|
}
|
|
for my $row (@$sss_rows) {
|
|
$row = { map { lc($_) => $row->{$_} } keys %$row }; # lowercase the keys
|
|
if ($row->{channel_name} eq $self->{channel}) {
|
|
$ss = $row;
|
|
last;
|
|
}
|
|
}
|
|
} else {
|
|
if ($sss_rows->[0]->{channel_name} && $sss_rows->[0]->{channel_name} ne $self->{channel}) {
|
|
die 'This server is using replication channels but "channel" was not specified on the command line';
|
|
} else {
|
|
$ss = $sss_rows->[0];
|
|
}
|
|
}
|
|
|
|
if ( $ss && %$ss ) {
|
|
$ss = { map { lc($_) => $ss->{$_} } keys %$ss }; # lowercase the keys
|
|
return $ss;
|
|
}
|
|
}
|
|
|
|
PTDEBUG && _d('This server returns nothing for SHOW SLAVE STATUS');
|
|
$self->{not_a_slave}->{$dbh}++;
|
|
}
|
|
}
|
|
|
|
sub get_master_status {
|
|
my ( $self, $dbh ) = @_;
|
|
|
|
if ( $self->{not_a_master}->{$dbh} ) {
|
|
PTDEBUG && _d('Server on dbh', $dbh, 'is not a master');
|
|
return;
|
|
}
|
|
|
|
my $sth = $self->{sths}->{$dbh}->{MASTER_STATUS}
|
|
||= $dbh->prepare('SHOW MASTER STATUS');
|
|
PTDEBUG && _d($dbh, 'SHOW MASTER STATUS');
|
|
$sth->execute();
|
|
my ($ms) = @{$sth->fetchall_arrayref({})};
|
|
PTDEBUG && _d(
|
|
$ms ? map { "$_=" . (defined $ms->{$_} ? $ms->{$_} : '') } keys %$ms
|
|
: '');
|
|
|
|
if ( !$ms || scalar keys %$ms < 2 ) {
|
|
PTDEBUG && _d('Server on dbh', $dbh, 'does not seem to be a master');
|
|
$self->{not_a_master}->{$dbh}++;
|
|
}
|
|
|
|
return { map { lc($_) => $ms->{$_} } keys %$ms }; # lowercase the keys
|
|
}
|
|
|
|
sub wait_for_master {
|
|
my ( $self, %args ) = @_;
|
|
my @required_args = qw(master_status slave_dbh);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($master_status, $slave_dbh) = @args{@required_args};
|
|
my $timeout = $args{timeout} || 60;
|
|
|
|
my $result;
|
|
my $waited;
|
|
if ( $master_status ) {
|
|
my $slave_status;
|
|
eval {
|
|
$slave_status = $self->get_slave_status($slave_dbh);
|
|
};
|
|
if ($EVAL_ERROR) {
|
|
return {
|
|
result => undef,
|
|
waited => 0,
|
|
error =>'Wait for master: this is a multi-master slave but "channel" was not specified on the command line',
|
|
};
|
|
}
|
|
my $server_version = VersionParser->new($slave_dbh);
|
|
my $channel_sql = $server_version > '5.6' && $self->{channel} ? ", '$self->{channel}'" : '';
|
|
my $sql = "SELECT MASTER_POS_WAIT('$master_status->{file}', $master_status->{position}, $timeout $channel_sql)";
|
|
PTDEBUG && _d($slave_dbh, $sql);
|
|
my $start = time;
|
|
($result) = $slave_dbh->selectrow_array($sql);
|
|
|
|
$waited = time - $start;
|
|
|
|
PTDEBUG && _d('Result of waiting:', $result);
|
|
PTDEBUG && _d("Waited", $waited, "seconds");
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Not waiting: this server is not a master');
|
|
}
|
|
|
|
return {
|
|
result => $result,
|
|
waited => $waited,
|
|
};
|
|
}
|
|
|
|
sub stop_slave {
|
|
my ( $self, $dbh ) = @_;
|
|
my $sth = $self->{sths}->{$dbh}->{STOP_SLAVE}
|
|
||= $dbh->prepare('STOP SLAVE');
|
|
PTDEBUG && _d($dbh, $sth->{Statement});
|
|
$sth->execute();
|
|
}
|
|
|
|
sub start_slave {
|
|
my ( $self, $dbh, $pos ) = @_;
|
|
if ( $pos ) {
|
|
my $sql = "START SLAVE UNTIL MASTER_LOG_FILE='$pos->{file}', "
|
|
. "MASTER_LOG_POS=$pos->{position}";
|
|
PTDEBUG && _d($dbh, $sql);
|
|
$dbh->do($sql);
|
|
}
|
|
else {
|
|
my $sth = $self->{sths}->{$dbh}->{START_SLAVE}
|
|
||= $dbh->prepare('START SLAVE');
|
|
PTDEBUG && _d($dbh, $sth->{Statement});
|
|
$sth->execute();
|
|
}
|
|
}
|
|
|
|
sub catchup_to_master {
|
|
my ( $self, $slave, $master, $timeout ) = @_;
|
|
$self->stop_slave($master);
|
|
$self->stop_slave($slave);
|
|
my $slave_status = $self->get_slave_status($slave);
|
|
my $slave_pos = $self->repl_posn($slave_status);
|
|
my $master_status = $self->get_master_status($master);
|
|
my $master_pos = $self->repl_posn($master_status);
|
|
PTDEBUG && _d('Master position:', $self->pos_to_string($master_pos),
|
|
'Slave position:', $self->pos_to_string($slave_pos));
|
|
|
|
my $result;
|
|
if ( $self->pos_cmp($slave_pos, $master_pos) < 0 ) {
|
|
PTDEBUG && _d('Waiting for slave to catch up to master');
|
|
$self->start_slave($slave, $master_pos);
|
|
|
|
$result = $self->wait_for_master(
|
|
master_status => $master_status,
|
|
slave_dbh => $slave,
|
|
timeout => $timeout,
|
|
master_status => $master_status
|
|
);
|
|
if ($result->{error}) {
|
|
die $result->{error};
|
|
}
|
|
if ( !defined $result->{result} ) {
|
|
$slave_status = $self->get_slave_status($slave);
|
|
if ( !$self->slave_is_running($slave_status) ) {
|
|
PTDEBUG && _d('Master position:',
|
|
$self->pos_to_string($master_pos),
|
|
'Slave position:', $self->pos_to_string($slave_pos));
|
|
$slave_pos = $self->repl_posn($slave_status);
|
|
if ( $self->pos_cmp($slave_pos, $master_pos) != 0 ) {
|
|
die "MASTER_POS_WAIT() returned NULL but slave has not "
|
|
. "caught up to master";
|
|
}
|
|
PTDEBUG && _d('Slave is caught up to master and stopped');
|
|
}
|
|
else {
|
|
die "Slave has not caught up to master and it is still running";
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
PTDEBUG && _d("Slave is already caught up to master");
|
|
}
|
|
|
|
return $result;
|
|
}
|
|
|
|
sub catchup_to_same_pos {
|
|
my ( $self, $s1_dbh, $s2_dbh ) = @_;
|
|
$self->stop_slave($s1_dbh);
|
|
$self->stop_slave($s2_dbh);
|
|
my $s1_status = $self->get_slave_status($s1_dbh);
|
|
my $s2_status = $self->get_slave_status($s2_dbh);
|
|
my $s1_pos = $self->repl_posn($s1_status);
|
|
my $s2_pos = $self->repl_posn($s2_status);
|
|
if ( $self->pos_cmp($s1_pos, $s2_pos) < 0 ) {
|
|
$self->start_slave($s1_dbh, $s2_pos);
|
|
}
|
|
elsif ( $self->pos_cmp($s2_pos, $s1_pos) < 0 ) {
|
|
$self->start_slave($s2_dbh, $s1_pos);
|
|
}
|
|
|
|
$s1_status = $self->get_slave_status($s1_dbh);
|
|
$s2_status = $self->get_slave_status($s2_dbh);
|
|
$s1_pos = $self->repl_posn($s1_status);
|
|
$s2_pos = $self->repl_posn($s2_status);
|
|
|
|
if ( $self->slave_is_running($s1_status)
|
|
|| $self->slave_is_running($s2_status)
|
|
|| $self->pos_cmp($s1_pos, $s2_pos) != 0)
|
|
{
|
|
die "The servers aren't both stopped at the same position";
|
|
}
|
|
|
|
}
|
|
|
|
sub slave_is_running {
|
|
my ( $self, $slave_status ) = @_;
|
|
return ($slave_status->{slave_sql_running} || 'No') eq 'Yes';
|
|
}
|
|
|
|
sub has_slave_updates {
|
|
my ( $self, $dbh ) = @_;
|
|
my $sql = q{SHOW VARIABLES LIKE 'log_slave_updates'};
|
|
PTDEBUG && _d($dbh, $sql);
|
|
my ($name, $value) = $dbh->selectrow_array($sql);
|
|
return $value && $value =~ m/^(1|ON)$/;
|
|
}
|
|
|
|
sub repl_posn {
|
|
my ( $self, $status ) = @_;
|
|
if ( exists $status->{file} && exists $status->{position} ) {
|
|
return {
|
|
file => $status->{file},
|
|
position => $status->{position},
|
|
};
|
|
}
|
|
else {
|
|
return {
|
|
file => $status->{relay_master_log_file},
|
|
position => $status->{exec_master_log_pos},
|
|
};
|
|
}
|
|
}
|
|
|
|
sub get_slave_lag {
|
|
my ( $self, $dbh ) = @_;
|
|
my $stat = $self->get_slave_status($dbh);
|
|
return unless $stat; # server is not a slave
|
|
return $stat->{seconds_behind_master};
|
|
}
|
|
|
|
sub pos_cmp {
|
|
my ( $self, $a, $b ) = @_;
|
|
return $self->pos_to_string($a) cmp $self->pos_to_string($b);
|
|
}
|
|
|
|
sub short_host {
|
|
my ( $self, $dsn ) = @_;
|
|
my ($host, $port);
|
|
if ( $dsn->{master_host} ) {
|
|
$host = $dsn->{master_host};
|
|
$port = $dsn->{master_port};
|
|
}
|
|
else {
|
|
$host = $dsn->{h};
|
|
$port = $dsn->{P};
|
|
}
|
|
return ($host || '[default]') . ( ($port || 3306) == 3306 ? '' : ":$port" );
|
|
}
|
|
|
|
sub is_replication_thread {
|
|
my ( $self, $query, %args ) = @_;
|
|
return unless $query;
|
|
|
|
my $type = lc($args{type} || 'all');
|
|
die "Invalid type: $type"
|
|
unless $type =~ m/^binlog_dump|slave_io|slave_sql|all$/i;
|
|
|
|
my $match = 0;
|
|
if ( $type =~ m/binlog_dump|all/i ) {
|
|
$match = 1
|
|
if ($query->{Command} || $query->{command} || '') eq "Binlog Dump";
|
|
}
|
|
if ( !$match ) {
|
|
if ( ($query->{User} || $query->{user} || '') eq "system user" ) {
|
|
PTDEBUG && _d("Slave replication thread");
|
|
if ( $type ne 'all' ) {
|
|
my $state = $query->{State} || $query->{state} || '';
|
|
|
|
if ( $state =~ m/^init|end$/ ) {
|
|
PTDEBUG && _d("Special state:", $state);
|
|
$match = 1;
|
|
}
|
|
else {
|
|
my ($slave_sql) = $state =~ m/
|
|
^(Waiting\sfor\sthe\snext\sevent
|
|
|Reading\sevent\sfrom\sthe\srelay\slog
|
|
|Has\sread\sall\srelay\slog;\swaiting
|
|
|Making\stemp\sfile
|
|
|Waiting\sfor\sslave\smutex\son\sexit)/xi;
|
|
|
|
$match = $type eq 'slave_sql' && $slave_sql ? 1
|
|
: $type eq 'slave_io' && !$slave_sql ? 1
|
|
: 0;
|
|
}
|
|
}
|
|
else {
|
|
$match = 1;
|
|
}
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Not system user');
|
|
}
|
|
|
|
if ( !defined $args{check_known_ids} || $args{check_known_ids} ) {
|
|
my $id = $query->{Id} || $query->{id};
|
|
if ( $match ) {
|
|
$self->{replication_thread}->{$id} = 1;
|
|
}
|
|
else {
|
|
if ( $self->{replication_thread}->{$id} ) {
|
|
PTDEBUG && _d("Thread ID is a known replication thread ID");
|
|
$match = 1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
PTDEBUG && _d('Matches', $type, 'replication thread:',
|
|
($match ? 'yes' : 'no'), '; match:', $match);
|
|
|
|
return $match;
|
|
}
|
|
|
|
|
|
sub get_replication_filters {
|
|
my ( $self, %args ) = @_;
|
|
my @required_args = qw(dbh);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($dbh) = @args{@required_args};
|
|
|
|
my %filters = ();
|
|
|
|
my $status = $self->get_master_status($dbh);
|
|
if ( $status ) {
|
|
map { $filters{$_} = $status->{$_} }
|
|
grep { defined $status->{$_} && $status->{$_} ne '' }
|
|
qw(
|
|
binlog_do_db
|
|
binlog_ignore_db
|
|
);
|
|
}
|
|
|
|
$status = $self->get_slave_status($dbh);
|
|
if ( $status ) {
|
|
map { $filters{$_} = $status->{$_} }
|
|
grep { defined $status->{$_} && $status->{$_} ne '' }
|
|
qw(
|
|
replicate_do_db
|
|
replicate_ignore_db
|
|
replicate_do_table
|
|
replicate_ignore_table
|
|
replicate_wild_do_table
|
|
replicate_wild_ignore_table
|
|
);
|
|
|
|
my $sql = "SHOW VARIABLES LIKE 'slave_skip_errors'";
|
|
PTDEBUG && _d($dbh, $sql);
|
|
my $row = $dbh->selectrow_arrayref($sql);
|
|
$filters{slave_skip_errors} = $row->[1] if $row->[1] && $row->[1] ne 'OFF';
|
|
}
|
|
|
|
return \%filters;
|
|
}
|
|
|
|
|
|
sub pos_to_string {
|
|
my ( $self, $pos ) = @_;
|
|
my $fmt = '%s/%020d';
|
|
return sprintf($fmt, @{$pos}{qw(file position)});
|
|
}
|
|
|
|
sub reset_known_replication_threads {
|
|
my ( $self ) = @_;
|
|
$self->{replication_thread} = {};
|
|
return;
|
|
}
|
|
|
|
sub get_cxn_from_dsn_table {
|
|
my ($self, %args) = @_;
|
|
my @required_args = qw(dsn_table_dsn make_cxn);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($dsn_table_dsn, $make_cxn) = @args{@required_args};
|
|
PTDEBUG && _d('DSN table DSN:', $dsn_table_dsn);
|
|
|
|
my $dp = $self->{DSNParser};
|
|
my $q = $self->{Quoter};
|
|
|
|
my $dsn = $dp->parse($dsn_table_dsn);
|
|
my $dsn_table;
|
|
if ( $dsn->{D} && $dsn->{t} ) {
|
|
$dsn_table = $q->quote($dsn->{D}, $dsn->{t});
|
|
}
|
|
elsif ( $dsn->{t} && $dsn->{t} =~ m/\./ ) {
|
|
$dsn_table = $q->quote($q->split_unquote($dsn->{t}));
|
|
}
|
|
else {
|
|
die "DSN table DSN does not specify a database (D) "
|
|
. "or a database-qualified table (t)";
|
|
}
|
|
|
|
my $dsn_tbl_cxn = $make_cxn->(dsn => $dsn);
|
|
my $dbh = $dsn_tbl_cxn->connect();
|
|
my $sql = "SELECT dsn FROM $dsn_table ORDER BY id";
|
|
PTDEBUG && _d($sql);
|
|
my $dsn_strings = $dbh->selectcol_arrayref($sql);
|
|
my @cxn;
|
|
if ( $dsn_strings ) {
|
|
foreach my $dsn_string ( @$dsn_strings ) {
|
|
PTDEBUG && _d('DSN from DSN table:', $dsn_string);
|
|
push @cxn, $make_cxn->(dsn_string => $dsn_string);
|
|
}
|
|
}
|
|
return \@cxn;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End MasterSlave package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# OptionParser package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/OptionParser.pm
|
|
# t/lib/OptionParser.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package OptionParser;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use List::Util qw(max);
|
|
use Getopt::Long;
|
|
use Data::Dumper;
|
|
|
|
my $POD_link_re = '[LC]<"?([^">]+)"?>';
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
my @required_args = qw();
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
|
|
my ($program_name) = $PROGRAM_NAME =~ m/([.A-Za-z-]+)$/;
|
|
$program_name ||= $PROGRAM_NAME;
|
|
my $home = $ENV{HOME} || $ENV{HOMEPATH} || $ENV{USERPROFILE} || '.';
|
|
|
|
my %attributes = (
|
|
'type' => 1,
|
|
'short form' => 1,
|
|
'group' => 1,
|
|
'default' => 1,
|
|
'cumulative' => 1,
|
|
'negatable' => 1,
|
|
'repeatable' => 1, # means it can be specified more than once
|
|
);
|
|
|
|
my $self = {
|
|
head1 => 'OPTIONS', # These args are used internally
|
|
skip_rules => 0, # to instantiate another Option-
|
|
item => '--(.*)', # Parser obj that parses the
|
|
attributes => \%attributes, # DSN OPTIONS section. Tools
|
|
parse_attributes => \&_parse_attribs, # don't tinker with these args.
|
|
|
|
%args,
|
|
|
|
strict => 1, # disabled by a special rule
|
|
program_name => $program_name,
|
|
opts => {},
|
|
got_opts => 0,
|
|
short_opts => {},
|
|
defaults => {},
|
|
groups => {},
|
|
allowed_groups => {},
|
|
errors => [],
|
|
rules => [], # desc of rules for --help
|
|
mutex => [], # rule: opts are mutually exclusive
|
|
atleast1 => [], # rule: at least one opt is required
|
|
disables => {}, # rule: opt disables other opts
|
|
defaults_to => {}, # rule: opt defaults to value of other opt
|
|
DSNParser => undef,
|
|
default_files => [
|
|
"/etc/percona-toolkit/percona-toolkit.conf",
|
|
"/etc/percona-toolkit/$program_name.conf",
|
|
"$home/.percona-toolkit.conf",
|
|
"$home/.$program_name.conf",
|
|
],
|
|
types => {
|
|
string => 's', # standard Getopt type
|
|
int => 'i', # standard Getopt type
|
|
float => 'f', # standard Getopt type
|
|
Hash => 'H', # hash, formed from a comma-separated list
|
|
hash => 'h', # hash as above, but only if a value is given
|
|
Array => 'A', # array, similar to Hash
|
|
array => 'a', # array, similar to hash
|
|
DSN => 'd', # DSN
|
|
size => 'z', # size with kMG suffix (powers of 2^10)
|
|
time => 'm', # time, with an optional suffix of s/h/m/d
|
|
},
|
|
};
|
|
|
|
return bless $self, $class;
|
|
}
|
|
|
|
sub get_specs {
|
|
my ( $self, $file ) = @_;
|
|
$file ||= $self->{file} || __FILE__;
|
|
my @specs = $self->_pod_to_specs($file);
|
|
$self->_parse_specs(@specs);
|
|
|
|
open my $fh, "<", $file or die "Cannot open $file: $OS_ERROR";
|
|
my $contents = do { local $/ = undef; <$fh> };
|
|
close $fh;
|
|
if ( $contents =~ m/^=head1 DSN OPTIONS/m ) {
|
|
PTDEBUG && _d('Parsing DSN OPTIONS');
|
|
my $dsn_attribs = {
|
|
dsn => 1,
|
|
copy => 1,
|
|
};
|
|
my $parse_dsn_attribs = sub {
|
|
my ( $self, $option, $attribs ) = @_;
|
|
map {
|
|
my $val = $attribs->{$_};
|
|
if ( $val ) {
|
|
$val = $val eq 'yes' ? 1
|
|
: $val eq 'no' ? 0
|
|
: $val;
|
|
$attribs->{$_} = $val;
|
|
}
|
|
} keys %$attribs;
|
|
return {
|
|
key => $option,
|
|
%$attribs,
|
|
};
|
|
};
|
|
my $dsn_o = new OptionParser(
|
|
description => 'DSN OPTIONS',
|
|
head1 => 'DSN OPTIONS',
|
|
dsn => 0, # XXX don't infinitely recurse!
|
|
item => '\* (.)', # key opts are a single character
|
|
skip_rules => 1, # no rules before opts
|
|
attributes => $dsn_attribs,
|
|
parse_attributes => $parse_dsn_attribs,
|
|
);
|
|
my @dsn_opts = map {
|
|
my $opts = {
|
|
key => $_->{spec}->{key},
|
|
dsn => $_->{spec}->{dsn},
|
|
copy => $_->{spec}->{copy},
|
|
desc => $_->{desc},
|
|
};
|
|
$opts;
|
|
} $dsn_o->_pod_to_specs($file);
|
|
$self->{DSNParser} = DSNParser->new(opts => \@dsn_opts);
|
|
}
|
|
|
|
if ( $contents =~ m/^=head1 VERSION\n\n^(.+)$/m ) {
|
|
$self->{version} = $1;
|
|
PTDEBUG && _d($self->{version});
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub DSNParser {
|
|
my ( $self ) = @_;
|
|
return $self->{DSNParser};
|
|
};
|
|
|
|
sub get_defaults_files {
|
|
my ( $self ) = @_;
|
|
return @{$self->{default_files}};
|
|
}
|
|
|
|
sub _pod_to_specs {
|
|
my ( $self, $file ) = @_;
|
|
$file ||= $self->{file} || __FILE__;
|
|
open my $fh, '<', $file or die "Cannot open $file: $OS_ERROR";
|
|
|
|
my @specs = ();
|
|
my @rules = ();
|
|
my $para;
|
|
|
|
local $INPUT_RECORD_SEPARATOR = '';
|
|
while ( $para = <$fh> ) {
|
|
next unless $para =~ m/^=head1 $self->{head1}/;
|
|
last;
|
|
}
|
|
|
|
while ( $para = <$fh> ) {
|
|
last if $para =~ m/^=over/;
|
|
next if $self->{skip_rules};
|
|
chomp $para;
|
|
$para =~ s/\s+/ /g;
|
|
$para =~ s/$POD_link_re/$1/go;
|
|
PTDEBUG && _d('Option rule:', $para);
|
|
push @rules, $para;
|
|
}
|
|
|
|
die "POD has no $self->{head1} section" unless $para;
|
|
|
|
do {
|
|
if ( my ($option) = $para =~ m/^=item $self->{item}/ ) {
|
|
chomp $para;
|
|
PTDEBUG && _d($para);
|
|
my %attribs;
|
|
|
|
$para = <$fh>; # read next paragraph, possibly attributes
|
|
|
|
if ( $para =~ m/: / ) { # attributes
|
|
$para =~ s/\s+\Z//g;
|
|
%attribs = map {
|
|
my ( $attrib, $val) = split(/: /, $_);
|
|
die "Unrecognized attribute for --$option: $attrib"
|
|
unless $self->{attributes}->{$attrib};
|
|
($attrib, $val);
|
|
} split(/; /, $para);
|
|
if ( $attribs{'short form'} ) {
|
|
$attribs{'short form'} =~ s/-//;
|
|
}
|
|
$para = <$fh>; # read next paragraph, probably short help desc
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Option has no attributes');
|
|
}
|
|
|
|
$para =~ s/\s+\Z//g;
|
|
$para =~ s/\s+/ /g;
|
|
$para =~ s/$POD_link_re/$1/go;
|
|
|
|
$para =~ s/\.(?:\n.*| [A-Z].*|\Z)//s;
|
|
PTDEBUG && _d('Short help:', $para);
|
|
|
|
die "No description after option spec $option" if $para =~ m/^=item/;
|
|
|
|
if ( my ($base_option) = $option =~ m/^\[no\](.*)/ ) {
|
|
$option = $base_option;
|
|
$attribs{'negatable'} = 1;
|
|
}
|
|
|
|
push @specs, {
|
|
spec => $self->{parse_attributes}->($self, $option, \%attribs),
|
|
desc => $para
|
|
. (defined $attribs{default} ? " (default $attribs{default})" : ''),
|
|
group => ($attribs{'group'} ? $attribs{'group'} : 'default'),
|
|
attributes => \%attribs
|
|
};
|
|
}
|
|
while ( $para = <$fh> ) {
|
|
last unless $para;
|
|
if ( $para =~ m/^=head1/ ) {
|
|
$para = undef; # Can't 'last' out of a do {} block.
|
|
last;
|
|
}
|
|
last if $para =~ m/^=item /;
|
|
}
|
|
} while ( $para );
|
|
|
|
die "No valid specs in $self->{head1}" unless @specs;
|
|
|
|
close $fh;
|
|
return @specs, @rules;
|
|
}
|
|
|
|
sub _parse_specs {
|
|
my ( $self, @specs ) = @_;
|
|
my %disables; # special rule that requires deferred checking
|
|
|
|
foreach my $opt ( @specs ) {
|
|
if ( ref $opt ) { # It's an option spec, not a rule.
|
|
PTDEBUG && _d('Parsing opt spec:',
|
|
map { ($_, '=>', $opt->{$_}) } keys %$opt);
|
|
|
|
my ( $long, $short ) = $opt->{spec} =~ m/^([\w-]+)(?:\|([^!+=]*))?/;
|
|
if ( !$long ) {
|
|
die "Cannot parse long option from spec $opt->{spec}";
|
|
}
|
|
$opt->{long} = $long;
|
|
|
|
die "Duplicate long option --$long" if exists $self->{opts}->{$long};
|
|
$self->{opts}->{$long} = $opt;
|
|
|
|
if ( length $long == 1 ) {
|
|
PTDEBUG && _d('Long opt', $long, 'looks like short opt');
|
|
$self->{short_opts}->{$long} = $long;
|
|
}
|
|
|
|
if ( $short ) {
|
|
die "Duplicate short option -$short"
|
|
if exists $self->{short_opts}->{$short};
|
|
$self->{short_opts}->{$short} = $long;
|
|
$opt->{short} = $short;
|
|
}
|
|
else {
|
|
$opt->{short} = undef;
|
|
}
|
|
|
|
$opt->{is_negatable} = $opt->{spec} =~ m/!/ ? 1 : 0;
|
|
$opt->{is_cumulative} = $opt->{spec} =~ m/\+/ ? 1 : 0;
|
|
$opt->{is_repeatable} = $opt->{attributes}->{repeatable} ? 1 : 0;
|
|
$opt->{is_required} = $opt->{desc} =~ m/required/ ? 1 : 0;
|
|
|
|
$opt->{group} ||= 'default';
|
|
$self->{groups}->{ $opt->{group} }->{$long} = 1;
|
|
|
|
$opt->{value} = undef;
|
|
$opt->{got} = 0;
|
|
|
|
my ( $type ) = $opt->{spec} =~ m/=(.)/;
|
|
$opt->{type} = $type;
|
|
PTDEBUG && _d($long, 'type:', $type);
|
|
|
|
|
|
$opt->{spec} =~ s/=./=s/ if ( $type && $type =~ m/[HhAadzm]/ );
|
|
|
|
if ( (my ($def) = $opt->{desc} =~ m/default\b(?: ([^)]+))?/) ) {
|
|
$self->{defaults}->{$long} = defined $def ? $def : 1;
|
|
PTDEBUG && _d($long, 'default:', $def);
|
|
}
|
|
|
|
if ( $long eq 'config' ) {
|
|
$self->{defaults}->{$long} = join(',', $self->get_defaults_files());
|
|
}
|
|
|
|
if ( (my ($dis) = $opt->{desc} =~ m/(disables .*)/) ) {
|
|
$disables{$long} = $dis;
|
|
PTDEBUG && _d('Deferring check of disables rule for', $opt, $dis);
|
|
}
|
|
|
|
$self->{opts}->{$long} = $opt;
|
|
}
|
|
else { # It's an option rule, not a spec.
|
|
PTDEBUG && _d('Parsing rule:', $opt);
|
|
push @{$self->{rules}}, $opt;
|
|
my @participants = $self->_get_participants($opt);
|
|
my $rule_ok = 0;
|
|
|
|
if ( $opt =~ m/mutually exclusive|one and only one/ ) {
|
|
$rule_ok = 1;
|
|
push @{$self->{mutex}}, \@participants;
|
|
PTDEBUG && _d(@participants, 'are mutually exclusive');
|
|
}
|
|
if ( $opt =~ m/at least one|one and only one/ ) {
|
|
$rule_ok = 1;
|
|
push @{$self->{atleast1}}, \@participants;
|
|
PTDEBUG && _d(@participants, 'require at least one');
|
|
}
|
|
if ( $opt =~ m/default to/ ) {
|
|
$rule_ok = 1;
|
|
$self->{defaults_to}->{$participants[0]} = $participants[1];
|
|
PTDEBUG && _d($participants[0], 'defaults to', $participants[1]);
|
|
}
|
|
if ( $opt =~ m/restricted to option groups/ ) {
|
|
$rule_ok = 1;
|
|
my ($groups) = $opt =~ m/groups ([\w\s\,]+)/;
|
|
my @groups = split(',', $groups);
|
|
%{$self->{allowed_groups}->{$participants[0]}} = map {
|
|
s/\s+//;
|
|
$_ => 1;
|
|
} @groups;
|
|
}
|
|
if( $opt =~ m/accepts additional command-line arguments/ ) {
|
|
$rule_ok = 1;
|
|
$self->{strict} = 0;
|
|
PTDEBUG && _d("Strict mode disabled by rule");
|
|
}
|
|
|
|
die "Unrecognized option rule: $opt" unless $rule_ok;
|
|
}
|
|
}
|
|
|
|
foreach my $long ( keys %disables ) {
|
|
my @participants = $self->_get_participants($disables{$long});
|
|
$self->{disables}->{$long} = \@participants;
|
|
PTDEBUG && _d('Option', $long, 'disables', @participants);
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub _get_participants {
|
|
my ( $self, $str ) = @_;
|
|
my @participants;
|
|
foreach my $long ( $str =~ m/--(?:\[no\])?([\w-]+)/g ) {
|
|
die "Option --$long does not exist while processing rule $str"
|
|
unless exists $self->{opts}->{$long};
|
|
push @participants, $long;
|
|
}
|
|
PTDEBUG && _d('Participants for', $str, ':', @participants);
|
|
return @participants;
|
|
}
|
|
|
|
sub opts {
|
|
my ( $self ) = @_;
|
|
my %opts = %{$self->{opts}};
|
|
return %opts;
|
|
}
|
|
|
|
sub short_opts {
|
|
my ( $self ) = @_;
|
|
my %short_opts = %{$self->{short_opts}};
|
|
return %short_opts;
|
|
}
|
|
|
|
sub set_defaults {
|
|
my ( $self, %defaults ) = @_;
|
|
$self->{defaults} = {};
|
|
foreach my $long ( keys %defaults ) {
|
|
die "Cannot set default for nonexistent option $long"
|
|
unless exists $self->{opts}->{$long};
|
|
$self->{defaults}->{$long} = $defaults{$long};
|
|
PTDEBUG && _d('Default val for', $long, ':', $defaults{$long});
|
|
}
|
|
return;
|
|
}
|
|
|
|
sub get_defaults {
|
|
my ( $self ) = @_;
|
|
return $self->{defaults};
|
|
}
|
|
|
|
sub get_groups {
|
|
my ( $self ) = @_;
|
|
return $self->{groups};
|
|
}
|
|
|
|
sub _set_option {
|
|
my ( $self, $opt, $val ) = @_;
|
|
my $long = exists $self->{opts}->{$opt} ? $opt
|
|
: exists $self->{short_opts}->{$opt} ? $self->{short_opts}->{$opt}
|
|
: die "Getopt::Long gave a nonexistent option: $opt";
|
|
$opt = $self->{opts}->{$long};
|
|
if ( $opt->{is_cumulative} ) {
|
|
$opt->{value}++;
|
|
}
|
|
elsif ( ($opt->{type} || '') eq 's' && $val =~ m/^--?(.+)/ ) {
|
|
my $next_opt = $1;
|
|
if ( exists $self->{opts}->{$next_opt}
|
|
|| exists $self->{short_opts}->{$next_opt} ) {
|
|
$self->save_error("--$long requires a string value");
|
|
return;
|
|
}
|
|
else {
|
|
if ($opt->{is_repeatable}) {
|
|
push @{$opt->{value}} , $val;
|
|
}
|
|
else {
|
|
$opt->{value} = $val;
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
if ($opt->{is_repeatable}) {
|
|
push @{$opt->{value}} , $val;
|
|
}
|
|
else {
|
|
$opt->{value} = $val;
|
|
}
|
|
}
|
|
$opt->{got} = 1;
|
|
PTDEBUG && _d('Got option', $long, '=', $val);
|
|
}
|
|
|
|
sub get_opts {
|
|
my ( $self ) = @_;
|
|
|
|
foreach my $long ( keys %{$self->{opts}} ) {
|
|
$self->{opts}->{$long}->{got} = 0;
|
|
$self->{opts}->{$long}->{value}
|
|
= exists $self->{defaults}->{$long} ? $self->{defaults}->{$long}
|
|
: $self->{opts}->{$long}->{is_cumulative} ? 0
|
|
: undef;
|
|
}
|
|
$self->{got_opts} = 0;
|
|
|
|
$self->{errors} = [];
|
|
|
|
if ( @ARGV && $ARGV[0] =~/^--config=/ ) {
|
|
$ARGV[0] = substr($ARGV[0],9);
|
|
$ARGV[0] =~ s/^'(.*)'$/$1/;
|
|
$ARGV[0] =~ s/^"(.*)"$/$1/;
|
|
$self->_set_option('config', shift @ARGV);
|
|
}
|
|
if ( @ARGV && $ARGV[0] eq "--config" ) {
|
|
shift @ARGV;
|
|
$self->_set_option('config', shift @ARGV);
|
|
}
|
|
if ( $self->has('config') ) {
|
|
my @extra_args;
|
|
foreach my $filename ( split(',', $self->get('config')) ) {
|
|
eval {
|
|
push @extra_args, $self->_read_config_file($filename);
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
if ( $self->got('config') ) {
|
|
die $EVAL_ERROR;
|
|
}
|
|
elsif ( PTDEBUG ) {
|
|
_d($EVAL_ERROR);
|
|
}
|
|
}
|
|
}
|
|
unshift @ARGV, @extra_args;
|
|
}
|
|
|
|
Getopt::Long::Configure('no_ignore_case', 'bundling');
|
|
GetOptions(
|
|
map { $_->{spec} => sub { $self->_set_option(@_); } }
|
|
grep { $_->{long} ne 'config' } # --config is handled specially above.
|
|
values %{$self->{opts}}
|
|
) or $self->save_error('Error parsing options');
|
|
|
|
if ( exists $self->{opts}->{version} && $self->{opts}->{version}->{got} ) {
|
|
if ( $self->{version} ) {
|
|
print $self->{version}, "\n";
|
|
exit 0;
|
|
}
|
|
else {
|
|
print "Error parsing version. See the VERSION section of the tool's documentation.\n";
|
|
exit 1;
|
|
}
|
|
}
|
|
|
|
if ( @ARGV && $self->{strict} ) {
|
|
$self->save_error("Unrecognized command-line options @ARGV");
|
|
}
|
|
|
|
foreach my $mutex ( @{$self->{mutex}} ) {
|
|
my @set = grep { $self->{opts}->{$_}->{got} } @$mutex;
|
|
if ( @set > 1 ) {
|
|
my $err = join(', ', map { "--$self->{opts}->{$_}->{long}" }
|
|
@{$mutex}[ 0 .. scalar(@$mutex) - 2] )
|
|
. ' and --'.$self->{opts}->{$mutex->[-1]}->{long}
|
|
. ' are mutually exclusive.';
|
|
$self->save_error($err);
|
|
}
|
|
}
|
|
|
|
foreach my $required ( @{$self->{atleast1}} ) {
|
|
my @set = grep { $self->{opts}->{$_}->{got} } @$required;
|
|
if ( @set == 0 ) {
|
|
my $err = join(', ', map { "--$self->{opts}->{$_}->{long}" }
|
|
@{$required}[ 0 .. scalar(@$required) - 2] )
|
|
.' or --'.$self->{opts}->{$required->[-1]}->{long};
|
|
$self->save_error("Specify at least one of $err");
|
|
}
|
|
}
|
|
|
|
$self->_check_opts( keys %{$self->{opts}} );
|
|
$self->{got_opts} = 1;
|
|
return;
|
|
}
|
|
|
|
sub _check_opts {
|
|
my ( $self, @long ) = @_;
|
|
my $long_last = scalar @long;
|
|
while ( @long ) {
|
|
foreach my $i ( 0..$#long ) {
|
|
my $long = $long[$i];
|
|
next unless $long;
|
|
my $opt = $self->{opts}->{$long};
|
|
if ( $opt->{got} ) {
|
|
if ( exists $self->{disables}->{$long} ) {
|
|
my @disable_opts = @{$self->{disables}->{$long}};
|
|
map { $self->{opts}->{$_}->{value} = undef; } @disable_opts;
|
|
PTDEBUG && _d('Unset options', @disable_opts,
|
|
'because', $long,'disables them');
|
|
}
|
|
|
|
if ( exists $self->{allowed_groups}->{$long} ) {
|
|
|
|
my @restricted_groups = grep {
|
|
!exists $self->{allowed_groups}->{$long}->{$_}
|
|
} keys %{$self->{groups}};
|
|
|
|
my @restricted_opts;
|
|
foreach my $restricted_group ( @restricted_groups ) {
|
|
RESTRICTED_OPT:
|
|
foreach my $restricted_opt (
|
|
keys %{$self->{groups}->{$restricted_group}} )
|
|
{
|
|
next RESTRICTED_OPT if $restricted_opt eq $long;
|
|
push @restricted_opts, $restricted_opt
|
|
if $self->{opts}->{$restricted_opt}->{got};
|
|
}
|
|
}
|
|
|
|
if ( @restricted_opts ) {
|
|
my $err;
|
|
if ( @restricted_opts == 1 ) {
|
|
$err = "--$restricted_opts[0]";
|
|
}
|
|
else {
|
|
$err = join(', ',
|
|
map { "--$self->{opts}->{$_}->{long}" }
|
|
grep { $_ }
|
|
@restricted_opts[0..scalar(@restricted_opts) - 2]
|
|
)
|
|
. ' or --'.$self->{opts}->{$restricted_opts[-1]}->{long};
|
|
}
|
|
$self->save_error("--$long is not allowed with $err");
|
|
}
|
|
}
|
|
|
|
}
|
|
elsif ( $opt->{is_required} ) {
|
|
$self->save_error("Required option --$long must be specified");
|
|
}
|
|
|
|
$self->_validate_type($opt);
|
|
if ( $opt->{parsed} ) {
|
|
delete $long[$i];
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Temporarily failed to parse', $long);
|
|
}
|
|
}
|
|
|
|
die "Failed to parse options, possibly due to circular dependencies"
|
|
if @long == $long_last;
|
|
$long_last = @long;
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub _validate_type {
|
|
my ( $self, $opt ) = @_;
|
|
return unless $opt;
|
|
|
|
if ( !$opt->{type} ) {
|
|
$opt->{parsed} = 1;
|
|
return;
|
|
}
|
|
|
|
my $val = $opt->{value};
|
|
|
|
if ( $val && $opt->{type} eq 'm' ) { # type time
|
|
PTDEBUG && _d('Parsing option', $opt->{long}, 'as a time value');
|
|
my ( $prefix, $num, $suffix ) = $val =~ m/([+-]?)(\d+)([a-z])?$/;
|
|
if ( !$suffix ) {
|
|
my ( $s ) = $opt->{desc} =~ m/\(suffix (.)\)/;
|
|
$suffix = $s || 's';
|
|
PTDEBUG && _d('No suffix given; using', $suffix, 'for',
|
|
$opt->{long}, '(value:', $val, ')');
|
|
}
|
|
if ( $suffix =~ m/[smhd]/ ) {
|
|
$val = $suffix eq 's' ? $num # Seconds
|
|
: $suffix eq 'm' ? $num * 60 # Minutes
|
|
: $suffix eq 'h' ? $num * 3600 # Hours
|
|
: $num * 86400; # Days
|
|
$opt->{value} = ($prefix || '') . $val;
|
|
PTDEBUG && _d('Setting option', $opt->{long}, 'to', $val);
|
|
}
|
|
else {
|
|
$self->save_error("Invalid time suffix for --$opt->{long}");
|
|
}
|
|
}
|
|
elsif ( $val && $opt->{type} eq 'd' ) { # type DSN
|
|
PTDEBUG && _d('Parsing option', $opt->{long}, 'as a DSN');
|
|
my $prev = {};
|
|
my $from_key = $self->{defaults_to}->{ $opt->{long} };
|
|
if ( $from_key ) {
|
|
PTDEBUG && _d($opt->{long}, 'DSN copies from', $from_key, 'DSN');
|
|
if ( $self->{opts}->{$from_key}->{parsed} ) {
|
|
$prev = $self->{opts}->{$from_key}->{value};
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Cannot parse', $opt->{long}, 'until',
|
|
$from_key, 'parsed');
|
|
return;
|
|
}
|
|
}
|
|
my $defaults = $self->{DSNParser}->parse_options($self);
|
|
if (!$opt->{attributes}->{repeatable}) {
|
|
$opt->{value} = $self->{DSNParser}->parse($val, $prev, $defaults);
|
|
} else {
|
|
my $values = [];
|
|
for my $dsn_string (@$val) {
|
|
push @$values, $self->{DSNParser}->parse($dsn_string, $prev, $defaults);
|
|
}
|
|
$opt->{value} = $values;
|
|
}
|
|
}
|
|
elsif ( $val && $opt->{type} eq 'z' ) { # type size
|
|
PTDEBUG && _d('Parsing option', $opt->{long}, 'as a size value');
|
|
$self->_parse_size($opt, $val);
|
|
}
|
|
elsif ( $opt->{type} eq 'H' || (defined $val && $opt->{type} eq 'h') ) {
|
|
$opt->{value} = { map { $_ => 1 } split(/(?<!\\),\s*/, ($val || '')) };
|
|
}
|
|
elsif ( $opt->{type} eq 'A' || (defined $val && $opt->{type} eq 'a') ) {
|
|
$opt->{value} = [ split(/(?<!\\),\s*/, ($val || '')) ];
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Nothing to validate for option',
|
|
$opt->{long}, 'type', $opt->{type}, 'value', $val);
|
|
}
|
|
|
|
$opt->{parsed} = 1;
|
|
return;
|
|
}
|
|
|
|
sub get {
|
|
my ( $self, $opt ) = @_;
|
|
my $long = (length $opt == 1 ? $self->{short_opts}->{$opt} : $opt);
|
|
die "Option $opt does not exist"
|
|
unless $long && exists $self->{opts}->{$long};
|
|
return $self->{opts}->{$long}->{value};
|
|
}
|
|
|
|
sub got {
|
|
my ( $self, $opt ) = @_;
|
|
my $long = (length $opt == 1 ? $self->{short_opts}->{$opt} : $opt);
|
|
die "Option $opt does not exist"
|
|
unless $long && exists $self->{opts}->{$long};
|
|
return $self->{opts}->{$long}->{got};
|
|
}
|
|
|
|
sub has {
|
|
my ( $self, $opt ) = @_;
|
|
my $long = (length $opt == 1 ? $self->{short_opts}->{$opt} : $opt);
|
|
return defined $long ? exists $self->{opts}->{$long} : 0;
|
|
}
|
|
|
|
sub set {
|
|
my ( $self, $opt, $val ) = @_;
|
|
my $long = (length $opt == 1 ? $self->{short_opts}->{$opt} : $opt);
|
|
die "Option $opt does not exist"
|
|
unless $long && exists $self->{opts}->{$long};
|
|
$self->{opts}->{$long}->{value} = $val;
|
|
return;
|
|
}
|
|
|
|
sub save_error {
|
|
my ( $self, $error ) = @_;
|
|
push @{$self->{errors}}, $error;
|
|
return;
|
|
}
|
|
|
|
sub errors {
|
|
my ( $self ) = @_;
|
|
return $self->{errors};
|
|
}
|
|
|
|
sub usage {
|
|
my ( $self ) = @_;
|
|
warn "No usage string is set" unless $self->{usage}; # XXX
|
|
return "Usage: " . ($self->{usage} || '') . "\n";
|
|
}
|
|
|
|
sub descr {
|
|
my ( $self ) = @_;
|
|
warn "No description string is set" unless $self->{description}; # XXX
|
|
my $descr = ($self->{description} || $self->{program_name} || '')
|
|
. " For more details, please use the --help option, "
|
|
. "or try 'perldoc $PROGRAM_NAME' "
|
|
. "for complete documentation.";
|
|
$descr = join("\n", $descr =~ m/(.{0,80})(?:\s+|$)/g)
|
|
unless $ENV{DONT_BREAK_LINES};
|
|
$descr =~ s/ +$//mg;
|
|
return $descr;
|
|
}
|
|
|
|
sub usage_or_errors {
|
|
my ( $self, $file, $return ) = @_;
|
|
$file ||= $self->{file} || __FILE__;
|
|
|
|
if ( !$self->{description} || !$self->{usage} ) {
|
|
PTDEBUG && _d("Getting description and usage from SYNOPSIS in", $file);
|
|
my %synop = $self->_parse_synopsis($file);
|
|
$self->{description} ||= $synop{description};
|
|
$self->{usage} ||= $synop{usage};
|
|
PTDEBUG && _d("Description:", $self->{description},
|
|
"\nUsage:", $self->{usage});
|
|
}
|
|
|
|
if ( $self->{opts}->{help}->{got} ) {
|
|
print $self->print_usage() or die "Cannot print usage: $OS_ERROR";
|
|
exit 0 unless $return;
|
|
}
|
|
elsif ( scalar @{$self->{errors}} ) {
|
|
print $self->print_errors() or die "Cannot print errors: $OS_ERROR";
|
|
exit 1 unless $return;
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub print_errors {
|
|
my ( $self ) = @_;
|
|
my $usage = $self->usage() . "\n";
|
|
if ( (my @errors = @{$self->{errors}}) ) {
|
|
$usage .= join("\n * ", 'Errors in command-line arguments:', @errors)
|
|
. "\n";
|
|
}
|
|
return $usage . "\n" . $self->descr();
|
|
}
|
|
|
|
sub print_usage {
|
|
my ( $self ) = @_;
|
|
die "Run get_opts() before print_usage()" unless $self->{got_opts};
|
|
my @opts = values %{$self->{opts}};
|
|
|
|
my $maxl = max(
|
|
map {
|
|
length($_->{long}) # option long name
|
|
+ ($_->{is_negatable} ? 4 : 0) # "[no]" if opt is negatable
|
|
+ ($_->{type} ? 2 : 0) # "=x" where x is the opt type
|
|
}
|
|
@opts);
|
|
|
|
my $maxs = max(0,
|
|
map {
|
|
length($_)
|
|
+ ($self->{opts}->{$_}->{is_negatable} ? 4 : 0)
|
|
+ ($self->{opts}->{$_}->{type} ? 2 : 0)
|
|
}
|
|
values %{$self->{short_opts}});
|
|
|
|
my $lcol = max($maxl, ($maxs + 3));
|
|
my $rcol = 80 - $lcol - 6;
|
|
my $rpad = ' ' x ( 80 - $rcol );
|
|
|
|
$maxs = max($lcol - 3, $maxs);
|
|
|
|
my $usage = $self->descr() . "\n" . $self->usage();
|
|
|
|
my @groups = reverse sort grep { $_ ne 'default'; } keys %{$self->{groups}};
|
|
push @groups, 'default';
|
|
|
|
foreach my $group ( reverse @groups ) {
|
|
$usage .= "\n".($group eq 'default' ? 'Options' : $group).":\n\n";
|
|
foreach my $opt (
|
|
sort { $a->{long} cmp $b->{long} }
|
|
grep { $_->{group} eq $group }
|
|
@opts )
|
|
{
|
|
my $long = $opt->{is_negatable} ? "[no]$opt->{long}" : $opt->{long};
|
|
my $short = $opt->{short};
|
|
my $desc = $opt->{desc};
|
|
|
|
$long .= $opt->{type} ? "=$opt->{type}" : "";
|
|
|
|
if ( $opt->{type} && $opt->{type} eq 'm' ) {
|
|
my ($s) = $desc =~ m/\(suffix (.)\)/;
|
|
$s ||= 's';
|
|
$desc =~ s/\s+\(suffix .\)//;
|
|
$desc .= ". Optional suffix s=seconds, m=minutes, h=hours, "
|
|
. "d=days; if no suffix, $s is used.";
|
|
}
|
|
$desc = join("\n$rpad", grep { $_ } $desc =~ m/(.{0,$rcol}(?!\W))(?:\s+|(?<=\W)|$)/g);
|
|
$desc =~ s/ +$//mg;
|
|
if ( $short ) {
|
|
$usage .= sprintf(" --%-${maxs}s -%s %s\n", $long, $short, $desc);
|
|
}
|
|
else {
|
|
$usage .= sprintf(" --%-${lcol}s %s\n", $long, $desc);
|
|
}
|
|
}
|
|
}
|
|
|
|
$usage .= "\nOption types: s=string, i=integer, f=float, h/H/a/A=comma-separated list, d=DSN, z=size, m=time\n";
|
|
|
|
if ( (my @rules = @{$self->{rules}}) ) {
|
|
$usage .= "\nRules:\n\n";
|
|
$usage .= join("\n", map { " $_" } @rules) . "\n";
|
|
}
|
|
if ( $self->{DSNParser} ) {
|
|
$usage .= "\n" . $self->{DSNParser}->usage();
|
|
}
|
|
$usage .= "\nOptions and values after processing arguments:\n\n";
|
|
foreach my $opt ( sort { $a->{long} cmp $b->{long} } @opts ) {
|
|
my $val = $opt->{value};
|
|
my $type = $opt->{type} || '';
|
|
my $bool = $opt->{spec} =~ m/^[\w-]+(?:\|[\w-])?!?$/;
|
|
$val = $bool ? ( $val ? 'TRUE' : 'FALSE' )
|
|
: !defined $val ? '(No value)'
|
|
: $type eq 'd' ? $self->{DSNParser}->as_string($val)
|
|
: $type =~ m/H|h/ ? join(',', sort keys %$val)
|
|
: $type =~ m/A|a/ ? join(',', @$val)
|
|
: $val;
|
|
$usage .= sprintf(" --%-${lcol}s %s\n", $opt->{long}, $val);
|
|
}
|
|
return $usage;
|
|
}
|
|
|
|
sub prompt_noecho {
|
|
shift @_ if ref $_[0] eq __PACKAGE__;
|
|
my ( $prompt ) = @_;
|
|
local $OUTPUT_AUTOFLUSH = 1;
|
|
print STDERR $prompt
|
|
or die "Cannot print: $OS_ERROR";
|
|
my $response;
|
|
eval {
|
|
require Term::ReadKey;
|
|
Term::ReadKey::ReadMode('noecho');
|
|
chomp($response = <STDIN>);
|
|
Term::ReadKey::ReadMode('normal');
|
|
print "\n"
|
|
or die "Cannot print: $OS_ERROR";
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
die "Cannot read response; is Term::ReadKey installed? $EVAL_ERROR";
|
|
}
|
|
return $response;
|
|
}
|
|
|
|
sub _read_config_file {
|
|
my ( $self, $filename ) = @_;
|
|
open my $fh, "<", $filename or die "Cannot open $filename: $OS_ERROR\n";
|
|
my @args;
|
|
my $prefix = '--';
|
|
my $parse = 1;
|
|
|
|
LINE:
|
|
while ( my $line = <$fh> ) {
|
|
chomp $line;
|
|
next LINE if $line =~ m/^\s*(?:\#|\;|$)/;
|
|
$line =~ s/\s+#.*$//g;
|
|
$line =~ s/^\s+|\s+$//g;
|
|
if ( $line eq '--' ) {
|
|
$prefix = '';
|
|
$parse = 0;
|
|
next LINE;
|
|
}
|
|
|
|
if ( $parse
|
|
&& !$self->has('version-check')
|
|
&& $line =~ /version-check/
|
|
) {
|
|
next LINE;
|
|
}
|
|
|
|
if ( $parse
|
|
&& (my($opt, $arg) = $line =~ m/^\s*([^=\s]+?)(?:\s*=\s*(.*?)\s*)?$/)
|
|
) {
|
|
push @args, grep { defined $_ } ("$prefix$opt", $arg);
|
|
}
|
|
elsif ( $line =~ m/./ ) {
|
|
push @args, $line;
|
|
}
|
|
else {
|
|
die "Syntax error in file $filename at line $INPUT_LINE_NUMBER";
|
|
}
|
|
}
|
|
close $fh;
|
|
return @args;
|
|
}
|
|
|
|
sub read_para_after {
|
|
my ( $self, $file, $regex ) = @_;
|
|
open my $fh, "<", $file or die "Can't open $file: $OS_ERROR";
|
|
local $INPUT_RECORD_SEPARATOR = '';
|
|
my $para;
|
|
while ( $para = <$fh> ) {
|
|
next unless $para =~ m/^=pod$/m;
|
|
last;
|
|
}
|
|
while ( $para = <$fh> ) {
|
|
next unless $para =~ m/$regex/;
|
|
last;
|
|
}
|
|
$para = <$fh>;
|
|
chomp($para);
|
|
close $fh or die "Can't close $file: $OS_ERROR";
|
|
return $para;
|
|
}
|
|
|
|
sub clone {
|
|
my ( $self ) = @_;
|
|
|
|
my %clone = map {
|
|
my $hashref = $self->{$_};
|
|
my $val_copy = {};
|
|
foreach my $key ( keys %$hashref ) {
|
|
my $ref = ref $hashref->{$key};
|
|
$val_copy->{$key} = !$ref ? $hashref->{$key}
|
|
: $ref eq 'HASH' ? { %{$hashref->{$key}} }
|
|
: $ref eq 'ARRAY' ? [ @{$hashref->{$key}} ]
|
|
: $hashref->{$key};
|
|
}
|
|
$_ => $val_copy;
|
|
} qw(opts short_opts defaults);
|
|
|
|
foreach my $scalar ( qw(got_opts) ) {
|
|
$clone{$scalar} = $self->{$scalar};
|
|
}
|
|
|
|
return bless \%clone;
|
|
}
|
|
|
|
sub _parse_size {
|
|
my ( $self, $opt, $val ) = @_;
|
|
|
|
if ( lc($val || '') eq 'null' ) {
|
|
PTDEBUG && _d('NULL size for', $opt->{long});
|
|
$opt->{value} = 'null';
|
|
return;
|
|
}
|
|
|
|
my %factor_for = (k => 1_024, M => 1_048_576, G => 1_073_741_824);
|
|
my ($pre, $num, $factor) = $val =~ m/^([+-])?(\d+)([kMG])?$/;
|
|
if ( defined $num ) {
|
|
if ( $factor ) {
|
|
$num *= $factor_for{$factor};
|
|
PTDEBUG && _d('Setting option', $opt->{y},
|
|
'to num', $num, '* factor', $factor);
|
|
}
|
|
$opt->{value} = ($pre || '') . $num;
|
|
}
|
|
else {
|
|
$self->save_error("Invalid size for --$opt->{long}: $val");
|
|
}
|
|
return;
|
|
}
|
|
|
|
sub _parse_attribs {
|
|
my ( $self, $option, $attribs ) = @_;
|
|
my $types = $self->{types};
|
|
return $option
|
|
. ($attribs->{'short form'} ? '|' . $attribs->{'short form'} : '' )
|
|
. ($attribs->{'negatable'} ? '!' : '' )
|
|
. ($attribs->{'cumulative'} ? '+' : '' )
|
|
. ($attribs->{'type'} ? '=' . $types->{$attribs->{type}} : '' );
|
|
}
|
|
|
|
sub _parse_synopsis {
|
|
my ( $self, $file ) = @_;
|
|
$file ||= $self->{file} || __FILE__;
|
|
PTDEBUG && _d("Parsing SYNOPSIS in", $file);
|
|
|
|
local $INPUT_RECORD_SEPARATOR = ''; # read paragraphs
|
|
open my $fh, "<", $file or die "Cannot open $file: $OS_ERROR";
|
|
my $para;
|
|
1 while defined($para = <$fh>) && $para !~ m/^=head1 SYNOPSIS/;
|
|
die "$file does not contain a SYNOPSIS section" unless $para;
|
|
my @synop;
|
|
for ( 1..2 ) { # 1 for the usage, 2 for the description
|
|
my $para = <$fh>;
|
|
push @synop, $para;
|
|
}
|
|
close $fh;
|
|
PTDEBUG && _d("Raw SYNOPSIS text:", @synop);
|
|
my ($usage, $desc) = @synop;
|
|
die "The SYNOPSIS section in $file is not formatted properly"
|
|
unless $usage && $desc;
|
|
|
|
$usage =~ s/^\s*Usage:\s+(.+)/$1/;
|
|
chomp $usage;
|
|
|
|
$desc =~ s/\n/ /g;
|
|
$desc =~ s/\s{2,}/ /g;
|
|
$desc =~ s/\. ([A-Z][a-z])/. $1/g;
|
|
$desc =~ s/\s+$//;
|
|
|
|
return (
|
|
description => $desc,
|
|
usage => $usage,
|
|
);
|
|
};
|
|
|
|
sub set_vars {
|
|
my ($self, $file) = @_;
|
|
$file ||= $self->{file} || __FILE__;
|
|
|
|
my %user_vars;
|
|
my $user_vars = $self->has('set-vars') ? $self->get('set-vars') : undef;
|
|
if ( $user_vars ) {
|
|
foreach my $var_val ( @$user_vars ) {
|
|
my ($var, $val) = $var_val =~ m/([^\s=]+)=(\S+)/;
|
|
die "Invalid --set-vars value: $var_val\n" unless $var && defined $val;
|
|
$user_vars{$var} = {
|
|
val => $val,
|
|
default => 0,
|
|
};
|
|
}
|
|
}
|
|
|
|
my %default_vars;
|
|
my $default_vars = $self->read_para_after($file, qr/MAGIC_set_vars/);
|
|
if ( $default_vars ) {
|
|
%default_vars = map {
|
|
my $var_val = $_;
|
|
my ($var, $val) = $var_val =~ m/([^\s=]+)=(\S+)/;
|
|
die "Invalid --set-vars value: $var_val\n" unless $var && defined $val;
|
|
$var => {
|
|
val => $val,
|
|
default => 1,
|
|
};
|
|
} split("\n", $default_vars);
|
|
}
|
|
|
|
my %vars = (
|
|
%default_vars, # first the tool's defaults
|
|
%user_vars, # then the user's which overwrite the defaults
|
|
);
|
|
PTDEBUG && _d('--set-vars:', Dumper(\%vars));
|
|
return \%vars;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
if ( PTDEBUG ) {
|
|
print STDERR '# ', $^X, ' ', $], "\n";
|
|
if ( my $uname = `uname -a` ) {
|
|
$uname =~ s/\s+/ /g;
|
|
print STDERR "# $uname\n";
|
|
}
|
|
print STDERR '# Arguments: ',
|
|
join(' ', map { my $a = "_[$_]_"; $a =~ s/\n/\n# /g; $a; } @ARGV), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End OptionParser package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Lmo::Utils package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Lmo/Utils.pm
|
|
# t/lib/Lmo/Utils.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Lmo::Utils;
|
|
|
|
use strict;
|
|
use warnings qw( FATAL all );
|
|
require Exporter;
|
|
our (@ISA, @EXPORT, @EXPORT_OK);
|
|
|
|
BEGIN {
|
|
@ISA = qw(Exporter);
|
|
@EXPORT = @EXPORT_OK = qw(
|
|
_install_coderef
|
|
_unimport_coderefs
|
|
_glob_for
|
|
_stash_for
|
|
);
|
|
}
|
|
|
|
{
|
|
no strict 'refs';
|
|
sub _glob_for {
|
|
return \*{shift()}
|
|
}
|
|
|
|
sub _stash_for {
|
|
return \%{ shift() . "::" };
|
|
}
|
|
}
|
|
|
|
sub _install_coderef {
|
|
my ($to, $code) = @_;
|
|
|
|
return *{ _glob_for $to } = $code;
|
|
}
|
|
|
|
sub _unimport_coderefs {
|
|
my ($target, @names) = @_;
|
|
return unless @names;
|
|
my $stash = _stash_for($target);
|
|
foreach my $name (@names) {
|
|
if ($stash->{$name} and defined(&{$stash->{$name}})) {
|
|
delete $stash->{$name};
|
|
}
|
|
}
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Lmo::Utils package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Lmo::Meta package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Lmo/Meta.pm
|
|
# t/lib/Lmo/Meta.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Lmo::Meta;
|
|
use strict;
|
|
use warnings qw( FATAL all );
|
|
|
|
my %metadata_for;
|
|
|
|
sub new {
|
|
my $class = shift;
|
|
return bless { @_ }, $class
|
|
}
|
|
|
|
sub metadata_for {
|
|
my $self = shift;
|
|
my ($class) = @_;
|
|
|
|
return $metadata_for{$class} ||= {};
|
|
}
|
|
|
|
sub class { shift->{class} }
|
|
|
|
sub attributes {
|
|
my $self = shift;
|
|
return keys %{$self->metadata_for($self->class)}
|
|
}
|
|
|
|
sub attributes_for_new {
|
|
my $self = shift;
|
|
my @attributes;
|
|
|
|
my $class_metadata = $self->metadata_for($self->class);
|
|
while ( my ($attr, $meta) = each %$class_metadata ) {
|
|
if ( exists $meta->{init_arg} ) {
|
|
push @attributes, $meta->{init_arg}
|
|
if defined $meta->{init_arg};
|
|
}
|
|
else {
|
|
push @attributes, $attr;
|
|
}
|
|
}
|
|
return @attributes;
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Lmo::Meta package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Lmo::Object package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Lmo/Object.pm
|
|
# t/lib/Lmo/Object.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Lmo::Object;
|
|
|
|
use strict;
|
|
use warnings qw( FATAL all );
|
|
|
|
use Carp ();
|
|
use Scalar::Util qw(blessed);
|
|
|
|
use Lmo::Meta;
|
|
use Lmo::Utils qw(_glob_for);
|
|
|
|
sub new {
|
|
my $class = shift;
|
|
my $args = $class->BUILDARGS(@_);
|
|
|
|
my $class_metadata = Lmo::Meta->metadata_for($class);
|
|
|
|
my @args_to_delete;
|
|
while ( my ($attr, $meta) = each %$class_metadata ) {
|
|
next unless exists $meta->{init_arg};
|
|
my $init_arg = $meta->{init_arg};
|
|
|
|
if ( defined $init_arg ) {
|
|
$args->{$attr} = delete $args->{$init_arg};
|
|
}
|
|
else {
|
|
push @args_to_delete, $attr;
|
|
}
|
|
}
|
|
|
|
delete $args->{$_} for @args_to_delete;
|
|
|
|
for my $attribute ( keys %$args ) {
|
|
if ( my $coerce = $class_metadata->{$attribute}{coerce} ) {
|
|
$args->{$attribute} = $coerce->($args->{$attribute});
|
|
}
|
|
if ( my $isa_check = $class_metadata->{$attribute}{isa} ) {
|
|
my ($check_name, $check_sub) = @$isa_check;
|
|
$check_sub->($args->{$attribute});
|
|
}
|
|
}
|
|
|
|
while ( my ($attribute, $meta) = each %$class_metadata ) {
|
|
next unless $meta->{required};
|
|
Carp::confess("Attribute ($attribute) is required for $class")
|
|
if ! exists $args->{$attribute}
|
|
}
|
|
|
|
my $self = bless $args, $class;
|
|
|
|
my @build_subs;
|
|
my $linearized_isa = mro::get_linear_isa($class);
|
|
|
|
for my $isa_class ( @$linearized_isa ) {
|
|
unshift @build_subs, *{ _glob_for "${isa_class}::BUILD" }{CODE};
|
|
}
|
|
my @args = %$args;
|
|
for my $sub (grep { defined($_) && exists &$_ } @build_subs) {
|
|
$sub->( $self, @args);
|
|
}
|
|
return $self;
|
|
}
|
|
|
|
sub BUILDARGS {
|
|
shift; # No need for the classname
|
|
if ( @_ == 1 && ref($_[0]) ) {
|
|
Carp::confess("Single parameters to new() must be a HASH ref, not $_[0]")
|
|
unless ref($_[0]) eq ref({});
|
|
return {%{$_[0]}} # We want a new reference, always
|
|
}
|
|
else {
|
|
return { @_ };
|
|
}
|
|
}
|
|
|
|
sub meta {
|
|
my $class = shift;
|
|
$class = Scalar::Util::blessed($class) || $class;
|
|
return Lmo::Meta->new(class => $class);
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Lmo::Object package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Lmo::Types package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Lmo/Types.pm
|
|
# t/lib/Lmo/Types.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Lmo::Types;
|
|
|
|
use strict;
|
|
use warnings qw( FATAL all );
|
|
|
|
use Carp ();
|
|
use Scalar::Util qw(looks_like_number blessed);
|
|
|
|
|
|
our %TYPES = (
|
|
Bool => sub { !$_[0] || (defined $_[0] && looks_like_number($_[0]) && $_[0] == 1) },
|
|
Num => sub { defined $_[0] && looks_like_number($_[0]) },
|
|
Int => sub { defined $_[0] && looks_like_number($_[0]) && $_[0] == int($_[0]) },
|
|
Str => sub { defined $_[0] },
|
|
Object => sub { defined $_[0] && blessed($_[0]) },
|
|
FileHandle => sub { local $@; require IO::Handle; fileno($_[0]) && $_[0]->opened },
|
|
|
|
map {
|
|
my $type = /R/ ? $_ : uc $_;
|
|
$_ . "Ref" => sub { ref $_[0] eq $type }
|
|
} qw(Array Code Hash Regexp Glob Scalar)
|
|
);
|
|
|
|
sub check_type_constaints {
|
|
my ($attribute, $type_check, $check_name, $val) = @_;
|
|
( ref($type_check) eq 'CODE'
|
|
? $type_check->($val)
|
|
: (ref $val eq $type_check
|
|
|| ($val && $val eq $type_check)
|
|
|| (exists $TYPES{$type_check} && $TYPES{$type_check}->($val)))
|
|
)
|
|
|| Carp::confess(
|
|
qq<Attribute ($attribute) does not pass the type constraint because: >
|
|
. qq<Validation failed for '$check_name' with value >
|
|
. (defined $val ? Lmo::Dumper($val) : 'undef') )
|
|
}
|
|
|
|
sub _nested_constraints {
|
|
my ($attribute, $aggregate_type, $type) = @_;
|
|
|
|
my $inner_types;
|
|
if ( $type =~ /\A(ArrayRef|Maybe)\[(.*)\]\z/ ) {
|
|
$inner_types = _nested_constraints($1, $2);
|
|
}
|
|
else {
|
|
$inner_types = $TYPES{$type};
|
|
}
|
|
|
|
if ( $aggregate_type eq 'ArrayRef' ) {
|
|
return sub {
|
|
my ($val) = @_;
|
|
return unless ref($val) eq ref([]);
|
|
|
|
if ($inner_types) {
|
|
for my $value ( @{$val} ) {
|
|
return unless $inner_types->($value)
|
|
}
|
|
}
|
|
else {
|
|
for my $value ( @{$val} ) {
|
|
return unless $value && ($value eq $type
|
|
|| (Scalar::Util::blessed($value) && $value->isa($type)));
|
|
}
|
|
}
|
|
return 1;
|
|
};
|
|
}
|
|
elsif ( $aggregate_type eq 'Maybe' ) {
|
|
return sub {
|
|
my ($value) = @_;
|
|
return 1 if ! defined($value);
|
|
if ($inner_types) {
|
|
return unless $inner_types->($value)
|
|
}
|
|
else {
|
|
return unless $value eq $type
|
|
|| (Scalar::Util::blessed($value) && $value->isa($type));
|
|
}
|
|
return 1;
|
|
}
|
|
}
|
|
else {
|
|
Carp::confess("Nested aggregate types are only implemented for ArrayRefs and Maybe");
|
|
}
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Lmo::Types package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Lmo package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Lmo.pm
|
|
# t/lib/Lmo.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
BEGIN {
|
|
$INC{"Lmo.pm"} = __FILE__;
|
|
package Lmo;
|
|
our $VERSION = '0.30_Percona'; # Forked from 0.30 of Mo.
|
|
|
|
|
|
use strict;
|
|
use warnings qw( FATAL all );
|
|
|
|
use Carp ();
|
|
use Scalar::Util qw(looks_like_number blessed);
|
|
|
|
use Lmo::Meta;
|
|
use Lmo::Object;
|
|
use Lmo::Types;
|
|
|
|
use Lmo::Utils;
|
|
|
|
my %export_for;
|
|
sub import {
|
|
warnings->import(qw(FATAL all));
|
|
strict->import();
|
|
|
|
my $caller = scalar caller(); # Caller's package
|
|
my %exports = (
|
|
extends => \&extends,
|
|
has => \&has,
|
|
with => \&with,
|
|
override => \&override,
|
|
confess => \&Carp::confess,
|
|
);
|
|
|
|
$export_for{$caller} = \%exports;
|
|
|
|
for my $keyword ( keys %exports ) {
|
|
_install_coderef "${caller}::$keyword" => $exports{$keyword};
|
|
}
|
|
|
|
if ( !@{ *{ _glob_for "${caller}::ISA" }{ARRAY} || [] } ) {
|
|
@_ = "Lmo::Object";
|
|
goto *{ _glob_for "${caller}::extends" }{CODE};
|
|
}
|
|
}
|
|
|
|
sub extends {
|
|
my $caller = scalar caller();
|
|
for my $class ( @_ ) {
|
|
_load_module($class);
|
|
}
|
|
_set_package_isa($caller, @_);
|
|
_set_inherited_metadata($caller);
|
|
}
|
|
|
|
sub _load_module {
|
|
my ($class) = @_;
|
|
|
|
(my $file = $class) =~ s{::|'}{/}g;
|
|
$file .= '.pm';
|
|
{ local $@; eval { require "$file" } } # or warn $@;
|
|
return;
|
|
}
|
|
|
|
sub with {
|
|
my $package = scalar caller();
|
|
require Role::Tiny;
|
|
for my $role ( @_ ) {
|
|
_load_module($role);
|
|
_role_attribute_metadata($package, $role);
|
|
}
|
|
Role::Tiny->apply_roles_to_package($package, @_);
|
|
}
|
|
|
|
sub _role_attribute_metadata {
|
|
my ($package, $role) = @_;
|
|
|
|
my $package_meta = Lmo::Meta->metadata_for($package);
|
|
my $role_meta = Lmo::Meta->metadata_for($role);
|
|
|
|
%$package_meta = (%$role_meta, %$package_meta);
|
|
}
|
|
|
|
sub has {
|
|
my $names = shift;
|
|
my $caller = scalar caller();
|
|
|
|
my $class_metadata = Lmo::Meta->metadata_for($caller);
|
|
|
|
for my $attribute ( ref $names ? @$names : $names ) {
|
|
my %args = @_;
|
|
my $method = ($args{is} || '') eq 'ro'
|
|
? sub {
|
|
Carp::confess("Cannot assign a value to a read-only accessor at reader ${caller}::${attribute}")
|
|
if $#_;
|
|
return $_[0]{$attribute};
|
|
}
|
|
: sub {
|
|
return $#_
|
|
? $_[0]{$attribute} = $_[1]
|
|
: $_[0]{$attribute};
|
|
};
|
|
|
|
$class_metadata->{$attribute} = ();
|
|
|
|
if ( my $type_check = $args{isa} ) {
|
|
my $check_name = $type_check;
|
|
|
|
if ( my ($aggregate_type, $inner_type) = $type_check =~ /\A(ArrayRef|Maybe)\[(.*)\]\z/ ) {
|
|
$type_check = Lmo::Types::_nested_constraints($attribute, $aggregate_type, $inner_type);
|
|
}
|
|
|
|
my $check_sub = sub {
|
|
my ($new_val) = @_;
|
|
Lmo::Types::check_type_constaints($attribute, $type_check, $check_name, $new_val);
|
|
};
|
|
|
|
$class_metadata->{$attribute}{isa} = [$check_name, $check_sub];
|
|
my $orig_method = $method;
|
|
$method = sub {
|
|
$check_sub->($_[1]) if $#_;
|
|
goto &$orig_method;
|
|
};
|
|
}
|
|
|
|
if ( my $builder = $args{builder} ) {
|
|
my $original_method = $method;
|
|
$method = sub {
|
|
$#_
|
|
? goto &$original_method
|
|
: ! exists $_[0]{$attribute}
|
|
? $_[0]{$attribute} = $_[0]->$builder
|
|
: goto &$original_method
|
|
};
|
|
}
|
|
|
|
if ( my $code = $args{default} ) {
|
|
Carp::confess("${caller}::${attribute}'s default is $code, but should be a coderef")
|
|
unless ref($code) eq 'CODE';
|
|
my $original_method = $method;
|
|
$method = sub {
|
|
$#_
|
|
? goto &$original_method
|
|
: ! exists $_[0]{$attribute}
|
|
? $_[0]{$attribute} = $_[0]->$code
|
|
: goto &$original_method
|
|
};
|
|
}
|
|
|
|
if ( my $role = $args{does} ) {
|
|
my $original_method = $method;
|
|
$method = sub {
|
|
if ( $#_ ) {
|
|
Carp::confess(qq<Attribute ($attribute) doesn't consume a '$role' role">)
|
|
unless Scalar::Util::blessed($_[1]) && eval { $_[1]->does($role) }
|
|
}
|
|
goto &$original_method
|
|
};
|
|
}
|
|
|
|
if ( my $coercion = $args{coerce} ) {
|
|
$class_metadata->{$attribute}{coerce} = $coercion;
|
|
my $original_method = $method;
|
|
$method = sub {
|
|
if ( $#_ ) {
|
|
return $original_method->($_[0], $coercion->($_[1]))
|
|
}
|
|
goto &$original_method;
|
|
}
|
|
}
|
|
|
|
_install_coderef "${caller}::$attribute" => $method;
|
|
|
|
if ( $args{required} ) {
|
|
$class_metadata->{$attribute}{required} = 1;
|
|
}
|
|
|
|
if ($args{clearer}) {
|
|
_install_coderef "${caller}::$args{clearer}"
|
|
=> sub { delete shift->{$attribute} }
|
|
}
|
|
|
|
if ($args{predicate}) {
|
|
_install_coderef "${caller}::$args{predicate}"
|
|
=> sub { exists shift->{$attribute} }
|
|
}
|
|
|
|
if ($args{handles}) {
|
|
_has_handles($caller, $attribute, \%args);
|
|
}
|
|
|
|
if (exists $args{init_arg}) {
|
|
$class_metadata->{$attribute}{init_arg} = $args{init_arg};
|
|
}
|
|
}
|
|
}
|
|
|
|
sub _has_handles {
|
|
my ($caller, $attribute, $args) = @_;
|
|
my $handles = $args->{handles};
|
|
|
|
my $ref = ref $handles;
|
|
my $kv;
|
|
if ( $ref eq ref [] ) {
|
|
$kv = { map { $_,$_ } @{$handles} };
|
|
}
|
|
elsif ( $ref eq ref {} ) {
|
|
$kv = $handles;
|
|
}
|
|
elsif ( $ref eq ref qr// ) {
|
|
Carp::confess("Cannot delegate methods based on a Regexp without a type constraint (isa)")
|
|
unless $args->{isa};
|
|
my $target_class = $args->{isa};
|
|
$kv = {
|
|
map { $_, $_ }
|
|
grep { $_ =~ $handles }
|
|
grep { !exists $Lmo::Object::{$_} && $target_class->can($_) }
|
|
grep { !$export_for{$target_class}->{$_} }
|
|
keys %{ _stash_for $target_class }
|
|
};
|
|
}
|
|
else {
|
|
Carp::confess("handles for $ref not yet implemented");
|
|
}
|
|
|
|
while ( my ($method, $target) = each %{$kv} ) {
|
|
my $name = _glob_for "${caller}::$method";
|
|
Carp::confess("You cannot overwrite a locally defined method ($method) with a delegation")
|
|
if defined &$name;
|
|
|
|
my ($target, @curried_args) = ref($target) ? @$target : $target;
|
|
*$name = sub {
|
|
my $self = shift;
|
|
my $delegate_to = $self->$attribute();
|
|
my $error = "Cannot delegate $method to $target because the value of $attribute";
|
|
Carp::confess("$error is not defined") unless $delegate_to;
|
|
Carp::confess("$error is not an object (got '$delegate_to')")
|
|
unless Scalar::Util::blessed($delegate_to) || (!ref($delegate_to) && $delegate_to->can($target));
|
|
return $delegate_to->$target(@curried_args, @_);
|
|
}
|
|
}
|
|
}
|
|
|
|
sub _set_package_isa {
|
|
my ($package, @new_isa) = @_;
|
|
my $package_isa = \*{ _glob_for "${package}::ISA" };
|
|
@{*$package_isa} = @new_isa;
|
|
}
|
|
|
|
sub _set_inherited_metadata {
|
|
my $class = shift;
|
|
my $class_metadata = Lmo::Meta->metadata_for($class);
|
|
my $linearized_isa = mro::get_linear_isa($class);
|
|
my %new_metadata;
|
|
|
|
for my $isa_class (reverse @$linearized_isa) {
|
|
my $isa_metadata = Lmo::Meta->metadata_for($isa_class);
|
|
%new_metadata = (
|
|
%new_metadata,
|
|
%$isa_metadata,
|
|
);
|
|
}
|
|
%$class_metadata = %new_metadata;
|
|
}
|
|
|
|
sub unimport {
|
|
my $caller = scalar caller();
|
|
my $target = caller;
|
|
_unimport_coderefs($target, keys %{$export_for{$caller}});
|
|
}
|
|
|
|
sub Dumper {
|
|
require Data::Dumper;
|
|
local $Data::Dumper::Indent = 0;
|
|
local $Data::Dumper::Sortkeys = 0;
|
|
local $Data::Dumper::Quotekeys = 0;
|
|
local $Data::Dumper::Terse = 1;
|
|
|
|
Data::Dumper::Dumper(@_)
|
|
}
|
|
|
|
BEGIN {
|
|
if ($] >= 5.010) {
|
|
{ local $@; require mro; }
|
|
}
|
|
else {
|
|
local $@;
|
|
eval {
|
|
require MRO::Compat;
|
|
} or do {
|
|
*mro::get_linear_isa = *mro::get_linear_isa_dfs = sub {
|
|
no strict 'refs';
|
|
|
|
my $classname = shift;
|
|
|
|
my @lin = ($classname);
|
|
my %stored;
|
|
foreach my $parent (@{"$classname\::ISA"}) {
|
|
my $plin = mro::get_linear_isa_dfs($parent);
|
|
foreach (@$plin) {
|
|
next if exists $stored{$_};
|
|
push(@lin, $_);
|
|
$stored{$_} = 1;
|
|
}
|
|
}
|
|
return \@lin;
|
|
};
|
|
}
|
|
}
|
|
}
|
|
|
|
sub override {
|
|
my ($methods, $code) = @_;
|
|
my $caller = scalar caller;
|
|
|
|
for my $method ( ref($methods) ? @$methods : $methods ) {
|
|
my $full_method = "${caller}::${method}";
|
|
*{_glob_for $full_method} = $code;
|
|
}
|
|
}
|
|
|
|
}
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Lmo package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# DSNParser package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/DSNParser.pm
|
|
# t/lib/DSNParser.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package DSNParser;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Data::Dumper;
|
|
$Data::Dumper::Indent = 0;
|
|
$Data::Dumper::Quotekeys = 0;
|
|
|
|
my $dsn_sep = qr/(?<!\\),/;
|
|
|
|
eval {
|
|
require DBI;
|
|
};
|
|
my $have_dbi = $EVAL_ERROR ? 0 : 1;
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
foreach my $arg ( qw(opts) ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my $self = {
|
|
opts => {} # h, P, u, etc. Should come from DSN OPTIONS section in POD.
|
|
};
|
|
foreach my $opt ( @{$args{opts}} ) {
|
|
if ( !$opt->{key} || !$opt->{desc} ) {
|
|
die "Invalid DSN option: ", Dumper($opt);
|
|
}
|
|
PTDEBUG && _d('DSN option:',
|
|
join(', ',
|
|
map { "$_=" . (defined $opt->{$_} ? ($opt->{$_} || '') : 'undef') }
|
|
keys %$opt
|
|
)
|
|
);
|
|
$self->{opts}->{$opt->{key}} = {
|
|
dsn => $opt->{dsn},
|
|
desc => $opt->{desc},
|
|
copy => $opt->{copy} || 0,
|
|
};
|
|
}
|
|
return bless $self, $class;
|
|
}
|
|
|
|
sub prop {
|
|
my ( $self, $prop, $value ) = @_;
|
|
if ( @_ > 2 ) {
|
|
PTDEBUG && _d('Setting', $prop, 'property');
|
|
$self->{$prop} = $value;
|
|
}
|
|
return $self->{$prop};
|
|
}
|
|
|
|
sub parse {
|
|
my ( $self, $dsn, $prev, $defaults ) = @_;
|
|
if ( !$dsn ) {
|
|
PTDEBUG && _d('No DSN to parse');
|
|
return;
|
|
}
|
|
PTDEBUG && _d('Parsing', $dsn);
|
|
$prev ||= {};
|
|
$defaults ||= {};
|
|
my %given_props;
|
|
my %final_props;
|
|
my $opts = $self->{opts};
|
|
|
|
foreach my $dsn_part ( split($dsn_sep, $dsn) ) {
|
|
$dsn_part =~ s/\\,/,/g;
|
|
if ( my ($prop_key, $prop_val) = $dsn_part =~ m/^(.)=(.*)$/ ) {
|
|
$given_props{$prop_key} = $prop_val;
|
|
}
|
|
else {
|
|
PTDEBUG && _d('Interpreting', $dsn_part, 'as h=', $dsn_part);
|
|
$given_props{h} = $dsn_part;
|
|
}
|
|
}
|
|
|
|
foreach my $key ( keys %$opts ) {
|
|
PTDEBUG && _d('Finding value for', $key);
|
|
$final_props{$key} = $given_props{$key};
|
|
if ( !defined $final_props{$key}
|
|
&& defined $prev->{$key} && $opts->{$key}->{copy} )
|
|
{
|
|
$final_props{$key} = $prev->{$key};
|
|
PTDEBUG && _d('Copying value for', $key, 'from previous DSN');
|
|
}
|
|
if ( !defined $final_props{$key} ) {
|
|
$final_props{$key} = $defaults->{$key};
|
|
PTDEBUG && _d('Copying value for', $key, 'from defaults');
|
|
}
|
|
}
|
|
|
|
foreach my $key ( keys %given_props ) {
|
|
die "Unknown DSN option '$key' in '$dsn'. For more details, "
|
|
. "please use the --help option, or try 'perldoc $PROGRAM_NAME' "
|
|
. "for complete documentation."
|
|
unless exists $opts->{$key};
|
|
}
|
|
if ( (my $required = $self->prop('required')) ) {
|
|
foreach my $key ( keys %$required ) {
|
|
die "Missing required DSN option '$key' in '$dsn'. For more details, "
|
|
. "please use the --help option, or try 'perldoc $PROGRAM_NAME' "
|
|
. "for complete documentation."
|
|
unless $final_props{$key};
|
|
}
|
|
}
|
|
|
|
return \%final_props;
|
|
}
|
|
|
|
sub parse_options {
|
|
my ( $self, $o ) = @_;
|
|
die 'I need an OptionParser object' unless ref $o eq 'OptionParser';
|
|
my $dsn_string
|
|
= join(',',
|
|
map { "$_=".$o->get($_); }
|
|
grep { $o->has($_) && $o->get($_) }
|
|
keys %{$self->{opts}}
|
|
);
|
|
PTDEBUG && _d('DSN string made from options:', $dsn_string);
|
|
return $self->parse($dsn_string);
|
|
}
|
|
|
|
sub as_string {
|
|
my ( $self, $dsn, $props ) = @_;
|
|
return $dsn unless ref $dsn;
|
|
my @keys = $props ? @$props : sort keys %$dsn;
|
|
return join(',',
|
|
map { "$_=" . ($_ eq 'p' ? '...' : $dsn->{$_}) }
|
|
grep {
|
|
exists $self->{opts}->{$_}
|
|
&& exists $dsn->{$_}
|
|
&& defined $dsn->{$_}
|
|
} @keys);
|
|
}
|
|
|
|
sub usage {
|
|
my ( $self ) = @_;
|
|
my $usage
|
|
= "DSN syntax is key=value[,key=value...] Allowable DSN keys:\n\n"
|
|
. " KEY COPY MEANING\n"
|
|
. " === ==== =============================================\n";
|
|
my %opts = %{$self->{opts}};
|
|
foreach my $key ( sort keys %opts ) {
|
|
$usage .= " $key "
|
|
. ($opts{$key}->{copy} ? 'yes ' : 'no ')
|
|
. ($opts{$key}->{desc} || '[No description]')
|
|
. "\n";
|
|
}
|
|
$usage .= "\n If the DSN is a bareword, the word is treated as the 'h' key.\n";
|
|
return $usage;
|
|
}
|
|
|
|
sub get_cxn_params {
|
|
my ( $self, $info ) = @_;
|
|
my $dsn;
|
|
my %opts = %{$self->{opts}};
|
|
my $driver = $self->prop('dbidriver') || '';
|
|
if ( $driver eq 'Pg' ) {
|
|
$dsn = 'DBI:Pg:dbname=' . ( $info->{D} || '' ) . ';'
|
|
. join(';', map { "$opts{$_}->{dsn}=$info->{$_}" }
|
|
grep { defined $info->{$_} }
|
|
qw(h P));
|
|
}
|
|
else {
|
|
$dsn = 'DBI:mysql:' . ( $info->{D} || '' ) . ';'
|
|
. join(';', map { "$opts{$_}->{dsn}=$info->{$_}" }
|
|
grep { defined $info->{$_} }
|
|
qw(F h P S A))
|
|
. ';mysql_read_default_group=client'
|
|
. ($info->{L} ? ';mysql_local_infile=1' : '');
|
|
}
|
|
PTDEBUG && _d($dsn);
|
|
return ($dsn, $info->{u}, $info->{p});
|
|
}
|
|
|
|
sub fill_in_dsn {
|
|
my ( $self, $dbh, $dsn ) = @_;
|
|
my $vars = $dbh->selectall_hashref('SHOW VARIABLES', 'Variable_name');
|
|
my ($user, $db) = $dbh->selectrow_array('SELECT USER(), DATABASE()');
|
|
$user =~ s/@.*//;
|
|
$dsn->{h} ||= $vars->{hostname}->{Value};
|
|
$dsn->{S} ||= $vars->{'socket'}->{Value};
|
|
$dsn->{P} ||= $vars->{port}->{Value};
|
|
$dsn->{u} ||= $user;
|
|
$dsn->{D} ||= $db;
|
|
}
|
|
|
|
sub get_dbh {
|
|
my ( $self, $cxn_string, $user, $pass, $opts ) = @_;
|
|
$opts ||= {};
|
|
my $defaults = {
|
|
AutoCommit => 0,
|
|
RaiseError => 1,
|
|
PrintError => 0,
|
|
ShowErrorStatement => 1,
|
|
mysql_enable_utf8 => ($cxn_string =~ m/charset=utf8/i ? 1 : 0),
|
|
};
|
|
@{$defaults}{ keys %$opts } = values %$opts;
|
|
if (delete $defaults->{L}) { # L for LOAD DATA LOCAL INFILE, our own extension
|
|
$defaults->{mysql_local_infile} = 1;
|
|
}
|
|
|
|
if ( $opts->{mysql_use_result} ) {
|
|
$defaults->{mysql_use_result} = 1;
|
|
}
|
|
|
|
if ( !$have_dbi ) {
|
|
die "Cannot connect to MySQL because the Perl DBI module is not "
|
|
. "installed or not found. Run 'perl -MDBI' to see the directories "
|
|
. "that Perl searches for DBI. If DBI is not installed, try:\n"
|
|
. " Debian/Ubuntu apt-get install libdbi-perl\n"
|
|
. " RHEL/CentOS yum install perl-DBI\n"
|
|
. " OpenSolaris pkg install pkg:/SUNWpmdbi\n";
|
|
|
|
}
|
|
|
|
my $dbh;
|
|
my $tries = 2;
|
|
while ( !$dbh && $tries-- ) {
|
|
PTDEBUG && _d($cxn_string, ' ', $user, ' ', $pass,
|
|
join(', ', map { "$_=>$defaults->{$_}" } keys %$defaults ));
|
|
|
|
$dbh = eval { DBI->connect($cxn_string, $user, $pass, $defaults) };
|
|
|
|
if ( !$dbh && $EVAL_ERROR ) {
|
|
if ( $EVAL_ERROR =~ m/locate DBD\/mysql/i ) {
|
|
die "Cannot connect to MySQL because the Perl DBD::mysql module is "
|
|
. "not installed or not found. Run 'perl -MDBD::mysql' to see "
|
|
. "the directories that Perl searches for DBD::mysql. If "
|
|
. "DBD::mysql is not installed, try:\n"
|
|
. " Debian/Ubuntu apt-get install libdbd-mysql-perl\n"
|
|
. " RHEL/CentOS yum install perl-DBD-MySQL\n"
|
|
. " OpenSolaris pgk install pkg:/SUNWapu13dbd-mysql\n";
|
|
}
|
|
elsif ( $EVAL_ERROR =~ m/not a compiled character set|character set utf8/ ) {
|
|
PTDEBUG && _d('Going to try again without utf8 support');
|
|
delete $defaults->{mysql_enable_utf8};
|
|
}
|
|
if ( !$tries ) {
|
|
die $EVAL_ERROR;
|
|
}
|
|
}
|
|
}
|
|
|
|
if ( $cxn_string =~ m/mysql/i ) {
|
|
my $sql;
|
|
|
|
if ( my ($charset) = $cxn_string =~ m/charset=([\w]+)/ ) {
|
|
$sql = qq{/*!40101 SET NAMES "$charset"*/};
|
|
PTDEBUG && _d($dbh, $sql);
|
|
eval { $dbh->do($sql) };
|
|
if ( $EVAL_ERROR ) {
|
|
die "Error setting NAMES to $charset: $EVAL_ERROR";
|
|
}
|
|
PTDEBUG && _d('Enabling charset for STDOUT');
|
|
if ( $charset eq 'utf8' ) {
|
|
binmode(STDOUT, ':utf8')
|
|
or die "Can't binmode(STDOUT, ':utf8'): $OS_ERROR";
|
|
}
|
|
else {
|
|
binmode(STDOUT) or die "Can't binmode(STDOUT): $OS_ERROR";
|
|
}
|
|
}
|
|
|
|
if ( my $vars = $self->prop('set-vars') ) {
|
|
$self->set_vars($dbh, $vars);
|
|
}
|
|
|
|
$sql = 'SELECT @@SQL_MODE';
|
|
PTDEBUG && _d($dbh, $sql);
|
|
my ($sql_mode) = eval { $dbh->selectrow_array($sql) };
|
|
if ( $EVAL_ERROR ) {
|
|
die "Error getting the current SQL_MODE: $EVAL_ERROR";
|
|
}
|
|
|
|
$sql = 'SET @@SQL_QUOTE_SHOW_CREATE = 1'
|
|
. '/*!40101, @@SQL_MODE=\'NO_AUTO_VALUE_ON_ZERO'
|
|
. ($sql_mode ? ",$sql_mode" : '')
|
|
. '\'*/';
|
|
PTDEBUG && _d($dbh, $sql);
|
|
eval { $dbh->do($sql) };
|
|
if ( $EVAL_ERROR ) {
|
|
die "Error setting SQL_QUOTE_SHOW_CREATE, SQL_MODE"
|
|
. ($sql_mode ? " and $sql_mode" : '')
|
|
. ": $EVAL_ERROR";
|
|
}
|
|
}
|
|
|
|
PTDEBUG && _d('DBH info: ',
|
|
$dbh,
|
|
Dumper($dbh->selectrow_hashref(
|
|
'SELECT DATABASE(), CONNECTION_ID(), VERSION()/*!50038 , @@hostname*/')),
|
|
'Connection info:', $dbh->{mysql_hostinfo},
|
|
'Character set info:', Dumper($dbh->selectall_arrayref(
|
|
"SHOW VARIABLES LIKE 'character_set%'", { Slice => {}})),
|
|
'$DBD::mysql::VERSION:', $DBD::mysql::VERSION,
|
|
'$DBI::VERSION:', $DBI::VERSION,
|
|
);
|
|
|
|
return $dbh;
|
|
}
|
|
|
|
sub get_hostname {
|
|
my ( $self, $dbh ) = @_;
|
|
if ( my ($host) = ($dbh->{mysql_hostinfo} || '') =~ m/^(\w+) via/ ) {
|
|
return $host;
|
|
}
|
|
my ( $hostname, $one ) = $dbh->selectrow_array(
|
|
'SELECT /*!50038 @@hostname, */ 1');
|
|
return $hostname;
|
|
}
|
|
|
|
sub disconnect {
|
|
my ( $self, $dbh ) = @_;
|
|
PTDEBUG && $self->print_active_handles($dbh);
|
|
$dbh->disconnect;
|
|
}
|
|
|
|
sub print_active_handles {
|
|
my ( $self, $thing, $level ) = @_;
|
|
$level ||= 0;
|
|
printf("# Active %sh: %s %s %s\n", ($thing->{Type} || 'undef'), "\t" x $level,
|
|
$thing, (($thing->{Type} || '') eq 'st' ? $thing->{Statement} || '' : ''))
|
|
or die "Cannot print: $OS_ERROR";
|
|
foreach my $handle ( grep {defined} @{ $thing->{ChildHandles} } ) {
|
|
$self->print_active_handles( $handle, $level + 1 );
|
|
}
|
|
}
|
|
|
|
sub copy {
|
|
my ( $self, $dsn_1, $dsn_2, %args ) = @_;
|
|
die 'I need a dsn_1 argument' unless $dsn_1;
|
|
die 'I need a dsn_2 argument' unless $dsn_2;
|
|
my %new_dsn = map {
|
|
my $key = $_;
|
|
my $val;
|
|
if ( $args{overwrite} ) {
|
|
$val = defined $dsn_1->{$key} ? $dsn_1->{$key} : $dsn_2->{$key};
|
|
}
|
|
else {
|
|
$val = defined $dsn_2->{$key} ? $dsn_2->{$key} : $dsn_1->{$key};
|
|
}
|
|
$key => $val;
|
|
} keys %{$self->{opts}};
|
|
return \%new_dsn;
|
|
}
|
|
|
|
sub set_vars {
|
|
my ($self, $dbh, $vars) = @_;
|
|
|
|
return unless $vars;
|
|
|
|
foreach my $var ( sort keys %$vars ) {
|
|
my $val = $vars->{$var}->{val};
|
|
|
|
(my $quoted_var = $var) =~ s/_/\\_/;
|
|
my ($var_exists, $current_val);
|
|
eval {
|
|
($var_exists, $current_val) = $dbh->selectrow_array(
|
|
"SHOW VARIABLES LIKE '$quoted_var'");
|
|
};
|
|
my $e = $EVAL_ERROR;
|
|
if ( $e ) {
|
|
PTDEBUG && _d($e);
|
|
}
|
|
|
|
if ( $vars->{$var}->{default} && !$var_exists ) {
|
|
PTDEBUG && _d('Not setting default var', $var,
|
|
'because it does not exist');
|
|
next;
|
|
}
|
|
|
|
if ( $current_val && $current_val eq $val ) {
|
|
PTDEBUG && _d('Not setting var', $var, 'because its value',
|
|
'is already', $val);
|
|
next;
|
|
}
|
|
|
|
my $sql = "SET SESSION $var=$val";
|
|
PTDEBUG && _d($dbh, $sql);
|
|
eval { $dbh->do($sql) };
|
|
if ( my $set_error = $EVAL_ERROR ) {
|
|
chomp($set_error);
|
|
$set_error =~ s/ at \S+ line \d+//;
|
|
my $msg = "Error setting $var: $set_error";
|
|
if ( $current_val ) {
|
|
$msg .= " The current value for $var is $current_val. "
|
|
. "If the variable is read only (not dynamic), specify "
|
|
. "--set-vars $var=$current_val to avoid this warning, "
|
|
. "else manually set the variable and restart MySQL.";
|
|
}
|
|
warn $msg . "\n\n";
|
|
}
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End DSNParser package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Daemon package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Daemon.pm
|
|
# t/lib/Daemon.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Daemon;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use POSIX qw(setsid);
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
foreach my $arg ( qw(o) ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my $o = $args{o};
|
|
my $self = {
|
|
o => $o,
|
|
log_file => $o->has('log') ? $o->get('log') : undef,
|
|
PID_file => $o->has('pid') ? $o->get('pid') : undef,
|
|
};
|
|
|
|
check_PID_file(undef, $self->{PID_file});
|
|
|
|
PTDEBUG && _d('Daemonized child will log to', $self->{log_file});
|
|
return bless $self, $class;
|
|
}
|
|
|
|
sub daemonize {
|
|
my ( $self ) = @_;
|
|
|
|
PTDEBUG && _d('About to fork and daemonize');
|
|
defined (my $pid = fork()) or die "Cannot fork: $OS_ERROR";
|
|
if ( $pid ) {
|
|
PTDEBUG && _d('Parent PID', $PID, 'exiting after forking child PID',$pid);
|
|
exit;
|
|
}
|
|
|
|
PTDEBUG && _d('Daemonizing child PID', $PID);
|
|
$self->{PID_owner} = $PID;
|
|
$self->{child} = 1;
|
|
|
|
POSIX::setsid() or die "Cannot start a new session: $OS_ERROR";
|
|
chdir '/' or die "Cannot chdir to /: $OS_ERROR";
|
|
|
|
$self->_make_PID_file();
|
|
|
|
$OUTPUT_AUTOFLUSH = 1;
|
|
|
|
PTDEBUG && _d('Redirecting STDIN to /dev/null');
|
|
close STDIN;
|
|
open STDIN, '/dev/null'
|
|
or die "Cannot reopen STDIN to /dev/null: $OS_ERROR";
|
|
|
|
if ( $self->{log_file} ) {
|
|
PTDEBUG && _d('Redirecting STDOUT and STDERR to', $self->{log_file});
|
|
close STDOUT;
|
|
open STDOUT, '>>', $self->{log_file}
|
|
or die "Cannot open log file $self->{log_file}: $OS_ERROR";
|
|
|
|
close STDERR;
|
|
open STDERR, ">&STDOUT"
|
|
or die "Cannot dupe STDERR to STDOUT: $OS_ERROR";
|
|
}
|
|
else {
|
|
if ( -t STDOUT ) {
|
|
PTDEBUG && _d('No log file and STDOUT is a terminal;',
|
|
'redirecting to /dev/null');
|
|
close STDOUT;
|
|
open STDOUT, '>', '/dev/null'
|
|
or die "Cannot reopen STDOUT to /dev/null: $OS_ERROR";
|
|
}
|
|
if ( -t STDERR ) {
|
|
PTDEBUG && _d('No log file and STDERR is a terminal;',
|
|
'redirecting to /dev/null');
|
|
close STDERR;
|
|
open STDERR, '>', '/dev/null'
|
|
or die "Cannot reopen STDERR to /dev/null: $OS_ERROR";
|
|
}
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub check_PID_file {
|
|
my ( $self, $file ) = @_;
|
|
my $PID_file = $self ? $self->{PID_file} : $file;
|
|
PTDEBUG && _d('Checking PID file', $PID_file);
|
|
if ( $PID_file && -f $PID_file ) {
|
|
my $pid;
|
|
eval {
|
|
chomp($pid = (slurp_file($PID_file) || ''));
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
die "The PID file $PID_file already exists but it cannot be read: "
|
|
. $EVAL_ERROR;
|
|
}
|
|
PTDEBUG && _d('PID file exists; it contains PID', $pid);
|
|
if ( $pid ) {
|
|
my $pid_is_alive = kill 0, $pid;
|
|
if ( $pid_is_alive ) {
|
|
die "The PID file $PID_file already exists "
|
|
. " and the PID that it contains, $pid, is running";
|
|
}
|
|
else {
|
|
warn "Overwriting PID file $PID_file because the PID that it "
|
|
. "contains, $pid, is not running";
|
|
}
|
|
}
|
|
else {
|
|
die "The PID file $PID_file already exists but it does not "
|
|
. "contain a PID";
|
|
}
|
|
}
|
|
else {
|
|
PTDEBUG && _d('No PID file');
|
|
}
|
|
return;
|
|
}
|
|
|
|
sub make_PID_file {
|
|
my ( $self ) = @_;
|
|
if ( exists $self->{child} ) {
|
|
die "Do not call Daemon::make_PID_file() for daemonized scripts";
|
|
}
|
|
$self->_make_PID_file();
|
|
$self->{PID_owner} = $PID;
|
|
return;
|
|
}
|
|
|
|
sub _make_PID_file {
|
|
my ( $self ) = @_;
|
|
|
|
my $PID_file = $self->{PID_file};
|
|
if ( !$PID_file ) {
|
|
PTDEBUG && _d('No PID file to create');
|
|
return;
|
|
}
|
|
|
|
$self->check_PID_file();
|
|
|
|
open my $PID_FH, '>', $PID_file
|
|
or die "Cannot open PID file $PID_file: $OS_ERROR";
|
|
print $PID_FH $PID
|
|
or die "Cannot print to PID file $PID_file: $OS_ERROR";
|
|
close $PID_FH
|
|
or die "Cannot close PID file $PID_file: $OS_ERROR";
|
|
|
|
PTDEBUG && _d('Created PID file:', $self->{PID_file});
|
|
return;
|
|
}
|
|
|
|
sub _remove_PID_file {
|
|
my ( $self ) = @_;
|
|
if ( $self->{PID_file} && -f $self->{PID_file} ) {
|
|
unlink $self->{PID_file}
|
|
or warn "Cannot remove PID file $self->{PID_file}: $OS_ERROR";
|
|
PTDEBUG && _d('Removed PID file');
|
|
}
|
|
else {
|
|
PTDEBUG && _d('No PID to remove');
|
|
}
|
|
return;
|
|
}
|
|
|
|
sub DESTROY {
|
|
my ( $self ) = @_;
|
|
|
|
$self->_remove_PID_file() if ($self->{PID_owner} || 0) == $PID;
|
|
|
|
return;
|
|
}
|
|
|
|
sub slurp_file {
|
|
my ($file) = @_;
|
|
return unless $file;
|
|
open my $fh, "<", $file or die "Cannot open $file: $OS_ERROR";
|
|
return do { local $/; <$fh> };
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Daemon package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Quoter package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Quoter.pm
|
|
# t/lib/Quoter.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Quoter;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Data::Dumper;
|
|
$Data::Dumper::Indent = 1;
|
|
$Data::Dumper::Sortkeys = 1;
|
|
$Data::Dumper::Quotekeys = 0;
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
return bless {}, $class;
|
|
}
|
|
|
|
sub quote {
|
|
my ( $self, @vals ) = @_;
|
|
foreach my $val ( @vals ) {
|
|
$val =~ s/`/``/g;
|
|
}
|
|
return join('.', map { '`' . $_ . '`' } @vals);
|
|
}
|
|
|
|
sub quote_val {
|
|
my ( $self, $val, %args ) = @_;
|
|
|
|
return 'NULL' unless defined $val; # undef = NULL
|
|
return "''" if $val eq ''; # blank string = ''
|
|
return $val if $val =~ m/^0x[0-9a-fA-F]+$/ # quote hex data
|
|
&& !$args{is_char}; # unless is_char is true
|
|
|
|
$val =~ s/(['\\])/\\$1/g;
|
|
return "'$val'";
|
|
}
|
|
|
|
sub split_unquote {
|
|
my ( $self, $db_tbl, $default_db ) = @_;
|
|
my ( $db, $tbl ) = split(/[.]/, $db_tbl);
|
|
if ( !$tbl ) {
|
|
$tbl = $db;
|
|
$db = $default_db;
|
|
}
|
|
for ($db, $tbl) {
|
|
next unless $_;
|
|
s/\A`//;
|
|
s/`\z//;
|
|
s/``/`/g;
|
|
}
|
|
|
|
return ($db, $tbl);
|
|
}
|
|
|
|
sub literal_like {
|
|
my ( $self, $like ) = @_;
|
|
return unless $like;
|
|
$like =~ s/([%_])/\\$1/g;
|
|
return "'$like'";
|
|
}
|
|
|
|
sub join_quote {
|
|
my ( $self, $default_db, $db_tbl ) = @_;
|
|
return unless $db_tbl;
|
|
my ($db, $tbl) = split(/[.]/, $db_tbl);
|
|
if ( !$tbl ) {
|
|
$tbl = $db;
|
|
$db = $default_db;
|
|
}
|
|
$db = "`$db`" if $db && $db !~ m/^`/;
|
|
$tbl = "`$tbl`" if $tbl && $tbl !~ m/^`/;
|
|
return $db ? "$db.$tbl" : $tbl;
|
|
}
|
|
|
|
sub serialize_list {
|
|
my ( $self, @args ) = @_;
|
|
PTDEBUG && _d('Serializing', Dumper(\@args));
|
|
return unless @args;
|
|
|
|
my @parts;
|
|
foreach my $arg ( @args ) {
|
|
if ( defined $arg ) {
|
|
$arg =~ s/,/\\,/g; # escape commas
|
|
$arg =~ s/\\N/\\\\N/g; # escape literal \N
|
|
push @parts, $arg;
|
|
}
|
|
else {
|
|
push @parts, '\N';
|
|
}
|
|
}
|
|
|
|
my $string = join(',', @parts);
|
|
PTDEBUG && _d('Serialized: <', $string, '>');
|
|
return $string;
|
|
}
|
|
|
|
sub deserialize_list {
|
|
my ( $self, $string ) = @_;
|
|
PTDEBUG && _d('Deserializing <', $string, '>');
|
|
die "Cannot deserialize an undefined string" unless defined $string;
|
|
|
|
my @parts;
|
|
foreach my $arg ( split(/(?<!\\),/, $string) ) {
|
|
if ( $arg eq '\N' ) {
|
|
$arg = undef;
|
|
}
|
|
else {
|
|
$arg =~ s/\\,/,/g;
|
|
$arg =~ s/\\\\N/\\N/g;
|
|
}
|
|
push @parts, $arg;
|
|
}
|
|
|
|
if ( !@parts ) {
|
|
my $n_empty_strings = $string =~ tr/,//;
|
|
$n_empty_strings++;
|
|
PTDEBUG && _d($n_empty_strings, 'empty strings');
|
|
map { push @parts, '' } 1..$n_empty_strings;
|
|
}
|
|
elsif ( $string =~ m/(?<!\\),$/ ) {
|
|
PTDEBUG && _d('Last value is an empty string');
|
|
push @parts, '';
|
|
}
|
|
|
|
PTDEBUG && _d('Deserialized', Dumper(\@parts));
|
|
return @parts;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Quoter package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# TableParser package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/TableParser.pm
|
|
# t/lib/TableParser.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package TableParser;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Data::Dumper;
|
|
$Data::Dumper::Indent = 1;
|
|
$Data::Dumper::Sortkeys = 1;
|
|
$Data::Dumper::Quotekeys = 0;
|
|
|
|
local $EVAL_ERROR;
|
|
eval {
|
|
require Quoter;
|
|
};
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
my $self = { %args };
|
|
$self->{Quoter} ||= Quoter->new();
|
|
return bless $self, $class;
|
|
}
|
|
|
|
sub Quoter { shift->{Quoter} }
|
|
|
|
sub get_create_table {
|
|
my ( $self, $dbh, $db, $tbl ) = @_;
|
|
die "I need a dbh parameter" unless $dbh;
|
|
die "I need a db parameter" unless $db;
|
|
die "I need a tbl parameter" unless $tbl;
|
|
my $q = $self->{Quoter};
|
|
|
|
my $new_sql_mode
|
|
= q{/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, }
|
|
. q{@@SQL_MODE := '', }
|
|
. q{@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, }
|
|
. q{@@SQL_QUOTE_SHOW_CREATE := 1 */};
|
|
|
|
my $old_sql_mode
|
|
= q{/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, }
|
|
. q{@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */};
|
|
|
|
PTDEBUG && _d($new_sql_mode);
|
|
eval { $dbh->do($new_sql_mode); };
|
|
PTDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
|
|
|
|
my $use_sql = 'USE ' . $q->quote($db);
|
|
PTDEBUG && _d($dbh, $use_sql);
|
|
$dbh->do($use_sql);
|
|
|
|
my $show_sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
|
|
PTDEBUG && _d($show_sql);
|
|
my $href;
|
|
eval { $href = $dbh->selectrow_hashref($show_sql); };
|
|
if ( my $e = $EVAL_ERROR ) {
|
|
PTDEBUG && _d($old_sql_mode);
|
|
$dbh->do($old_sql_mode);
|
|
|
|
die $e;
|
|
}
|
|
|
|
PTDEBUG && _d($old_sql_mode);
|
|
$dbh->do($old_sql_mode);
|
|
|
|
my ($key) = grep { m/create (?:table|view)/i } keys %$href;
|
|
if ( !$key ) {
|
|
die "Error: no 'Create Table' or 'Create View' in result set from "
|
|
. "$show_sql: " . Dumper($href);
|
|
}
|
|
|
|
return $href->{$key};
|
|
}
|
|
|
|
sub parse {
|
|
my ( $self, $ddl, $opts ) = @_;
|
|
return unless $ddl;
|
|
|
|
if ( $ddl =~ m/CREATE (?:TEMPORARY )?TABLE "/ ) {
|
|
$ddl = $self->ansi_to_legacy($ddl);
|
|
}
|
|
elsif ( $ddl !~ m/CREATE (?:TEMPORARY )?TABLE `/ ) {
|
|
die "TableParser doesn't handle CREATE TABLE without quoting.";
|
|
}
|
|
|
|
my ($name) = $ddl =~ m/CREATE (?:TEMPORARY )?TABLE\s+(`.+?`)/;
|
|
(undef, $name) = $self->{Quoter}->split_unquote($name) if $name;
|
|
|
|
$ddl =~ s/(`[^`\n]+`)/\L$1/gm;
|
|
|
|
my $engine = $self->get_engine($ddl);
|
|
|
|
my @defs = $ddl =~ m/^(\s+`.*?),?$/gm;
|
|
my @cols = map { $_ =~ m/`([^`]+)`/ } @defs;
|
|
PTDEBUG && _d('Table cols:', join(', ', map { "`$_`" } @cols));
|
|
|
|
my %def_for;
|
|
@def_for{@cols} = @defs;
|
|
|
|
my (@nums, @null, @non_generated);
|
|
my (%type_for, %is_nullable, %is_numeric, %is_autoinc, %is_generated);
|
|
foreach my $col ( @cols ) {
|
|
my $def = $def_for{$col};
|
|
|
|
$def =~ s/``//g;
|
|
|
|
my ( $type ) = $def =~ m/`[^`]+`\s([a-z]+)/;
|
|
die "Can't determine column type for $def" unless $type;
|
|
$type_for{$col} = $type;
|
|
if ( $type =~ m/(?:(?:tiny|big|medium|small)?int|float|double|decimal|year)/ ) {
|
|
push @nums, $col;
|
|
$is_numeric{$col} = 1;
|
|
}
|
|
if ( $def !~ m/NOT NULL/ ) {
|
|
push @null, $col;
|
|
$is_nullable{$col} = 1;
|
|
}
|
|
if ( remove_quoted_text($def) =~ m/\WGENERATED\W/i ) {
|
|
$is_generated{$col} = 1;
|
|
} else {
|
|
push @non_generated, $col;
|
|
}
|
|
$is_autoinc{$col} = $def =~ m/AUTO_INCREMENT/i ? 1 : 0;
|
|
}
|
|
|
|
my ($keys, $clustered_key) = $self->get_keys($ddl, $opts, \%is_nullable);
|
|
|
|
my ($charset) = $ddl =~ m/DEFAULT CHARSET=(\w+)/;
|
|
|
|
return {
|
|
name => $name,
|
|
cols => \@cols,
|
|
col_posn => { map { $cols[$_] => $_ } 0..$#cols },
|
|
is_col => { map { $_ => 1 } @non_generated },
|
|
null_cols => \@null,
|
|
is_nullable => \%is_nullable,
|
|
non_generated_cols => \@non_generated,
|
|
is_autoinc => \%is_autoinc,
|
|
is_generated => \%is_generated,
|
|
clustered_key => $clustered_key,
|
|
keys => $keys,
|
|
defs => \%def_for,
|
|
numeric_cols => \@nums,
|
|
is_numeric => \%is_numeric,
|
|
engine => $engine,
|
|
type_for => \%type_for,
|
|
charset => $charset,
|
|
};
|
|
}
|
|
|
|
sub remove_quoted_text {
|
|
my ($string) = @_;
|
|
$string =~ s/[^\\]`[^`]*[^\\]`//g;
|
|
$string =~ s/[^\\]"[^"]*[^\\]"//g;
|
|
$string =~ s/[^\\]"[^"]*[^\\]"//g;
|
|
return $string;
|
|
}
|
|
|
|
sub sort_indexes {
|
|
my ( $self, $tbl ) = @_;
|
|
|
|
my @indexes
|
|
= sort {
|
|
(($a ne 'PRIMARY') <=> ($b ne 'PRIMARY'))
|
|
|| ( !$tbl->{keys}->{$a}->{is_unique} <=> !$tbl->{keys}->{$b}->{is_unique} )
|
|
|| ( $tbl->{keys}->{$a}->{is_nullable} <=> $tbl->{keys}->{$b}->{is_nullable} )
|
|
|| ( scalar(@{$tbl->{keys}->{$a}->{cols}}) <=> scalar(@{$tbl->{keys}->{$b}->{cols}}) )
|
|
}
|
|
grep {
|
|
$tbl->{keys}->{$_}->{type} eq 'BTREE'
|
|
}
|
|
sort keys %{$tbl->{keys}};
|
|
|
|
PTDEBUG && _d('Indexes sorted best-first:', join(', ', @indexes));
|
|
return @indexes;
|
|
}
|
|
|
|
sub find_best_index {
|
|
my ( $self, $tbl, $index ) = @_;
|
|
my $best;
|
|
if ( $index ) {
|
|
($best) = grep { uc $_ eq uc $index } keys %{$tbl->{keys}};
|
|
}
|
|
if ( !$best ) {
|
|
if ( $index ) {
|
|
die "Index '$index' does not exist in table";
|
|
}
|
|
else {
|
|
($best) = $self->sort_indexes($tbl);
|
|
}
|
|
}
|
|
PTDEBUG && _d('Best index found is', $best);
|
|
return $best;
|
|
}
|
|
|
|
sub find_possible_keys {
|
|
my ( $self, $dbh, $database, $table, $quoter, $where ) = @_;
|
|
return () unless $where;
|
|
my $sql = 'EXPLAIN SELECT * FROM ' . $quoter->quote($database, $table)
|
|
. ' WHERE ' . $where;
|
|
PTDEBUG && _d($sql);
|
|
my $expl = $dbh->selectrow_hashref($sql);
|
|
$expl = { map { lc($_) => $expl->{$_} } keys %$expl };
|
|
if ( $expl->{possible_keys} ) {
|
|
PTDEBUG && _d('possible_keys =', $expl->{possible_keys});
|
|
my @candidates = split(',', $expl->{possible_keys});
|
|
my %possible = map { $_ => 1 } @candidates;
|
|
if ( $expl->{key} ) {
|
|
PTDEBUG && _d('MySQL chose', $expl->{key});
|
|
unshift @candidates, grep { $possible{$_} } split(',', $expl->{key});
|
|
PTDEBUG && _d('Before deduping:', join(', ', @candidates));
|
|
my %seen;
|
|
@candidates = grep { !$seen{$_}++ } @candidates;
|
|
}
|
|
PTDEBUG && _d('Final list:', join(', ', @candidates));
|
|
return @candidates;
|
|
}
|
|
else {
|
|
PTDEBUG && _d('No keys in possible_keys');
|
|
return ();
|
|
}
|
|
}
|
|
|
|
sub check_table {
|
|
my ( $self, %args ) = @_;
|
|
my @required_args = qw(dbh db tbl);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($dbh, $db, $tbl) = @args{@required_args};
|
|
my $q = $self->{Quoter} || 'Quoter';
|
|
my $db_tbl = $q->quote($db, $tbl);
|
|
PTDEBUG && _d('Checking', $db_tbl);
|
|
|
|
$self->{check_table_error} = undef;
|
|
|
|
my $sql = "SHOW TABLES FROM " . $q->quote($db)
|
|
. ' LIKE ' . $q->literal_like($tbl);
|
|
PTDEBUG && _d($sql);
|
|
my $row;
|
|
eval {
|
|
$row = $dbh->selectrow_arrayref($sql);
|
|
};
|
|
if ( my $e = $EVAL_ERROR ) {
|
|
PTDEBUG && _d($e);
|
|
$self->{check_table_error} = $e;
|
|
return 0;
|
|
}
|
|
if ( !$row->[0] || $row->[0] ne $tbl ) {
|
|
PTDEBUG && _d('Table does not exist');
|
|
return 0;
|
|
}
|
|
|
|
PTDEBUG && _d('Table', $db, $tbl, 'exists');
|
|
return 1;
|
|
|
|
}
|
|
|
|
sub get_engine {
|
|
my ( $self, $ddl, $opts ) = @_;
|
|
my ( $engine ) = $ddl =~ m/\).*?(?:ENGINE|TYPE)=(\w+)/;
|
|
PTDEBUG && _d('Storage engine:', $engine);
|
|
return $engine || undef;
|
|
}
|
|
|
|
sub get_keys {
|
|
my ( $self, $ddl, $opts, $is_nullable ) = @_;
|
|
my $engine = $self->get_engine($ddl);
|
|
my $keys = {};
|
|
my $clustered_key = undef;
|
|
|
|
KEY:
|
|
foreach my $key ( $ddl =~ m/^ ((?:[A-Z]+ )?KEY .*)$/gm ) {
|
|
|
|
next KEY if $key =~ m/FOREIGN/;
|
|
|
|
my $key_ddl = $key;
|
|
PTDEBUG && _d('Parsed key:', $key_ddl);
|
|
|
|
if ( !$engine || $engine !~ m/MEMORY|HEAP/ ) {
|
|
$key =~ s/USING HASH/USING BTREE/;
|
|
}
|
|
|
|
my ( $type, $cols ) = $key =~ m/(?:USING (\w+))? \((.+)\)/;
|
|
my ( $special ) = $key =~ m/(FULLTEXT|SPATIAL)/;
|
|
$type = $type || $special || 'BTREE';
|
|
my ($name) = $key =~ m/(PRIMARY|`[^`]*`)/;
|
|
my $unique = $key =~ m/PRIMARY|UNIQUE/ ? 1 : 0;
|
|
my @cols;
|
|
my @col_prefixes;
|
|
foreach my $col_def ( $cols =~ m/`[^`]+`(?:\(\d+\))?/g ) {
|
|
my ($name, $prefix) = $col_def =~ m/`([^`]+)`(?:\((\d+)\))?/;
|
|
push @cols, $name;
|
|
push @col_prefixes, $prefix;
|
|
}
|
|
$name =~ s/`//g;
|
|
|
|
PTDEBUG && _d( $name, 'key cols:', join(', ', map { "`$_`" } @cols));
|
|
|
|
$keys->{$name} = {
|
|
name => $name,
|
|
type => $type,
|
|
colnames => $cols,
|
|
cols => \@cols,
|
|
col_prefixes => \@col_prefixes,
|
|
is_unique => $unique,
|
|
is_nullable => scalar(grep { $is_nullable->{$_} } @cols),
|
|
is_col => { map { $_ => 1 } @cols },
|
|
ddl => $key_ddl,
|
|
};
|
|
|
|
if ( ($engine || '') =~ m/InnoDB/i && !$clustered_key ) {
|
|
my $this_key = $keys->{$name};
|
|
if ( $this_key->{name} eq 'PRIMARY' ) {
|
|
$clustered_key = 'PRIMARY';
|
|
}
|
|
elsif ( $this_key->{is_unique} && !$this_key->{is_nullable} ) {
|
|
$clustered_key = $this_key->{name};
|
|
}
|
|
PTDEBUG && $clustered_key && _d('This key is the clustered key');
|
|
}
|
|
}
|
|
|
|
return $keys, $clustered_key;
|
|
}
|
|
|
|
sub get_fks {
|
|
my ( $self, $ddl, $opts ) = @_;
|
|
my $q = $self->{Quoter};
|
|
my $fks = {};
|
|
|
|
foreach my $fk (
|
|
$ddl =~ m/CONSTRAINT .* FOREIGN KEY .* REFERENCES [^\)]*\)/mg )
|
|
{
|
|
my ( $name ) = $fk =~ m/CONSTRAINT `(.*?)`/;
|
|
my ( $cols ) = $fk =~ m/FOREIGN KEY \(([^\)]+)\)/;
|
|
my ( $parent, $parent_cols ) = $fk =~ m/REFERENCES (\S+) \(([^\)]+)\)/;
|
|
|
|
my ($db, $tbl) = $q->split_unquote($parent, $opts->{database});
|
|
my %parent_tbl = (tbl => $tbl);
|
|
$parent_tbl{db} = $db if $db;
|
|
|
|
if ( $parent !~ m/\./ && $opts->{database} ) {
|
|
$parent = $q->quote($opts->{database}) . ".$parent";
|
|
}
|
|
|
|
$fks->{$name} = {
|
|
name => $name,
|
|
colnames => $cols,
|
|
cols => [ map { s/[ `]+//g; $_; } split(',', $cols) ],
|
|
parent_tbl => \%parent_tbl,
|
|
parent_tblname => $parent,
|
|
parent_cols => [ map { s/[ `]+//g; $_; } split(',', $parent_cols) ],
|
|
parent_colnames=> $parent_cols,
|
|
ddl => $fk,
|
|
};
|
|
}
|
|
|
|
return $fks;
|
|
}
|
|
|
|
sub remove_auto_increment {
|
|
my ( $self, $ddl ) = @_;
|
|
$ddl =~ s/(^\).*?) AUTO_INCREMENT=\d+\b/$1/m;
|
|
return $ddl;
|
|
}
|
|
|
|
sub get_table_status {
|
|
my ( $self, $dbh, $db, $like ) = @_;
|
|
my $q = $self->{Quoter};
|
|
my $sql = "SHOW TABLE STATUS FROM " . $q->quote($db);
|
|
my @params;
|
|
if ( $like ) {
|
|
$sql .= ' LIKE ?';
|
|
push @params, $like;
|
|
}
|
|
PTDEBUG && _d($sql, @params);
|
|
my $sth = $dbh->prepare($sql);
|
|
eval { $sth->execute(@params); };
|
|
if ($EVAL_ERROR) {
|
|
PTDEBUG && _d($EVAL_ERROR);
|
|
return;
|
|
}
|
|
my @tables = @{$sth->fetchall_arrayref({})};
|
|
@tables = map {
|
|
my %tbl; # Make a copy with lowercased keys
|
|
@tbl{ map { lc $_ } keys %$_ } = values %$_;
|
|
$tbl{engine} ||= $tbl{type} || $tbl{comment};
|
|
delete $tbl{type};
|
|
\%tbl;
|
|
} @tables;
|
|
return @tables;
|
|
}
|
|
|
|
my $ansi_quote_re = qr/" [^"]* (?: "" [^"]* )* (?<=.) "/ismx;
|
|
sub ansi_to_legacy {
|
|
my ($self, $ddl) = @_;
|
|
$ddl =~ s/($ansi_quote_re)/ansi_quote_replace($1)/ge;
|
|
return $ddl;
|
|
}
|
|
|
|
sub ansi_quote_replace {
|
|
my ($val) = @_;
|
|
$val =~ s/^"|"$//g;
|
|
$val =~ s/`/``/g;
|
|
$val =~ s/""/"/g;
|
|
return "`$val`";
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End TableParser package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Retry package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Retry.pm
|
|
# t/lib/Retry.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Retry;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Time::HiRes qw(sleep);
|
|
|
|
sub new {
|
|
my ( $class, %args ) = @_;
|
|
my $self = {
|
|
%args,
|
|
};
|
|
return bless $self, $class;
|
|
}
|
|
|
|
sub retry {
|
|
my ( $self, %args ) = @_;
|
|
my @required_args = qw(try fail final_fail);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
};
|
|
my ($try, $fail, $final_fail) = @args{@required_args};
|
|
my $wait = $args{wait} || sub { sleep 1; };
|
|
my $tries = $args{tries} || 3;
|
|
|
|
my $last_error;
|
|
my $tryno = 0;
|
|
TRY:
|
|
while ( ++$tryno <= $tries ) {
|
|
PTDEBUG && _d("Try", $tryno, "of", $tries);
|
|
my $result;
|
|
eval {
|
|
$result = $try->(tryno=>$tryno);
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d("Try code failed:", $EVAL_ERROR);
|
|
$last_error = $EVAL_ERROR;
|
|
|
|
if ( $tryno < $tries ) { # more retries
|
|
my $retry = $fail->(tryno=>$tryno, error=>$last_error);
|
|
last TRY unless $retry;
|
|
PTDEBUG && _d("Calling wait code");
|
|
$wait->(tryno=>$tryno);
|
|
}
|
|
}
|
|
else {
|
|
PTDEBUG && _d("Try code succeeded");
|
|
return $result;
|
|
}
|
|
}
|
|
|
|
PTDEBUG && _d('Try code did not succeed');
|
|
return $final_fail->(error=>$last_error);
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Retry package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# Transformers package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/Transformers.pm
|
|
# t/lib/Transformers.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package Transformers;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Time::Local qw(timegm timelocal);
|
|
use Digest::MD5 qw(md5_hex);
|
|
use B qw();
|
|
|
|
BEGIN {
|
|
require Exporter;
|
|
our @ISA = qw(Exporter);
|
|
our %EXPORT_TAGS = ();
|
|
our @EXPORT = ();
|
|
our @EXPORT_OK = qw(
|
|
micro_t
|
|
percentage_of
|
|
secs_to_time
|
|
time_to_secs
|
|
shorten
|
|
ts
|
|
parse_timestamp
|
|
unix_timestamp
|
|
any_unix_timestamp
|
|
make_checksum
|
|
crc32
|
|
encode_json
|
|
);
|
|
}
|
|
|
|
our $mysql_ts = qr/(\d\d)(\d\d)(\d\d) +(\d+):(\d+):(\d+)(\.\d+)?/;
|
|
our $proper_ts = qr/(\d\d\d\d)-(\d\d)-(\d\d)[T ](\d\d):(\d\d):(\d\d)(\.\d+)?/;
|
|
our $n_ts = qr/(\d{1,5})([shmd]?)/; # Limit \d{1,5} because \d{6} looks
|
|
|
|
sub micro_t {
|
|
my ( $t, %args ) = @_;
|
|
my $p_ms = defined $args{p_ms} ? $args{p_ms} : 0; # precision for ms vals
|
|
my $p_s = defined $args{p_s} ? $args{p_s} : 0; # precision for s vals
|
|
my $f;
|
|
|
|
$t = 0 if $t < 0;
|
|
|
|
$t = sprintf('%.17f', $t) if $t =~ /e/;
|
|
|
|
$t =~ s/\.(\d{1,6})\d*/\.$1/;
|
|
|
|
if ($t > 0 && $t <= 0.000999) {
|
|
$f = ($t * 1000000) . 'us';
|
|
}
|
|
elsif ($t >= 0.001000 && $t <= 0.999999) {
|
|
$f = sprintf("%.${p_ms}f", $t * 1000);
|
|
$f = ($f * 1) . 'ms'; # * 1 to remove insignificant zeros
|
|
}
|
|
elsif ($t >= 1) {
|
|
$f = sprintf("%.${p_s}f", $t);
|
|
$f = ($f * 1) . 's'; # * 1 to remove insignificant zeros
|
|
}
|
|
else {
|
|
$f = 0; # $t should = 0 at this point
|
|
}
|
|
|
|
return $f;
|
|
}
|
|
|
|
sub percentage_of {
|
|
my ( $is, $of, %args ) = @_;
|
|
my $p = $args{p} || 0; # float precision
|
|
my $fmt = $p ? "%.${p}f" : "%d";
|
|
return sprintf $fmt, ($is * 100) / ($of ||= 1);
|
|
}
|
|
|
|
sub secs_to_time {
|
|
my ( $secs, $fmt ) = @_;
|
|
$secs ||= 0;
|
|
return '00:00' unless $secs;
|
|
|
|
$fmt ||= $secs >= 86_400 ? 'd'
|
|
: $secs >= 3_600 ? 'h'
|
|
: 'm';
|
|
|
|
return
|
|
$fmt eq 'd' ? sprintf(
|
|
"%d+%02d:%02d:%02d",
|
|
int($secs / 86_400),
|
|
int(($secs % 86_400) / 3_600),
|
|
int(($secs % 3_600) / 60),
|
|
$secs % 60)
|
|
: $fmt eq 'h' ? sprintf(
|
|
"%02d:%02d:%02d",
|
|
int(($secs % 86_400) / 3_600),
|
|
int(($secs % 3_600) / 60),
|
|
$secs % 60)
|
|
: sprintf(
|
|
"%02d:%02d",
|
|
int(($secs % 3_600) / 60),
|
|
$secs % 60);
|
|
}
|
|
|
|
sub time_to_secs {
|
|
my ( $val, $default_suffix ) = @_;
|
|
die "I need a val argument" unless defined $val;
|
|
my $t = 0;
|
|
my ( $prefix, $num, $suffix ) = $val =~ m/([+-]?)(\d+)([a-z])?$/;
|
|
$suffix = $suffix || $default_suffix || 's';
|
|
if ( $suffix =~ m/[smhd]/ ) {
|
|
$t = $suffix eq 's' ? $num * 1 # Seconds
|
|
: $suffix eq 'm' ? $num * 60 # Minutes
|
|
: $suffix eq 'h' ? $num * 3600 # Hours
|
|
: $num * 86400; # Days
|
|
|
|
$t *= -1 if $prefix && $prefix eq '-';
|
|
}
|
|
else {
|
|
die "Invalid suffix for $val: $suffix";
|
|
}
|
|
return $t;
|
|
}
|
|
|
|
sub shorten {
|
|
my ( $num, %args ) = @_;
|
|
my $p = defined $args{p} ? $args{p} : 2; # float precision
|
|
my $d = defined $args{d} ? $args{d} : 1_024; # divisor
|
|
my $n = 0;
|
|
my @units = ('', qw(k M G T P E Z Y));
|
|
while ( $num >= $d && $n < @units - 1 ) {
|
|
$num /= $d;
|
|
++$n;
|
|
}
|
|
return sprintf(
|
|
$num =~ m/\./ || $n
|
|
? '%1$.'.$p.'f%2$s'
|
|
: '%1$d',
|
|
$num, $units[$n]);
|
|
}
|
|
|
|
sub ts {
|
|
my ( $time, $gmt ) = @_;
|
|
my ( $sec, $min, $hour, $mday, $mon, $year )
|
|
= $gmt ? gmtime($time) : localtime($time);
|
|
$mon += 1;
|
|
$year += 1900;
|
|
my $val = sprintf("%d-%02d-%02dT%02d:%02d:%02d",
|
|
$year, $mon, $mday, $hour, $min, $sec);
|
|
if ( my ($us) = $time =~ m/(\.\d+)$/ ) {
|
|
$us = sprintf("%.6f", $us);
|
|
$us =~ s/^0\././;
|
|
$val .= $us;
|
|
}
|
|
return $val;
|
|
}
|
|
|
|
sub parse_timestamp {
|
|
my ( $val ) = @_;
|
|
if ( my($y, $m, $d, $h, $i, $s, $f)
|
|
= $val =~ m/^$mysql_ts$/ )
|
|
{
|
|
return sprintf "%d-%02d-%02d %02d:%02d:"
|
|
. (defined $f ? '%09.6f' : '%02d'),
|
|
$y + 2000, $m, $d, $h, $i, (defined $f ? $s + $f : $s);
|
|
}
|
|
elsif ( $val =~ m/^$proper_ts$/ ) {
|
|
return $val;
|
|
}
|
|
return $val;
|
|
}
|
|
|
|
sub unix_timestamp {
|
|
my ( $val, $gmt ) = @_;
|
|
if ( my($y, $m, $d, $h, $i, $s, $us) = $val =~ m/^$proper_ts$/ ) {
|
|
$val = $gmt
|
|
? timegm($s, $i, $h, $d, $m - 1, $y)
|
|
: timelocal($s, $i, $h, $d, $m - 1, $y);
|
|
if ( defined $us ) {
|
|
$us = sprintf('%.6f', $us);
|
|
$us =~ s/^0\././;
|
|
$val .= $us;
|
|
}
|
|
}
|
|
return $val;
|
|
}
|
|
|
|
sub any_unix_timestamp {
|
|
my ( $val, $callback ) = @_;
|
|
|
|
if ( my ($n, $suffix) = $val =~ m/^$n_ts$/ ) {
|
|
$n = $suffix eq 's' ? $n # Seconds
|
|
: $suffix eq 'm' ? $n * 60 # Minutes
|
|
: $suffix eq 'h' ? $n * 3600 # Hours
|
|
: $suffix eq 'd' ? $n * 86400 # Days
|
|
: $n; # default: Seconds
|
|
PTDEBUG && _d('ts is now - N[shmd]:', $n);
|
|
return time - $n;
|
|
}
|
|
elsif ( $val =~ m/^\d{9,}/ ) {
|
|
PTDEBUG && _d('ts is already a unix timestamp');
|
|
return $val;
|
|
}
|
|
elsif ( my ($ymd, $hms) = $val =~ m/^(\d{6})(?:\s+(\d+:\d+:\d+))?/ ) {
|
|
PTDEBUG && _d('ts is MySQL slow log timestamp');
|
|
$val .= ' 00:00:00' unless $hms;
|
|
return unix_timestamp(parse_timestamp($val));
|
|
}
|
|
elsif ( ($ymd, $hms) = $val =~ m/^(\d{4}-\d\d-\d\d)(?:[T ](\d+:\d+:\d+))?/) {
|
|
PTDEBUG && _d('ts is properly formatted timestamp');
|
|
$val .= ' 00:00:00' unless $hms;
|
|
return unix_timestamp($val);
|
|
}
|
|
else {
|
|
PTDEBUG && _d('ts is MySQL expression');
|
|
return $callback->($val) if $callback && ref $callback eq 'CODE';
|
|
}
|
|
|
|
PTDEBUG && _d('Unknown ts type:', $val);
|
|
return;
|
|
}
|
|
|
|
sub make_checksum {
|
|
my ( $val ) = @_;
|
|
my $checksum = uc substr(md5_hex($val), -16);
|
|
PTDEBUG && _d($checksum, 'checksum for', $val);
|
|
return $checksum;
|
|
}
|
|
|
|
sub crc32 {
|
|
my ( $string ) = @_;
|
|
return unless $string;
|
|
my $poly = 0xEDB88320;
|
|
my $crc = 0xFFFFFFFF;
|
|
foreach my $char ( split(//, $string) ) {
|
|
my $comp = ($crc ^ ord($char)) & 0xFF;
|
|
for ( 1 .. 8 ) {
|
|
$comp = $comp & 1 ? $poly ^ ($comp >> 1) : $comp >> 1;
|
|
}
|
|
$crc = (($crc >> 8) & 0x00FFFFFF) ^ $comp;
|
|
}
|
|
return $crc ^ 0xFFFFFFFF;
|
|
}
|
|
|
|
my $got_json = eval { require JSON };
|
|
sub encode_json {
|
|
return JSON::encode_json(@_) if $got_json;
|
|
my ( $data ) = @_;
|
|
return (object_to_json($data) || '');
|
|
}
|
|
|
|
|
|
sub object_to_json {
|
|
my ($obj) = @_;
|
|
my $type = ref($obj);
|
|
|
|
if($type eq 'HASH'){
|
|
return hash_to_json($obj);
|
|
}
|
|
elsif($type eq 'ARRAY'){
|
|
return array_to_json($obj);
|
|
}
|
|
else {
|
|
return value_to_json($obj);
|
|
}
|
|
}
|
|
|
|
sub hash_to_json {
|
|
my ($obj) = @_;
|
|
my @res;
|
|
for my $k ( sort { $a cmp $b } keys %$obj ) {
|
|
push @res, string_to_json( $k )
|
|
. ":"
|
|
. ( object_to_json( $obj->{$k} ) || value_to_json( $obj->{$k} ) );
|
|
}
|
|
return '{' . ( @res ? join( ",", @res ) : '' ) . '}';
|
|
}
|
|
|
|
sub array_to_json {
|
|
my ($obj) = @_;
|
|
my @res;
|
|
|
|
for my $v (@$obj) {
|
|
push @res, object_to_json($v) || value_to_json($v);
|
|
}
|
|
|
|
return '[' . ( @res ? join( ",", @res ) : '' ) . ']';
|
|
}
|
|
|
|
sub value_to_json {
|
|
my ($value) = @_;
|
|
|
|
return 'null' if(!defined $value);
|
|
|
|
my $b_obj = B::svref_2object(\$value); # for round trip problem
|
|
my $flags = $b_obj->FLAGS;
|
|
return $value # as is
|
|
if $flags & ( B::SVp_IOK | B::SVp_NOK ) and !( $flags & B::SVp_POK ); # SvTYPE is IV or NV?
|
|
|
|
my $type = ref($value);
|
|
|
|
if( !$type ) {
|
|
return string_to_json($value);
|
|
}
|
|
else {
|
|
return 'null';
|
|
}
|
|
|
|
}
|
|
|
|
my %esc = (
|
|
"\n" => '\n',
|
|
"\r" => '\r',
|
|
"\t" => '\t',
|
|
"\f" => '\f',
|
|
"\b" => '\b',
|
|
"\"" => '\"',
|
|
"\\" => '\\\\',
|
|
"\'" => '\\\'',
|
|
);
|
|
|
|
sub string_to_json {
|
|
my ($arg) = @_;
|
|
|
|
$arg =~ s/([\x22\x5c\n\r\t\f\b])/$esc{$1}/g;
|
|
$arg =~ s/\//\\\//g;
|
|
$arg =~ s/([\x00-\x08\x0b\x0e-\x1f])/'\\u00' . unpack('H2', $1)/eg;
|
|
|
|
utf8::upgrade($arg);
|
|
utf8::encode($arg);
|
|
|
|
return '"' . $arg . '"';
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End Transformers package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# HTTP::Micro package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/HTTP/Micro.pm
|
|
# t/lib/HTTP/Micro.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package HTTP::Micro;
|
|
|
|
our $VERSION = '0.01';
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use Carp ();
|
|
|
|
my @attributes;
|
|
BEGIN {
|
|
@attributes = qw(agent timeout);
|
|
no strict 'refs';
|
|
for my $accessor ( @attributes ) {
|
|
*{$accessor} = sub {
|
|
@_ > 1 ? $_[0]->{$accessor} = $_[1] : $_[0]->{$accessor};
|
|
};
|
|
}
|
|
}
|
|
|
|
sub new {
|
|
my($class, %args) = @_;
|
|
(my $agent = $class) =~ s{::}{-}g;
|
|
my $self = {
|
|
agent => $agent . "/" . ($class->VERSION || 0),
|
|
timeout => 60,
|
|
};
|
|
for my $key ( @attributes ) {
|
|
$self->{$key} = $args{$key} if exists $args{$key}
|
|
}
|
|
return bless $self, $class;
|
|
}
|
|
|
|
my %DefaultPort = (
|
|
http => 80,
|
|
https => 443,
|
|
);
|
|
|
|
sub request {
|
|
my ($self, $method, $url, $args) = @_;
|
|
@_ == 3 || (@_ == 4 && ref $args eq 'HASH')
|
|
or Carp::croak(q/Usage: $http->request(METHOD, URL, [HASHREF])/);
|
|
$args ||= {}; # we keep some state in this during _request
|
|
|
|
my $response;
|
|
for ( 0 .. 1 ) {
|
|
$response = eval { $self->_request($method, $url, $args) };
|
|
last unless $@ && $method eq 'GET'
|
|
&& $@ =~ m{^(?:Socket closed|Unexpected end)};
|
|
}
|
|
|
|
if (my $e = "$@") {
|
|
$response = {
|
|
success => q{},
|
|
status => 599,
|
|
reason => 'Internal Exception',
|
|
content => $e,
|
|
headers => {
|
|
'content-type' => 'text/plain',
|
|
'content-length' => length $e,
|
|
}
|
|
};
|
|
}
|
|
return $response;
|
|
}
|
|
|
|
sub _request {
|
|
my ($self, $method, $url, $args) = @_;
|
|
|
|
my ($scheme, $host, $port, $path_query) = $self->_split_url($url);
|
|
|
|
my $request = {
|
|
method => $method,
|
|
scheme => $scheme,
|
|
host_port => ($port == $DefaultPort{$scheme} ? $host : "$host:$port"),
|
|
uri => $path_query,
|
|
headers => {},
|
|
};
|
|
|
|
my $handle = HTTP::Micro::Handle->new(timeout => $self->{timeout});
|
|
|
|
$handle->connect($scheme, $host, $port);
|
|
|
|
$self->_prepare_headers_and_cb($request, $args);
|
|
$handle->write_request_header(@{$request}{qw/method uri headers/});
|
|
$handle->write_content_body($request) if $request->{content};
|
|
|
|
my $response;
|
|
do { $response = $handle->read_response_header }
|
|
until (substr($response->{status},0,1) ne '1');
|
|
|
|
if (!($method eq 'HEAD' || $response->{status} =~ /^[23]04/)) {
|
|
$response->{content} = '';
|
|
$handle->read_content_body(sub { $_[1]->{content} .= $_[0] }, $response);
|
|
}
|
|
|
|
$handle->close;
|
|
$response->{success} = substr($response->{status},0,1) eq '2';
|
|
return $response;
|
|
}
|
|
|
|
sub _prepare_headers_and_cb {
|
|
my ($self, $request, $args) = @_;
|
|
|
|
for ($args->{headers}) {
|
|
next unless defined;
|
|
while (my ($k, $v) = each %$_) {
|
|
$request->{headers}{lc $k} = $v;
|
|
}
|
|
}
|
|
$request->{headers}{'host'} = $request->{host_port};
|
|
$request->{headers}{'connection'} = "close";
|
|
$request->{headers}{'user-agent'} ||= $self->{agent};
|
|
|
|
if (defined $args->{content}) {
|
|
$request->{headers}{'content-type'} ||= "application/octet-stream";
|
|
utf8::downgrade($args->{content}, 1)
|
|
or Carp::croak(q/Wide character in request message body/);
|
|
$request->{headers}{'content-length'} = length $args->{content};
|
|
$request->{content} = $args->{content};
|
|
}
|
|
return;
|
|
}
|
|
|
|
sub _split_url {
|
|
my $url = pop;
|
|
|
|
my ($scheme, $authority, $path_query) = $url =~ m<\A([^:/?#]+)://([^/?#]*)([^#]*)>
|
|
or Carp::croak(qq/Cannot parse URL: '$url'/);
|
|
|
|
$scheme = lc $scheme;
|
|
$path_query = "/$path_query" unless $path_query =~ m<\A/>;
|
|
|
|
my $host = (length($authority)) ? lc $authority : 'localhost';
|
|
$host =~ s/\A[^@]*@//; # userinfo
|
|
my $port = do {
|
|
$host =~ s/:([0-9]*)\z// && length $1
|
|
? $1
|
|
: $DefaultPort{$scheme}
|
|
};
|
|
|
|
return ($scheme, $host, $port, $path_query);
|
|
}
|
|
|
|
} # HTTP::Micro
|
|
|
|
{
|
|
package HTTP::Micro::Handle;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
|
|
use Carp qw(croak);
|
|
use Errno qw(EINTR EPIPE);
|
|
use IO::Socket qw(SOCK_STREAM);
|
|
|
|
sub BUFSIZE () { 32768 }
|
|
|
|
my $Printable = sub {
|
|
local $_ = shift;
|
|
s/\r/\\r/g;
|
|
s/\n/\\n/g;
|
|
s/\t/\\t/g;
|
|
s/([^\x20-\x7E])/sprintf('\\x%.2X', ord($1))/ge;
|
|
$_;
|
|
};
|
|
|
|
sub new {
|
|
my ($class, %args) = @_;
|
|
return bless {
|
|
rbuf => '',
|
|
timeout => 60,
|
|
max_line_size => 16384,
|
|
%args
|
|
}, $class;
|
|
}
|
|
|
|
my $ssl_verify_args = {
|
|
check_cn => "when_only",
|
|
wildcards_in_alt => "anywhere",
|
|
wildcards_in_cn => "anywhere"
|
|
};
|
|
|
|
sub connect {
|
|
@_ == 4 || croak(q/Usage: $handle->connect(scheme, host, port)/);
|
|
my ($self, $scheme, $host, $port) = @_;
|
|
|
|
if ( $scheme eq 'https' ) {
|
|
eval "require IO::Socket::SSL"
|
|
unless exists $INC{'IO/Socket/SSL.pm'};
|
|
croak(qq/IO::Socket::SSL must be installed for https support\n/)
|
|
unless $INC{'IO/Socket/SSL.pm'};
|
|
}
|
|
elsif ( $scheme ne 'http' ) {
|
|
croak(qq/Unsupported URL scheme '$scheme'\n/);
|
|
}
|
|
|
|
$self->{fh} = IO::Socket::INET->new(
|
|
PeerHost => $host,
|
|
PeerPort => $port,
|
|
Proto => 'tcp',
|
|
Type => SOCK_STREAM,
|
|
Timeout => $self->{timeout}
|
|
) or croak(qq/Could not connect to '$host:$port': $@/);
|
|
|
|
binmode($self->{fh})
|
|
or croak(qq/Could not binmode() socket: '$!'/);
|
|
|
|
if ( $scheme eq 'https') {
|
|
IO::Socket::SSL->start_SSL($self->{fh});
|
|
ref($self->{fh}) eq 'IO::Socket::SSL'
|
|
or die(qq/SSL connection failed for $host\n/);
|
|
if ( $self->{fh}->can("verify_hostname") ) {
|
|
$self->{fh}->verify_hostname( $host, $ssl_verify_args )
|
|
or die(qq/SSL certificate not valid for $host\n/);
|
|
}
|
|
else {
|
|
my $fh = $self->{fh};
|
|
_verify_hostname_of_cert($host, _peer_certificate($fh), $ssl_verify_args)
|
|
or die(qq/SSL certificate not valid for $host\n/);
|
|
}
|
|
}
|
|
|
|
$self->{host} = $host;
|
|
$self->{port} = $port;
|
|
|
|
return $self;
|
|
}
|
|
|
|
sub close {
|
|
@_ == 1 || croak(q/Usage: $handle->close()/);
|
|
my ($self) = @_;
|
|
CORE::close($self->{fh})
|
|
or croak(qq/Could not close socket: '$!'/);
|
|
}
|
|
|
|
sub write {
|
|
@_ == 2 || croak(q/Usage: $handle->write(buf)/);
|
|
my ($self, $buf) = @_;
|
|
|
|
my $len = length $buf;
|
|
my $off = 0;
|
|
|
|
local $SIG{PIPE} = 'IGNORE';
|
|
|
|
while () {
|
|
$self->can_write
|
|
or croak(q/Timed out while waiting for socket to become ready for writing/);
|
|
my $r = syswrite($self->{fh}, $buf, $len, $off);
|
|
if (defined $r) {
|
|
$len -= $r;
|
|
$off += $r;
|
|
last unless $len > 0;
|
|
}
|
|
elsif ($! == EPIPE) {
|
|
croak(qq/Socket closed by remote server: $!/);
|
|
}
|
|
elsif ($! != EINTR) {
|
|
croak(qq/Could not write to socket: '$!'/);
|
|
}
|
|
}
|
|
return $off;
|
|
}
|
|
|
|
sub read {
|
|
@_ == 2 || @_ == 3 || croak(q/Usage: $handle->read(len)/);
|
|
my ($self, $len) = @_;
|
|
|
|
my $buf = '';
|
|
my $got = length $self->{rbuf};
|
|
|
|
if ($got) {
|
|
my $take = ($got < $len) ? $got : $len;
|
|
$buf = substr($self->{rbuf}, 0, $take, '');
|
|
$len -= $take;
|
|
}
|
|
|
|
while ($len > 0) {
|
|
$self->can_read
|
|
or croak(q/Timed out while waiting for socket to become ready for reading/);
|
|
my $r = sysread($self->{fh}, $buf, $len, length $buf);
|
|
if (defined $r) {
|
|
last unless $r;
|
|
$len -= $r;
|
|
}
|
|
elsif ($! != EINTR) {
|
|
croak(qq/Could not read from socket: '$!'/);
|
|
}
|
|
}
|
|
if ($len) {
|
|
croak(q/Unexpected end of stream/);
|
|
}
|
|
return $buf;
|
|
}
|
|
|
|
sub readline {
|
|
@_ == 1 || croak(q/Usage: $handle->readline()/);
|
|
my ($self) = @_;
|
|
|
|
while () {
|
|
if ($self->{rbuf} =~ s/\A ([^\x0D\x0A]* \x0D?\x0A)//x) {
|
|
return $1;
|
|
}
|
|
$self->can_read
|
|
or croak(q/Timed out while waiting for socket to become ready for reading/);
|
|
my $r = sysread($self->{fh}, $self->{rbuf}, BUFSIZE, length $self->{rbuf});
|
|
if (defined $r) {
|
|
last unless $r;
|
|
}
|
|
elsif ($! != EINTR) {
|
|
croak(qq/Could not read from socket: '$!'/);
|
|
}
|
|
}
|
|
croak(q/Unexpected end of stream while looking for line/);
|
|
}
|
|
|
|
sub read_header_lines {
|
|
@_ == 1 || @_ == 2 || croak(q/Usage: $handle->read_header_lines([headers])/);
|
|
my ($self, $headers) = @_;
|
|
$headers ||= {};
|
|
my $lines = 0;
|
|
my $val;
|
|
|
|
while () {
|
|
my $line = $self->readline;
|
|
|
|
if ($line =~ /\A ([^\x00-\x1F\x7F:]+) : [\x09\x20]* ([^\x0D\x0A]*)/x) {
|
|
my ($field_name) = lc $1;
|
|
$val = \($headers->{$field_name} = $2);
|
|
}
|
|
elsif ($line =~ /\A [\x09\x20]+ ([^\x0D\x0A]*)/x) {
|
|
$val
|
|
or croak(q/Unexpected header continuation line/);
|
|
next unless length $1;
|
|
$$val .= ' ' if length $$val;
|
|
$$val .= $1;
|
|
}
|
|
elsif ($line =~ /\A \x0D?\x0A \z/x) {
|
|
last;
|
|
}
|
|
else {
|
|
croak(q/Malformed header line: / . $Printable->($line));
|
|
}
|
|
}
|
|
return $headers;
|
|
}
|
|
|
|
sub write_header_lines {
|
|
(@_ == 2 && ref $_[1] eq 'HASH') || croak(q/Usage: $handle->write_header_lines(headers)/);
|
|
my($self, $headers) = @_;
|
|
|
|
my $buf = '';
|
|
while (my ($k, $v) = each %$headers) {
|
|
my $field_name = lc $k;
|
|
$field_name =~ /\A [\x21\x23-\x27\x2A\x2B\x2D\x2E\x30-\x39\x41-\x5A\x5E-\x7A\x7C\x7E]+ \z/x
|
|
or croak(q/Invalid HTTP header field name: / . $Printable->($field_name));
|
|
$field_name =~ s/\b(\w)/\u$1/g;
|
|
$buf .= "$field_name: $v\x0D\x0A";
|
|
}
|
|
$buf .= "\x0D\x0A";
|
|
return $self->write($buf);
|
|
}
|
|
|
|
sub read_content_body {
|
|
@_ == 3 || @_ == 4 || croak(q/Usage: $handle->read_content_body(callback, response, [read_length])/);
|
|
my ($self, $cb, $response, $len) = @_;
|
|
$len ||= $response->{headers}{'content-length'};
|
|
|
|
croak("No content-length in the returned response, and this "
|
|
. "UA doesn't implement chunking") unless defined $len;
|
|
|
|
while ($len > 0) {
|
|
my $read = ($len > BUFSIZE) ? BUFSIZE : $len;
|
|
$cb->($self->read($read), $response);
|
|
$len -= $read;
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub write_content_body {
|
|
@_ == 2 || croak(q/Usage: $handle->write_content_body(request)/);
|
|
my ($self, $request) = @_;
|
|
my ($len, $content_length) = (0, $request->{headers}{'content-length'});
|
|
|
|
$len += $self->write($request->{content});
|
|
|
|
$len == $content_length
|
|
or croak(qq/Content-Length missmatch (got: $len expected: $content_length)/);
|
|
|
|
return $len;
|
|
}
|
|
|
|
sub read_response_header {
|
|
@_ == 1 || croak(q/Usage: $handle->read_response_header()/);
|
|
my ($self) = @_;
|
|
|
|
my $line = $self->readline;
|
|
|
|
$line =~ /\A (HTTP\/(0*\d+\.0*\d+)) [\x09\x20]+ ([0-9]{3}) [\x09\x20]+ ([^\x0D\x0A]*) \x0D?\x0A/x
|
|
or croak(q/Malformed Status-Line: / . $Printable->($line));
|
|
|
|
my ($protocol, $version, $status, $reason) = ($1, $2, $3, $4);
|
|
|
|
return {
|
|
status => $status,
|
|
reason => $reason,
|
|
headers => $self->read_header_lines,
|
|
protocol => $protocol,
|
|
};
|
|
}
|
|
|
|
sub write_request_header {
|
|
@_ == 4 || croak(q/Usage: $handle->write_request_header(method, request_uri, headers)/);
|
|
my ($self, $method, $request_uri, $headers) = @_;
|
|
|
|
return $self->write("$method $request_uri HTTP/1.1\x0D\x0A")
|
|
+ $self->write_header_lines($headers);
|
|
}
|
|
|
|
sub _do_timeout {
|
|
my ($self, $type, $timeout) = @_;
|
|
$timeout = $self->{timeout}
|
|
unless defined $timeout && $timeout >= 0;
|
|
|
|
my $fd = fileno $self->{fh};
|
|
defined $fd && $fd >= 0
|
|
or croak(q/select(2): 'Bad file descriptor'/);
|
|
|
|
my $initial = time;
|
|
my $pending = $timeout;
|
|
my $nfound;
|
|
|
|
vec(my $fdset = '', $fd, 1) = 1;
|
|
|
|
while () {
|
|
$nfound = ($type eq 'read')
|
|
? select($fdset, undef, undef, $pending)
|
|
: select(undef, $fdset, undef, $pending) ;
|
|
if ($nfound == -1) {
|
|
$! == EINTR
|
|
or croak(qq/select(2): '$!'/);
|
|
redo if !$timeout || ($pending = $timeout - (time - $initial)) > 0;
|
|
$nfound = 0;
|
|
}
|
|
last;
|
|
}
|
|
$! = 0;
|
|
return $nfound;
|
|
}
|
|
|
|
sub can_read {
|
|
@_ == 1 || @_ == 2 || croak(q/Usage: $handle->can_read([timeout])/);
|
|
my $self = shift;
|
|
return $self->_do_timeout('read', @_)
|
|
}
|
|
|
|
sub can_write {
|
|
@_ == 1 || @_ == 2 || croak(q/Usage: $handle->can_write([timeout])/);
|
|
my $self = shift;
|
|
return $self->_do_timeout('write', @_)
|
|
}
|
|
} # HTTP::Micro::Handle
|
|
|
|
my $prog = <<'EOP';
|
|
BEGIN {
|
|
if ( defined &IO::Socket::SSL::CAN_IPV6 ) {
|
|
*CAN_IPV6 = \*IO::Socket::SSL::CAN_IPV6;
|
|
}
|
|
else {
|
|
constant->import( CAN_IPV6 => '' );
|
|
}
|
|
my %const = (
|
|
NID_CommonName => 13,
|
|
GEN_DNS => 2,
|
|
GEN_IPADD => 7,
|
|
);
|
|
while ( my ($name,$value) = each %const ) {
|
|
no strict 'refs';
|
|
*{$name} = UNIVERSAL::can( 'Net::SSLeay', $name ) || sub { $value };
|
|
}
|
|
}
|
|
{
|
|
use Carp qw(croak);
|
|
my %dispatcher = (
|
|
issuer => sub { Net::SSLeay::X509_NAME_oneline( Net::SSLeay::X509_get_issuer_name( shift )) },
|
|
subject => sub { Net::SSLeay::X509_NAME_oneline( Net::SSLeay::X509_get_subject_name( shift )) },
|
|
);
|
|
if ( $Net::SSLeay::VERSION >= 1.30 ) {
|
|
$dispatcher{commonName} = sub {
|
|
my $cn = Net::SSLeay::X509_NAME_get_text_by_NID(
|
|
Net::SSLeay::X509_get_subject_name( shift ), NID_CommonName);
|
|
$cn =~s{\0$}{}; # work around Bug in Net::SSLeay <1.33
|
|
$cn;
|
|
}
|
|
} else {
|
|
$dispatcher{commonName} = sub {
|
|
croak "you need at least Net::SSLeay version 1.30 for getting commonName"
|
|
}
|
|
}
|
|
|
|
if ( $Net::SSLeay::VERSION >= 1.33 ) {
|
|
$dispatcher{subjectAltNames} = sub { Net::SSLeay::X509_get_subjectAltNames( shift ) };
|
|
} else {
|
|
$dispatcher{subjectAltNames} = sub {
|
|
return;
|
|
};
|
|
}
|
|
|
|
$dispatcher{authority} = $dispatcher{issuer};
|
|
$dispatcher{owner} = $dispatcher{subject};
|
|
$dispatcher{cn} = $dispatcher{commonName};
|
|
|
|
sub _peer_certificate {
|
|
my ($self, $field) = @_;
|
|
my $ssl = $self->_get_ssl_object or return;
|
|
|
|
my $cert = ${*$self}{_SSL_certificate}
|
|
||= Net::SSLeay::get_peer_certificate($ssl)
|
|
or return $self->error("Could not retrieve peer certificate");
|
|
|
|
if ($field) {
|
|
my $sub = $dispatcher{$field} or croak
|
|
"invalid argument for peer_certificate, valid are: ".join( " ",keys %dispatcher ).
|
|
"\nMaybe you need to upgrade your Net::SSLeay";
|
|
return $sub->($cert);
|
|
} else {
|
|
return $cert
|
|
}
|
|
}
|
|
|
|
|
|
my %scheme = (
|
|
ldap => {
|
|
wildcards_in_cn => 0,
|
|
wildcards_in_alt => 'leftmost',
|
|
check_cn => 'always',
|
|
},
|
|
http => {
|
|
wildcards_in_cn => 'anywhere',
|
|
wildcards_in_alt => 'anywhere',
|
|
check_cn => 'when_only',
|
|
},
|
|
smtp => {
|
|
wildcards_in_cn => 0,
|
|
wildcards_in_alt => 0,
|
|
check_cn => 'always'
|
|
},
|
|
none => {}, # do not check
|
|
);
|
|
|
|
$scheme{www} = $scheme{http}; # alias
|
|
$scheme{xmpp} = $scheme{http}; # rfc 3920
|
|
$scheme{pop3} = $scheme{ldap}; # rfc 2595
|
|
$scheme{imap} = $scheme{ldap}; # rfc 2595
|
|
$scheme{acap} = $scheme{ldap}; # rfc 2595
|
|
$scheme{nntp} = $scheme{ldap}; # rfc 4642
|
|
$scheme{ftp} = $scheme{http}; # rfc 4217
|
|
|
|
|
|
sub _verify_hostname_of_cert {
|
|
my $identity = shift;
|
|
my $cert = shift;
|
|
my $scheme = shift || 'none';
|
|
if ( ! ref($scheme) ) {
|
|
$scheme = $scheme{$scheme} or croak "scheme $scheme not defined";
|
|
}
|
|
|
|
return 1 if ! %$scheme; # 'none'
|
|
|
|
my $commonName = $dispatcher{cn}->($cert);
|
|
my @altNames = $dispatcher{subjectAltNames}->($cert);
|
|
|
|
if ( my $sub = $scheme->{callback} ) {
|
|
return $sub->($identity,$commonName,@altNames);
|
|
}
|
|
|
|
|
|
my $ipn;
|
|
if ( CAN_IPV6 and $identity =~m{:} ) {
|
|
$ipn = IO::Socket::SSL::inet_pton(IO::Socket::SSL::AF_INET6,$identity)
|
|
or croak "'$identity' is not IPv6, but neither IPv4 nor hostname";
|
|
} elsif ( $identity =~m{^\d+\.\d+\.\d+\.\d+$} ) {
|
|
$ipn = IO::Socket::SSL::inet_aton( $identity ) or croak "'$identity' is not IPv4, but neither IPv6 nor hostname";
|
|
} else {
|
|
if ( $identity =~m{[^a-zA-Z0-9_.\-]} ) {
|
|
$identity =~m{\0} and croak("name '$identity' has \\0 byte");
|
|
$identity = IO::Socket::SSL::idn_to_ascii($identity) or
|
|
croak "Warning: Given name '$identity' could not be converted to IDNA!";
|
|
}
|
|
}
|
|
|
|
my $check_name = sub {
|
|
my ($name,$identity,$wtyp) = @_;
|
|
$wtyp ||= '';
|
|
my $pattern;
|
|
if ( $wtyp eq 'anywhere' and $name =~m{^([a-zA-Z0-9_\-]*)\*(.+)} ) {
|
|
$pattern = qr{^\Q$1\E[a-zA-Z0-9_\-]*\Q$2\E$}i;
|
|
} elsif ( $wtyp eq 'leftmost' and $name =~m{^\*(\..+)$} ) {
|
|
$pattern = qr{^[a-zA-Z0-9_\-]*\Q$1\E$}i;
|
|
} else {
|
|
$pattern = qr{^\Q$name\E$}i;
|
|
}
|
|
return $identity =~ $pattern;
|
|
};
|
|
|
|
my $alt_dnsNames = 0;
|
|
while (@altNames) {
|
|
my ($type, $name) = splice (@altNames, 0, 2);
|
|
if ( $ipn and $type == GEN_IPADD ) {
|
|
return 1 if $ipn eq $name;
|
|
|
|
} elsif ( ! $ipn and $type == GEN_DNS ) {
|
|
$name =~s/\s+$//; $name =~s/^\s+//;
|
|
$alt_dnsNames++;
|
|
$check_name->($name,$identity,$scheme->{wildcards_in_alt})
|
|
and return 1;
|
|
}
|
|
}
|
|
|
|
if ( ! $ipn and (
|
|
$scheme->{check_cn} eq 'always' or
|
|
$scheme->{check_cn} eq 'when_only' and !$alt_dnsNames)) {
|
|
$check_name->($commonName,$identity,$scheme->{wildcards_in_cn})
|
|
and return 1;
|
|
}
|
|
|
|
return 0; # no match
|
|
}
|
|
}
|
|
EOP
|
|
|
|
eval { require IO::Socket::SSL };
|
|
if ( $INC{"IO/Socket/SSL.pm"} ) {
|
|
eval $prog;
|
|
die $@ if $@;
|
|
}
|
|
|
|
1;
|
|
# ###########################################################################
|
|
# End HTTP::Micro package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# VersionCheck package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/VersionCheck.pm
|
|
# t/lib/VersionCheck.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package VersionCheck;
|
|
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use Data::Dumper;
|
|
local $Data::Dumper::Indent = 1;
|
|
local $Data::Dumper::Sortkeys = 1;
|
|
local $Data::Dumper::Quotekeys = 0;
|
|
|
|
use Digest::MD5 qw(md5_hex);
|
|
use Sys::Hostname qw(hostname);
|
|
use File::Basename qw();
|
|
use File::Spec;
|
|
use FindBin qw();
|
|
|
|
eval {
|
|
require Percona::Toolkit;
|
|
require HTTP::Micro;
|
|
};
|
|
|
|
{
|
|
my $file = 'percona-version-check';
|
|
my $home = $ENV{HOME} || $ENV{HOMEPATH} || $ENV{USERPROFILE} || '.';
|
|
my @vc_dirs = (
|
|
'/etc/percona',
|
|
'/etc/percona-toolkit',
|
|
'/tmp',
|
|
"$home",
|
|
);
|
|
|
|
sub version_check_file {
|
|
foreach my $dir ( @vc_dirs ) {
|
|
if ( -d $dir && -w $dir ) {
|
|
PTDEBUG && _d('Version check file', $file, 'in', $dir);
|
|
return $dir . '/' . $file;
|
|
}
|
|
}
|
|
PTDEBUG && _d('Version check file', $file, 'in', $ENV{PWD});
|
|
return $file; # in the CWD
|
|
}
|
|
}
|
|
|
|
sub version_check_time_limit {
|
|
return 60 * 60 * 24; # one day
|
|
}
|
|
|
|
|
|
sub version_check {
|
|
my (%args) = @_;
|
|
|
|
my $instances = $args{instances} || [];
|
|
my $instances_to_check;
|
|
|
|
PTDEBUG && _d('FindBin::Bin:', $FindBin::Bin);
|
|
if ( !$args{force} ) {
|
|
if ( $FindBin::Bin
|
|
&& (-d "$FindBin::Bin/../.bzr" ||
|
|
-d "$FindBin::Bin/../../.bzr" ||
|
|
-d "$FindBin::Bin/../.git" ||
|
|
-d "$FindBin::Bin/../../.git"
|
|
)
|
|
) {
|
|
PTDEBUG && _d("$FindBin::Bin/../.bzr disables --version-check");
|
|
return;
|
|
}
|
|
}
|
|
|
|
eval {
|
|
foreach my $instance ( @$instances ) {
|
|
my ($name, $id) = get_instance_id($instance);
|
|
$instance->{name} = $name;
|
|
$instance->{id} = $id;
|
|
}
|
|
|
|
push @$instances, { name => 'system', id => 0 };
|
|
|
|
$instances_to_check = get_instances_to_check(
|
|
instances => $instances,
|
|
vc_file => $args{vc_file}, # testing
|
|
now => $args{now}, # testing
|
|
);
|
|
PTDEBUG && _d(scalar @$instances_to_check, 'instances to check');
|
|
return unless @$instances_to_check;
|
|
|
|
my $protocol = 'https';
|
|
eval { require IO::Socket::SSL; };
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d($EVAL_ERROR);
|
|
PTDEBUG && _d("SSL not available, won't run version_check");
|
|
return;
|
|
}
|
|
PTDEBUG && _d('Using', $protocol);
|
|
|
|
my $advice = pingback(
|
|
instances => $instances_to_check,
|
|
protocol => $protocol,
|
|
url => $args{url} # testing
|
|
|| $ENV{PERCONA_VERSION_CHECK_URL} # testing
|
|
|| "$protocol://v.percona.com",
|
|
);
|
|
if ( $advice ) {
|
|
PTDEBUG && _d('Advice:', Dumper($advice));
|
|
if ( scalar @$advice > 1) {
|
|
print "\n# " . scalar @$advice . " software updates are "
|
|
. "available:\n";
|
|
}
|
|
else {
|
|
print "\n# A software update is available:\n";
|
|
}
|
|
print join("\n", map { "# * $_" } @$advice), "\n\n";
|
|
}
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d('Version check failed:', $EVAL_ERROR);
|
|
}
|
|
|
|
if ( @$instances_to_check ) {
|
|
eval {
|
|
update_check_times(
|
|
instances => $instances_to_check,
|
|
vc_file => $args{vc_file}, # testing
|
|
now => $args{now}, # testing
|
|
);
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d('Error updating version check file:', $EVAL_ERROR);
|
|
}
|
|
}
|
|
|
|
if ( $ENV{PTDEBUG_VERSION_CHECK} ) {
|
|
warn "Exiting because the PTDEBUG_VERSION_CHECK "
|
|
. "environment variable is defined.\n";
|
|
exit 255;
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
sub get_instances_to_check {
|
|
my (%args) = @_;
|
|
|
|
my $instances = $args{instances};
|
|
my $now = $args{now} || int(time);
|
|
my $vc_file = $args{vc_file} || version_check_file();
|
|
|
|
if ( !-f $vc_file ) {
|
|
PTDEBUG && _d('Version check file', $vc_file, 'does not exist;',
|
|
'version checking all instances');
|
|
return $instances;
|
|
}
|
|
|
|
open my $fh, '<', $vc_file or die "Cannot open $vc_file: $OS_ERROR";
|
|
chomp(my $file_contents = do { local $/ = undef; <$fh> });
|
|
PTDEBUG && _d('Version check file', $vc_file, 'contents:', $file_contents);
|
|
close $fh;
|
|
my %last_check_time_for = $file_contents =~ /^([^,]+),(.+)$/mg;
|
|
|
|
my $check_time_limit = version_check_time_limit();
|
|
my @instances_to_check;
|
|
foreach my $instance ( @$instances ) {
|
|
my $last_check_time = $last_check_time_for{ $instance->{id} };
|
|
PTDEBUG && _d('Intsance', $instance->{id}, 'last checked',
|
|
$last_check_time, 'now', $now, 'diff', $now - ($last_check_time || 0),
|
|
'hours until next check',
|
|
sprintf '%.2f',
|
|
($check_time_limit - ($now - ($last_check_time || 0))) / 3600);
|
|
if ( !defined $last_check_time
|
|
|| ($now - $last_check_time) >= $check_time_limit ) {
|
|
PTDEBUG && _d('Time to check', Dumper($instance));
|
|
push @instances_to_check, $instance;
|
|
}
|
|
}
|
|
|
|
return \@instances_to_check;
|
|
}
|
|
|
|
sub update_check_times {
|
|
my (%args) = @_;
|
|
|
|
my $instances = $args{instances};
|
|
my $now = $args{now} || int(time);
|
|
my $vc_file = $args{vc_file} || version_check_file();
|
|
PTDEBUG && _d('Updating last check time:', $now);
|
|
|
|
my %all_instances = map {
|
|
$_->{id} => { name => $_->{name}, ts => $now }
|
|
} @$instances;
|
|
|
|
if ( -f $vc_file ) {
|
|
open my $fh, '<', $vc_file or die "Cannot read $vc_file: $OS_ERROR";
|
|
my $contents = do { local $/ = undef; <$fh> };
|
|
close $fh;
|
|
|
|
foreach my $line ( split("\n", ($contents || '')) ) {
|
|
my ($id, $ts) = split(',', $line);
|
|
if ( !exists $all_instances{$id} ) {
|
|
$all_instances{$id} = { ts => $ts }; # original ts, not updated
|
|
}
|
|
}
|
|
}
|
|
|
|
open my $fh, '>', $vc_file or die "Cannot write to $vc_file: $OS_ERROR";
|
|
foreach my $id ( sort keys %all_instances ) {
|
|
PTDEBUG && _d('Updated:', $id, Dumper($all_instances{$id}));
|
|
print { $fh } $id . ',' . $all_instances{$id}->{ts} . "\n";
|
|
}
|
|
close $fh;
|
|
|
|
return;
|
|
}
|
|
|
|
sub get_instance_id {
|
|
my ($instance) = @_;
|
|
|
|
my $dbh = $instance->{dbh};
|
|
my $dsn = $instance->{dsn};
|
|
|
|
my $sql = q{SELECT CONCAT(@@hostname, @@port)};
|
|
PTDEBUG && _d($sql);
|
|
my ($name) = eval { $dbh->selectrow_array($sql) };
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d($EVAL_ERROR);
|
|
$sql = q{SELECT @@hostname};
|
|
PTDEBUG && _d($sql);
|
|
($name) = eval { $dbh->selectrow_array($sql) };
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d($EVAL_ERROR);
|
|
$name = ($dsn->{h} || 'localhost') . ($dsn->{P} || 3306);
|
|
}
|
|
else {
|
|
$sql = q{SHOW VARIABLES LIKE 'port'};
|
|
PTDEBUG && _d($sql);
|
|
my (undef, $port) = eval { $dbh->selectrow_array($sql) };
|
|
PTDEBUG && _d('port:', $port);
|
|
$name .= $port || '';
|
|
}
|
|
}
|
|
my $id = md5_hex($name);
|
|
|
|
PTDEBUG && _d('MySQL instance:', $id, $name, Dumper($dsn));
|
|
|
|
return $name, $id;
|
|
}
|
|
|
|
|
|
sub pingback {
|
|
my (%args) = @_;
|
|
my @required_args = qw(url instances);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg arugment" unless $args{$arg};
|
|
}
|
|
my $url = $args{url};
|
|
my $instances = $args{instances};
|
|
|
|
my $ua = $args{ua} || HTTP::Micro->new( timeout => 3 );
|
|
|
|
my $response = $ua->request('GET', $url);
|
|
PTDEBUG && _d('Server response:', Dumper($response));
|
|
die "No response from GET $url"
|
|
if !$response;
|
|
die("GET on $url returned HTTP status $response->{status}; expected 200\n",
|
|
($response->{content} || '')) if $response->{status} != 200;
|
|
die("GET on $url did not return any programs to check")
|
|
if !$response->{content};
|
|
|
|
my $items = parse_server_response(
|
|
response => $response->{content}
|
|
);
|
|
die "Failed to parse server requested programs: $response->{content}"
|
|
if !scalar keys %$items;
|
|
|
|
my $versions = get_versions(
|
|
items => $items,
|
|
instances => $instances,
|
|
);
|
|
die "Failed to get any program versions; should have at least gotten Perl"
|
|
if !scalar keys %$versions;
|
|
|
|
my $client_content = encode_client_response(
|
|
items => $items,
|
|
versions => $versions,
|
|
general_id => md5_hex( hostname() ),
|
|
);
|
|
|
|
my $client_response = {
|
|
headers => { "X-Percona-Toolkit-Tool" => File::Basename::basename($0) },
|
|
content => $client_content,
|
|
};
|
|
PTDEBUG && _d('Client response:', Dumper($client_response));
|
|
|
|
$response = $ua->request('POST', $url, $client_response);
|
|
PTDEBUG && _d('Server suggestions:', Dumper($response));
|
|
die "No response from POST $url $client_response"
|
|
if !$response;
|
|
die "POST $url returned HTTP status $response->{status}; expected 200"
|
|
if $response->{status} != 200;
|
|
|
|
return unless $response->{content};
|
|
|
|
$items = parse_server_response(
|
|
response => $response->{content},
|
|
split_vars => 0,
|
|
);
|
|
die "Failed to parse server suggestions: $response->{content}"
|
|
if !scalar keys %$items;
|
|
my @suggestions = map { $_->{vars} }
|
|
sort { $a->{item} cmp $b->{item} }
|
|
values %$items;
|
|
|
|
return \@suggestions;
|
|
}
|
|
|
|
sub encode_client_response {
|
|
my (%args) = @_;
|
|
my @required_args = qw(items versions general_id);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg arugment" unless $args{$arg};
|
|
}
|
|
my ($items, $versions, $general_id) = @args{@required_args};
|
|
|
|
my @lines;
|
|
foreach my $item ( sort keys %$items ) {
|
|
next unless exists $versions->{$item};
|
|
if ( ref($versions->{$item}) eq 'HASH' ) {
|
|
my $mysql_versions = $versions->{$item};
|
|
for my $id ( sort keys %$mysql_versions ) {
|
|
push @lines, join(';', $id, $item, $mysql_versions->{$id});
|
|
}
|
|
}
|
|
else {
|
|
push @lines, join(';', $general_id, $item, $versions->{$item});
|
|
}
|
|
}
|
|
|
|
my $client_response = join("\n", @lines) . "\n";
|
|
return $client_response;
|
|
}
|
|
|
|
sub parse_server_response {
|
|
my (%args) = @_;
|
|
my @required_args = qw(response);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg arugment" unless $args{$arg};
|
|
}
|
|
my ($response) = @args{@required_args};
|
|
|
|
my %items = map {
|
|
my ($item, $type, $vars) = split(";", $_);
|
|
if ( !defined $args{split_vars} || $args{split_vars} ) {
|
|
$vars = [ split(",", ($vars || '')) ];
|
|
}
|
|
$item => {
|
|
item => $item,
|
|
type => $type,
|
|
vars => $vars,
|
|
};
|
|
} split("\n", $response);
|
|
|
|
PTDEBUG && _d('Items:', Dumper(\%items));
|
|
|
|
return \%items;
|
|
}
|
|
|
|
my %sub_for_type = (
|
|
os_version => \&get_os_version,
|
|
perl_version => \&get_perl_version,
|
|
perl_module_version => \&get_perl_module_version,
|
|
mysql_variable => \&get_mysql_variable,
|
|
);
|
|
|
|
sub valid_item {
|
|
my ($item) = @_;
|
|
return unless $item;
|
|
if ( !exists $sub_for_type{ $item->{type} } ) {
|
|
PTDEBUG && _d('Invalid type:', $item->{type});
|
|
return 0;
|
|
}
|
|
return 1;
|
|
}
|
|
|
|
sub get_versions {
|
|
my (%args) = @_;
|
|
my @required_args = qw(items);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg arugment" unless $args{$arg};
|
|
}
|
|
my ($items) = @args{@required_args};
|
|
|
|
my %versions;
|
|
foreach my $item ( values %$items ) {
|
|
next unless valid_item($item);
|
|
eval {
|
|
my $version = $sub_for_type{ $item->{type} }->(
|
|
item => $item,
|
|
instances => $args{instances},
|
|
);
|
|
if ( $version ) {
|
|
chomp $version unless ref($version);
|
|
$versions{$item->{item}} = $version;
|
|
}
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d('Error getting version for', Dumper($item), $EVAL_ERROR);
|
|
}
|
|
}
|
|
|
|
return \%versions;
|
|
}
|
|
|
|
|
|
sub get_os_version {
|
|
if ( $OSNAME eq 'MSWin32' ) {
|
|
require Win32;
|
|
return Win32::GetOSDisplayName();
|
|
}
|
|
|
|
chomp(my $platform = `uname -s`);
|
|
PTDEBUG && _d('platform:', $platform);
|
|
return $OSNAME unless $platform;
|
|
|
|
chomp(my $lsb_release
|
|
= `which lsb_release 2>/dev/null | awk '{print \$1}'` || '');
|
|
PTDEBUG && _d('lsb_release:', $lsb_release);
|
|
|
|
my $release = "";
|
|
|
|
if ( $platform eq 'Linux' ) {
|
|
if ( -f "/etc/fedora-release" ) {
|
|
$release = `cat /etc/fedora-release`;
|
|
}
|
|
elsif ( -f "/etc/redhat-release" ) {
|
|
$release = `cat /etc/redhat-release`;
|
|
}
|
|
elsif ( -f "/etc/system-release" ) {
|
|
$release = `cat /etc/system-release`;
|
|
}
|
|
elsif ( $lsb_release ) {
|
|
$release = `$lsb_release -ds`;
|
|
}
|
|
elsif ( -f "/etc/lsb-release" ) {
|
|
$release = `grep DISTRIB_DESCRIPTION /etc/lsb-release`;
|
|
$release =~ s/^\w+="([^"]+)".+/$1/;
|
|
}
|
|
elsif ( -f "/etc/debian_version" ) {
|
|
chomp(my $rel = `cat /etc/debian_version`);
|
|
$release = "Debian $rel";
|
|
if ( -f "/etc/apt/sources.list" ) {
|
|
chomp(my $code_name = `awk '/^deb/ {print \$3}' /etc/apt/sources.list | awk -F/ '{print \$1}'| awk 'BEGIN {FS="|"} {print \$1}' | sort | uniq -c | sort -rn | head -n1 | awk '{print \$2}'`);
|
|
$release .= " ($code_name)" if $code_name;
|
|
}
|
|
}
|
|
elsif ( -f "/etc/os-release" ) { # openSUSE
|
|
chomp($release = `grep PRETTY_NAME /etc/os-release`);
|
|
$release =~ s/^PRETTY_NAME="(.+)"$/$1/;
|
|
}
|
|
elsif ( `ls /etc/*release 2>/dev/null` ) {
|
|
if ( `grep DISTRIB_DESCRIPTION /etc/*release 2>/dev/null` ) {
|
|
$release = `grep DISTRIB_DESCRIPTION /etc/*release | head -n1`;
|
|
}
|
|
else {
|
|
$release = `cat /etc/*release | head -n1`;
|
|
}
|
|
}
|
|
}
|
|
elsif ( $platform =~ m/(?:BSD|^Darwin)$/ ) {
|
|
my $rel = `uname -r`;
|
|
$release = "$platform $rel";
|
|
}
|
|
elsif ( $platform eq "SunOS" ) {
|
|
my $rel = `head -n1 /etc/release` || `uname -r`;
|
|
$release = "$platform $rel";
|
|
}
|
|
|
|
if ( !$release ) {
|
|
PTDEBUG && _d('Failed to get the release, using platform');
|
|
$release = $platform;
|
|
}
|
|
chomp($release);
|
|
|
|
$release =~ s/^"|"$//g;
|
|
|
|
PTDEBUG && _d('OS version =', $release);
|
|
return $release;
|
|
}
|
|
|
|
sub get_perl_version {
|
|
my (%args) = @_;
|
|
my $item = $args{item};
|
|
return unless $item;
|
|
|
|
my $version = sprintf '%vd', $PERL_VERSION;
|
|
PTDEBUG && _d('Perl version', $version);
|
|
return $version;
|
|
}
|
|
|
|
sub get_perl_module_version {
|
|
my (%args) = @_;
|
|
my $item = $args{item};
|
|
return unless $item;
|
|
|
|
my $var = '$' . $item->{item} . '::VERSION';
|
|
my $version = eval "use $item->{item}; $var;";
|
|
PTDEBUG && _d('Perl version for', $var, '=', $version);
|
|
return $version;
|
|
}
|
|
|
|
sub get_mysql_variable {
|
|
return get_from_mysql(
|
|
show => 'VARIABLES',
|
|
@_,
|
|
);
|
|
}
|
|
|
|
sub get_from_mysql {
|
|
my (%args) = @_;
|
|
my $show = $args{show};
|
|
my $item = $args{item};
|
|
my $instances = $args{instances};
|
|
return unless $show && $item;
|
|
|
|
if ( !$instances || !@$instances ) {
|
|
PTDEBUG && _d('Cannot check', $item,
|
|
'because there are no MySQL instances');
|
|
return;
|
|
}
|
|
|
|
if ($item->{item} eq 'MySQL' && $item->{type} eq 'mysql_variable') {
|
|
@{$item->{vars}} = grep { $_ eq 'version' || $_ eq 'version_comment' } @{$item->{vars}};
|
|
}
|
|
|
|
|
|
my @versions;
|
|
my %version_for;
|
|
foreach my $instance ( @$instances ) {
|
|
next unless $instance->{id}; # special system instance has id=0
|
|
my $dbh = $instance->{dbh};
|
|
local $dbh->{FetchHashKeyName} = 'NAME_lc';
|
|
my $sql = qq/SHOW $show/;
|
|
PTDEBUG && _d($sql);
|
|
my $rows = $dbh->selectall_hashref($sql, 'variable_name');
|
|
|
|
my @versions;
|
|
foreach my $var ( @{$item->{vars}} ) {
|
|
$var = lc($var);
|
|
my $version = $rows->{$var}->{value};
|
|
PTDEBUG && _d('MySQL version for', $item->{item}, '=', $version,
|
|
'on', $instance->{name});
|
|
push @versions, $version;
|
|
}
|
|
$version_for{ $instance->{id} } = join(' ', @versions);
|
|
}
|
|
|
|
return \%version_for;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End VersionCheck package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# VersionParser package
|
|
# This package is a copy without comments from the original. The original
|
|
# with comments and its test file can be found in the Bazaar repository at,
|
|
# lib/VersionParser.pm
|
|
# t/lib/VersionParser.t
|
|
# See https://launchpad.net/percona-toolkit for more information.
|
|
# ###########################################################################
|
|
{
|
|
package VersionParser;
|
|
|
|
use Lmo;
|
|
use Scalar::Util qw(blessed);
|
|
use English qw(-no_match_vars);
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
use overload (
|
|
'""' => "version",
|
|
'<=>' => "cmp",
|
|
'cmp' => "cmp",
|
|
fallback => 1,
|
|
);
|
|
|
|
use Carp ();
|
|
|
|
has major => (
|
|
is => 'ro',
|
|
isa => 'Int',
|
|
required => 1,
|
|
);
|
|
|
|
has [qw( minor revision )] => (
|
|
is => 'ro',
|
|
isa => 'Num',
|
|
);
|
|
|
|
has flavor => (
|
|
is => 'ro',
|
|
isa => 'Str',
|
|
default => sub { 'Unknown' },
|
|
);
|
|
|
|
has innodb_version => (
|
|
is => 'ro',
|
|
isa => 'Str',
|
|
default => sub { 'NO' },
|
|
);
|
|
|
|
sub series {
|
|
my $self = shift;
|
|
return $self->_join_version($self->major, $self->minor);
|
|
}
|
|
|
|
sub version {
|
|
my $self = shift;
|
|
return $self->_join_version($self->major, $self->minor, $self->revision);
|
|
}
|
|
|
|
sub is_in {
|
|
my ($self, $target) = @_;
|
|
|
|
return $self eq $target;
|
|
}
|
|
|
|
sub _join_version {
|
|
my ($self, @parts) = @_;
|
|
|
|
return join ".", map { my $c = $_; $c =~ s/^0\./0/; $c } grep defined, @parts;
|
|
}
|
|
sub _split_version {
|
|
my ($self, $str) = @_;
|
|
my @version_parts = map { s/^0(?=\d)/0./; $_ } $str =~ m/(\d+)/g;
|
|
return @version_parts[0..2];
|
|
}
|
|
|
|
sub normalized_version {
|
|
my ( $self ) = @_;
|
|
my $result = sprintf('%d%02d%02d', map { $_ || 0 } $self->major,
|
|
$self->minor,
|
|
$self->revision);
|
|
PTDEBUG && _d($self->version, 'normalizes to', $result);
|
|
return $result;
|
|
}
|
|
|
|
sub comment {
|
|
my ( $self, $cmd ) = @_;
|
|
my $v = $self->normalized_version();
|
|
|
|
return "/*!$v $cmd */"
|
|
}
|
|
|
|
my @methods = qw(major minor revision);
|
|
sub cmp {
|
|
my ($left, $right) = @_;
|
|
my $right_obj = (blessed($right) && $right->isa(ref($left)))
|
|
? $right
|
|
: ref($left)->new($right);
|
|
|
|
my $retval = 0;
|
|
for my $m ( @methods ) {
|
|
last unless defined($left->$m) && defined($right_obj->$m);
|
|
$retval = $left->$m <=> $right_obj->$m;
|
|
last if $retval;
|
|
}
|
|
return $retval;
|
|
}
|
|
|
|
sub BUILDARGS {
|
|
my $self = shift;
|
|
|
|
if ( @_ == 1 ) {
|
|
my %args;
|
|
if ( blessed($_[0]) && $_[0]->can("selectrow_hashref") ) {
|
|
PTDEBUG && _d("VersionParser got a dbh, trying to get the version");
|
|
my $dbh = $_[0];
|
|
local $dbh->{FetchHashKeyName} = 'NAME_lc';
|
|
my $query = eval {
|
|
$dbh->selectall_arrayref(q/SHOW VARIABLES LIKE 'version%'/, { Slice => {} })
|
|
};
|
|
if ( $query ) {
|
|
$query = { map { $_->{variable_name} => $_->{value} } @$query };
|
|
@args{@methods} = $self->_split_version($query->{version});
|
|
$args{flavor} = delete $query->{version_comment}
|
|
if $query->{version_comment};
|
|
}
|
|
elsif ( eval { ($query) = $dbh->selectrow_array(q/SELECT VERSION()/) } ) {
|
|
@args{@methods} = $self->_split_version($query);
|
|
}
|
|
else {
|
|
Carp::confess("Couldn't get the version from the dbh while "
|
|
. "creating a VersionParser object: $@");
|
|
}
|
|
$args{innodb_version} = eval { $self->_innodb_version($dbh) };
|
|
}
|
|
elsif ( !ref($_[0]) ) {
|
|
@args{@methods} = $self->_split_version($_[0]);
|
|
}
|
|
|
|
for my $method (@methods) {
|
|
delete $args{$method} unless defined $args{$method};
|
|
}
|
|
@_ = %args if %args;
|
|
}
|
|
|
|
return $self->SUPER::BUILDARGS(@_);
|
|
}
|
|
|
|
sub _innodb_version {
|
|
my ( $self, $dbh ) = @_;
|
|
return unless $dbh;
|
|
my $innodb_version = "NO";
|
|
|
|
my ($innodb) =
|
|
grep { $_->{engine} =~ m/InnoDB/i }
|
|
map {
|
|
my %hash;
|
|
@hash{ map { lc $_ } keys %$_ } = values %$_;
|
|
\%hash;
|
|
}
|
|
@{ $dbh->selectall_arrayref("SHOW ENGINES", {Slice=>{}}) };
|
|
if ( $innodb ) {
|
|
PTDEBUG && _d("InnoDB support:", $innodb->{support});
|
|
if ( $innodb->{support} =~ m/YES|DEFAULT/i ) {
|
|
my $vars = $dbh->selectrow_hashref(
|
|
"SHOW VARIABLES LIKE 'innodb_version'");
|
|
$innodb_version = !$vars ? "BUILTIN"
|
|
: ($vars->{Value} || $vars->{value});
|
|
}
|
|
else {
|
|
$innodb_version = $innodb->{support}; # probably DISABLED or NO
|
|
}
|
|
}
|
|
|
|
PTDEBUG && _d("InnoDB version:", $innodb_version);
|
|
return $innodb_version;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
no Lmo;
|
|
1;
|
|
}
|
|
# ###########################################################################
|
|
# End VersionParser package
|
|
# ###########################################################################
|
|
|
|
# ###########################################################################
|
|
# This is a combination of modules and programs in one -- a runnable module.
|
|
# http://www.perl.com/pub/a/2006/07/13/lightning-articles.html?page=last
|
|
# Or, look it up in the Camel book on pages 642 and 643 in the 3rd edition.
|
|
#
|
|
# Check at the end of this package for the call to main() which actually runs
|
|
# the program.
|
|
# ###########################################################################
|
|
package pt_heartbeat;
|
|
|
|
use strict;
|
|
use warnings FATAL => 'all';
|
|
use English qw(-no_match_vars);
|
|
use List::Util qw(min max sum);
|
|
use Time::HiRes qw(gettimeofday time sleep usleep);
|
|
use IO::File;
|
|
|
|
use Percona::Toolkit;
|
|
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|
|
|
Transformers->import(qw(ts unix_timestamp));
|
|
|
|
my @dbhs; # Holds slave DBHs if --recurse
|
|
my @sths; # Holds [$host, $sth] if --recurse
|
|
|
|
sub main {
|
|
local @ARGV = @_; # set global ARGV for this package
|
|
|
|
# Reset all global vars between test runs else weird things happen.
|
|
@dbhs = ();
|
|
@sths = ();
|
|
|
|
# ########################################################################
|
|
# Get configuration information.
|
|
# ########################################################################
|
|
my $o = new OptionParser();
|
|
$o->get_specs();
|
|
$o->get_opts();
|
|
|
|
my $dp = $o->DSNParser;
|
|
$dp->prop('dbidriver', $o->get('dbi-driver'));
|
|
$dp->prop('set-vars', $o->set_vars());
|
|
|
|
if ( !$o->get('help') ) {
|
|
my @frames = $o->get('frames') =~ m/(\d+[smhd])/g;
|
|
if ( @frames ) {
|
|
my @times;
|
|
foreach my $frame ( @frames ) {
|
|
my ($num, $suf) = $frame =~ m/(\d+)([smhd])$/;
|
|
if ( !$num ) {
|
|
$o->save_error("Invalid --frames argument");
|
|
}
|
|
else {
|
|
push @times,
|
|
$suf eq 's' ? $num # Seconds
|
|
: $suf eq 'm' ? $num * 60 # Minutes
|
|
: $suf eq 'h' ? $num * 3600 # Hours
|
|
: $num * 86400; # Days
|
|
}
|
|
}
|
|
$o->set('frames', \@times);
|
|
}
|
|
else {
|
|
$o->save_error("Invalid --frames argument");
|
|
}
|
|
|
|
if ( $o->get('create-table')
|
|
&& !($o->get('database') && $o->get('table'))) {
|
|
$o->save_error('--create-table requires both --database and --table');
|
|
}
|
|
|
|
if ( $o->get('interval') < 0.01 ) {
|
|
$o->save_error("--interval must be >= 0.01");
|
|
}
|
|
|
|
if ( !$o->get('stop') && !$o->get('database') ) {
|
|
$o->save_error('--database must be specified');
|
|
}
|
|
}
|
|
|
|
eval {
|
|
MasterSlave::check_recursion_method($o->get('recursion-method'));
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
$o->save_error("Invalid --recursion-method: $EVAL_ERROR")
|
|
}
|
|
|
|
$o->usage_or_errors();
|
|
|
|
# ########################################################################
|
|
# Make common modules and var for frequently used options.
|
|
# ########################################################################
|
|
my $q = new Quoter();
|
|
my $tp = new TableParser(Quoter => $q);
|
|
|
|
my $interval = $o->get('interval');
|
|
my $skew = $o->get('update') ? 0 : $o->get('skew');
|
|
my $sentinel = $o->get('sentinel');
|
|
my $frames = $o->get('frames');
|
|
my $db = $o->get('database');
|
|
my $tbl = $o->get('table');
|
|
|
|
# ########################################################################
|
|
# Create --sentinel file if --stop was given, and possibly exit.
|
|
# ########################################################################
|
|
if ( $o->get('stop') ) {
|
|
PTDEBUG && _d('Creating sentinel file', $sentinel);
|
|
my $file = IO::File->new($sentinel, ">>")
|
|
or die "Cannot open $sentinel: $OS_ERROR\n";
|
|
print $file "Remove this file to permit pt-heartbeat to run\n"
|
|
or die "Cannot write to $sentinel: $OS_ERROR\n";
|
|
close $file
|
|
or die "Cannot close $sentinel: $OS_ERROR\n";
|
|
print STDOUT "Successfully created file $sentinel\n";
|
|
# Exit only if no other action (update, monitor, check) is given.
|
|
if ( !$o->get('update') && !$o->get('check') && !$o->get('monitor') ) {
|
|
PTDEBUG && _d("Nothing more to do, quitting");
|
|
return 0;
|
|
}
|
|
else {
|
|
# Wait for all other running instances to quit, assuming they have the
|
|
# same --interval as this invocation. Then remove the file and
|
|
# continue.
|
|
PTDEBUG && _d("Waiting for other instances to quit");
|
|
sleep $interval ;
|
|
PTDEBUG && _d("Unlinking", $sentinel);
|
|
unlink $sentinel
|
|
or die "Cannot unlink $sentinel: $OS_ERROR";
|
|
}
|
|
}
|
|
|
|
# ########################################################################
|
|
# Connect to MySQL.
|
|
# ########################################################################
|
|
if ( $o->get('ask-pass') ) {
|
|
$o->set('password', OptionParser::prompt_noecho("Enter password: "));
|
|
}
|
|
my $dsn_defaults = $dp->parse_options($o);
|
|
my $dsn = @ARGV ? $dp->parse(shift @ARGV, $dsn_defaults)
|
|
: $dsn_defaults;
|
|
my $dbh = $dp->get_dbh($dp->get_cxn_params($dsn), {AutoCommit=>1});
|
|
|
|
$dbh->{InactiveDestroy} = 1; # Don't disconnect on fork
|
|
$dbh->{FetchHashKeyName} = 'NAME_lc';
|
|
$dbh->do("USE `$db`");
|
|
|
|
# ########################################################################
|
|
# If --check-read-only option was given and we are in --update mode
|
|
# we wait until server is writable , or run-time is over, or sentinel file
|
|
# We also do this check after daemon is up and running, but it is necessary
|
|
# to check this before attempting to create the table and inserting rows
|
|
# https://bugs.launchpad.net/percona-toolkit/+bug/1328686
|
|
# #######################################################################
|
|
if ( $o->get('check-read-only') && $o->get('update') ) {
|
|
PTDEBUG && _d('Checking if server is read_only');
|
|
if ( server_is_readonly($dbh) && PTDEBUG ) {
|
|
_d('Server is read-only, waiting')
|
|
}
|
|
my $start_time = time;
|
|
my $run_time = $o->get('run-time');
|
|
my $interval = $o->get('interval') || 5;
|
|
while (server_is_readonly($dbh)) {
|
|
sleep($interval);
|
|
if (
|
|
($run_time && $run_time < time - $start_time)
|
|
|| -f $sentinel
|
|
) {
|
|
return 0;
|
|
}
|
|
}
|
|
}
|
|
|
|
# ########################################################################
|
|
# Create the heartbeat table if --create-table was given.
|
|
# ########################################################################
|
|
my $utc = $o->get('utc');
|
|
my $now_func = $utc ? 'UTC_TIMESTAMP()' : 'NOW()';
|
|
my $db_tbl = $q->quote($db, $tbl);
|
|
my $server_id = $dbh->selectrow_array('SELECT @@server_id');
|
|
if ( $o->get('create-table') ) {
|
|
my $sql = $o->read_para_after(__FILE__, qr/MAGIC_create_heartbeat/);
|
|
$sql =~ s/heartbeat/IF NOT EXISTS $db_tbl/;
|
|
PTDEBUG && _d($sql);
|
|
if ($o->get('create-table-engine')) {
|
|
my $engine = $o->get('create-table-engine');
|
|
$sql =~ s/;$/ ENGINE=$engine;/;
|
|
PTDEBUG && _d("Using engine $engine");
|
|
PTDEBUG && _d($sql);
|
|
}
|
|
eval { $dbh->do($sql); };
|
|
if ( $EVAL_ERROR ) {
|
|
die "Error creating heartbeat table:". $EVAL_ERROR;
|
|
}
|
|
|
|
# Now we insert first row.
|
|
# Some caveats:
|
|
|
|
# 1)
|
|
# This may fail if the table already existed and already had this row.
|
|
# We eval to ignore this possibility.
|
|
# NOTE: This can break replication though! See:
|
|
# https://bugs.launchpad.net/percona-toolkit/+bug/1004567
|
|
# So --replace should be used in most cases.
|
|
my $sql_insert_row = ($o->get('replace') ? "REPLACE" : "INSERT")
|
|
. qq/ INTO $db_tbl (ts, server_id) VALUES ($now_func, $server_id)/;
|
|
|
|
# 2)
|
|
# RBR (Row Based Replication) converts REPLACE to INSERT if row isn't
|
|
# present in master. This breakes replication when the row is present in slave.
|
|
# Other workarounds also fail.
|
|
# INSERT IGNORE (ignore is not replicated if no error in master)
|
|
# DELETE then INSERT (DELETE is ignored, INSERT breaks replication)
|
|
# INSERT ON DUPLICATE UPDATE (converts to simple INSERT)
|
|
# TRUNCATE gets trough and replicates! So we use that to wipe slave(s).
|
|
if ($o->get('replace')) {
|
|
my $sql_truncate = "TRUNCATE TABLE $db_tbl";
|
|
PTDEBUG && _d($sql_truncate);
|
|
eval { $dbh->do($sql_truncate) };
|
|
}
|
|
PTDEBUG && _d($sql_insert_row);
|
|
eval { $dbh->do($sql_insert_row); };
|
|
}
|
|
|
|
# ########################################################################
|
|
# Get and check heartbeat table structure.
|
|
# ########################################################################
|
|
my $tbl_def = $dbh->selectrow_arrayref("SHOW CREATE TABLE $db_tbl");
|
|
my $tbl_struct = $tp->parse($tbl_def->[1]);
|
|
|
|
die "Heartbeat table $db_tbl does not have a ts column"
|
|
unless $tbl_struct->{is_col}->{ts};
|
|
|
|
my $hires_ts = $tbl_struct->{type_for}->{ts} =~ m/char/i ? 1 : 0;
|
|
PTDEBUG && _d("Hi-res ts:", ($hires_ts ? 'yes' : 'no'));
|
|
|
|
my $id = $tbl_struct->{is_col}->{id}; # legacy table struct
|
|
die "Heartbeat table $db_tbl does not have a server_id or id column"
|
|
unless $tbl_struct->{is_col}->{server_id} || $id;
|
|
|
|
# If there's an id column, then we're running in legacy mode. If there's
|
|
# a server_id column, then we're running in the new mode which supports
|
|
# multiple --update instances.
|
|
if ( $tbl_struct->{is_col}->{id} && $tbl_struct->{is_col}->{server_id} ) {
|
|
die "Heartbeat table $db_tbl cannot have both an id column and "
|
|
. "a server_id column";
|
|
}
|
|
|
|
# pk_col and pk_val are used to identify the heartbeat row to update or
|
|
# or monitor.
|
|
my ($pk_col, $pk_val);
|
|
if ( $id ) {
|
|
# Legacy mode: update heartbeat row WHERE id=1 and monitor heartbeat
|
|
# row WHERE id=1.
|
|
$pk_col = 'id';
|
|
$pk_val = '1';
|
|
}
|
|
elsif ( $tbl_struct->{is_col}->{server_id} ) {
|
|
# Multi-update mode: update heartbeat row WHERE server_id=@@server_id
|
|
# and monitor heartbeat row WHERE server_id=master_server_id.
|
|
if ( $o->get('update') ) {
|
|
$pk_col = 'server_id';
|
|
$pk_val = $server_id;
|
|
}
|
|
else { # monitor or check
|
|
my $master_server_id = $o->get('master-server-id');
|
|
if ( !$master_server_id ) {
|
|
eval {
|
|
my $ms = new MasterSlave(
|
|
OptionParser => $o,
|
|
DSNParser => $dp,
|
|
Quoter => $q,
|
|
);
|
|
my $master_dsn = $ms->get_master_dsn($dbh, $dsn, $dp)
|
|
or die "This server is not a slave";
|
|
my $master_dbh = $dp->get_dbh($dp->get_cxn_params($master_dsn),
|
|
{ AutoCommit => 1 });
|
|
($master_server_id)
|
|
= $master_dbh->selectrow_array('SELECT @@server_id');
|
|
$master_dbh->disconnect;
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
PTDEBUG && _d("Error determining master id:", $EVAL_ERROR);
|
|
}
|
|
}
|
|
if ( !$master_server_id ) {
|
|
die "The --master-server-id option must be specified because "
|
|
. "the heartbeat table $db_tbl uses the server_id column "
|
|
. "for --update or --check but the server's master could "
|
|
. "not be automatically determined.\n"
|
|
. "Please read the DESCRIPTION section of the pt-heartbeat POD.\n";
|
|
}
|
|
$pk_col = 'server_id';
|
|
$pk_val = $master_server_id;
|
|
}
|
|
}
|
|
else {
|
|
die "Heartbeat table $db_tbl does not have a server_id or id column";
|
|
}
|
|
PTDEBUG && _d('Heartbeat row primary key:', $pk_col, '=', $pk_val);
|
|
|
|
# Check that heartbeat table has at least 1 row unless --replace because
|
|
# --replace will create the row if it doesn't exist.
|
|
if ( !$o->get('replace') ) {
|
|
my $sql = "SELECT 1 FROM $db_tbl WHERE $pk_col='$pk_val' LIMIT 1";
|
|
PTDEBUG && _d($sql);
|
|
my $row = $dbh->selectall_arrayref($sql);
|
|
if ( scalar @$row == 0 ) {
|
|
PTDEBUG && _d('No heartbeat row in table');
|
|
if ( $o->get('insert-heartbeat-row') ) {
|
|
my $sql = "INSERT INTO $db_tbl ($pk_col, ts) "
|
|
. "VALUES ('$pk_val', $now_func)";
|
|
PTDEBUG && _d($sql);
|
|
$dbh->do($sql);
|
|
}
|
|
else {
|
|
if ( $id ) {
|
|
die "The heartbeat table is empty.\n"
|
|
. "At least one row must be inserted into the heartbeat "
|
|
. "table.\nPlease read the DESCRIPTION section of the "
|
|
. "pt-heartbeat POD.\n";
|
|
}
|
|
else {
|
|
die "No row found in heartbeat table for server_id $pk_val.\n"
|
|
. "At least one row must be inserted into the heartbeat "
|
|
. "table for server_id $pk_val.\nPlease read the "
|
|
. "DESCRIPTION section of the pt-heartbeat POD.\n";
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
# ########################################################################
|
|
# Make sth for updating or checking the heartbeat table.
|
|
# ########################################################################
|
|
my ($heartbeat_sql, $heartbeat_sth);
|
|
my ($get_delay, $update_heartbeat);
|
|
|
|
if ( $o->get('update') ) {
|
|
my @master_status_cols = grep { $tbl_struct->{is_col}->{$_} }
|
|
qw(file position);
|
|
PTDEBUG && _d("Master status columns:", join(', ', @master_status_cols));
|
|
|
|
my @slave_status_cols = grep { $tbl_struct->{is_col}->{$_} }
|
|
qw(relay_master_log_file exec_master_log_pos);
|
|
PTDEBUG && _d("Slave status columns:", join(', ', @slave_status_cols));
|
|
|
|
# Just a shortcut so I don't have to check both arrays when creating
|
|
# SQL statement below.
|
|
my @extra_cols = (@master_status_cols, @slave_status_cols);
|
|
|
|
if ( $o->get('replace') ) {
|
|
$heartbeat_sql
|
|
= "REPLACE INTO $db_tbl (ts, $pk_col"
|
|
. (@extra_cols ? ", " . join(', ', @extra_cols) : '')
|
|
. ") VALUES (?, '$pk_val'"
|
|
. (@extra_cols ? ", " . join(', ', map { '?' } @extra_cols) : '')
|
|
. ")";
|
|
}
|
|
else {
|
|
$heartbeat_sql
|
|
= "UPDATE $db_tbl SET ts=?"
|
|
. (@extra_cols ? ", " . join(', ', map { "$_=?" } @extra_cols) : "")
|
|
. " WHERE $pk_col='$pk_val'";
|
|
}
|
|
PTDEBUG && _d("UPDATE SQL:", $heartbeat_sql);
|
|
|
|
$heartbeat_sth = $dbh->prepare($heartbeat_sql);
|
|
|
|
my $ro_check = !!$o->get('check-read-only');
|
|
$update_heartbeat = sub {
|
|
my ($sth) = @_;
|
|
my @vals;
|
|
|
|
return if $ro_check && server_is_readonly($dbh);
|
|
|
|
my $sql;
|
|
if ( @master_status_cols ) {
|
|
$sql = "SHOW MASTER STATUS";
|
|
PTDEBUG && _d($dbh, $sql);
|
|
my $row = $dbh->selectrow_hashref($sql);
|
|
if ( !$row ) {
|
|
PTDEBUG && _d("No row from", $sql);
|
|
push @vals, map { undef } @master_status_cols;
|
|
}
|
|
else {
|
|
push @vals, map { $row->{$_} } @master_status_cols;
|
|
}
|
|
}
|
|
|
|
if ( @slave_status_cols ) {
|
|
$sql = "SHOW SLAVE STATUS";
|
|
PTDEBUG && _d($dbh, $sql);
|
|
my $row = $dbh->selectrow_hashref($sql);
|
|
if ( !$row ) {
|
|
PTDEBUG && _d("No row from", $sql);
|
|
push @vals, map { undef } @slave_status_cols;
|
|
}
|
|
else {
|
|
push @vals, map { $row->{$_} } @slave_status_cols;
|
|
}
|
|
}
|
|
|
|
my $retry = Retry->new();
|
|
$retry->retry(
|
|
tries => 3,
|
|
wait => sub { sleep 0.25; return; },
|
|
try => sub {
|
|
$sth->execute(ts(time, $utc), @vals);
|
|
PTDEBUG && _d($sth->{Statement});
|
|
$sth->finish();
|
|
},
|
|
fail => sub {
|
|
my (%args) = @_;
|
|
my $error = $args{error};
|
|
if ( $error =~ m/Deadlock found/ ) {
|
|
return 1; # try again
|
|
}
|
|
else {
|
|
return 0;
|
|
}
|
|
},
|
|
final_fail => sub {
|
|
my (%args) = @_;
|
|
die $args{error};
|
|
}
|
|
);
|
|
|
|
return;
|
|
};
|
|
}
|
|
else { # --monitor or --check
|
|
my $dbi_driver = lc $o->get('dbi-driver');
|
|
|
|
# UNIX_TIMESTAMP(UTC_TIMESTAMP()) instead of UNIX_TIMESTAMP() alone,
|
|
# so we make sure that we aren't being fooled by a timezone.
|
|
# UNIX_TIMESTAMP(ts) replaces unix_timestamp($ts) -- MySQL is the
|
|
# authority here, so let it calculate everything.
|
|
$heartbeat_sql
|
|
= "SELECT " . ($utc ? 'UNIX_TIMESTAMP(ts)' : 'ts')
|
|
. ($dbi_driver eq 'mysql' ? '/*!50038, @@hostname AS host*/' : '')
|
|
. ($id ? "" : ", server_id")
|
|
. " FROM $db_tbl "
|
|
. "WHERE $pk_col='$pk_val' "
|
|
. "LIMIT 1";
|
|
PTDEBUG && _d("SELECT SQL:", $heartbeat_sql);
|
|
|
|
$heartbeat_sth = $dbh->prepare($heartbeat_sql);
|
|
|
|
$get_delay = sub {
|
|
my ($sth) = @_;
|
|
$sth->execute();
|
|
PTDEBUG && _d($sth->{Statement});
|
|
my ($ts, $hostname, $server_id) = $sth->fetchrow_array();
|
|
my $now = time;
|
|
PTDEBUG && _d("Heartbeat from server", $server_id, "\n",
|
|
" now:", ts($now, $utc), "\n",
|
|
" ts:", $ts, "\n",
|
|
"skew:", $skew);
|
|
my $delay = $now - unix_timestamp($ts, $utc) - $skew;
|
|
PTDEBUG && _d('Delay', sprintf('%.6f', $delay), 'on', $hostname);
|
|
|
|
# Because we adjust for skew, if the ts are less than skew seconds
|
|
# apart (i.e. replication is very fast) then delay will be negative.
|
|
# So it's effectively 0 seconds of lag.
|
|
$delay = 0.00 if $delay < 0;
|
|
|
|
$sth->finish();
|
|
return ($delay, $hostname, $pk_val);
|
|
};
|
|
|
|
# https://bugs.launchpad.net/percona-toolkit/+bug/1163372
|
|
# "pt-heartbeat --utc --check always returns 0"
|
|
if ( $utc ) {
|
|
my $sql = "SET time_zone='+0:00'";
|
|
PTDEBUG && _d($sql);
|
|
$dbh->do($sql);
|
|
}
|
|
}
|
|
|
|
# Do a little check just to make sure the table is there, so there's one last
|
|
# chance to catch errors before daemonizing.
|
|
if ( $o->get('update') ) {
|
|
$update_heartbeat->($heartbeat_sth);
|
|
}
|
|
else {
|
|
$get_delay->($heartbeat_sth);
|
|
}
|
|
$heartbeat_sth->finish();
|
|
|
|
# ########################################################################
|
|
# Daemonize only after (potentially) asking for passwords for --ask-pass.
|
|
# ########################################################################
|
|
my $daemon;
|
|
if ( $o->get('daemonize') ) {
|
|
$daemon = new Daemon(o=>$o);
|
|
$daemon->daemonize();
|
|
PTDEBUG && _d('I am a daemon now');
|
|
}
|
|
elsif ( $o->get('pid') ) {
|
|
# We're not daemoninzing, it just handles PID stuff.
|
|
$daemon = new Daemon(o=>$o);
|
|
$daemon->make_PID_file();
|
|
}
|
|
|
|
# ########################################################################
|
|
# --check and exit if --check was given.
|
|
# ########################################################################
|
|
if ( $o->get('check') ) {
|
|
PTDEBUG && _d('--check and exit');
|
|
check_delay(
|
|
dsn => $dsn,
|
|
dbh => $dbh,
|
|
sth => $heartbeat_sth,
|
|
sql => $heartbeat_sql,
|
|
get_delay => $get_delay,
|
|
interval => $interval,
|
|
skew => $skew,
|
|
hires_ts => $hires_ts,
|
|
OptionParser => $o,
|
|
DSNParser => $dp,
|
|
);
|
|
disconnect($dbh, $heartbeat_sth);
|
|
return 0;
|
|
}
|
|
|
|
# ########################################################################
|
|
# Do the version-check
|
|
# ########################################################################
|
|
if ( $o->get('version-check') && (!$o->has('quiet') || !$o->get('quiet')) ) {
|
|
VersionCheck::version_check(
|
|
force => $o->got('version-check'),
|
|
instances => [ { dbh => $dbh, dsn => $dsn } ],
|
|
);
|
|
}
|
|
|
|
# ########################################################################
|
|
# Setup moving averages for --frames.
|
|
# ########################################################################
|
|
my @samples;
|
|
my $limit = max(@$frames);
|
|
|
|
# 2.00s [ 0.05s, 0.01s, 0.00s ]
|
|
my $format = ($hires_ts ? '%1$.2f' : '%1$4d') . "s [ ";
|
|
my $findex = 2;
|
|
foreach (@$frames) {
|
|
$format .= $findex > 2 ? ', ' : '';
|
|
$format .= '%'.$findex.'$5.2fs';
|
|
$findex++;
|
|
}
|
|
$format .= " ]";
|
|
$format .= ($o->get('print-master-server-id') ? ' %'.$findex.'$d' : '')
|
|
. "\n";
|
|
|
|
# ########################################################################
|
|
# Monitor or update the heartbeat table.
|
|
# ########################################################################
|
|
my $end = $o->get('run-time') ? int(time + $o->get('run-time')) : 0;
|
|
PTDEBUG && _d($end ? ('Will exit at', ts($end)) : 'Running forever');
|
|
|
|
my $get_next_interval = make_interval_iter($interval, $skew);
|
|
|
|
while ( # Stop if...
|
|
(!$end || int(time) < $end) # runtime exceeded, or
|
|
&& !-f $sentinel # sentinel file created
|
|
) {
|
|
eval {
|
|
my $next_interval = $get_next_interval->();
|
|
# save current time in variable to avoid race condition
|
|
# https://bugs.launchpad.net/percona-toolkit/+bug/1406390
|
|
my $time = time;
|
|
if ( $time >= $next_interval ) {
|
|
do { $next_interval = $get_next_interval->() }
|
|
until $next_interval > $time;
|
|
PTDEBUG && _d("Missed last interval; next interval:",
|
|
ts($next_interval));
|
|
}
|
|
sleep $next_interval - $time;
|
|
PTDEBUG && _d('Woke up at', ts(time));
|
|
|
|
# Connect or reconnect if necessary.
|
|
if ( !$dbh->ping() ) {
|
|
$dbh = $dp->get_dbh($dp->get_cxn_params($dsn), { AutoCommit => 1 });
|
|
$dbh->{InactiveDestroy} = 1; # Don't disconnect on fork
|
|
$dbh->{FetchHashKeyName} = 'NAME_lc';
|
|
$dbh->do("USE `$db`");
|
|
|
|
$heartbeat_sth = undef;
|
|
}
|
|
|
|
if ( $o->get('monitor') ) {
|
|
$heartbeat_sth ||= $dbh->prepare($heartbeat_sql);
|
|
my ($delay) = $get_delay->($heartbeat_sth);
|
|
|
|
unshift @samples, $delay;
|
|
pop @samples if @samples > $limit;
|
|
|
|
# Calculate and print results
|
|
my @vals = map {
|
|
my $bound = min($_, scalar(@samples));
|
|
sum(@samples[0 .. $bound-1]) / $_;
|
|
} @$frames;
|
|
|
|
my $output = sprintf $format, $delay, @vals, $pk_val;
|
|
if ( my $file = $o->get('file') ) {
|
|
open my $file, '>', $file
|
|
or die "Can't open $file: $OS_ERROR";
|
|
print $file $output
|
|
or die "Can't print to $file: $OS_ERROR";
|
|
close $file
|
|
or die "Can't close $file: $OS_ERROR";
|
|
}
|
|
else {
|
|
print $output;
|
|
}
|
|
}
|
|
else { # --update mode
|
|
$heartbeat_sth ||= $dbh->prepare($heartbeat_sql);
|
|
$update_heartbeat->($heartbeat_sth);
|
|
}
|
|
};
|
|
if ( $EVAL_ERROR ) {
|
|
my ( $err ) = $EVAL_ERROR =~ m/^(?:DBI|DBD).*failed: (.*?)\s*at \S+ line .*/;
|
|
if ( $err ) {
|
|
warn "$err\n";
|
|
}
|
|
else {
|
|
die $EVAL_ERROR;
|
|
}
|
|
}
|
|
}
|
|
|
|
disconnect($dbh, $heartbeat_sth);
|
|
return 0;
|
|
}
|
|
|
|
# ############################################################################
|
|
# Subroutines.
|
|
# ############################################################################
|
|
|
|
sub server_is_readonly {
|
|
my ($dbh) = @_;
|
|
my ( $is_read_only ) = $dbh->selectrow_array(q{SELECT @@global.read_only});
|
|
if ( $is_read_only ) {
|
|
my ( $privs ) = eval { $dbh->selectrow_array(q{SHOW GRANTS}) };
|
|
if ( $privs && $privs =~ /\b(?:ALL|SUPER)\b/ ) {
|
|
$is_read_only = undef;
|
|
}
|
|
}
|
|
return $is_read_only;
|
|
}
|
|
|
|
# Check the delay on a single server. Optionally recurse to all its slaves.
|
|
sub check_delay {
|
|
my ( %args ) = @_;
|
|
my @required_args = qw(dsn dbh sth sql get_delay interval skew OptionParser DSNParser);
|
|
foreach my $arg ( @required_args ) {
|
|
die "I need a $arg argument" unless $args{$arg};
|
|
}
|
|
my ($dsn, $dbh, $sth, $sql, $get_delay, $interval, $skew, $o, $dp)
|
|
= @args{@required_args};
|
|
PTDEBUG && _d('Checking slave delay');
|
|
|
|
# Collect a list of connections to the slaves.
|
|
if ( $o->get('recurse') ) {
|
|
PTDEBUG && _d('Recursing to slaves');
|
|
my $ms = new MasterSlave(
|
|
OptionParser => $o,
|
|
DSNParser => $dp,
|
|
Quoter => "Quoter",
|
|
);
|
|
$ms->recurse_to_slaves(
|
|
{ dbh => $dbh,
|
|
dsn => $dsn,
|
|
callback => sub {
|
|
my ( $dsn, $dbh, $level ) = @_;
|
|
push @dbhs, $dbh;
|
|
PTDEBUG && _d("Found slave", $dp->as_string($dsn));
|
|
push @sths, [ $dsn, $dbh->prepare($sql) ];
|
|
},
|
|
},
|
|
);
|
|
}
|
|
else {
|
|
push @sths, [ $dsn, $sth ];
|
|
}
|
|
|
|
my $format_delay = ($args{hires_ts} ? '%1$.2f' : '%1$d')
|
|
. ($o->get('print-master-server-id') ? ' %2$d' : "")
|
|
. "\n";
|
|
my $format_host = '%1$-20s '.($args{hires_ts} ? '%2$.2f' : '%2$d')
|
|
. ($o->get('print-master-server-id') ? ' %3$d' : "")
|
|
. "\n";
|
|
|
|
# Before hi-res ts, we could check all slaves at one interval, assuming
|
|
# the checks were fast, i.e. able to be done within one interval. But
|
|
# now we have intervals up to 0.01 fast and that's too short to check all
|
|
# slaves. So for each slave we sleep until the next interval.
|
|
my $get_next_interval = make_interval_iter($interval, $skew);
|
|
|
|
SLAVE:
|
|
foreach my $thing ( @sths ) {
|
|
my ( $dsn, $sth ) = @$thing;
|
|
PTDEBUG && _d('Checking slave', $dp->as_string($dsn));
|
|
|
|
my $next_interval = $get_next_interval->();
|
|
# save current time in variable to avoid race condition
|
|
# https://bugs.launchpad.net/percona-toolkit/+bug/1406390
|
|
my $time = time;
|
|
if ( $time >= $next_interval ) {
|
|
do { $next_interval = $get_next_interval->() }
|
|
until $next_interval > $time;
|
|
PTDEBUG && _d("Missed last interval; next interval:",
|
|
ts($next_interval));
|
|
}
|
|
sleep $next_interval - $time;
|
|
PTDEBUG && _d('Woke up at', ts(time));
|
|
my ($delay, $hostname, $master_server_id) = $get_delay->($sth);
|
|
|
|
if ( $o->get('recurse') ) {
|
|
# Must print not only the delay, but the server's hostname if
|
|
# available. Prefer the hostname from the DSN, then the hostname
|
|
# from @@hostname, then fall back to Socket or default File.
|
|
my $host = $dsn->{h} || $hostname || $dsn->{S} || $dsn->{F} || '';
|
|
if ( $dsn->{P} && $dsn->{P} ne '3306' ) {
|
|
$host .= ":$dsn->{P}";
|
|
}
|
|
printf($format_host, $host, $delay, $master_server_id);
|
|
}
|
|
else {
|
|
# Just print the delay.
|
|
printf($format_delay, $delay, $master_server_id);
|
|
}
|
|
}
|
|
|
|
return;
|
|
}
|
|
|
|
# The interval iterator works by first returning the next whole second.
|
|
# So if the current time (since epoch) is 5.123, then the next whole second
|
|
# is 6.0, plus an optional skew. The next interval is 6.0 * the interval.
|
|
# If the interval is 0.5s, then the next interval is 6.5, plus an optional
|
|
# skew. Therefore, we always start on a whole second and return when the
|
|
# next interval is or should be. The caller can then sleep(time-next_interval)
|
|
# to wake up at that interval. If the caller misses the next interval,
|
|
# they just call the iterator until the next interval is later then the
|
|
# current time.
|
|
sub make_interval_iter {
|
|
my ( $interval, $skew ) = @_;
|
|
die "I need an interval argument" unless defined $interval;
|
|
my ($s) = gettimeofday();
|
|
my $start_s = $s + 1;
|
|
my $i = 0;
|
|
my $get_next_interval = sub {
|
|
return $start_s + ($interval * $i++) + $skew;
|
|
};
|
|
return $get_next_interval;
|
|
}
|
|
|
|
sub disconnect {
|
|
my ( $dbh, $sth ) = @_;
|
|
PTDEBUG && _d('Disconnecting');
|
|
$sth->finish() if $sth;
|
|
foreach my $handle ( @sths ) {
|
|
my $sth = $handle->[1];
|
|
$sth->finish() if $sth;
|
|
}
|
|
foreach my $handle ( $dbh, @dbhs ) {
|
|
$handle->disconnect() if $handle;
|
|
}
|
|
return;
|
|
}
|
|
|
|
sub _d {
|
|
my ($package, undef, $line) = caller 0;
|
|
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
|
|
map { defined $_ ? $_ : 'undef' }
|
|
@_;
|
|
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
|
|
}
|
|
|
|
# ############################################################################
|
|
# Run the program.
|
|
# ############################################################################
|
|
if ( !caller ) { exit main(@ARGV); }
|
|
|
|
1; # Because this is a module as well as a script.
|
|
|
|
# ############################################################################
|
|
# Documentation.
|
|
# ############################################################################
|
|
|
|
=pod
|
|
|
|
=head1 NAME
|
|
|
|
pt-heartbeat - Monitor MySQL replication delay.
|
|
|
|
=head1 SYNOPSIS
|
|
|
|
Usage: pt-heartbeat [OPTIONS] [DSN] --update|--monitor|--check|--stop
|
|
|
|
pt-heartbeat measures replication lag on a MySQL or PostgreSQL server. You can
|
|
use it to update a master or monitor a replica. If possible, MySQL connection
|
|
options are read from your .my.cnf file.
|
|
|
|
Start daemonized process to update test.heartbeat table on master:
|
|
|
|
pt-heartbeat -D test --update -h master-server --daemonize
|
|
|
|
Monitor replication lag on slave:
|
|
|
|
pt-heartbeat -D test --monitor -h slave-server
|
|
|
|
pt-heartbeat -D test --monitor -h slave-server --dbi-driver Pg
|
|
|
|
Check slave lag once and exit (using optional DSN to specify slave host):
|
|
|
|
pt-heartbeat -D test --check h=slave-server
|
|
|
|
=head1 RISKS
|
|
|
|
Percona Toolkit is mature, proven in the real world, and well tested,
|
|
but all database tools can pose a risk to the system and the database
|
|
server. Before using this tool, please:
|
|
|
|
=over
|
|
|
|
=item * Read the tool's documentation
|
|
|
|
=item * Review the tool's known L<"BUGS">
|
|
|
|
=item * Test the tool on a non-production server
|
|
|
|
=item * Backup your production server and verify the backups
|
|
|
|
=back
|
|
|
|
=head1 DESCRIPTION
|
|
|
|
pt-heartbeat is a two-part MySQL and PostgreSQL replication delay monitoring
|
|
system that measures delay by looking at actual replicated data. This
|
|
avoids reliance on the replication mechanism itself, which is unreliable. (For
|
|
example, C<SHOW SLAVE STATUS> on MySQL).
|
|
|
|
The first part is an L<"--update"> instance of pt-heartbeat that connects to
|
|
a master and updates a timestamp ("heartbeat record") every L<"--interval">
|
|
seconds. Since the heartbeat table may contain records from multiple
|
|
masters (see L<"MULTI-SLAVE HIERARCHY">), the server's ID (@@server_id) is
|
|
used to identify records.
|
|
|
|
The second part is a L<"--monitor"> or L<"--check"> instance of pt-heartbeat
|
|
that connects to a slave, examines the replicated heartbeat record from its
|
|
immediate master or the specified L<"--master-server-id">, and computes the
|
|
difference from the current system time. If replication between the slave and
|
|
the master is delayed or broken, the computed difference will be greater than
|
|
zero and potentially increase if L<"--monitor"> is specified.
|
|
|
|
You must either manually create the heartbeat table on the master or use
|
|
L<"--create-table">. See L<"--create-table"> for the proper heartbeat
|
|
table structure. The C<MEMORY> storage engine is suggested, but not
|
|
required of course, for MySQL.
|
|
|
|
The heartbeat table must contain a heartbeat row. By default, a heartbeat
|
|
row is inserted if it doesn't exist. This feature can be disabled with the
|
|
L<"--[no]insert-heartbeat-row"> option in case the database user does not
|
|
have INSERT privileges.
|
|
|
|
pt-heartbeat depends only on the heartbeat record being replicated to the slave,
|
|
so it works regardless of the replication mechanism (built-in replication, a
|
|
system such as Continuent Tungsten, etc). It works at any depth in the
|
|
replication hierarchy; for example, it will reliably report how far a slave lags
|
|
its master's master's master. And if replication is stopped, it will continue
|
|
to work and report (accurately!) that the slave is falling further and further
|
|
behind the master.
|
|
|
|
pt-heartbeat has a maximum resolution of 0.01 second. The clocks on the
|
|
master and slave servers must be closely synchronized via NTP. By default,
|
|
L<"--update"> checks happen on the edge of the second (e.g. 00:01) and
|
|
L<"--monitor"> checks happen halfway between seconds (e.g. 00:01.5).
|
|
As long as the servers' clocks are closely synchronized and replication
|
|
events are propagating in less than half a second, pt-heartbeat will report
|
|
zero seconds of delay.
|
|
|
|
pt-heartbeat will try to reconnect if the connection has an error, but will
|
|
not retry if it can't get a connection when it first starts.
|
|
|
|
The L<"--dbi-driver"> option lets you use pt-heartbeat to monitor PostgreSQL
|
|
as well. It is reported to work well with Slony-1 replication.
|
|
|
|
=head1 MULTI-SLAVE HIERARCHY
|
|
|
|
If the replication hierarchy has multiple slaves which are masters of
|
|
other slaves, like "master -> slave1 -> slave2", L<"--update"> instances
|
|
can be ran on the slaves as well as the master. The default heartbeat
|
|
table (see L<"--create-table">) is keyed on the C<server_id> column, so
|
|
each server will update the row where C<server_id=@@server_id>.
|
|
|
|
For L<"--monitor"> and L<"--check">, if L<"--master-server-id"> is not
|
|
specified, the tool tries to discover and use the slave's immediate master.
|
|
If this fails, or if you want monitor lag from another master, then you can
|
|
specify the L<"--master-server-id"> to use.
|
|
|
|
For example, if the replication hierarchy is "master -> slave1 -> slave2"
|
|
with corresponding server IDs 1, 2 and 3, you can:
|
|
|
|
pt-heartbeat --daemonize -D test --update -h master
|
|
pt-heartbeat --daemonize -D test --update -h slave1
|
|
|
|
Then check (or monitor) the replication delay from master to slave2:
|
|
|
|
pt-heartbeat -D test --master-server-id 1 --check slave2
|
|
|
|
Or check the replication delay from slave1 to slave2:
|
|
|
|
pt-heartbeat -D test --master-server-id 2 --check slave2
|
|
|
|
Stopping the L<"--update"> instance one slave1 will not affect the instance
|
|
on master.
|
|
|
|
=head1 MASTER AND SLAVE STATUS
|
|
|
|
The default heartbeat table (see L<"--create-table">) has columns for saving
|
|
information from C<SHOW MASTER STATUS> and C<SHOW SLAVE STATUS>. These
|
|
columns are optional. If any are present, their corresponding information
|
|
will be saved.
|
|
|
|
=head1 Percona XtraDB Cluster
|
|
|
|
Although pt-heartbeat should work with all supported versions of Percona XtraDB
|
|
Cluster (PXC), we recommend using 5.5.28-23.7 and newer.
|
|
|
|
If you are setting up heartbeat instances between cluster nodes, keep in mind
|
|
that, since the speed of the cluster is determined by its slowest node,
|
|
pt-heartbeat will not report how fast the cluster itself is, but only how
|
|
fast events are replicating from one node to another.
|
|
|
|
You must specify L<"--master-server-id"> for L<"--monitor"> and L<"--check">
|
|
instances.
|
|
|
|
=head1 OPTIONS
|
|
|
|
Specify at least one of L<"--stop">, L<"--update">, L<"--monitor">, or L<"--check">.
|
|
|
|
L<"--update">, L<"--monitor">, and L<"--check"> are mutually exclusive.
|
|
|
|
L<"--daemonize"> and L<"--check"> are mutually exclusive.
|
|
|
|
This tool accepts additional command-line arguments. Refer to the
|
|
L<"SYNOPSIS"> and usage information for details.
|
|
|
|
=over
|
|
|
|
=item --ask-pass
|
|
|
|
Prompt for a password when connecting to MySQL.
|
|
|
|
=item --charset
|
|
|
|
short form: -A; type: string
|
|
|
|
Default character set. If the value is utf8, sets Perl's binmode on STDOUT to
|
|
utf8, passes the mysql_enable_utf8 option to DBD::mysql, and runs SET NAMES UTF8
|
|
after connecting to MySQL. Any other value sets binmode on STDOUT without the
|
|
utf8 layer, and runs SET NAMES after connecting to MySQL.
|
|
|
|
=item --check
|
|
|
|
Check slave delay once and exit. If you also specify L<"--recurse">, the
|
|
tool will try to discover slave's of the given slave and check and print
|
|
their lag, too. The hostname or IP and port for each slave is printed
|
|
before its delay. L<"--recurse"> only works with MySQL.
|
|
|
|
=item --check-read-only
|
|
|
|
Check if the server has read_only enabled; If it does, the tool skips doing
|
|
any inserts.
|
|
|
|
=item --config
|
|
|
|
type: Array
|
|
|
|
Read this comma-separated list of config files; if specified, this must be the
|
|
first option on the command line.
|
|
|
|
=item --create-table
|
|
|
|
Create the heartbeat L<"--table"> if it does not exist.
|
|
|
|
This option causes the table specified by L<"--database"> and L<"--table"> to
|
|
be created with the following MAGIC_create_heartbeat table definition:
|
|
|
|
CREATE TABLE heartbeat (
|
|
ts varchar(26) NOT NULL,
|
|
server_id int unsigned NOT NULL PRIMARY KEY,
|
|
file varchar(255) DEFAULT NULL, -- SHOW MASTER STATUS
|
|
position bigint unsigned DEFAULT NULL, -- SHOW MASTER STATUS
|
|
relay_master_log_file varchar(255) DEFAULT NULL, -- SHOW SLAVE STATUS
|
|
exec_master_log_pos bigint unsigned DEFAULT NULL -- SHOW SLAVE STATUS
|
|
);
|
|
|
|
The heartbeat table requires at least one row. If you manually create the
|
|
heartbeat table, then you must insert a row by doing:
|
|
|
|
INSERT INTO heartbeat (ts, server_id) VALUES (NOW(), N);
|
|
|
|
or if using L<"--utc">:
|
|
|
|
INSERT INTO heartbeat (ts, server_id) VALUES (UTC_TIMESTAMP(), N);
|
|
|
|
where C<N> is the server's ID; do not use @@server_id because it will replicate
|
|
and slaves will insert their own server ID instead of the master's server ID.
|
|
|
|
This is done automatically by L<"--create-table">.
|
|
|
|
A legacy version of the heartbeat table is still supported:
|
|
|
|
CREATE TABLE heartbeat (
|
|
id int NOT NULL PRIMARY KEY,
|
|
ts datetime NOT NULL
|
|
);
|
|
|
|
Legacy tables do not support L<"--update"> instances on each slave
|
|
of a multi-slave hierarchy like "master -> slave1 -> slave2".
|
|
To manually insert the one required row into a legacy table:
|
|
|
|
INSERT INTO heartbeat (id, ts) VALUES (1, NOW());
|
|
|
|
or if using L<"--utc">:
|
|
|
|
INSERT INTO heartbeat (id, ts) VALUES (1, UTC_TIMESTAMP());
|
|
|
|
The tool automatically detects if the heartbeat table is legacy.
|
|
|
|
See also L<"MULTI-SLAVE HIERARCHY">.
|
|
|
|
=item --create-table-engine
|
|
|
|
type: string
|
|
|
|
Sets the engine to be used for the heartbeat table.
|
|
The default storage engine is InnoDB as of MySQL 5.5.5.
|
|
|
|
=item --daemonize
|
|
|
|
Fork to the background and detach from the shell. POSIX operating systems only.
|
|
|
|
=item --database
|
|
|
|
short form: -D; type: string
|
|
|
|
The database to use for the connection.
|
|
|
|
=item --dbi-driver
|
|
|
|
default: mysql; type: string
|
|
|
|
Specify a driver for the connection; C<mysql> and C<Pg> are supported.
|
|
|
|
=item --defaults-file
|
|
|
|
short form: -F; type: string
|
|
|
|
Only read mysql options from the given file. You must give an absolute
|
|
pathname.
|
|
|
|
=item --file
|
|
|
|
type: string
|
|
|
|
Print latest L<"--monitor"> output to this file.
|
|
|
|
When L<"--monitor"> is given, prints output to the specified file instead of to
|
|
STDOUT. The file is opened, truncated, and closed every interval, so it will
|
|
only contain the most recent statistics. Useful when L<"--daemonize"> is given.
|
|
|
|
=item --frames
|
|
|
|
type: string; default: 1m,5m,15m
|
|
|
|
Timeframes for averages.
|
|
|
|
Specifies the timeframes over which to calculate moving averages when
|
|
L<"--monitor"> is given. Specify as a comma-separated list of numbers with
|
|
suffixes. The suffix can be s for seconds, m for minutes, h for hours, or d for
|
|
days. The size of the largest frame determines the maximum memory usage, as up
|
|
to the specified number of per-second samples are kept in memory to calculate
|
|
the averages. You can specify as many timeframes as you like.
|
|
|
|
=item --help
|
|
|
|
Show help and exit.
|
|
|
|
=item --host
|
|
|
|
short form: -h; type: string
|
|
|
|
Connect to host.
|
|
|
|
=item --[no]insert-heartbeat-row
|
|
|
|
default: yes
|
|
|
|
Insert a heartbeat row in the L<"--table"> if one doesn't exist.
|
|
|
|
The heartbeat L<"--table"> requires a heartbeat row, else there's nothing
|
|
to L<"--update">, L<"--monitor">, or L<"--check">! By default, the tool will
|
|
insert a heartbeat row if one is not already present. You can disable this
|
|
feature by specifying C<--no-insert-heartbeat-row> in case the database user
|
|
does not have INSERT privileges.
|
|
|
|
=item --interval
|
|
|
|
type: float; default: 1.0
|
|
|
|
How often to update or check the heartbeat L<"--table">. Updates and checks
|
|
begin on the first whole second then repeat every L<"--interval"> seconds
|
|
for L<"--update"> and every L<"--interval"> plus L<"--skew"> seconds for
|
|
L<"--monitor">.
|
|
|
|
For example, if at 00:00.4 an L<"--update"> instance is started at 0.5 second
|
|
intervals, the first update happens at 00:01.0, the next at 00:01.5, etc.
|
|
If at 00:10.7 a L<"--monitor"> instance is started at 0.05 second intervals
|
|
with the default 0.5 second L<"--skew">, then the first check happens at
|
|
00:11.5 (00:11.0 + 0.5) which will be L<"--skew"> seconds after the last update
|
|
which, because the instances are checking at synchronized intervals, happened
|
|
at 00:11.0.
|
|
|
|
The tool waits for and begins on the first whole second just to make the
|
|
interval calculations simpler. Therefore, the tool could wait up to 1 second
|
|
before updating or checking.
|
|
|
|
The minimum (fastest) interval is 0.01, and the maximum precision is two
|
|
decimal places, so 0.015 will be rounded to 0.02.
|
|
|
|
If a legacy heartbeat table (see L<"--create-table">) is used, then the
|
|
maximum precision is 1s because the C<ts> column is type C<datetime>.
|
|
|
|
=item --log
|
|
|
|
type: string
|
|
|
|
Print all output to this file when daemonized.
|
|
|
|
=item --master-server-id
|
|
|
|
type: string
|
|
|
|
Calculate delay from this master server ID for L<"--monitor"> or L<"--check">.
|
|
If not given, pt-heartbeat attempts to connect to the server's master and
|
|
determine its server id.
|
|
|
|
=item --monitor
|
|
|
|
Monitor slave delay continuously.
|
|
|
|
Specifies that pt-heartbeat should check the slave's delay every second and
|
|
report to STDOUT (or if L<"--file"> is given, to the file instead). The output
|
|
is the current delay followed by moving averages over the timeframe given in
|
|
L<"--frames">. For example,
|
|
|
|
5s [ 0.25s, 0.05s, 0.02s ]
|
|
|
|
=item --password
|
|
|
|
short form: -p; type: string
|
|
|
|
Password to use when connecting.
|
|
If password contains commas they must be escaped with a backslash: "exam\,ple"
|
|
|
|
=item --pid
|
|
|
|
type: string
|
|
|
|
Create the given PID file. The tool won't start if the PID file already
|
|
exists and the PID it contains is different than the current PID. However,
|
|
if the PID file exists and the PID it contains is no longer running, the
|
|
tool will overwrite the PID file with the current PID. The PID file is
|
|
removed automatically when the tool exits.
|
|
|
|
=item --port
|
|
|
|
short form: -P; type: int
|
|
|
|
Port number to use for connection.
|
|
|
|
=item --print-master-server-id
|
|
|
|
Print the auto-detected or given L<"--master-server-id">. If L<"--check">
|
|
or L<"--monitor"> is specified, specifying this option will print the
|
|
auto-detected or given L<"--master-server-id"> at the end of each line.
|
|
|
|
=item --recurse
|
|
|
|
type: int
|
|
|
|
Check slaves recursively to this depth in L<"--check"> mode.
|
|
|
|
Try to discover slave servers recursively, to the specified depth. After
|
|
discovering servers, run the check on each one of them and print the hostname
|
|
(if possible), followed by the slave delay.
|
|
|
|
This currently works only with MySQL. See L<"--recursion-method">.
|
|
|
|
=item --recursion-method
|
|
|
|
type: array; default: processlist,hosts
|
|
|
|
Preferred recursion method used to find slaves.
|
|
|
|
Possible methods are:
|
|
|
|
METHOD USES
|
|
=========== ==================
|
|
processlist SHOW PROCESSLIST
|
|
hosts SHOW SLAVE HOSTS
|
|
none Do not find slaves
|
|
|
|
The processlist method is preferred because SHOW SLAVE HOSTS is not reliable.
|
|
However, the hosts method is required if the server uses a non-standard
|
|
port (not 3306). Usually pt-heartbeat does the right thing and finds
|
|
the slaves, but you may give a preferred method and it will be used first.
|
|
If it doesn't find any slaves, the other methods will be tried.
|
|
|
|
=item --replace
|
|
|
|
Use C<REPLACE> instead of C<UPDATE> for --update.
|
|
|
|
When running in L<"--update"> mode, use C<REPLACE> instead of C<UPDATE> to set
|
|
the heartbeat table's timestamp. The C<REPLACE> statement is a MySQL extension
|
|
to SQL. This option is useful when you don't know whether the table contains
|
|
any rows or not. It must be used in conjunction with --update.
|
|
|
|
=item --run-time
|
|
|
|
type: time
|
|
|
|
Time to run before exiting.
|
|
|
|
=item --sentinel
|
|
|
|
type: string; default: /tmp/pt-heartbeat-sentinel
|
|
|
|
Exit if this file exists.
|
|
|
|
=item --slave-user
|
|
|
|
type: string
|
|
|
|
Sets the user to be used to connect to the slaves.
|
|
This parameter allows you to have a different user with less privileges on the
|
|
slaves but that user must exist on all slaves.
|
|
|
|
=item --slave-password
|
|
|
|
type: string
|
|
|
|
Sets the password to be used to connect to the slaves.
|
|
It can be used with --slave-user and the password for the user must be the same
|
|
on all slaves.
|
|
|
|
=item --set-vars
|
|
|
|
type: Array
|
|
|
|
Set the MySQL variables in this comma-separated list of C<variable=value> pairs.
|
|
|
|
By default, the tool sets:
|
|
|
|
=for comment ignore-pt-internal-value
|
|
MAGIC_set_vars
|
|
|
|
wait_timeout=10000
|
|
|
|
Variables specified on the command line override these defaults. For
|
|
example, specifying C<--set-vars wait_timeout=500> overrides the defaultvalue of C<10000>.
|
|
|
|
The tool prints a warning and continues if a variable cannot be set.
|
|
|
|
=item --skew
|
|
|
|
type: float; default: 0.5
|
|
|
|
How long to delay checks.
|
|
|
|
The default is to delay checks one half second. Since the update happens as
|
|
soon as possible after the beginning of the second on the master, this allows
|
|
one half second of replication delay before reporting that the slave lags the
|
|
master by one second. If your clocks are not completely accurate or there is
|
|
some other reason you'd like to delay the slave more or less, you can tweak this
|
|
value. Try setting the C<PTDEBUG> environment variable to see the effect this
|
|
has.
|
|
|
|
=item --socket
|
|
|
|
short form: -S; type: string
|
|
|
|
Socket file to use for connection.
|
|
|
|
=item --stop
|
|
|
|
Stop running instances by creating the sentinel file.
|
|
|
|
This should have the effect of stopping all running
|
|
instances which are watching the same sentinel file. If none of
|
|
L<"--update">, L<"--monitor"> or L<"--check"> is specified, C<pt-heartbeat>
|
|
will exit after creating the file. If one of these is specified,
|
|
C<pt-heartbeat> will wait the interval given by L<"--interval">, then remove
|
|
the file and continue working.
|
|
|
|
You might find this handy to stop cron jobs gracefully if necessary, or to
|
|
replace one running instance with another. For example, if you want to stop
|
|
and restart C<pt-heartbeat> every hour (just to make sure that it is restarted
|
|
every hour, in case of a server crash or some other problem), you could use a
|
|
C<crontab> line like this:
|
|
|
|
0 * * * * pt-heartbeat --update -D test --stop \
|
|
--sentinel /tmp/pt-heartbeat-hourly
|
|
|
|
The non-default L<"--sentinel"> will make sure the hourly C<cron> job stops
|
|
only instances previously started with the same options (that is, from the
|
|
same C<cron> job).
|
|
|
|
See also L<"--sentinel">.
|
|
|
|
=item --table
|
|
|
|
type: string; default: heartbeat
|
|
|
|
The table to use for the heartbeat.
|
|
|
|
Don't specify database.table; use L<"--database"> to specify the database.
|
|
|
|
See L<"--create-table">.
|
|
|
|
=item --update
|
|
|
|
Update a master's heartbeat.
|
|
|
|
=item --user
|
|
|
|
short form: -u; type: string
|
|
|
|
User for login if not current user.
|
|
|
|
=item --utc
|
|
|
|
Ignore system time zones and use only UTC. By default pt-heartbeat does
|
|
not check or adjust for different system or MySQL time zones which can
|
|
cause the tool to compute the lag incorrectly. Specifying this option is
|
|
a good idea because it ensures that the tool works correctly regardless of
|
|
time zones.
|
|
|
|
If used, this option must be used for all pt-heartbeat instances:
|
|
L<"--update">, L<"--monitor">, L<"--check">, etc. You should probably
|
|
set the option in a L<"--config"> file. Mixing this option with pt-heartbeat
|
|
instances not using this option will cause false-positive lag readings
|
|
due to different time zones (unless all your systems are set to use UTC,
|
|
in which case this option isn't required).
|
|
|
|
=item --version
|
|
|
|
Show version and exit.
|
|
|
|
=item --[no]version-check
|
|
|
|
default: yes
|
|
|
|
Check for the latest version of Percona Toolkit, MySQL, and other programs.
|
|
|
|
This is a standard "check for updates automatically" feature, with two
|
|
additional features. First, the tool checks the version of other programs
|
|
on the local system in addition to its own version. For example, it checks
|
|
the version of every MySQL server it connects to, Perl, and the Perl module
|
|
DBD::mysql. Second, it checks for and warns about versions with known
|
|
problems. For example, MySQL 5.5.25 had a critical bug and was re-released
|
|
as 5.5.25a.
|
|
|
|
Any updates or known problems are printed to STDOUT before the tool's normal
|
|
output. This feature should never interfere with the normal operation of the
|
|
tool.
|
|
|
|
For more information, visit L<https://www.percona.com/version-check>.
|
|
|
|
=back
|
|
|
|
=head1 DSN OPTIONS
|
|
|
|
These DSN options are used to create a DSN. Each option is given like
|
|
C<option=value>. The options are case-sensitive, so P and p are not the
|
|
same option. There cannot be whitespace before or after the C<=> and
|
|
if the value contains whitespace it must be quoted. DSN options are
|
|
comma-separated. See the L<percona-toolkit> manpage for full details.
|
|
|
|
=over
|
|
|
|
=item * A
|
|
|
|
dsn: charset; copy: yes
|
|
|
|
Default character set.
|
|
|
|
=item * D
|
|
|
|
dsn: database; copy: yes
|
|
|
|
Default database.
|
|
|
|
=item * F
|
|
|
|
dsn: mysql_read_default_file; copy: yes
|
|
|
|
Only read default options from the given file
|
|
|
|
=item * h
|
|
|
|
dsn: host; copy: yes
|
|
|
|
Connect to host.
|
|
|
|
=item * p
|
|
|
|
dsn: password; copy: yes
|
|
|
|
Password to use when connecting.
|
|
If password contains commas they must be escaped with a backslash: "exam\,ple"
|
|
|
|
=item * P
|
|
|
|
dsn: port; copy: yes
|
|
|
|
Port number to use for connection.
|
|
|
|
=item * S
|
|
|
|
dsn: mysql_socket; copy: yes
|
|
|
|
Socket file to use for connection.
|
|
|
|
=item * u
|
|
|
|
dsn: user; copy: yes
|
|
|
|
User for login if not current user.
|
|
|
|
=back
|
|
|
|
=head1 ENVIRONMENT
|
|
|
|
The environment variable C<PTDEBUG> enables verbose debugging output to STDERR.
|
|
To enable debugging and capture all output to a file, run the tool like:
|
|
|
|
PTDEBUG=1 pt-heartbeat ... > FILE 2>&1
|
|
|
|
Be careful: debugging output is voluminous and can generate several megabytes
|
|
of output.
|
|
|
|
=head1 SYSTEM REQUIREMENTS
|
|
|
|
You need Perl, DBI, DBD::mysql, and some core packages that ought to be
|
|
installed in any reasonably new version of Perl.
|
|
|
|
=head1 BUGS
|
|
|
|
For a list of known bugs, see L<http://www.percona.com/bugs/pt-heartbeat>.
|
|
|
|
Please report bugs at L<https://bugs.launchpad.net/percona-toolkit>.
|
|
Include the following information in your bug report:
|
|
|
|
=over
|
|
|
|
=item * Complete command-line used to run the tool
|
|
|
|
=item * Tool L<"--version">
|
|
|
|
=item * MySQL version of all servers involved
|
|
|
|
=item * Output from the tool including STDERR
|
|
|
|
=item * Input files (log/dump/config files, etc.)
|
|
|
|
=back
|
|
|
|
If possible, include debugging output by running the tool with C<PTDEBUG>;
|
|
see L<"ENVIRONMENT">.
|
|
|
|
=head1 DOWNLOADING
|
|
|
|
Visit L<http://www.percona.com/software/percona-toolkit/> to download the
|
|
latest release of Percona Toolkit. Or, get the latest release from the
|
|
command line:
|
|
|
|
wget percona.com/get/percona-toolkit.tar.gz
|
|
|
|
wget percona.com/get/percona-toolkit.rpm
|
|
|
|
wget percona.com/get/percona-toolkit.deb
|
|
|
|
You can also get individual tools from the latest release:
|
|
|
|
wget percona.com/get/TOOL
|
|
|
|
Replace C<TOOL> with the name of any tool.
|
|
|
|
=head1 AUTHORS
|
|
|
|
Proven Scaling LLC, SixApart Ltd, Baron Schwartz, and Daniel Nichter
|
|
|
|
=head1 ABOUT PERCONA TOOLKIT
|
|
|
|
This tool is part of Percona Toolkit, a collection of advanced command-line
|
|
tools for MySQL developed by Percona. Percona Toolkit was forked from two
|
|
projects in June, 2011: Maatkit and Aspersa. Those projects were created by
|
|
Baron Schwartz and primarily developed by him and Daniel Nichter. Visit
|
|
L<http://www.percona.com/software/> to learn about other free, open-source
|
|
software from Percona.
|
|
|
|
=head1 COPYRIGHT, LICENSE, AND WARRANTY
|
|
|
|
This program is copyright 2007-2017 Percona LLC and/or its affiliates,
|
|
2006 Proven Scaling LLC and Six Apart Ltd.
|
|
|
|
Feedback and improvements are welcome.
|
|
|
|
THIS PROGRAM IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED
|
|
WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
|
|
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
|
|
|
This program is free software; you can redistribute it and/or modify it under
|
|
the terms of the GNU General Public License as published by the Free Software
|
|
Foundation, version 2; OR the Perl Artistic License. On UNIX and similar
|
|
systems, you can issue `man perlgpl' or `man perlartistic' to read these
|
|
licenses.
|
|
|
|
You should have received a copy of the GNU General Public License along with
|
|
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
|
|
Place, Suite 330, Boston, MA 02111-1307 USA.
|
|
|
|
=head1 VERSION
|
|
|
|
pt-heartbeat 3.0.5
|
|
|
|
=cut
|