Rewriting pt-online-schema-change 2.1.1. Work in progress (this code doesn't work yet).

This commit is contained in:
Daniel Nichter
2012-03-20 13:34:16 -06:00
parent f4c978c957
commit a17bdd65d3
5 changed files with 2720 additions and 2037 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -62,39 +62,24 @@ sub new {
} }
my ($cxn, $tbl, $chunk_size, $o, $q) = @args{@required_args}; my ($cxn, $tbl, $chunk_size, $o, $q) = @args{@required_args};
# Die unless table can be nibbled, else return row estimate, nibble index,
# and if table can be nibbled in one chunk.
my $nibble_params = can_nibble(%args);
my $where = $o->get('where'); my $where = $o->get('where');
my ($row_est, $mysql_index) = get_row_estimate(%args, where => $where);
my $one_nibble = !defined $args{one_nibble} || $args{one_nibble}
? $row_est <= $chunk_size * $o->get('chunk-size-limit')
: 0;
PTDEBUG && _d('One nibble:', $one_nibble ? 'yes' : 'no');
if ( $args{resume}
&& !defined $args{resume}->{lower_boundary}
&& !defined $args{resume}->{upper_boundary} ) {
PTDEBUG && _d('Resuming from one nibble table');
$one_nibble = 1;
}
# Get an index to nibble by. We'll order rows by the index's columns.
my $index = _find_best_index(%args, mysql_index => $mysql_index);
if ( !$index && !$one_nibble ) {
die "There is no good index and the table is oversized.";
}
my $tbl_struct = $tbl->{tbl_struct}; my $tbl_struct = $tbl->{tbl_struct};
my $ignore_col = $o->get('ignore-columns') || {}; my $ignore_col = $o->get('ignore-columns') || {};
my $all_cols = $o->get('columns') || $tbl_struct->{cols}; my $all_cols = $o->get('columns') || $tbl_struct->{cols};
my @cols = grep { !$ignore_col->{$_} } @$all_cols; my @cols = grep { !$ignore_col->{$_} } @$all_cols;
my $self; my $self;
if ( $one_nibble ) { if ( $nibble_params->{one_nibble} ) {
# If the chunk size is >= number of rows in table, then we don't # If the chunk size is >= number of rows in table, then we don't
# need to chunk; we can just select all rows, in order, at once. # need to chunk; we can just select all rows, in order, at once.
my $nibble_sql my $nibble_sql
= ($args{dml} ? "$args{dml} " : "SELECT ") = ($args{dml} ? "$args{dml} " : "SELECT ")
. ($args{select} ? $args{select} . ($args{select} ? $args{select}
: join(', ', map { $q->quote($_) } @cols)) : join(', ', map { $q->quote($_) } @cols))
. " FROM " . $q->quote(@{$tbl}{qw(db tbl)}) . " FROM $tbl->{name}"
. ($where ? " WHERE $where" : '') . ($where ? " WHERE $where" : '')
. " /*checksum table*/"; . " /*checksum table*/";
PTDEBUG && _d('One nibble statement:', $nibble_sql); PTDEBUG && _d('One nibble statement:', $nibble_sql);
@@ -103,7 +88,7 @@ sub new {
= "EXPLAIN SELECT " = "EXPLAIN SELECT "
. ($args{select} ? $args{select} . ($args{select} ? $args{select}
: join(', ', map { $q->quote($_) } @cols)) : join(', ', map { $q->quote($_) } @cols))
. " FROM " . $q->quote(@{$tbl}{qw(db tbl)}) . " FROM $tbl->{name}"
. ($where ? " WHERE $where" : '') . ($where ? " WHERE $where" : '')
. " /*explain checksum table*/"; . " /*explain checksum table*/";
PTDEBUG && _d('Explain one nibble statement:', $explain_nibble_sql); PTDEBUG && _d('Explain one nibble statement:', $explain_nibble_sql);
@@ -117,6 +102,7 @@ sub new {
}; };
} }
else { else {
my $index = $nibble_params->{index}; # brevity
my $index_cols = $tbl->{tbl_struct}->{keys}->{$index}->{cols}; my $index_cols = $tbl->{tbl_struct}->{keys}->{$index}->{cols};
# Figure out how to nibble the table with the index. # Figure out how to nibble the table with the index.
@@ -132,7 +118,7 @@ sub new {
# Make SQL statements, prepared on first call to next(). FROM and # Make SQL statements, prepared on first call to next(). FROM and
# ORDER BY are the same for all statements. FORCE IDNEX and ORDER BY # ORDER BY are the same for all statements. FORCE IDNEX and ORDER BY
# are needed to ensure deterministic nibbling. # are needed to ensure deterministic nibbling.
my $from = $q->quote(@{$tbl}{qw(db tbl)}) . " FORCE INDEX(`$index`)"; my $from = "$tbl->{name} FORCE INDEX(`$index`)";
my $order_by = join(', ', map {$q->quote($_)} @{$index_cols}); my $order_by = join(', ', map {$q->quote($_)} @{$index_cols});
# The real first row in the table. Usually we start nibbling from # The real first row in the table. Usually we start nibbling from
@@ -246,7 +232,7 @@ sub new {
}; };
} }
$self->{row_est} = $row_est; $self->{row_est} = $nibble_params->{row_est},
$self->{nibbleno} = 0; $self->{nibbleno} = 0;
$self->{have_rows} = 0; $self->{have_rows} = 0;
$self->{rowno} = 0; $self->{rowno} = 0;
@@ -418,6 +404,52 @@ sub row_estimate {
return $self->{row_est}; return $self->{row_est};
} }
sub can_nibble {
my (%args) = @_;
my @required_args = qw(Cxn tbl chunk_size OptionParser TableParser);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($cxn, $tbl, $chunk_size, $o) = @args{@required_args};
# About how many rows are there?
my ($row_est, $mysql_index) = get_row_estimate(
Cxn => $cxn,
tbl => $tbl,
where => $o->get('where'),
);
# Can all those rows be nibbled in one chunk? If one_nibble is defined,
# then do as it says; else, look at the chunk size limit.
my $one_nibble = !defined $args{one_nibble} || $args{one_nibble}
? $row_est <= $chunk_size * $o->get('chunk-size-limit')
: 0;
PTDEBUG && _d('One nibble:', $one_nibble ? 'yes' : 'no');
# Special case: we're resuming and there's no boundaries, so the table
# being resumed was originally nibbled in one chunk, so do the same again.
if ( $args{resume}
&& !defined $args{resume}->{lower_boundary}
&& !defined $args{resume}->{upper_boundary} ) {
PTDEBUG && _d('Resuming from one nibble table');
$one_nibble = 1;
}
# Get an index to nibble by. We'll order rows by the index's columns.
my $index = _find_best_index(%args, mysql_index => $mysql_index);
if ( !$index && !$one_nibble ) {
die "There is no good index and the table is oversized.";
}
# The table can be nibbled if this point is reached, else we would have
# died earlier. Return some values about nibbling the table.
return {
row_est => $row_est, # nibble about this many rows
index => $index, # using this index
one_nibble => $one_nibble, # if the table fits in one nibble/chunk
};
}
sub _find_best_index { sub _find_best_index {
my (%args) = @_; my (%args) = @_;
my @required_args = qw(Cxn tbl TableParser); my @required_args = qw(Cxn tbl TableParser);
@@ -494,10 +526,10 @@ sub _find_best_index {
sub _get_index_cardinality { sub _get_index_cardinality {
my (%args) = @_; my (%args) = @_;
my @required_args = qw(Cxn tbl index Quoter); my @required_args = qw(Cxn tbl index);
my ($cxn, $tbl, $index, $q) = @args{@required_args}; my ($cxn, $tbl, $index) = @args{@required_args};
my $sql = "SHOW INDEXES FROM " . $q->quote(@{$tbl}{qw(db tbl)}) my $sql = "SHOW INDEXES FROM $tbl->{name} "
. "WHERE Key_name = '$index'"; . "WHERE Key_name = '$index'";
PTDEBUG && _d($sql); PTDEBUG && _d($sql);
my $cardinality = 1; my $cardinality = 1;
@@ -511,22 +543,25 @@ sub _get_index_cardinality {
sub get_row_estimate { sub get_row_estimate {
my (%args) = @_; my (%args) = @_;
my @required_args = qw(Cxn tbl OptionParser TableParser Quoter); my @required_args = qw(Cxn tbl);
my ($cxn, $tbl, $o, $tp, $q) = @args{@required_args}; foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($cxn, $tbl) = @args{@required_args};
if ( $args{where} ) { if ( !$args{where} && exists $tbl->{tbl_status} ) {
PTDEBUG && _d('WHERE clause, using explain plan for row estimate'); PTDEBUG && _d('Using table status for row estimate');
my $table = $q->quote(@{$tbl}{qw(db tbl)}); return $tbl->{tbl_status}->{rows} || 0;
my $sql = "EXPLAIN SELECT * FROM $table WHERE $args{where}"; }
else {
PTDEBUG && _d('Use EXPLAIN for row estimate');
my $sql = "EXPLAIN SELECT * FROM $tbl->{name} "
. "WHERE " . ($args{where} || '1=1');
PTDEBUG && _d($sql); PTDEBUG && _d($sql);
my $expl = $cxn->dbh()->selectrow_hashref($sql); my $expl = $cxn->dbh()->selectrow_hashref($sql);
PTDEBUG && _d(Dumper($expl)); PTDEBUG && _d(Dumper($expl));
return ($expl->{rows} || 0), $expl->{key}; return ($expl->{rows} || 0), $expl->{key};
} }
else {
PTDEBUG && _d('No WHERE clause, using table status for row estimate');
return $tbl->{tbl_status}->{rows} || 0;
}
} }
sub _prepare_sths { sub _prepare_sths {

View File

@@ -1,142 +0,0 @@
# This program is copyright 2011 Percona Inc.
# Feedback and improvements are welcome.
#
# THIS PROGRAM IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
# MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2; OR the Perl Artistic License. On UNIX and similar
# systems, you can issue `man perlgpl' or `man perlartistic' to read these
# licenses.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA.
# ###########################################################################
# OSCCaptureSync package
# ###########################################################################
{
# Package: OSCCaptureSync
# OSCCaptureSync implements the capture and sync phases of an online schema
# change.
package OSCCaptureSync;
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
# Sub: new
#
# Parameters:
# %args - Arguments
#
# Returns:
# OSCCaptureSync object
sub new {
my ( $class, %args ) = @_;
my @required_args = qw(Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my $self = {
Quoter => $args{Quoter},
};
return bless $self, $class;
}
sub capture {
my ( $self, %args ) = @_;
my @required_args = qw(msg dbh db tbl tmp_tbl columns chunk_column);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($msg, $dbh) = @args{@required_args};
my @triggers = $self->_make_triggers(%args);
foreach my $sql ( @triggers ) {
$msg->($sql);
$dbh->do($sql) unless $args{print};
}
return;
}
sub _make_triggers {
my ( $self, %args ) = @_;
my @required_args = qw(db tbl tmp_tbl chunk_column columns);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($db, $tbl, $tmp_tbl, $chunk_column) = @args{@required_args};
my $q = $self->{Quoter};
$chunk_column = $q->quote($chunk_column);
my $old_table = $q->quote($db, $tbl);
my $new_table = $q->quote($db, $tmp_tbl);
my $new_values = join(', ', map { "NEW.".$q->quote($_) } @{$args{columns}});
my $columns = join(', ', map { $q->quote($_) } @{$args{columns}});
my $delete_trigger = "CREATE TRIGGER mk_osc_del AFTER DELETE ON $old_table "
. "FOR EACH ROW "
. "DELETE IGNORE FROM $new_table "
. "WHERE $new_table.$chunk_column = OLD.$chunk_column";
my $insert_trigger = "CREATE TRIGGER mk_osc_ins AFTER INSERT ON $old_table "
. "FOR EACH ROW "
. "REPLACE INTO $new_table ($columns) "
. "VALUES($new_values)";
my $update_trigger = "CREATE TRIGGER mk_osc_upd AFTER UPDATE ON $old_table "
. "FOR EACH ROW "
. "REPLACE INTO $new_table ($columns) "
. "VALUES ($new_values)";
return $delete_trigger, $update_trigger, $insert_trigger;
}
sub sync {
my ( $self, %args ) = @_;
my @required_args = qw();
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
return;
}
sub cleanup {
my ( $self, %args ) = @_;
my @required_args = qw(dbh db msg);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($dbh, $db, $msg) = @args{@required_args};
my $q = $self->{Quoter};
foreach my $trigger ( qw(del ins upd) ) {
my $sql = "DROP TRIGGER IF EXISTS " . $q->quote($db, "mk_osc_$trigger");
$msg->($sql);
$dbh->do($sql) unless $args{print};
}
return;
}
sub _d {
my ($package, undef, $line) = caller 0;
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
map { defined $_ ? $_ : 'undef' }
@_;
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
}
1;
}
# ###########################################################################
# End OSCCaptureSync package
# ###########################################################################

View File

@@ -241,6 +241,7 @@ sub next {
sub _iterate_files { sub _iterate_files {
my ( $self ) = @_; my ( $self ) = @_;
my $q = $self->{Quoter};
if ( !$self->{fh} ) { if ( !$self->{fh} ) {
my ($fh, $file) = $self->{file_itr}->(); my ($fh, $file) = $self->{file_itr}->();
@@ -302,6 +303,7 @@ sub _iterate_files {
return { return {
db => $self->{db}, db => $self->{db},
tbl => $tbl, tbl => $tbl,
name => $q->quote($self->{db}, $tbl),
ddl => $ddl, ddl => $ddl,
}; };
} }
@@ -385,6 +387,7 @@ sub _iterate_dbh {
return { return {
db => $self->{db}, db => $self->{db},
tbl => $tbl, tbl => $tbl,
name => $q->quote($self->{db}, $tbl),
ddl => $ddl, ddl => $ddl,
tbl_status => $tbl_status, tbl_status => $tbl_status,
}; };

View File

@@ -1,131 +0,0 @@
#!/usr/bin/perl
BEGIN {
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
};
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More;
use DSNParser;
use Sandbox;
use PerconaTest;
use Quoter;
use OSCCaptureSync;
use Data::Dumper;
$Data::Dumper::Indent = 1;
$Data::Dumper::Sortkeys = 1;
$Data::Dumper::Quotekeys = 0;
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to MySQL';
}
else {
plan tests => 10;
}
my $q = new Quoter();
my $osc = new OSCCaptureSync(Quoter => $q);
my $msg = sub { print "$_[0]\n"; };
my $output;
sub test_table {
my (%args) = @_;
my ($tbl, $col, $expect) = @args{qw(tbl col expect)};
$sb->load_file("master", "t/lib/samples/osc/$tbl");
PerconaTest::wait_for_table($dbh, "osc.t", "id=5");
$dbh->do("USE osc");
ok(
no_diff(
sub {
$osc->capture(
dbh => $dbh,
db => 'osc',
tbl => 't',
tmp_tbl => '__new_t',
columns => ['id', $col],
chunk_column => 'id',
msg => $msg,
)
},
"t/lib/samples/osc/$expect",
stderr => 1,
),
"$tbl: SQL statments to create triggers"
);
$dbh->do("insert into t values (6, 'f')");
$dbh->do("update t set `$col`='z' where id=1");
$dbh->do("delete from t where id=3");
my $rows = $dbh->selectall_arrayref("select id, `$col` from __new_t order by id");
is_deeply(
$rows,
[
[1, 'z'], # update t set c="z" where id=1
[6, 'f'], # insert into t values (6, "f")
],
"$tbl: Triggers work"
) or print Dumper($rows);
output(sub {
$osc->cleanup(
dbh => $dbh,
db => 'osc',
msg => $msg,
);
});
$rows = $dbh->selectall_arrayref("show triggers from `osc` like 't'");
is_deeply(
$rows,
[],
"$tbl: Cleanup removes the triggers"
);
}
test_table(
tbl => "tbl001.sql",
col => "c",
expect => "capsync001.txt",
);
test_table(
tbl => "tbl002.sql",
col => "default",
expect => "capsync002.txt",
);
test_table(
tbl => "tbl003.sql",
col => "space col",
expect => "capsync003.txt",
);
# #############################################################################
# Done.
# #############################################################################
{
local *STDERR;
open STDERR, '>', \$output;
$osc->_d('Complete test coverage');
}
like(
$output,
qr/Complete test coverage/,
'_d() works'
);
$sb->wipe_clean($dbh);
exit;