Update replication_filters.t and resume.t.

This commit is contained in:
Daniel Nichter
2011-10-15 15:03:25 -06:00
parent 4ad88ddd93
commit 23e9b91d78
7 changed files with 492 additions and 124 deletions

View File

@@ -5540,22 +5540,6 @@ sub main {
return $exit_status;
}
# #####################################################################
# Check that the replication table exists, or possibly create it.
# #####################################################################
eval {
check_repl_table(
dbh => $master_dbh,
repl_table => $repl_table,
OptionParser => $o,
TableParser => $tp,
Quoter => $q,
);
};
if ( $EVAL_ERROR ) {
die ts($EVAL_ERROR);
}
# #####################################################################
# Check for replication filters.
# #####################################################################
@@ -5588,6 +5572,22 @@ sub main {
}
}
# #####################################################################
# Check that the replication table exists, or possibly create it.
# #####################################################################
eval {
check_repl_table(
dbh => $master_dbh,
repl_table => $repl_table,
OptionParser => $o,
TableParser => $tp,
Quoter => $q,
);
};
if ( $EVAL_ERROR ) {
die ts($EVAL_ERROR);
}
# #####################################################################
# Make a ReplicaLagWaiter to help wait for slaves after each chunk.
# #####################################################################
@@ -5729,8 +5729,8 @@ sub main {
MKDEBUG && _d('Resuming from', $last_chunk->{chunk},
'at', $last_chunk->{ts});
if ( !$o->get('quiet') ) {
print ts("Resuming from $tbl->{db}.$tbl->{tbl} at chunk "
. "$last_chunk->{chunk}, timestamp $last_chunk->{ts}\n");
print "Resuming from $tbl->{db}.$tbl->{tbl} chunk "
. "$last_chunk->{chunk}, timestamp $last_chunk->{ts}\n";
}
}
@@ -5883,7 +5883,7 @@ sub main {
# and master_crc.
$update_sth->execute(
# UPDATE repl_table SET
sprintf('%.3f', $tbl->{nibble_time}), # chunk_time
sprintf('%.6f', $tbl->{nibble_time}), # chunk_time
$crc, # master_crc
$cnt, # master_cnt
# WHERE
@@ -6225,8 +6225,8 @@ sub exec_nibble {
$tbl->{tbl}, # tbl
$chunk, # chunk (number)
$chunk_index, # chunk_index
$lb_quoted, # lower_boundary
$ub_quoted, # upper_boundary
$lb_quoted || undef, # lower_boundary
$ub_quoted || undef, # upper_boundary
# this_cnt, this_crc WHERE
@{$boundary->{lower}}, # upper boundary values
@{$boundary->{upper}}, # lower boundary values
@@ -6627,6 +6627,7 @@ sub last_chunk {
. "WHERE db='$last_chunk->{db}' AND tbl='$last_chunk->{tbl}'";
MKDEBUG && _d($sql);
my ($max_chunk) = $dbh->selectrow_array($sql);
MKDEBUG && _d('Max chunk:', $max_chunk);
if ( ($last_chunk->{chunk} || 0) ne ($max_chunk || 0) ) {
warn ts("Not resuming from max chunk ("
. ($last_chunk->{chunk} || 0)
@@ -6640,17 +6641,34 @@ sub last_chunk {
sub next_lower_boundary {
my (%args) = @_;
my @required_args = qw(dbh tbl last_chunk NibbleIterator Quoter);
my @required_args = qw(Cxn tbl last_chunk NibbleIterator Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my ($dbh, $tbl, $last_chunk, $nibble_iter, $q) = @args{@required_args};
my ($cxn, $tbl, $last_chunk, $nibble_iter, $q) = @args{@required_args};
if ( $nibble_iter->nibble_index() ne ($last_chunk->{chunk_index} || '') ) {
# If the last chunk (which should be the max chunk) is 1 and there
# was no chunk index, then the table was checksummed in a single chunk.
if ( $last_chunk->{chunk} == 1
&& !$last_chunk->{chunk_index}
&& !$nibble_iter->nibble_index() ) {
return;
}
my $chunk_index = $nibble_iter->nibble_index() || '';
if ( ($last_chunk->{chunk_index} || '')
ne ($nibble_iter->nibble_index() || '') ) {
warn ts("Cannot resume from table $tbl->{db}.$tbl->{tbl} chunk "
. "$last_chunk->{chunk} because the chunk index are different: "
. "$last_chunk->{chunk_index} was used originally but "
. $nibble_iter->nibble_index() . " is used now.\n");
. "$last_chunk->{chunk} because the chunk indexes are different: "
. ($last_chunk->{chunk_index} ? $last_chunk->{chunk_index}
: "no index")
. " was used originally but "
. ($nibble_iter->nibble_index() ? $nibble_iter->nibble_index()
: "no index")
. " is used now. If the table has not changed significantly, "
. "this may be caused by running the tool with different command "
. "line options. This table will be skipped and checksumming "
. "will resume with the next table.\n");
$tbl->{checksum_results}->{errors}++;
return;
}
@@ -6664,9 +6682,9 @@ sub next_lower_boundary {
. ($sql->{where} ? " AND ($sql->{where})" : '')
. " ORDER BY $sql->{order_by}"
. " LIMIT 1"
. " /*resume next lower boundary*/";
. " /*resume next chunk boundary*/";
MKDEBUG && _d($next_lb_sql);
my $sth = $dbh->prepare($next_lb_sql);
my $sth = $cxn->dbh()->prepare($next_lb_sql);
my @ub = split ',', $last_chunk->{upper_boundary};
MKDEBUG && _d($sth->{Statement}, 'params:', @ub);
@@ -7269,14 +7287,15 @@ L<"--create-replicate-table"> (MAGIC_create_replicate):
chunk int NOT NULL,
chunk_time float NULL,
chunk_index varchar(200) NULL,
lower_boundary text NOT NULL,
upper_boundary text NOT NULL,
lower_boundary text NULL,
upper_boundary text NULL,
this_crc char(40) NOT NULL,
this_cnt int NOT NULL,
master_crc char(40) NULL,
master_cnt int NULL,
ts timestamp NOT NULL,
PRIMARY KEY (db, tbl, chunk)
PRIMARY KEY (db, tbl, chunk),
INDEX (ts)
) ENGINE=InnoDB;
Be sure to choose an appropriate storage engine for the checksum table. If you

View File

@@ -593,6 +593,24 @@ sub count_checksum_results {
return $total;
}
sub normalize_checksum_results {
my ($output) = @_;
my $tmp_file = "/tmp/test-checksum-results-output";
open my $fh, ">", $tmp_file or die "Cannot open $tmp_file: $OS_ERROR";
printf $fh $output;
close $fh;
my $normal_output = `cat $tmp_file | awk '/^[0-9 ]/ {print \$2 " " \$3 " " \$4 " " \$5 " " \$6 " " \$8} /^[A-Z]/ {print \$0}'`;
`rm $tmp_file >/dev/null`;
return $normal_output;
}
sub get_master_binlog_pos {
my ($dbh) = @_;
my $sql = "SHOW MASTER STATUS";
my $ms = $dbh->selectrow_hashref($sql);
return $ms->{position};
}
1;
}
# ###########################################################################

View File

@@ -38,7 +38,6 @@ else {
# so we need to specify --lock-wait-timeout=3 else the tool will die.
my $master_dsn = 'h=127.1,P=12345,u=msandbox,p=msandbox';
my @args = ($master_dsn, qw(--lock-wait-timeout 3));
my $row;
my $output;
my $exit_status;

View File

@@ -14,67 +14,86 @@ use Test::More;
use PerconaTest;
use Sandbox;
require "$trunk/bin/pt-table-checksum";
my $vp = new VersionParser();
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $master_dbh = $sb->get_dbh_for('master');
my $slave1_dbh = $sb->get_dbh_for('slave1');
my $slave2_dbh = $sb->get_dbh_for('slave2');
if ( !$dbh ) {
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
elsif ( !$slave1_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave1';
}
elsif ( !$slave2_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave2';
}
else {
plan tests => 3;
plan tests => 4;
}
# The sandbox servers run with lock_wait_timeout=3 and it's not dynamic
# so we need to specify --lock-wait-timeout=3 else the tool will die.
my $master_dsn = 'h=127.1,P=12345,u=msandbox,p=msandbox';
my @args = ($master_dsn, qw(--lock-wait-timeout 3));
my $output;
my $cnf='/tmp/12345/my.sandbox.cnf';
my @args = ('-F', $cnf, 'h=127.1', qw(--explain --replicate test.checksums));
my $row;
$sb->create_dbs($dbh, [qw(test)]);
# Add a replication filter to the slaves.
for my $port ( qw(12346 12347) ) {
diag(`/tmp/$port/stop >/dev/null`);
diag(`cp /tmp/$port/my.sandbox.cnf /tmp/$port/orig.cnf`);
diag(`echo "replicate-ignore-db=foo" >> /tmp/$port/my.sandbox.cnf`);
diag(`/tmp/$port/start >/dev/null`);
}
# Add a replication filter to the slave.
diag(`/tmp/12346/stop >/dev/null`);
diag(`cp /tmp/12346/my.sandbox.cnf /tmp/12346/orig.cnf`);
diag(`echo "replicate-ignore-db=foo" >> /tmp/12346/my.sandbox.cnf`);
diag(`/tmp/12346/start >/dev/null`);
my $pos = PerconaTest::get_master_binlog_pos($master_dbh);
$output = output(
sub { pt_table_checksum::main(@args, '--create-replicate-table') },
sub { pt_table_checksum::main(@args, qw(-t sakila.country)) },
stderr => 1,
);
unlike(
$output,
qr/mysql\s+user/,
is(
PerconaTest::get_master_binlog_pos($master_dbh),
$pos,
"Did not checksum with replication filter"
);
like(
$output,
qr/replication filters are set/,
"Warns about replication fitlers"
qr/h=127.0.0.1,P=12346/,
"Warns about replication fitler on slave1"
);
# #############################################################################
# Issue 1060: mk-table-checksum tries to check replicate sanity options
# when no --replicate
# #############################################################################
$output = output(
sub { pt_table_checksum::main('h=127.1,P=12346,u=msandbox,p=msandbox',
qw(-d sakila -t film --schema --no-check-replication-filters)) },
);
like(
$output,
qr/sakila.+?film/,
"--schema with replication filters (issue 1060)"
qr/h=127.0.0.1,P=12347/,
"Warns about replication fitler on slave2"
);
# Disable the check.
$output = output(
sub { pt_table_checksum::main(@args, qw(-t sakila.country),
qw(--no-check-replication-filters)) },
stderr => 1,
);
like(
$output,
qr/sakila\.country$/,
"--no-check-replication-filters"
);
# #############################################################################
# Done.
# #############################################################################
# Remove the replication filter from the slave.
diag(`/tmp/12346/stop >/dev/null`);
diag(`mv /tmp/12346/orig.cnf /tmp/12346/my.sandbox.cnf`);
diag(`/tmp/12346/start >/dev/null`);
$sb->wipe_clean($dbh);
for my $port ( qw(12346 12347) ) {
diag(`/tmp/$port/stop >/dev/null`);
diag(`mv /tmp/$port/orig.cnf /tmp/$port/my.sandbox.cnf`);
diag(`/tmp/$port/start >/dev/null`);
}
$sb->wipe_clean($master_dbh);
exit;

View File

@@ -18,89 +18,329 @@ require "$trunk/bin/pt-table-checksum";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $master_dbh = $sb->get_dbh_for('master');
my $slave_dbh = $sb->get_dbh_for('slave1');
my $slave1_dbh = $sb->get_dbh_for('slave1');
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
elsif ( !$slave_dbh ) {
elsif ( !$slave1_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave';
}
else {
plan tests => 4;
plan tests => 12;
}
# The sandbox servers run with lock_wait_timeout=3 and it's not dynamic
# so we need to specify --lock-wait-timeout=3 else the tool will die.
my $master_dsn = 'h=127.1,P=12345,u=msandbox,p=msandbox';
my @args = ($master_dsn, qw(--lock-wait-timeout 3));
my $row;
my $output;
my $cnf='/tmp/12345/my.sandbox.cnf';
my $cmd = "$trunk/bin/pt-table-checksum -F $cnf 127.0.0.1";
$sb->create_dbs($master_dbh, [qw(test)]);
$sb->load_file('master', 't/pt-table-checksum/samples/checksum_tbl.sql');
$sb->load_file('master', 't/pt-table-checksum/samples/resume.sql');
$sb->load_file('master', 't/pt-table-checksum/samples/resume2.sql');
sub load_data_infile {
my ($file, $where) = @_;
$master_dbh->do('truncate table percona.checksums');
$master_dbh->do("LOAD DATA LOCAL INFILE '$trunk/t/pt-table-checksum/samples/checksum_results/$file' INTO TABLE percona.checksums");
if ( $where ) {
PerconaTest::wait_for_table($slave1_dbh, 'percona.checksums', $where);
}
}
# #############################################################################
# Issue 36: Add --resume option to mk-table-checksum
# #############################################################################
# Create an empty replicate table.
pt_table_checksum::main(@args, qw(-d foo --quiet));
PerconaTest::wait_for_table($slave1_dbh, 'percona.checksums');
$master_dbh->do('truncate table percona.checksums');
# Test --resume.
my $all_sakila_tables = [
[qw( sakila actor )],
[qw( sakila address )],
[qw( sakila category )],
[qw( sakila city )],
[qw( sakila country )],
[qw( sakila customer )],
[qw( sakila film )],
[qw( sakila film_actor )],
[qw( sakila film_category)],
[qw( sakila film_text )],
[qw( sakila inventory )],
[qw( sakila language )],
[qw( sakila payment )],
[qw( sakila rental )],
[qw( sakila staff )],
[qw( sakila store )],
];
# Child processes checksum each db.tbl on each host and print the results
# when done. So the output is nondeterministic. sort helps fix this.
# ############################################################################
# "Resume" from empty repl table.
# ############################################################################
$output = `$cmd h=127.1,P=12346 -d test -t resume --chunk-size 3 --chunk-size-limit 0 --resume $trunk/t/pt-table-checksum/samples/resume-chunked-partial.txt | sort | diff $trunk/t/pt-table-checksum/samples/resume-chunked-complete.txt -`;
is(
$output,
'',
'--resume --chunk-size'
$output = output(
sub { pt_table_checksum::main(@args, qw(-d sakila --resume)) },
);
$output = `$cmd h=127.1,P=12346 -d test -t resume --resume $trunk/t/pt-table-checksum/samples/resume-partial.txt | sort | diff $trunk/t/pt-table-checksum/samples/resume-complete.txt -`;
is(
$output,
'',
'--resume'
$row = $master_dbh->selectall_arrayref('select db, tbl from percona.checksums order by db, tbl');
is_deeply(
$row,
$all_sakila_tables,
"Resume from empty repl table"
);
$output = `$cmd h=127.1,P=12346 -d test,test2 -t resume,resume2 --chunk-size 3 --chunk-size-limit 0 --resume $trunk/t/pt-table-checksum/samples/resume2-chunked-partial.txt | sort | diff $trunk/t/pt-table-checksum/samples/resume2-chunked-complete.txt -`;
is(
$output,
'',
'--resume --chunk-size 2 dbs'
# ############################################################################
# Resume when all tables already done.
# ############################################################################
# Timestamps shouldn't change because no rows should be updated.
$row = $master_dbh->selectall_arrayref('select ts from percona.checksums order by db, tbl');
$output = output(
sub { pt_table_checksum::main(@args, qw(-d sakila --resume)) },
);
# Test --resume-replicate.
# First re-checksum and replicate using chunks so we can more easily break,
# resume and test it.
`$cmd -d test --replicate test.checksum --empty-replicate-table --chunk-size 3 --chunk-size-limit 0`;
# Make sure the results propagate.
sleep 1;
# Now break the results as if that run didn't finish.
`/tmp/12345/use -e "DELETE FROM test.checksum WHERE tbl='resume' AND chunk=2"`;
# And now test --resume with --replicate.
$output = `$cmd -d test --resume-replicate --replicate test.checksum --chunk-size 3 --chunk-size-limit 0`;
# The TIME value can fluctuate between 1 and 0. Make it 0.
$output =~ s/6abf4a82(\s+)\d+/6abf4a82${1}0/;
is(
$output,
"DATABASE TABLE CHUNK HOST ENGINE COUNT CHECKSUM TIME WAIT STAT LAG
# already checksummed: test resume 0 127.0.0.1
# already checksummed: test resume 1 127.0.0.1
test resume 2 127.0.0.1 InnoDB 3 6abf4a82 0 NULL NULL NULL
# already checksummed: test resume 3 127.0.0.1
"",
"Resume with nothing to do"
);
is_deeply(
$master_dbh->selectall_arrayref('select ts from percona.checksums order by db, tbl'),
$row,
"Timestamps didn't change"
);
# ############################################################################
# Resume from a single chunk table. So, resume should really start with
# next table.
# ############################################################################
load_data_infile("sakila-done-singles", "ts='2011-10-15 13:00:16'");
$master_dbh->do("delete from percona.checksums where ts > '2011-10-15 13:00:04'");
$row = $master_dbh->selectall_arrayref('select db, tbl from percona.checksums order by db, tbl');
is_deeply(
$row,
[
[qw( sakila actor )],
[qw( sakila address )],
[qw( sakila category )],
[qw( sakila city )],
],
"Checksum results for 1/4 of sakila singles"
);
$output = output(
sub { pt_table_checksum::main(@args, qw(-d sakila --resume)) },
trf => sub { return PerconaTest::normalize_checksum_results(@_) },
);
$row = $master_dbh->selectall_arrayref('select db, tbl from percona.checksums order by db, tbl');
is_deeply(
$row,
$all_sakila_tables,
"Resume finished sakila"
);
# XXX This may not be a stable test if your machine isn't fast enough
# to do these remaining tables as single chunks.
is(
$output,
"ERRORS DIFFS ROWS CHUNKS SKIPPED TABLE
0 0 109 1 0 sakila.country
0 0 599 1 0 sakila.customer
0 0 1000 1 0 sakila.film
0 0 5462 1 0 sakila.film_actor
0 0 1000 1 0 sakila.film_category
0 0 1000 1 0 sakila.film_text
0 0 4581 1 0 sakila.inventory
0 0 6 1 0 sakila.language
0 0 16049 1 0 sakila.payment
0 0 16044 1 0 sakila.rental
0 0 2 1 0 sakila.staff
0 0 2 1 0 sakila.store
",
'--resume-replicate'
"Resumed from next table"
);
# ############################################################################
# Resume from the middle of a table that was being chunked.
# ############################################################################
load_data_infile("sakila-done-1k-chunks", "ts='2011-10-15 13:00:57'");
$master_dbh->do("delete from percona.checksums where ts > '2011-10-15 13:00:28'");
my $first_half = [
[qw(sakila actor 1 200 )],
[qw(sakila address 1 603 )],
[qw(sakila category 1 16 )],
[qw(sakila city 1 600 )],
[qw(sakila country 1 109 )],
[qw(sakila customer 1 599 )],
[qw(sakila film 1 1000 )],
[qw(sakila film_actor 1 1000 )],
[qw(sakila film_actor 2 1000 )],
[qw(sakila film_actor 3 1000 )],
[qw(sakila film_actor 4 1000 )],
[qw(sakila film_actor 5 1000 )],
[qw(sakila film_actor 6 462 )],
[qw(sakila film_category 1 1000 )],
[qw(sakila film_text 1 1000 )],
[qw(sakila inventory 1 1000 )],
[qw(sakila inventory 2 1000 )],
[qw(sakila inventory 3 1000 )],
[qw(sakila inventory 4 1000 )],
[qw(sakila inventory 5 581 )],
[qw(sakila language 1 6 )],
[qw(sakila payment 1 1000 )],
[qw(sakila payment 2 1000 )],
[qw(sakila payment 3 1000 )],
[qw(sakila payment 4 1000 )],
[qw(sakila payment 5 1000 )],
[qw(sakila payment 6 1000 )],
[qw(sakila payment 7 1000 )],
];
$row = $master_dbh->selectall_arrayref('select db, tbl, chunk, master_cnt from percona.checksums order by db, tbl');
is_deeply(
$row,
$first_half,
"Checksum results through sakila.payment chunk 7"
);
$output = output(
sub { pt_table_checksum::main(@args, qw(-d sakila --resume),
qw(--chunk-time 0)) },
trf => sub { return PerconaTest::normalize_checksum_results(@_) },
);
$row = $master_dbh->selectall_arrayref('select db, tbl, chunk, master_cnt from percona.checksums order by db, tbl');
is_deeply(
$row,
[
@$first_half,
[qw(sakila payment 8 1000 )],
[qw(sakila payment 9 1000 )],
[qw(sakila payment 10 1000 )],
[qw(sakila payment 11 1000 )],
[qw(sakila payment 12 1000 )],
[qw(sakila payment 13 1000 )],
[qw(sakila payment 14 1000 )],
[qw(sakila payment 15 1000 )],
[qw(sakila payment 16 1000 )],
[qw(sakila payment 17 49 )],
[qw(sakila rental 1 1000 )],
[qw(sakila rental 2 1000 )],
[qw(sakila rental 3 1000 )],
[qw(sakila rental 4 1000 )],
[qw(sakila rental 5 1000 )],
[qw(sakila rental 6 1000 )],
[qw(sakila rental 7 1000 )],
[qw(sakila rental 8 1000 )],
[qw(sakila rental 9 1000 )],
[qw(sakila rental 10 1000 )],
[qw(sakila rental 11 1000 )],
[qw(sakila rental 12 1000 )],
[qw(sakila rental 13 1000 )],
[qw(sakila rental 14 1000 )],
[qw(sakila rental 15 1000 )],
[qw(sakila rental 16 1000 )],
[qw(sakila rental 17 44 )],
[qw(sakila staff 1 2 )],
[qw(sakila store 1 2 )],
],
"Resume finished sakila"
);
is(
$output,
"Resuming from sakila.payment chunk 7, timestamp 2011-10-15 13:00:28
ERRORS DIFFS ROWS CHUNKS SKIPPED TABLE
0 0 9049 10 0 sakila.payment
0 0 16044 17 0 sakila.rental
0 0 2 1 0 sakila.staff
0 0 2 1 0 sakila.store
",
"Resumed from sakila.payment chunk 7"
);
# ############################################################################
# Resume from the end of a finished table that was being chunked.
# ############################################################################
load_data_infile("sakila-done-1k-chunks", "ts='2011-10-15 13:00:57'");
$master_dbh->do("delete from percona.checksums where ts > '2011-10-15 13:00:38'");
$row = $master_dbh->selectall_arrayref('select db, tbl, chunk, master_cnt from percona.checksums order by db, tbl');
is_deeply(
$row,
[
@$first_half,
[qw(sakila payment 8 1000 )],
[qw(sakila payment 9 1000 )],
[qw(sakila payment 10 1000 )],
[qw(sakila payment 11 1000 )],
[qw(sakila payment 12 1000 )],
[qw(sakila payment 13 1000 )],
[qw(sakila payment 14 1000 )],
[qw(sakila payment 15 1000 )],
[qw(sakila payment 16 1000 )],
[qw(sakila payment 17 49 )],
],
"Checksum results through sakila.payment"
);
$output = output(
sub { pt_table_checksum::main(@args, qw(-d sakila --resume),
qw(--chunk-time 0)) },
trf => sub { return PerconaTest::normalize_checksum_results(@_) },
);
$row = $master_dbh->selectall_arrayref('select db, tbl, chunk, master_cnt from percona.checksums order by db, tbl');
is_deeply(
$row,
[
@$first_half,
[qw(sakila payment 8 1000 )],
[qw(sakila payment 9 1000 )],
[qw(sakila payment 10 1000 )],
[qw(sakila payment 11 1000 )],
[qw(sakila payment 12 1000 )],
[qw(sakila payment 13 1000 )],
[qw(sakila payment 14 1000 )],
[qw(sakila payment 15 1000 )],
[qw(sakila payment 16 1000 )],
[qw(sakila payment 17 49 )],
[qw(sakila rental 1 1000 )],
[qw(sakila rental 2 1000 )],
[qw(sakila rental 3 1000 )],
[qw(sakila rental 4 1000 )],
[qw(sakila rental 5 1000 )],
[qw(sakila rental 6 1000 )],
[qw(sakila rental 7 1000 )],
[qw(sakila rental 8 1000 )],
[qw(sakila rental 9 1000 )],
[qw(sakila rental 10 1000 )],
[qw(sakila rental 11 1000 )],
[qw(sakila rental 12 1000 )],
[qw(sakila rental 13 1000 )],
[qw(sakila rental 14 1000 )],
[qw(sakila rental 15 1000 )],
[qw(sakila rental 16 1000 )],
[qw(sakila rental 17 44 )],
[qw(sakila staff 1 2 )],
[qw(sakila store 1 2 )],
],
"Resume finished sakila"
);
is(
$output,
"ERRORS DIFFS ROWS CHUNKS SKIPPED TABLE
0 0 16044 17 0 sakila.rental
0 0 2 1 0 sakila.staff
0 0 2 1 0 sakila.store
",
"Resumed from end of sakila.payment"
);
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($master_dbh);
$sb->wipe_clean($slave_dbh);
exit;

View File

@@ -0,0 +1,57 @@
sakila actor 1 0.002713 \N \N \N 160cf2ec 200 160cf2ec 200 2011-10-15 13:00:01
sakila address 1 0.004167 \N \N \N 3fc29462 603 3fc29462 603 2011-10-15 13:00:02
sakila category 1 0.001135 \N \N \N a2d7d74f 16 a2d7d74f 16 2011-10-15 13:00:03
sakila city 1 0.003327 \N \N \N 303bee90 600 303bee90 600 2011-10-15 13:00:04
sakila country 1 0.001697 \N \N \N 4a4fd7e4 109 4a4fd7e4 109 2011-10-15 13:00:05
sakila customer 1 0.004117 \N \N \N dbbb314e 599 dbbb314e 599 2011-10-15 13:00:06
sakila film 1 0.01261 \N \N \N ec4010f9 1000 ec4010f9 1000 2011-10-15 13:00:07
sakila film_actor 1 0.006481 PRIMARY 1,1,1 39,39,293 440e9c7e 1000 440e9c7e 1000 2011-10-15 13:00:08
sakila film_actor 2 0.006782 PRIMARY 39,39,320 76,76,234 ee7c0050 1000 ee7c0050 1000 2011-10-15 13:00:09
sakila film_actor 3 0.008531 PRIMARY 76,76,251 110,110,513 ae8105fe 1000 ae8105fe 1000 2011-10-15 13:00:10
sakila film_actor 4 0.005418 PRIMARY 110,110,525 146,146,278 95cd606d 1000 95cd606d 1000 2011-10-15 13:00:11
sakila film_actor 5 0.006011 PRIMARY 146,146,296 183,183,862 6e0ab29c 1000 6e0ab29c 1000 2011-10-15 13:00:12
sakila film_actor 6 0.005747 PRIMARY 183,183,914 200,200,993 916417a4 462 916417a4 462 2011-10-15 13:00:13
sakila film_category 1 0.005813 \N \N \N afa46d51 1000 afa46d51 1000 2011-10-15 13:00:14
sakila film_text 1 0.00244 \N \N \N 186d7573 1000 186d7573 1000 2011-10-15 13:00:15
sakila inventory 1 0.00611 PRIMARY 1 1000 823e0cc1 1000 823e0cc1 1000 2011-10-15 13:00:16
sakila inventory 2 0.007018 PRIMARY 1001 2000 dc2e044d 1000 dc2e044d 1000 2011-10-15 13:00:17
sakila inventory 3 0.008906 PRIMARY 2001 3000 b4d210dc 1000 b4d210dc 1000 2011-10-15 13:00:18
sakila inventory 4 0.00614 PRIMARY 3001 4000 2ac7ec19 1000 2ac7ec19 1000 2011-10-15 13:00:19
sakila inventory 5 0.005256 PRIMARY 4001 4581 297c82ce 581 297c82ce 581 2011-10-15 13:00:20
sakila language 1 0.001054 \N \N \N 7e7df3f 6 7e7df3f 6 2011-10-15 13:00:21
sakila payment 1 0.006943 PRIMARY 1 1000 8eddd82a 1000 8eddd82a 1000 2011-10-15 13:00:22
sakila payment 2 0.013493 PRIMARY 1001 2000 cf5f0276 1000 cf5f0276 1000 2011-10-15 13:00:23
sakila payment 3 0.010318 PRIMARY 2001 3000 21d1e508 1000 21d1e508 1000 2011-10-15 13:00:24
sakila payment 4 0.008192 PRIMARY 3001 4000 f2acae9a 1000 f2acae9a 1000 2011-10-15 13:00:25
sakila payment 5 0.009202 PRIMARY 4001 5000 ac381a52 1000 ac381a52 1000 2011-10-15 13:00:26
sakila payment 6 0.007947 PRIMARY 5001 6000 122d5885 1000 122d5885 1000 2011-10-15 13:00:27
sakila payment 7 0.00989 PRIMARY 6001 7000 40f9d159 1000 40f9d159 1000 2011-10-15 13:00:28
sakila payment 8 0.007068 PRIMARY 7001 8000 dbc8c61 1000 dbc8c61 1000 2011-10-15 13:00:29
sakila payment 9 0.00904 PRIMARY 8001 9000 47561e98 1000 47561e98 1000 2011-10-15 13:00:30
sakila payment 10 0.008536 PRIMARY 9001 10000 c1d25dc0 1000 c1d25dc0 1000 2011-10-15 13:00:31
sakila payment 11 0.007003 PRIMARY 10001 11000 38dbcf1d 1000 38dbcf1d 1000 2011-10-15 13:00:32
sakila payment 12 0.009802 PRIMARY 11001 12000 7b949f9e 1000 7b949f9e 1000 2011-10-15 13:00:33
sakila payment 13 0.006866 PRIMARY 12001 13000 73253df7 1000 73253df7 1000 2011-10-15 13:00:34
sakila payment 14 0.007389 PRIMARY 13001 14000 ad58fd5c 1000 ad58fd5c 1000 2011-10-15 13:00:35
sakila payment 15 0.009497 PRIMARY 14001 15000 37ceaf5e 1000 37ceaf5e 1000 2011-10-15 13:00:36
sakila payment 16 0.007292 PRIMARY 15001 16000 88fa3a22 1000 88fa3a22 1000 2011-10-15 13:00:37
sakila payment 17 0.001262 PRIMARY 16001 16049 d5ec0985 49 d5ec0985 49 2011-10-15 13:00:38
sakila rental 1 0.006666 PRIMARY 1 1001 880ffc22 1000 880ffc22 1000 2011-10-15 13:00:39
sakila rental 2 0.007498 PRIMARY 1002 2001 b32e3664 1000 b32e3664 1000 2011-10-15 13:00:40
sakila rental 3 0.01027 PRIMARY 2002 3002 1acd2a86 1000 1acd2a86 1000 2011-10-15 13:00:41
sakila rental 4 0.009184 PRIMARY 3003 4002 b935564f 1000 b935564f 1000 2011-10-15 13:00:42
sakila rental 5 0.006689 PRIMARY 4003 5002 22b356e5 1000 22b356e5 1000 2011-10-15 13:00:43
sakila rental 6 0.007026 PRIMARY 5003 6002 74f132f6 1000 74f132f6 1000 2011-10-15 13:00:44
sakila rental 7 0.008435 PRIMARY 6003 7003 4f623d47 1000 4f623d47 1000 2011-10-15 13:00:45
sakila rental 8 0.00669 PRIMARY 7004 8003 1f520556 1000 1f520556 1000 2011-10-15 13:00:46
sakila rental 9 0.008853 PRIMARY 8004 9003 cc9ce99f 1000 cc9ce99f 1000 2011-10-15 13:00:47
sakila rental 10 0.006933 PRIMARY 9004 10004 982e6f6 1000 982e6f6 1000 2011-10-15 13:00:48
sakila rental 11 0.006462 PRIMARY 10005 11004 d2ad38b8 1000 d2ad38b8 1000 2011-10-15 13:00:49
sakila rental 12 0.00984 PRIMARY 11005 12004 3b07b7a1 1000 3b07b7a1 1000 2011-10-15 13:00:50
sakila rental 13 0.011029 PRIMARY 12005 13004 f5de6bd 1000 f5de6bd 1000 2011-10-15 13:00:51
sakila rental 14 0.008716 PRIMARY 13005 14004 ea862225 1000 ea862225 1000 2011-10-15 13:00:52
sakila rental 15 0.006528 PRIMARY 14005 15004 dc7ca09f 1000 dc7ca09f 1000 2011-10-15 13:00:53
sakila rental 16 0.0096 PRIMARY 15005 16005 113818d1 1000 113818d1 1000 2011-10-15 13:00:54
sakila rental 17 0.001746 PRIMARY 16006 16049 dc02888c 44 dc02888c 44 2011-10-15 13:00:55
sakila staff 1 0.001279 \N \N \N 233668ae 2 233668ae 2 2011-10-15 13:00:56
sakila store 1 0.000905 \N \N \N 6ce7245a 2 6ce7245a 2 2011-10-15 13:00:57

View File

@@ -0,0 +1,16 @@
sakila actor 1 0.002313 \N \N \N 160cf2ec 200 160cf2ec 200 2011-10-15 13:00:01
sakila address 1 0.006412 \N \N \N 3fc29462 603 3fc29462 603 2011-10-15 13:00:02
sakila category 1 0.001285 \N \N \N a2d7d74f 16 a2d7d74f 16 2011-10-15 13:00:03
sakila city 1 0.00336 \N \N \N 303bee90 600 303bee90 600 2011-10-15 13:00:04
sakila country 1 0.001663 \N \N \N 4a4fd7e4 109 4a4fd7e4 109 2011-10-15 13:00:05
sakila customer 1 0.004198 \N \N \N dbbb314e 599 dbbb314e 599 2011-10-15 13:00:06
sakila film 1 0.012757 \N \N \N ec4010f9 1000 ec4010f9 1000 2011-10-15 13:00:07
sakila film_actor 1 0.023028 \N \N \N 6e505c85 5462 6e505c85 5462 2011-10-15 13:00:08
sakila film_category 1 0.004835 \N \N \N afa46d51 1000 afa46d51 1000 2011-10-15 13:00:09
sakila film_text 1 0.002406 \N \N \N 186d7573 1000 186d7573 1000 2011-10-15 13:00:10
sakila inventory 1 0.021011 \N \N \N e9797687 4581 e9797687 4581 2011-10-15 13:00:11
sakila language 1 0.001155 \N \N \N 7e7df3f 6 7e7df3f 6 2011-10-15 13:00:12
sakila payment 1 0.091125 \N \N \N 10c1c1a8 16049 10c1c1a8 16049 2011-10-15 13:00:13
sakila rental 1 0.079093 \N \N \N 46829ea7 16044 46829ea7 16044 2011-10-15 13:00:14
sakila staff 1 0.001413 \N \N \N 233668ae 2 233668ae 2 2011-10-15 13:00:15
sakila store 1 0.00111 \N \N \N 6ce7245a 2 6ce7245a 2 2011-10-15 13:00:16