diff --git a/t/pt-archiver/basics.t b/t/pt-archiver/basics.t index 08762efb..364c325e 100644 --- a/t/pt-archiver/basics.t +++ b/t/pt-archiver/basics.t @@ -14,34 +14,34 @@ use Time::HiRes qw(time); use PerconaTest; use Sandbox; +use Data::Dumper; require "$trunk/bin/pt-archiver"; my $dp = new DSNParser(opts=>$dsn_opts); my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp); -my $dbh = $sb->get_dbh_for('master'); +my $master_dbh = $sb->get_dbh_for('master'); +my $slave1_dbh = $sb->get_dbh_for('slave1'); -if ( !$dbh ) { +if ( !$master_dbh ) { plan skip_all => 'Cannot connect to sandbox master'; } +elsif ( !$slave1_dbh ) { + plan skip_all => 'Cannot connect to sandbox slave1'; +} +else { + plan tests => 29; +} my $output; my $rows; -my $cnf = "/tmp/12345/my.sandbox.cnf"; -my $cmd = "$trunk/bin/pt-archiver"; - -# Make sure load works. -$sb->create_dbs($dbh, ['test']); -$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql'); -$rows = $dbh->selectrow_arrayref('select count(*) from test.table_1')->[0]; -if ( ($rows || 0) != 4 ) { - plan skip_all => 'Failed to load tables1-4.sql'; -} -else { - plan tests => 26; -} - +my $cnf = "/tmp/12345/my.sandbox.cnf"; +my $cmd = "$trunk/bin/pt-archiver"; my @args = qw(--dry-run --where 1=1); +$sb->create_dbs($master_dbh, ['test']); +$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql'); +$sb->wait_for_slaves(); + # ########################################################################### # These are dry-run tests of various options to test that the correct # SQL statements are generated. @@ -185,9 +185,58 @@ cmp_ok( "--sleep between SELECT --bulk-delete (bug 979092)" ); +# ############################################################################# +# Bug 903387: pt-archiver doesn't honor b=1 flag to create SQL_LOG_BIN statement +# ############################################################################# + +$sb->load_file('master', "t/pt-archiver/samples/bulk_regular_insert.sql"); +$sb->wait_for_slaves(); + +my $original_rows = $slave1_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id"); +is_deeply( + $original_rows, + [ + [1, 'aa', '11:11:11'], + [2, 'bb', '11:11:12'], + [3, 'cc', '11:11:13'], + [4, 'dd', '11:11:14'], + [5, 'ee', '11:11:15'], + [6, 'ff', '11:11:16'], + [7, 'gg', '11:11:17'], + [8, 'hh', '11:11:18'], + [9, 'ii', '11:11:19'], + [10,'jj', '11:11:10'], + ], + "Bug 903387: slave has rows" +); + +$output = output( + sub { pt_archiver::main( + '--source', "D=bri,t=t,F=$cnf,b=1", + '--dest', "D=bri,t=t_arch", + qw(--where 1=1 --replace --commit-each --bulk-insert --bulk-delete), + qw(--limit 10)) }, +); + +$rows = $master_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id"); +is_deeply( + $rows, + [ + [10,'jj', '11:11:10'], + ], + "Bug 903387: rows deleted on master" +) or diag(Dumper($rows)); + +$rows = $slave1_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id"); +is_deeply( + $rows, + $original_rows, + "Bug 903387: slave still has rows" +) or diag(Dumper($rows)); + # ############################################################################# # Done. # ############################################################################# -$sb->wipe_clean($dbh); +$sb->wipe_clean($master_dbh); ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox"); exit;