PT-2340 - Support MySQL 8.4

- Removed runtime.txt after discussion with Anastasia Alexandrova
- Added "use VersionParser" into tests in t/lib when needed
- Removed word master from tests for pt-archiver, pt-config-diff, pt-deadlock-logger, pt-duplicate-key-checker, pt-find, pt-fk-error-logger, pt-heartbeat, pt-index-usage, pt-ioprofile, pt-kill, pt-mysql-summary
- Removed word slave from tests for pt-archiver, pt-config-diff, pt-deadlock-logger, pt-duplicate-key-checker, pt-find, pt-fk-error-logger, pt-heartbeat, pt-index-usage, pt-ioprofile, pt-kill, pt-mysql-summary
- Updated modules for pt-archiver, pt-config-diff, pt-deadlock-logger, pt-duplicate-key-checker, pt-find, pt-fk-error-logger, pt-heartbeat, pt-index-usage, pt-ioprofile, pt-kill, pt-mysql-summary
- Changed mysql_ssl patch, so it is now short option s
- Added a check for existing zombies in t/pt-kill/execute_command.t
- Added bin/pt-galera-log-explainer to .gitignore
This commit is contained in:
Sveta Smirnova
2024-07-26 13:35:29 +03:00
parent 8cbb5a0c8f
commit 5c999ca3e0
114 changed files with 1898 additions and 1455 deletions

View File

@@ -17,58 +17,58 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $master_dbh = $sb->get_dbh_for('master');
my $slave_dbh = $sb->get_dbh_for('slave1');
my $source_dbh = $sb->get_dbh_for('source');
my $replica_dbh = $sb->get_dbh_for('replica1');
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
if ( !$source_dbh ) {
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ( !$slave_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave';
elsif ( !$replica_dbh ) {
plan skip_all => 'Cannot connect to sandbox replica';
} elsif ($sandbox_version lt '5.7') {
plan skip_all => 'Only on MySQL 5.7+';
} else {
plan tests => 5;
}
my ($master1_dbh, $master1_dsn) = $sb->start_sandbox(
server => 'chan_master1',
type => 'master',
my ($source1_dbh, $source1_dsn) = $sb->start_sandbox(
server => 'chan_source1',
type => 'source',
);
my ($master2_dbh, $master2_dsn) = $sb->start_sandbox(
server => 'chan_master2',
type => 'master',
my ($source2_dbh, $source2_dsn) = $sb->start_sandbox(
server => 'chan_source2',
type => 'source',
);
my ($slave1_dbh, $slave1_dsn) = $sb->start_sandbox(
server => 'chan_slave1',
type => 'master',
my ($replica1_dbh, $replica1_dsn) = $sb->start_sandbox(
server => 'chan_replica1',
type => 'source',
);
my $slave1_port = $sb->port_for('chan_slave1');
my $replica1_port = $sb->port_for('chan_replica1');
$sb->load_file('chan_master1', "sandbox/gtid_on.sql", undef, no_wait => 1);
$sb->load_file('chan_master2', "sandbox/gtid_on.sql", undef, no_wait => 1);
$sb->load_file('chan_slave1', "sandbox/slave_channels.sql", undef, no_wait => 1);
$sb->load_file('chan_source1', "sandbox/gtid_on.sql", undef, no_wait => 1);
$sb->load_file('chan_source2', "sandbox/gtid_on.sql", undef, no_wait => 1);
$sb->load_file('chan_replica1', "sandbox/replica_channels.sql", undef, no_wait => 1);
my $master1_port = $sb->port_for('chan_master1');
my $source1_port = $sb->port_for('chan_source1');
my $num_rows = 40000;
# Load some rows into masters 1 & 2.
$sb->load_file('chan_master1', "t/pt-archiver/samples/channels.sql", undef, no_wait => 1);
# Load some rows into sources 1 & 2.
$sb->load_file('chan_source1', "t/pt-archiver/samples/channels.sql", undef, no_wait => 1);
diag("Loading $num_rows into the test.t1 table on first master. This might take some time.");
diag(`util/mysql_random_data_load --host=127.0.0.1 --port=$master1_port --user=msandbox --password=msandbox test t1 $num_rows`);
diag("Loading $num_rows into the test.t1 table on first source. This might take some time.");
diag(`util/mysql_random_data_load --host=127.0.0.1 --port=$source1_port --user=msandbox --password=msandbox test t1 $num_rows`);
diag("$num_rows rows loaded. Starting tests.");
$master_dbh->do("FLUSH TABLES");
$source_dbh->do("FLUSH TABLES");
my $rows = $master1_dbh->selectrow_arrayref('SELECT COUNT(*) FROM test.t1 ');
my $rows = $source1_dbh->selectrow_arrayref('SELECT COUNT(*) FROM test.t1 ');
is(
@$rows[0],
$num_rows,
"All rows were loaded into master 1",
"All rows were loaded into source 1",
);
my @args = ('--source', $master1_dsn.',D=test,t=t1', '--purge', '--where', sprintf('id >= %d', $num_rows / 2), '--check-slave-lag', $slave1_dsn);
my @args = ('--source', $source1_dsn.',D=test,t=t1', '--purge', '--where', sprintf('id >= %d', $num_rows / 2), "--check-${replica_name}-lag", $replica1_dsn);
my ($exit_status, $output);
@@ -104,12 +104,12 @@ is(
'Ok if channel name was specified',
);
$sb->stop_sandbox(qw(chan_master1 chan_master2 chan_slave1));
$sb->stop_sandbox(qw(chan_source1 chan_source2 chan_replica1));
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($master_dbh);
$sb->wipe_clean($source_dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
exit;

View File

@@ -19,14 +19,14 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $master_dbh = $sb->get_dbh_for('master');
my $slave1_dbh = $sb->get_dbh_for('slave1');
my $source_dbh = $sb->get_dbh_for('source');
my $replica1_dbh = $sb->get_dbh_for('replica1');
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
if ( !$source_dbh ) {
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ( !$slave1_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave1';
elsif ( !$replica1_dbh ) {
plan skip_all => 'Cannot connect to sandbox replica1';
}
my $output;
@@ -35,9 +35,9 @@ my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
my @args = qw(--dry-run --where 1=1);
$sb->create_dbs($master_dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->wait_for_slaves();
$sb->create_dbs($source_dbh, ['test']);
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$sb->wait_for_replicas();
# ###########################################################################
# These are dry-run tests of various options to test that the correct
@@ -97,7 +97,7 @@ like($output, qr/SELECT/, 'I can disable the check OK');
shift @args; # remove --dry-run
# Test --why-quit and --statistics output
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(sub {pt_archiver::main(@args, '--source', "D=test,t=table_1,F=$cnf", qw(--purge --why-quit --statistics)) });
like($output, qr/Started at \d/, 'Start timestamp');
like($output, qr/Source:/, 'source');
@@ -105,18 +105,18 @@ like($output, qr/SELECT 4\nINSERT 0\nDELETE 4\n/, 'row counts');
like($output, qr/Exiting because there are no more rows/, 'Exit reason');
# Test basic functionality with OPTIMIZE
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(sub {pt_archiver::main(@args, qw(--optimize ds --source), "D=test,t=table_1,F=$cnf", qw(--purge)) });
is($output, '', 'OPTIMIZE did not fail');
# Test an empty table
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `/tmp/12345/use -N -e "delete from test.table_1"`;
$output = output(sub {pt_archiver::main(@args, '--source', "D=test,t=table_1,F=$cnf", qw(--purge)) });
is($output, "", 'Empty table OK');
# Test the output
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$trunk/bin/pt-archiver --where 1=1 --source D=test,t=table_1,F=$cnf --purge --progress 2 2>&1 | awk '{print \$3}'`;
is($output, <<EOF
COUNT
@@ -128,12 +128,12 @@ EOF
,'Progress output looks okay');
# Statistics
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(sub {pt_archiver::main(@args, qw(--statistics --source), "D=test,t=table_1,F=$cnf", qw(--dest t=table_2)) });
like($output, qr/commit *10/, 'Stats print OK');
# Test --no-delete.
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(sub {pt_archiver::main(@args, qw(--no-delete --source), "D=test,t=table_1,F=$cnf", qw(--dry-run --file /tmp/pt-archiver-test-no-delete-1)) });
like($output, qr/> /, '--no-delete implies strict ascending');
unlike($output, qr/>=/, '--no-delete implies strict ascending');
@@ -146,7 +146,7 @@ is($output + 0, 4, 'All 4 rows are still there');
# --sleep
# #############################################################################
# This table, gt_n.t1, is nothing special; it just has 19 rows and a PK.
$sb->load_file('master', 't/pt-archiver/samples/gt_n.sql');
$sb->load_file('source', 't/pt-archiver/samples/gt_n.sql');
# https://bugs.launchpad.net/percona-toolkit/+bug/979092
# This shouldn't take more than 3 seconds because it only takes 2 SELECT
@@ -166,7 +166,7 @@ ok(
) or diag($output, "t=", $t);
# Try again with --bulk-delete. The tool should work the same.
$sb->load_file('master', 't/pt-archiver/samples/gt_n.sql');
$sb->load_file('source', 't/pt-archiver/samples/gt_n.sql');
$t0 = time;
$output = output(
sub { pt_archiver::main(@args, '--source', "D=gt_n,t=t1,F=$cnf",
@@ -184,11 +184,11 @@ ok(
# Bug 903387: pt-archiver doesn't honor b=1 flag to create SQL_LOG_BIN statement
# #############################################################################
SKIP: {
$sb->load_file('master', "t/pt-archiver/samples/bulk_regular_insert.sql");
$sb->wait_for_slaves();
$sb->load_file('source', "t/pt-archiver/samples/bulk_regular_insert.sql");
$sb->wait_for_replicas();
my $original_rows = $slave1_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id");
my $original_no_id = $slave1_dbh->selectall_arrayref("SELECT c,t FROM bri.t ORDER BY id");
my $original_rows = $replica1_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id");
my $original_no_id = $replica1_dbh->selectall_arrayref("SELECT c,t FROM bri.t ORDER BY id");
is_deeply(
$original_no_id,
[
@@ -203,7 +203,7 @@ SKIP: {
['ii', '11:11:19'],
['jj', '11:11:10'],
],
"Bug 903387: slave has rows"
"Bug 903387: replica has rows"
);
$output = output(
@@ -214,26 +214,26 @@ SKIP: {
qw(--limit 10)) },
);
$rows = $master_dbh->selectall_arrayref("SELECT c,t FROM bri.t ORDER BY id");
$rows = $source_dbh->selectall_arrayref("SELECT c,t FROM bri.t ORDER BY id");
is_deeply(
$rows,
[
['jj', '11:11:10'],
],
"Bug 903387: rows deleted on master"
"Bug 903387: rows deleted on source"
) or diag(Dumper($rows));
$rows = $slave1_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id");
$rows = $replica1_dbh->selectall_arrayref("SELECT * FROM bri.t ORDER BY id");
is_deeply(
$rows,
$original_rows,
"Bug 903387: slave still has rows"
"Bug 903387: replica still has rows"
) or diag(Dumper($rows));
}
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($master_dbh);
$sb->wipe_clean($source_dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
done_testing;

View File

@@ -18,29 +18,29 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $master_dbh = $sb->get_dbh_for('master');
my $source_dbh = $sb->get_dbh_for('source');
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
if ( !$source_dbh ) {
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
$sb->create_dbs($master_dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->create_dbs($source_dbh, ['test']);
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
# ###########################################################################
# pt-archiver deletes data despite --dry-run
# https://bugs.launchpad.net/percona-toolkit/+bug/1199589
# ###########################################################################
my $rows_before = $master_dbh->selectall_arrayref("SELECT * FROM test.table_1 ORDER BY a");
my $rows_before = $source_dbh->selectall_arrayref("SELECT * FROM test.table_1 ORDER BY a");
$output = `$cmd --optimize --dry-run --purge --where 1=1 --source D=test,t=table_1,F=$cnf 2>&1`;
my $rows_after = $master_dbh->selectall_arrayref("SELECT * FROM test.table_1 ORDER BY a");
my $rows_after = $source_dbh->selectall_arrayref("SELECT * FROM test.table_1 ORDER BY a");
is_deeply(
$rows_after,
@@ -51,6 +51,6 @@ is_deeply(
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($master_dbh);
$sb->wipe_clean($source_dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
done_testing;

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 14;
@@ -34,7 +34,7 @@ my $cmd = "$trunk/bin/pt-archiver";
$sb->create_dbs($dbh, ['test']);
# Test --bulk-delete deletes in chunks
$sb->load_file('master', 't/pt-archiver/samples/table5.sql');
$sb->load_file('source', 't/pt-archiver/samples/table5.sql');
$output = `perl -I $trunk/t/pt-archiver/samples $cmd --plugin Plugin7 --no-ascend --limit 50 --bulk-delete --purge --where 1=1 --source D=test,t=table_5,F=$cnf --statistics 2>&1`;
like($output, qr/SELECT 105/, 'Fetched 105 rows');
like($output, qr/DELETE 105/, 'Deleted 105 rows');
@@ -54,7 +54,7 @@ like($output, qr/\(1=1\)/, 'WHERE clause is jailed');
unlike($output, qr/[^(]1=1/, 'WHERE clause is jailed');
# Test --bulk-delete works ok with a destination table
$sb->load_file('master', 't/pt-archiver/samples/table5.sql');
$sb->load_file('source', 't/pt-archiver/samples/table5.sql');
$output = output(
sub { pt_archiver::main(qw(--no-ascend --limit 50 --bulk-delete --where 1=1), "--source", "D=test,t=table_5,F=$cnf", qw(--statistics --dest t=table_5_dest)) },
);

View File

@@ -19,10 +19,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -34,7 +34,7 @@ $sb->wipe_clean($dbh);
$sb->create_dbs($dbh, ['test']);
# Test --bulk-insert
$sb->load_file('master', 't/pt-archiver/samples/table5.sql');
$sb->load_file('source', 't/pt-archiver/samples/table5.sql');
$dbh->do('INSERT INTO `test`.`table_5_copy` SELECT * FROM `test`.`table_5`');
$output = output(
@@ -60,7 +60,7 @@ like($output, qr/copy\s+$chks/, 'copy checksum');
# ############################################################################
# Issue 1260: mk-archiver --bulk-insert data loss
# ############################################################################
$sb->load_file('master', 't/pt-archiver/samples/bulk_regular_insert.sql');
$sb->load_file('source', 't/pt-archiver/samples/bulk_regular_insert.sql');
my $orig_rows = $dbh->selectall_arrayref('select id from bri.t order by id');
my $lt_8 = [ grep { $_->[0] < 8 } @$orig_rows ];
my $ge_8 = [ grep { $_->[0] >= 8 } @$orig_rows ];
@@ -97,9 +97,9 @@ if( Test::Builder->VERSION < 2 ) {
}
# >"
for my $char ( "\N{KATAKANA LETTER NI}", "\N{U+DF}" ) {
my $utf8_dbh = $sb->get_dbh_for('master', { mysql_enable_utf8 => 1, AutoCommit => 1 });
my $utf8_dbh = $sb->get_dbh_for('source', { mysql_enable_utf8 => 1, AutoCommit => 1 });
$sb->load_file('master', 't/pt-archiver/samples/bug_1127450.sql');
$sb->load_file('source', 't/pt-archiver/samples/bug_1127450.sql');
my $sql = qq{INSERT INTO `bug_1127450`.`original` VALUES (1, ?)};
$utf8_dbh->prepare($sql)->execute($char);
@@ -144,7 +144,7 @@ for my $char ( "\N{KATAKANA LETTER NI}", "\N{U+DF}" ) {
# PT-2123: pt-archiver gives error "Wide character in print at
# /usr/bin/pt-archiver line 6815" when using --bulk-insert
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2123.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2123.sql');
$dbh->do('set names "utf8mb4"');
my $original_rows = $dbh->selectall_arrayref('select col2 from pt_2123.t1 where col1=5');

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -32,7 +32,7 @@ my $cmd = "perl -I $trunk/t/pt-archiver/samples $trunk/bin/pt-archiver";
# First run without the plugin to get a reference for how the tables should
# be after a normal bulk insert run.
# #############################################################################
$sb->load_file('master', "t/pt-archiver/samples/bulk_regular_insert.sql");
$sb->load_file('source', "t/pt-archiver/samples/bulk_regular_insert.sql");
$dbh->do('use bri');
output(
@@ -67,7 +67,7 @@ is_deeply(
# #############################################################################
# Do it again with the plugin. The tables should be identical.
# #############################################################################
$sb->load_file('master', "t/pt-archiver/samples/bulk_regular_insert.sql");
$sb->load_file('source', "t/pt-archiver/samples/bulk_regular_insert.sql");
$dbh->do('use bri');
`$cmd --source F=$cnf,D=bri,t=t,L=1 --dest t=t_arch,m=bulk_regular_insert --where "1=1" --bulk-insert --limit 3`;

View File

@@ -17,20 +17,20 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh2 = $sb->get_dbh_for('slave1');
my $dbh = $sb->get_dbh_for('source');
my $dbh2 = $sb->get_dbh_for('replica1');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ( !$dbh2 ) {
plan skip_all => 'Cannot connect to sandbox slave';
plan skip_all => 'Cannot connect to sandbox replica';
}
elsif ( $sb->is_cluster_mode ) {
plan skip_all => 'Not for PXC',
}
elsif ( $sandbox_version ge '5.6' ) {
plan skip_all => 'Slave trick does not work on MySQL 5.6+';
plan skip_all => 'Replica trick does not work on MySQL 5.6+';
}
my $output;
@@ -39,10 +39,10 @@ my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
# #############################################################################
# Issue 758: Make mk-archiver wait for a slave
# Issue 758: Make mk-archiver wait for a replica
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_758.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_758.sql');
is_deeply(
$dbh->selectall_arrayref('select * from issue_758.t'),
@@ -50,21 +50,20 @@ is_deeply(
'Table not purged yet (issue 758)'
);
# Once this goes through repl, the slave will sleep causing
# seconds behind master to increase > 0.
# Once this goes through repl, the replica will sleep causing
# seconds behind source to increase > 0.
system('/tmp/12345/use -e "insert into issue_758.t select sleep(3)"');
# Slave seems to be lagging now so the first row should get purged
# Replica seems to be lagging now so the first row should get purged
# immediately, then the script should wait about 2 seconds until
# slave lag is gone.
#system("$cmd --source F=$cnf,D=issue_758,t=t --purge --where 'i>0' --check-slave-lag h=127.1,P=12346,u=msandbox,p=msandbox >/dev/null 2>&1 &");
system("$cmd --source F=$cnf,D=issue_758,t=t --purge --where 'i>0' --check-slave-lag h=127.1,P=12346,u=msandbox,p=msandbox &");
# replica lag is gone.
system("$cmd --source F=$cnf,D=issue_758,t=t --purge --where 'i>0' --check-${replica_name}-lag h=127.1,P=12346,u=msandbox,p=msandbox &");
sleep 1;
is_deeply(
$dbh2->selectall_arrayref('select * from issue_758.t'),
[[1],[2]],
'No changes on slave yet (issue 758)'
'No changes on replica yet (issue 758)'
);
is_deeply(
@@ -73,7 +72,7 @@ is_deeply(
'First row purged (issue 758)'
);
# The script it waiting for slave lag so no more rows should be purged yet.
# The script it waiting for replica lag so no more rows should be purged yet.
sleep 1;
is_deeply(
$dbh->selectall_arrayref('select * from issue_758.t'),
@@ -81,19 +80,19 @@ is_deeply(
'Still only first row purged (issue 758)'
);
# After this sleep the slave should have executed the INSERT SELECT,
# After this sleep the replica should have executed the INSERT SELECT,
# which returns 0, and the 2 purge/delete statments from above.
sleep 3;
is_deeply(
$dbh->selectall_arrayref('select * from issue_758.t'),
[[0]],
'Final table state on master (issue 758)'
'Final table state on source (issue 758)'
);
is_deeply(
$dbh2->selectall_arrayref('select * from issue_758.t'),
[[0]],
'Final table state on slave (issue 758)'
'Final table state on replica (issue 758)'
);
# #############################################################################

View File

@@ -17,11 +17,11 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $slave_dbh = $sb->get_dbh_for('slave1');
my $dbh = $sb->get_dbh_for('source');
my $replica_dbh = $sb->get_dbh_for('replica1');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 4;
@@ -35,7 +35,7 @@ my $cmd = "perl -I $trunk/t/pt-archiver/samples $trunk/bin/pt-archiver";
# ###########################################################################
# Bulk delete with limit that results in 2 chunks.
# ###########################################################################
$sb->load_file('master', "t/pt-archiver/samples/compact_col_vals.sql");
$sb->load_file('source', "t/pt-archiver/samples/compact_col_vals.sql");
$dbh->do('use cai');
is_deeply(

View File

@@ -17,11 +17,11 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $slave_dbh = $sb->get_dbh_for('slave1');
my $dbh = $sb->get_dbh_for('source');
my $replica_dbh = $sb->get_dbh_for('replica1');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 19;
@@ -35,7 +35,7 @@ my $cmd = "perl -I $trunk/t/pt-archiver/samples $trunk/bin/pt-archiver";
# ###########################################################################
# Bulk delete with limit that results in 2 chunks.
# ###########################################################################
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
$sb->load_file('source', "t/pt-archiver/samples/delete_more.sql");
$dbh->do('use dm');
#1
@@ -97,11 +97,11 @@ is_deeply(
);
SKIP: {
skip 'Cannot connect to slave sandbox', 6 unless $slave_dbh;
$slave_dbh->do('use dm');
skip 'Cannot connect to replica sandbox', 6 unless $replica_dbh;
$replica_dbh->do('use dm');
#5
is_deeply(
$slave_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
$replica_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
[1, '2010-02-16', 'a'],
[2, '2010-02-15', 'b'],
@@ -109,11 +109,11 @@ SKIP: {
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
],
'Slave main_table-123 not changed'
'Replica main_table-123 not changed'
);
#6
is_deeply(
$slave_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
$replica_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
[1, 'a'],
[2, 'b'],
@@ -124,15 +124,15 @@ SKIP: {
[5, 'e'],
[6, 'ot1'],
],
'Slave other_table-123 not changed'
'Replica other_table-123 not changed'
);
# Run it again without DSN b so changes should be made on slave.
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
# Run it again without DSN b so changes should be made on replica.
$sb->load_file('source', "t/pt-archiver/samples/delete_more.sql");
#7
is_deeply(
$slave_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
$replica_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
[1, '2010-02-16', 'a'],
[2, '2010-02-15', 'b'],
@@ -140,11 +140,11 @@ SKIP: {
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
],
'Reset slave main_table-123'
'Reset replica main_table-123'
);
#8
is_deeply(
$slave_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
$replica_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
[1, 'a'],
[2, 'b'],
@@ -155,14 +155,14 @@ SKIP: {
[5, 'e'],
[6, 'ot1'],
],
'Reset slave other_table-123'
'Reset replica other_table-123'
);
`$cmd --purge --primary-key-only --source F=$cnf,D=dm,t=main_table-123,i=pub_date,m=delete_more --where "pub_date < '2010-02-16'" --bulk-delete --limit 2`;
sleep 1;
#9
is_deeply(
$slave_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
$replica_dbh->selectall_arrayref('select * from `main_table-123` order by id'),
[
[1, '2010-02-16', 'a'],
# [2, '2010-02-15', 'b'],
@@ -170,26 +170,26 @@ SKIP: {
[4, '2010-02-16', 'd'],
[5, '2010-02-14', 'e'],
],
'Slave main_table-123 changed'
'Replica main_table-123 changed'
);
#10
is_deeply(
$slave_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
$replica_dbh->selectall_arrayref('select * from `other_table-123` order by id'),
[
[1, 'a'],
[4, 'd'],
[5, 'e'],
[6, 'ot1'],
],
'Slave other_table-123 changed'
'Replica other_table-123 changed'
);
}
# ###########################################################################
# Bulk delete in single chunk.
# ###########################################################################
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
$sb->load_file('source', "t/pt-archiver/samples/delete_more.sql");
$dbh->do('use dm');
#11
is_deeply(
@@ -247,7 +247,7 @@ is_deeply(
# ###########################################################################
# Single delete.
# ###########################################################################
$sb->load_file('master', "t/pt-archiver/samples/delete_more.sql");
$sb->load_file('source', "t/pt-archiver/samples/delete_more.sql");
$dbh->do('use dm');
#15
is_deeply(

View File

@@ -18,10 +18,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -31,7 +31,7 @@ my $cmd = "$trunk/bin/pt-archiver";
# Make sure load works.
$sb->create_dbs($dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
# Archive to another table.
$output = output(
@@ -44,7 +44,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_2"`;
is($output + 0, 4, 'Found rows in new table OK when archiving to another table');
# Archive only some columns to another table.
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(
sub { pt_archiver::main("-c", "b,c", qw(--where 1=1), "--source", "D=test,t=table_1,F=$cnf", qw(--dest t=table_2)) },
);
@@ -55,7 +55,7 @@ ok(scalar @$rows == 0, 'Purged all rows ok');
# and after the archive operation and I am convinced that the original
# expected output was incorrect.
my ($sql, $expect_rows);
if ( $sb->is_cluster_node('master') ) {
if ( $sb->is_cluster_node('source') ) {
# PXC nodes have auto-inc offsets, so rather than see what they are
# and account for them, we just don't select the auto-inc col, a.
# This test is really about b, c, and d anyway.
@@ -84,7 +84,7 @@ is_deeply(
'Found rows in new table OK when archiving only some columns to another table') or diag(Dumper($rows));
# Archive to another table with autocommit
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --txn-size 0), "--source", "D=test,t=table_1,F=$cnf", qw(--dest t=table_2)) },
);
@@ -95,7 +95,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_2"`;
is($output + 0, 4, 'Found rows in new table OK when archiving to another table with autocommit');
# Archive to another table with commit every 2 rows
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --txn-size 2), "--source", "D=test,t=table_1,F=$cnf", qw(--dest t=table_2)) },
);
@@ -106,7 +106,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_2"`;
is($output + 0, 4, 'Found rows in new table OK when archiving to another table with commit every 2 rows');
# Test that table with many rows can be archived to table with few
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --dest t=table_4 --no-check-columns), "--source", "D=test,t=table_1,F=$cnf") },
);

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -29,7 +29,7 @@ my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
$sb->create_dbs($dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
# Archive to a file.
`rm -f archive.test.table_1`;
@@ -51,7 +51,7 @@ EOF
`rm -f archive.test.table_1`;
# Archive to a file, but specify only some columns.
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
`rm -f archive.test.table_1`;
$output = output(
sub { pt_archiver::main("-c", "b,c", qw(--where 1=1 --header), "--source", "D=test,t=table_1,F=$cnf", "--file", 'archive.%D.%t') },
@@ -74,7 +74,7 @@ EOF
sub test_charset {
my ($charset) = @_;
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
local $@;
my ($out, $exit_val) = full_output( sub {
pt_archiver::main("-c", "b,c", qw(--where 1=1 --header),
@@ -98,7 +98,7 @@ for my $charset (qw(latin1 utf8 UTF8 )) {
my $warning;
local $SIG{__WARN__} = sub { $warning .= shift };
my ($out) = full_output( sub {
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
pt_archiver::main("-c", "b,c", qw(--where 1=1 --header),
"--source", "D=test,t=table_1,F=$cnf",
'--file', '/tmp/%Y-%m-%d-%D_%H:%i:%s.%t',
@@ -116,7 +116,7 @@ like(
local $SIG{__WARN__} = undef;
$sb->load_file('master', 't/pt-archiver/samples/table2.sql');
$sb->load_file('source', 't/pt-archiver/samples/table2.sql');
`rm -f archive.test.table_2`;
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --output-format=csv), "--source", "D=test,t=table_2,F=$cnf", "--file", 'archive.%D.%t') },

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 3;
@@ -34,7 +34,7 @@ my $cmd = "$trunk/bin/pt-archiver";
# ###########################################################################
# Test the custom plugin gt_n.
# ###########################################################################
$sb->load_file('master', 't/pt-archiver/samples/gt_n.sql');
$sb->load_file('source', 't/pt-archiver/samples/gt_n.sql');
my $sql = 'select status, count(*) from gt_n.t1 group by status';
is_deeply(
$dbh->selectall_arrayref($sql),

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 18;
@@ -35,7 +35,7 @@ $sb->wipe_clean($dbh);
$sb->create_dbs($dbh, ['test']);
# Test ascending index; it should ascend the primary key
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --dry-run --where 1=1 --source D=test,t=table_3,F=$cnf --purge 2>&1`;
like($output, qr/FORCE INDEX\(`PRIMARY`\)/, 'Uses PRIMARY index');
$output = `$cmd --where 1=1 --source D=test,t=table_3,F=$cnf --purge 2>&1`;
@@ -44,22 +44,22 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_3"`;
is($output + 0, 0, 'Ascended key OK');
# Test specifying a wrong index.
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --where 1=1 --source i=foo,D=test,t=table_3,F=$cnf --purge 2>&1`;
like($output, qr/Index 'foo' does not exist in table/, 'Got bad-index error OK');
# Test specifying a NULLable index.
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --where 1=1 --source i=b,D=test,t=table_1,F=$cnf --purge 2>&1`;
is($output, "", 'Got no error with a NULLable index');
# Test table without a primary key
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --where 1=1 --source D=test,t=table_4,F=$cnf --purge 2>&1`;
like($output, qr/Cannot find an ascendable index/, 'Got need-PK-error OK');
# Test ascending index explicitly
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --where 1=1 --source D=test,t=table_3,F=$cnf,i=PRIMARY --purge 2>&1`;
is($output, '', 'No output for ascending index explicitly');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_3"`;
@@ -67,7 +67,7 @@ is($output + 0, 0, 'Ascended explicit key OK');
# Test that mk-archiver gets column ordinals and such right when building the
# ascending-index queries.
$sb->load_file('master', 't/pt-archiver/samples/table11.sql');
$sb->load_file('source', 't/pt-archiver/samples/table11.sql');
$output = `$cmd --limit 2 --where 1=1 --source D=test,t=table_11,F=$cnf --purge 2>&1`;
is($output, '', 'No output while dealing with out-of-order PK');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_11"`;
@@ -75,14 +75,14 @@ is($output + 0, 0, 'Ascended out-of-order PK OK');
#####################
# Test that ascending index check WHERE clause can't be hijacked
$sb->load_file('master', 't/pt-archiver/samples/table6.sql');
$sb->load_file('source', 't/pt-archiver/samples/table6.sql');
$output = `$cmd --source D=test,t=table_6,F=$cnf --purge --limit 2 --where 'c=1'`;
is($output, '', 'No errors purging table_6');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_6"`;
is($output + 0, 1, 'Did not purge last row');
# Test that ascending index check doesn't leave any holes
$sb->load_file('master', 't/pt-archiver/samples/table5.sql');
$sb->load_file('source', 't/pt-archiver/samples/table5.sql');
$output = `$cmd --source D=test,t=table_5,F=$cnf --purge --limit 50 --where 'a<current_date - interval 1 day' 2>&1`;
is($output, '', 'No errors in larger table');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_5"`;
@@ -91,7 +91,7 @@ is($output + 0, 0, 'Purged completely on multi-column ascending index');
# Make sure ascending index check can be disabled
$output = `$cmd --where 1=1 --dry-run --no-ascend --source D=test,t=table_5,F=$cnf --purge --limit 50 2>&1`;
like ( $output, qr/(^SELECT .*$)\n\1/m, '--no-ascend makes fetch-first and fetch-next identical' );
$sb->load_file('master', 't/pt-archiver/samples/table5.sql');
$sb->load_file('source', 't/pt-archiver/samples/table5.sql');
$output = `$cmd --where 1=1 --no-ascend --source D=test,t=table_5,F=$cnf --purge --limit 1 2>&1`;
is($output, '', "No output when --no-ascend");

View File

@@ -17,13 +17,13 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
# This issue/bug seems not to have been reproduced or followed up on.
plan skip_all => "issue 1152";
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 2;
@@ -33,7 +33,7 @@ my $output;
my $rows;
my $cnf = "/tmp/12345/my.sandbox.cnf";
$sb->load_file('master', 't/pt-archiver/samples/issue_1152.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_1152.sql');
# #############################################################################
# Issue 1152: mk-archiver columns option resulting in null archived table data

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 4;
@@ -35,8 +35,8 @@ $sb->create_dbs($dbh, ['test']);
# #############################################################################
# Issue 1166: Don't LIMIT 1 for unique indexes
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_131.sql');
$sb->load_file('master', 't/pt-archiver/samples/issue_1166.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_131.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_1166.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --dry-run --source),
@@ -65,7 +65,7 @@ like(
# This issue is related:
# Issue 1170: Allow bulk delete without LIMIT
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_131.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_131.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --dry-run --source),

View File

@@ -18,10 +18,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ($sandbox_version gt '5.7') {
plan tests => 4;
@@ -36,7 +36,7 @@ my $archived_rows;
# #############################################################################
# Issue 1152: mk-archiver columns option resulting in null archived table data
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_1225.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_1225.sql');
$dbh->do('set names "utf8"');
my $original_rows = $dbh->selectall_arrayref('select c from issue_1225.t limit 2');
@@ -77,7 +77,7 @@ ok(
);
}
$sb->load_file('master', 't/pt-archiver/samples/issue_1225.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_1225.sql');
$output = output(
sub { pt_archiver::main(

View File

@@ -20,10 +20,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ( $DBD::mysql::VERSION lt '4' ) {
plan skip_all => "DBD::mysql version $DBD::mysql::VERSION has utf8 bugs. "
@@ -39,7 +39,7 @@ my $file = "/tmp/pt-archiver-file.txt";
# Issue 1229: mk-archiver not creating UTF8 compatible file handles for
# archive to file
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_1225.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_1225.sql');
$dbh->do('set names "utf8"');
my $original_rows = $dbh->selectall_arrayref('select c from issue_1225.t where i in (1, 2)');

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 2;
@@ -36,7 +36,7 @@ $sb->create_dbs($dbh, ['test']);
# Issue 131: mk-archiver fails to insert records if destination table columns
# in different order than source table
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_131.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_131.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "F=$cnf,D=test,t=issue_131_src", qw(--statistics --dest t=issue_131_dst)) },
);

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 3;
@@ -31,7 +31,7 @@ my $output;
# #############################################################################
# Issue 1152: mk-archiver columns option resulting in null archived table data
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_1593265.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_1593265.sql');
$dbh->do('set names "utf8"');

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 2;
@@ -35,7 +35,7 @@ $sb->create_dbs($dbh, ['test']);
# #############################################################################
# Issue 524: mk-archiver --no-delete --dry-run prints out DELETE statement
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/issue_131.sql');
$sb->load_file('source', 't/pt-archiver/samples/issue_131.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --dry-run --no-delete), "--source", "F=$cnf,D=test,t=issue_131_src", qw(--dest t=issue_131_dst)) },
);

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 3;

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 18;
@@ -35,7 +35,7 @@ my $cmd = "perl -I $trunk/t/pt-archiver/samples $trunk/bin/pt-archiver";
$sb->create_dbs($dbh, ['test']);
# Check plugin that does nothing
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --where 1=1 --source m=Plugin1,D=test,t=table_1,F=$cnf --dest t=table_2 2>&1`;
is($output, '', 'Loading a blank plugin worked OK');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
@@ -43,7 +43,7 @@ is($output + 0, 4, 'Purged no rows ok b/c of blank plugin');
# Test that ascending index check doesn't leave any holes on a unique index when
# there is a plugin that always says rows are archivable
$sb->load_file('master', 't/pt-archiver/samples/table5.sql');
$sb->load_file('source', 't/pt-archiver/samples/table5.sql');
$output = `$cmd --source m=Plugin2,D=test,t=table_5,F=$cnf --purge --limit 50 --where 'a<current_date - interval 1 day' 2>&1`;
is($output, '', 'No errors with strictly ascending index');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_5"`;
@@ -51,7 +51,7 @@ is($output + 0, 0, 'Purged completely with strictly ascending index');
# Check plugin that adds rows to another table (same thing as --dest, but on
# same db handle)
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
$output = `$cmd --where 1=1 --source m=Plugin3,D=test,t=table_1,F=$cnf --purge 2>&1`;
is($output, '', 'Running with plugin did not die');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
@@ -60,7 +60,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_2"`;
is($output + 0, 4, 'Plugin archived all rows to table_2 OK');
# Check plugin that does ON DUPLICATE KEY UPDATE on insert
$sb->load_file('master', 't/pt-archiver/samples/tables7-9.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables7-9.sql');
$output = `$cmd --where 1=1 --source D=test,t=table_7,F=$cnf --dest m=Plugin4,t=table_8 2>&1`;
is($output, '', 'Loading plugin worked OK');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_7"`;
@@ -73,7 +73,7 @@ $output = `/tmp/12345/use -N -e "select a, b, c from test.table_9"`;
like($output, qr/1\s+3\s+6/, 'ODKU added rows up');
# Check plugin that sets up and archives a temp table
$sb->load_file('master', 't/pt-archiver/samples/table10.sql');
$sb->load_file('source', 't/pt-archiver/samples/table10.sql');
$output = `$cmd --where 1=1 --source m=Plugin5,D=test,t=tmp_table,F=$cnf --dest t=table_10 2>&1`;
is($output, '', 'Loading plugin worked OK');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_10"`;
@@ -81,8 +81,8 @@ is($output + 0, 2, 'Plugin archived all rows to table_10 OK');
# Check plugin that sets up and archives to one or the other table depending
# on even/odd
$sb->load_file('master', 't/pt-archiver/samples/table10.sql');
$sb->load_file('master', 't/pt-archiver/samples/table13.sql');
$sb->load_file('source', 't/pt-archiver/samples/table10.sql');
$sb->load_file('source', 't/pt-archiver/samples/table13.sql');
$output = `$cmd --where 1=1 --source D=test,t=table_13,F=$cnf --dest m=Plugin6,t=table_10 2>&1`;
is($output, '', 'Loading plugin worked OK');
$output = `/tmp/12345/use -N -e "select count(*) from test.table_even"`;

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 3;
@@ -31,7 +31,7 @@ my $output;
# #############################################################################
# Issue 1152: mk-archiver columns option resulting in null archived table data
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-143.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-143.sql');
my $original_rows = $dbh->selectall_arrayref('select * from test.stats_r');
my $exit_status;

View File

@@ -19,31 +19,31 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $master_dbh = $sb->get_dbh_for('master');
my $slave1_dbh = $sb->get_dbh_for('slave1');
my $source_dbh = $sb->get_dbh_for('source');
my $replica1_dbh = $sb->get_dbh_for('replica1');
if ( !$master_dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
if ( !$source_dbh ) {
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ( !$slave1_dbh ) {
plan skip_all => 'Cannot connect to sandbox slave1';
elsif ( !$replica1_dbh ) {
plan skip_all => 'Cannot connect to sandbox replica1';
}
my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
my @args = qw(--where 1=1);
$sb->create_dbs($master_dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->wait_for_slaves();
$sb->create_dbs($source_dbh, ['test']);
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
$sb->wait_for_replicas();
my $old_innodb_lock_wait_timeout = `/tmp/12345/use -ss -e 'select \@\@global.innodb_lock_wait_timeout'`;
chomp $old_innodb_lock_wait_timeout;
$master_dbh->do('set global innodb_lock_wait_timeout=1');
$source_dbh->do('set global innodb_lock_wait_timeout=1');
$master_dbh->do('begin');
$master_dbh->do('select * from test.table_1 for update;');
$source_dbh->do('begin');
$source_dbh->do('select * from test.table_1 for update;');
my ($output, $exit_val) = full_output(sub {pt_archiver::main(@args, '--source', "D=test,t=table_1,F=$cnf", qw(--purge)) });
@@ -62,9 +62,9 @@ unlike(
# #############################################################################
# Done.
# #############################################################################
$master_dbh->do("set global innodb_lock_wait_timeout=$old_innodb_lock_wait_timeout");
$source_dbh->do("set global innodb_lock_wait_timeout=$old_innodb_lock_wait_timeout");
$sb->wipe_clean($master_dbh);
$sb->wipe_clean($source_dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
done_testing;

View File

@@ -19,10 +19,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -33,7 +33,7 @@ $sb->wipe_clean($dbh);
$sb->create_dbs($dbh, ['test']);
# Test --bulk-insert
$sb->load_file('master', 't/pt-archiver/samples/pt-2083.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2083.sql');
$output = output(
sub { pt_archiver::main(qw(--commit-each --where 1=1 --statistics --charset latin1),
@@ -56,7 +56,7 @@ like(
) or diag($copied[0]);
# Test --file
$sb->load_file('master', 't/pt-archiver/samples/pt-2083.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2083.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1 --statistics --charset latin1),

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 23;
@@ -31,7 +31,7 @@ my $output;
# #############################################################################
# PT-2114: Incorrect casting of BIT columns by pt-archiver
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2114.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2114.sql');
my $zero_rows = $dbh->selectall_arrayref('select id, hex(val) from pt_2114.t1 where val = 0');
my $exit_status;
@@ -69,7 +69,7 @@ is (
# #############################################################################
# Reloading dump to perform archiving
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2114.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2114.sql');
my $one_rows = $dbh->selectall_arrayref('select id, hex(val) from pt_2114.t1 where val = 1');
@@ -115,7 +115,7 @@ is_deeply(
# #############################################################################
# Reloading dump to perform archiving
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2114.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2114.sql');
$output = output(
sub { $exit_status = pt_archiver::main(
@@ -159,7 +159,7 @@ is_deeply(
# #############################################################################
# Reloading dump to perform archiving
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2114.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2114.sql');
$output = output(
sub { $exit_status = pt_archiver::main(
@@ -194,7 +194,7 @@ is (
# #############################################################################
# Reloading dump to perform archiving
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2114.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2114.sql');
# Archiving into a file
$output = output(
@@ -244,7 +244,7 @@ is (
# Longer BIT values
# Loading dump to perform archiving
# #############################################################################
$sb->load_file('master', 't/pt-archiver/samples/pt-2114-2.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2114-2.sql');
my $not_archived_rows = $dbh->selectall_arrayref("select id, hex(val) from pt_2114.t1 where val = b'1111000010'");
$output = output(

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -30,7 +30,7 @@ my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
$sb->create_dbs($dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
# Archive to a file.
`rm -f archive.test.table_1`;

View File

@@ -19,10 +19,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
my $output;
@@ -33,7 +33,7 @@ $sb->wipe_clean($dbh);
$sb->create_dbs($dbh, ['test']);
# Test --bulk-insert
$sb->load_file('master', 't/pt-archiver/samples/pt-2279.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2279.sql');
$output = output(
sub { pt_archiver::main(qw(--limit 50 --bulk-insert),
@@ -49,7 +49,7 @@ unlike(
) or diag($output);
# Test --file
$sb->load_file('master', 't/pt-archiver/samples/pt-2279.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt-2279.sql');
$output = output(
sub { pt_archiver::main(qw(--limit 50),

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 8;
@@ -32,7 +32,7 @@ my $cnf = "/tmp/12345/my.sandbox.cnf";
my $cmd = "$trunk/bin/pt-archiver";
$sb->create_dbs($dbh, ['test']);
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
# Test basic functionality with defaults
$output = output(
@@ -45,7 +45,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
is($output + 0, 0, 'Purged ok');
# Test basic functionality with --commit-each
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=table_1,F=$cnf", qw(--commit-each --limit 1 --purge)) },
);
@@ -56,7 +56,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
is($output + 0, 0, 'Purged ok with --commit-each');
# Archive only part of the table
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=table_1,F=$cnf", qw(--where a<4 --purge)) },
);
@@ -67,7 +67,7 @@ $output = `/tmp/12345/use -N -e "select count(*) from test.table_1"`;
is($output + 0, 1, 'Purged some rows ok');
# Fail if --primary-key-only was specified and there is no PK in the table
$sb->load_file('master', 't/pt-archiver/samples/pt_157.sql');
$sb->load_file('source', 't/pt-archiver/samples/pt_157.sql');
$output = output(
sub { pt_archiver::main(qw(--where 1=1), "--source", "D=test,t=t1,F=$cnf", qw(--purge --primary-key-only)) },
stderr => 1,

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 8;
@@ -35,7 +35,7 @@ my $cmd = "perl -I $trunk/t/pt-archiver/samples $trunk/bin/pt-archiver";
# ###########################################################################
# Test the custom plugin res_fk.
# ###########################################################################
$sb->load_file('master', 't/pt-archiver/samples/res_fk.sql');
$sb->load_file('source', 't/pt-archiver/samples/res_fk.sql');
$sql = 'select * from test.comp';
is_deeply(
$dbh->selectall_arrayref($sql),

View File

@@ -17,10 +17,10 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh = $sb->get_dbh_for('source');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
else {
plan tests => 5;
@@ -34,7 +34,7 @@ my $cmd = "$trunk/bin/pt-archiver";
$sb->create_dbs($dbh, ['test']);
# Safe auto-increment behavior.
$sb->load_file('master', 't/pt-archiver/samples/table12.sql');
$sb->load_file('source', 't/pt-archiver/samples/table12.sql');
$output = output(
sub { pt_archiver::main(qw(--purge --where 1=1), "--source", "D=test,t=table_12,F=$cnf") },
);
@@ -43,7 +43,7 @@ $output = `/tmp/12345/use -N -e "select min(a),count(*) from test.table_12"`;
like($output, qr/^3\t1$/, 'Did not touch the max auto_increment');
# Safe auto-increment behavior, disabled.
$sb->load_file('master', 't/pt-archiver/samples/table12.sql');
$sb->load_file('source', 't/pt-archiver/samples/table12.sql');
$output = output(
sub { pt_archiver::main(qw(--no-safe-auto-increment --purge --where 1=1), "--source", "D=test,t=table_12,F=$cnf") },
);

View File

@@ -17,14 +17,14 @@ require "$trunk/bin/pt-archiver";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $dbh2 = $sb->get_dbh_for('slave1');
my $dbh = $sb->get_dbh_for('source');
my $dbh2 = $sb->get_dbh_for('replica1');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
plan skip_all => 'Cannot connect to sandbox source';
}
elsif ( !$dbh2 ) {
plan skip_all => 'Cannot connect to sandbox slave';
plan skip_all => 'Cannot connect to sandbox replica';
}
my $output;
@@ -47,7 +47,7 @@ ok(
);
# Test with a sentinel file
$sb->load_file('master', 't/pt-archiver/samples/table1.sql');
$sb->load_file('source', 't/pt-archiver/samples/table1.sql');
diag(`touch $sentinel`);
$output = output(
@@ -131,20 +131,20 @@ diag(`rm -f $pid_file`);
# #############################################################################
# This test will achive rows from dbh:test.table_1 to dbh2:test.table_2.
$sb->load_file('master', 't/pt-archiver/samples/tables1-4.sql');
$sb->load_file('source', 't/pt-archiver/samples/tables1-4.sql');
# Change passwords so defaults files won't work.
$sb->do_as_root(
'master',
'source',
q/CREATE USER 'bob'@'%' IDENTIFIED BY 'foo'/,
q/GRANT ALL ON *.* TO 'bob'@'%'/,
);
$dbh2->do('TRUNCATE TABLE test.table_2');
$sb->wait_for_slaves;
$sb->wait_for_replicas;
$output = output(
sub { pt_archiver::main(
'--source', 'h=127.1,P=12345,D=test,t=table_1,u=bob,p=foo',
'--source', 'h=127.1,P=12345,D=test,t=table_1,u=bob,p=foo,s=1',
'--dest', 'P=12346,t=table_2',
qw(--where 1=1))
},
@@ -156,9 +156,9 @@ is(
scalar @$r,
4,
'--dest inherited from --source'
);
) or diag($output);
$sb->do_as_root('master', q/DROP USER 'bob'@'%'/);
$sb->do_as_root('source', q/DROP USER 'bob'@'%'/);
# #############################################################################
# Done.