Remove MySQLDump; move get_create_table() and get_table_status() to TableParser and remove saving results in each function.

This commit is contained in:
Daniel Nichter
2011-09-15 09:39:32 -06:00
parent d5eb621333
commit 756142c1d1
22 changed files with 177 additions and 727 deletions

View File

@@ -42,7 +42,7 @@ $Data::Dumper::Quotekeys = 0;
sub new {
my ( $class, %args ) = @_;
my @required_args = qw(method base-dir plugins get_id
QueryParser MySQLDump TableParser TableSyncer Quoter);
QueryParser TableParser TableSyncer Quoter);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
@@ -767,7 +767,6 @@ sub add_indexes {
my $qp = $self->{QueryParser};
my $tp = $self->{TableParser};
my $q = $self->{Quoter};
my $du = $self->{MySQLDump};
my @src_tbls = $qp->get_tables($query);
my @keys;
@@ -777,8 +776,11 @@ sub add_indexes {
my $tbl_struct;
eval {
$tbl_struct = $tp->parse(
$du->get_create_table($dsts->[0]->{dbh}, $q, $db, $tbl)
);
$tp->get_create_table(
dbh => $dsts->[0]->{dbh},
db => $db,
tbl => $tbl,
));
};
if ( $EVAL_ERROR ) {
MKDEBUG && _d('Error parsing', $db, '.', $tbl, ':', $EVAL_ERROR);

View File

@@ -1,322 +0,0 @@
# This program is copyright 2007-2011 Baron Schwartz, 2011 Percona Inc.
# Feedback and improvements are welcome.
#
# THIS PROGRAM IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
# MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2; OR the Perl Artistic License. On UNIX and similar
# systems, you can issue `man perlgpl' or `man perlartistic' to read these
# licenses.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA.
# ###########################################################################
# MySQLDump package
# ###########################################################################
{
# Package: MySQLDump
# MySQLDump gets CREATE TABLE defs from MySQL.
package MySQLDump;
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use constant MKDEBUG => $ENV{MKDEBUG} || 0;
( our $before = <<'EOF') =~ s/^ //gm;
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
EOF
( our $after = <<'EOF') =~ s/^ //gm;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
EOF
sub new {
my ( $class, %args ) = @_;
my $self = {
cache => 0, # Afaik no script uses this cache any longer because
# it has caused difficult-to-find bugs in the past.
};
return bless $self, $class;
}
sub dump {
my ( $self, $dbh, $quoter, $db, $tbl, $what ) = @_;
if ( $what eq 'table' ) {
my $ddl = $self->get_create_table($dbh, $quoter, $db, $tbl);
return unless $ddl;
if ( $ddl->[0] eq 'table' ) {
return $before
. 'DROP TABLE IF EXISTS ' . $quoter->quote($tbl) . ";\n"
. $ddl->[1] . ";\n";
}
else {
return 'DROP TABLE IF EXISTS ' . $quoter->quote($tbl) . ";\n"
. '/*!50001 DROP VIEW IF EXISTS '
. $quoter->quote($tbl) . "*/;\n/*!50001 "
. $self->get_tmp_table($dbh, $quoter, $db, $tbl) . "*/;\n";
}
}
elsif ( $what eq 'triggers' ) {
my $trgs = $self->get_triggers($dbh, $quoter, $db, $tbl);
if ( $trgs && @$trgs ) {
my $result = $before . "\nDELIMITER ;;\n";
foreach my $trg ( @$trgs ) {
if ( $trg->{sql_mode} ) {
$result .= qq{/*!50003 SET SESSION SQL_MODE='$trg->{sql_mode}' */;;\n};
}
$result .= "/*!50003 CREATE */ ";
if ( $trg->{definer} ) {
my ( $user, $host )
= map { s/'/''/g; "'$_'"; }
split('@', $trg->{definer}, 2);
$result .= "/*!50017 DEFINER=$user\@$host */ ";
}
$result .= sprintf("/*!50003 TRIGGER %s %s %s ON %s\nFOR EACH ROW %s */;;\n\n",
$quoter->quote($trg->{trigger}),
@{$trg}{qw(timing event)},
$quoter->quote($trg->{table}),
$trg->{statement});
}
$result .= "DELIMITER ;\n\n/*!50003 SET SESSION SQL_MODE=\@OLD_SQL_MODE */;\n\n";
return $result;
}
else {
return undef;
}
}
elsif ( $what eq 'view' ) {
my $ddl = $self->get_create_table($dbh, $quoter, $db, $tbl);
return '/*!50001 DROP TABLE IF EXISTS ' . $quoter->quote($tbl) . "*/;\n"
. '/*!50001 DROP VIEW IF EXISTS ' . $quoter->quote($tbl) . "*/;\n"
. '/*!50001 ' . $ddl->[1] . "*/;\n";
}
else {
die "You didn't say what to dump.";
}
}
# USEs the given database.
sub _use_db {
my ( $self, $dbh, $quoter, $new ) = @_;
if ( !$new ) {
MKDEBUG && _d('No new DB to use');
return;
}
my $sql = 'USE ' . $quoter->quote($new);
MKDEBUG && _d($dbh, $sql);
$dbh->do($sql);
return;
}
sub get_create_table {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
if ( !$self->{cache} || !$self->{tables}->{$db}->{$tbl} ) {
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
MKDEBUG && _d($sql);
eval { $dbh->do($sql); };
MKDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
$self->_use_db($dbh, $quoter, $db);
$sql = "SHOW CREATE TABLE " . $quoter->quote($db, $tbl);
MKDEBUG && _d($sql);
my $href;
eval { $href = $dbh->selectrow_hashref($sql); };
if ( $EVAL_ERROR ) {
warn "Failed to $sql. The table may be damaged.\nError: $EVAL_ERROR";
return;
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
MKDEBUG && _d($sql);
$dbh->do($sql);
my ($key) = grep { m/create table/i } keys %$href;
if ( $key ) {
MKDEBUG && _d('This table is a base table');
$self->{tables}->{$db}->{$tbl} = [ 'table', $href->{$key} ];
}
else {
MKDEBUG && _d('This table is a view');
($key) = grep { m/create view/i } keys %$href;
$self->{tables}->{$db}->{$tbl} = [ 'view', $href->{$key} ];
}
}
return $self->{tables}->{$db}->{$tbl};
}
sub get_columns {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
MKDEBUG && _d('Get columns for', $db, $tbl);
if ( !$self->{cache} || !$self->{columns}->{$db}->{$tbl} ) {
$self->_use_db($dbh, $quoter, $db);
my $sql = "SHOW COLUMNS FROM " . $quoter->quote($db, $tbl);
MKDEBUG && _d($sql);
my $cols = $dbh->selectall_arrayref($sql, { Slice => {} });
$self->{columns}->{$db}->{$tbl} = [
map {
my %row;
@row{ map { lc $_ } keys %$_ } = values %$_;
\%row;
} @$cols
];
}
return $self->{columns}->{$db}->{$tbl};
}
sub get_tmp_table {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
my $result = 'CREATE TABLE ' . $quoter->quote($tbl) . " (\n";
$result .= join(",\n",
map { ' ' . $quoter->quote($_->{field}) . ' ' . $_->{type} }
@{$self->get_columns($dbh, $quoter, $db, $tbl)});
$result .= "\n)";
MKDEBUG && _d($result);
return $result;
}
sub get_triggers {
my ( $self, $dbh, $quoter, $db, $tbl ) = @_;
if ( !$self->{cache} || !$self->{triggers}->{$db} ) {
$self->{triggers}->{$db} = {};
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
MKDEBUG && _d($sql);
eval { $dbh->do($sql); };
MKDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
$sql = "SHOW TRIGGERS FROM " . $quoter->quote($db);
MKDEBUG && _d($sql);
my $sth = $dbh->prepare($sql);
$sth->execute();
if ( $sth->rows ) {
my $trgs = $sth->fetchall_arrayref({});
foreach my $trg (@$trgs) {
# Lowercase the hash keys because the NAME_lc property might be set
# on the $dbh, so the lettercase is unpredictable. This makes them
# predictable.
my %trg;
@trg{ map { lc $_ } keys %$trg } = values %$trg;
push @{ $self->{triggers}->{$db}->{ $trg{table} } }, \%trg;
}
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
MKDEBUG && _d($sql);
$dbh->do($sql);
}
if ( $tbl ) {
return $self->{triggers}->{$db}->{$tbl};
}
return values %{$self->{triggers}->{$db}};
}
sub get_databases {
my ( $self, $dbh, $quoter, $like ) = @_;
if ( !$self->{cache} || !$self->{databases} || $like ) {
my $sql = 'SHOW DATABASES';
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
my $sth = $dbh->prepare($sql);
MKDEBUG && _d($sql, @params);
$sth->execute( @params );
my @dbs = map { $_->[0] } @{$sth->fetchall_arrayref()};
$self->{databases} = \@dbs unless $like;
return @dbs;
}
return @{$self->{databases}};
}
sub get_table_status {
my ( $self, $dbh, $quoter, $db, $like ) = @_;
if ( !$self->{cache} || !$self->{table_status}->{$db} || $like ) {
my $sql = "SHOW TABLE STATUS FROM " . $quoter->quote($db);
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
MKDEBUG && _d($sql, @params);
my $sth = $dbh->prepare($sql);
$sth->execute(@params);
my @tables = @{$sth->fetchall_arrayref({})};
@tables = map {
my %tbl; # Make a copy with lowercased keys
@tbl{ map { lc $_ } keys %$_ } = values %$_;
$tbl{engine} ||= $tbl{type} || $tbl{comment};
delete $tbl{type};
\%tbl;
} @tables;
$self->{table_status}->{$db} = \@tables unless $like;
return @tables;
}
return @{$self->{table_status}->{$db}};
}
sub get_table_list {
my ( $self, $dbh, $quoter, $db, $like ) = @_;
if ( !$self->{cache} || !$self->{table_list}->{$db} || $like ) {
my $sql = "SHOW /*!50002 FULL*/ TABLES FROM " . $quoter->quote($db);
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
MKDEBUG && _d($sql, @params);
my $sth = $dbh->prepare($sql);
$sth->execute(@params);
my @tables = @{$sth->fetchall_arrayref()};
@tables = map {
my %tbl = (
name => $_->[0],
engine => ($_->[1] || '') eq 'VIEW' ? 'VIEW' : '',
);
\%tbl;
} @tables;
$self->{table_list}->{$db} = \@tables unless $like;
return @tables;
}
return @{$self->{table_list}->{$db}};
}
sub _d {
my ($package, undef, $line) = caller 0;
@_ = map { (my $temp = $_) =~ s/\n/\n# /g; $temp; }
map { defined $_ ? $_ : 'undef' }
@_;
print STDERR "# $package:$line $PID ", join(' ', @_), "\n";
}
1;
}
# ###########################################################################
# End MySQLDump package
# ###########################################################################

View File

@@ -58,7 +58,6 @@ my $tbl_name = qr{
#
# Optional Arguments:
# Schema - <Schema> object to initialize while iterating.
# MySQLDump - <MySQLDump> object to get CREATE TABLE when iterating dbh.
# TableParser - <TableParser> object to parse CREATE TABLE for tbl_struct.
# keep_ddl - Keep CREATE TABLE (default false)
#
@@ -172,7 +171,7 @@ sub _make_filters {
# Only filtered schema objects are returned. If iterating dump files
# (i.e. the obj was created with a file_itr arg), then the returned
# schema object will always have a ddl (see below). But if iterating
# a dbh, then you must create the obj with a MySQLDump obj to get a ddl.
# a dbh, then you must create the obj with a TableParser obj to get a ddl.
# If this object was created with a TableParser, then the ddl, if present,
# is parsed, too.
#
@@ -344,8 +343,8 @@ sub _iterate_dbh {
if ( !$engine || $self->engine_is_allowed($engine) ) {
my $ddl;
if ( my $du = $self->{MySQLDump} ) {
$ddl = $du->get_create_table($dbh, $q, $self->{db}, $tbl)->[1];
if ( my $du = $self->{TableParser} ) {
$ddl = $du->get_create_table($dbh, $self->{db}, $tbl);
}
return {

View File

@@ -63,10 +63,10 @@ $Data::Dumper::Quotekeys = 0;
#
# Required Arguments:
# Quoter - <Quoter> object
# MySQLDump - <MySQLDump> object
# TableParser - <TableParser> object
sub new {
my ( $class, %args ) = @_;
foreach my $arg ( qw(Quoter MySQLDump) ) {
foreach my $arg ( qw(Quoter TableParser) ) {
die "I need a $arg argument" unless $args{$arg};
}
@@ -800,7 +800,7 @@ sub size_to_rows {
}
my ($dbh, $db, $tbl, $chunk_size) = @args{@required_args};
my $q = $self->{Quoter};
my $du = $self->{MySQLDump};
my $tp = $self->{TableParser};
my ($n_rows, $avg_row_length);
@@ -819,7 +819,7 @@ sub size_to_rows {
}
if ( $suffix || $args{avg_row_length} ) {
my ($status) = $du->get_table_status($dbh, $q, $db, $tbl);
my ($status) = $tp->get_table_status($dbh, $db, $tbl);
$avg_row_length = $status->{avg_row_length};
if ( !defined $n_rows ) {
$n_rows = $avg_row_length ? ceil($chunk_size / $avg_row_length) : undef;

View File

@@ -23,9 +23,6 @@
#
# Several subs in this module require either a $ddl or $tbl param.
#
# $ddl is the return value from MySQLDump::get_create_table() (which returns
# the output of SHOW CREATE TALBE).
#
# $tbl is the return value from the sub below, parse().
#
# And some subs have an optional $opts param which is a hashref of options.
@@ -54,6 +51,55 @@ sub new {
return bless $self, $class;
}
sub get_create_table {
my ( $self, $dbh, $db, $tbl ) = @_;
die "I need a dbh parameter" unless $dbh;
die "I need a db parameter" unless $db;
die "I need a tbl parameter" unless $tbl;
my $q = $self->{Quoter};
my $sql = '/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, '
. q{@@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), }
. '@OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, '
. '@@SQL_QUOTE_SHOW_CREATE := 1 */';
MKDEBUG && _d($sql);
eval { $dbh->do($sql); };
MKDEBUG && $EVAL_ERROR && _d($EVAL_ERROR);
# Must USE the tbl's db because some bug with SHOW CREATE TABLE on a
# view when the current db isn't the view's db causes MySQL to crash.
$sql = 'USE ' . $q->quote($db);
MKDEBUG && _d($dbh, $sql);
$dbh->do($sql);
$sql = "SHOW CREATE TABLE " . $q->quote($db, $tbl);
MKDEBUG && _d($sql);
my $href;
eval { $href = $dbh->selectrow_hashref($sql); };
if ( $EVAL_ERROR ) {
MKDEBUG && _d($EVAL_ERROR);
return;
}
$sql = '/*!40101 SET @@SQL_MODE := @OLD_SQL_MODE, '
. '@@SQL_QUOTE_SHOW_CREATE := @OLD_QUOTE */';
MKDEBUG && _d($sql);
$dbh->do($sql);
my ($key) = grep { m/create table/i } keys %$href;
if ( $key ) {
MKDEBUG && _d('This table is a base table');
$href->{$key} =~ s/\b[ ]{2,}/ /g;
$href->{$key} .= "\n";
}
else {
MKDEBUG && _d('This table is a view');
($key) = grep { m/create view/i } keys %$href;
}
return $href->{$key};
}
# Sub: parse
# Parse SHOW CREATE TABLE.
#
@@ -62,16 +108,6 @@ sub new {
sub parse {
my ( $self, $ddl, $opts ) = @_;
return unless $ddl;
if ( ref $ddl eq 'ARRAY' ) {
if ( lc $ddl->[0] eq 'table' ) {
$ddl = $ddl->[1];
}
else {
return {
engine => 'VIEW',
};
}
}
if ( $ddl !~ m/CREATE (?:TEMPORARY )?TABLE `/ ) {
die "Cannot parse table definition; is ANSI quoting "
@@ -310,7 +346,7 @@ sub get_engine {
return $engine || undef;
}
# $ddl is a SHOW CREATE TABLE returned from MySQLDumper::get_create_table().
# $ddl is a SHOW CREATE TABLE returned from get_create_table().
# The general format of a key is
# [FOREIGN|UNIQUE|PRIMARY|FULLTEXT|SPATIAL] KEY `name` [USING BTREE|HASH] (`cols`).
# Returns a hashref of keys and their properties and the clustered key (if
@@ -452,52 +488,27 @@ sub remove_auto_increment {
return $ddl;
}
sub remove_secondary_indexes {
my ( $self, $ddl ) = @_;
my $sec_indexes_ddl;
my $tbl_struct = $self->parse($ddl);
if ( ($tbl_struct->{engine} || '') =~ m/InnoDB/i ) {
my $clustered_key = $tbl_struct->{clustered_key};
$clustered_key ||= '';
my @sec_indexes = map {
# Remove key from CREATE TABLE ddl.
my $key_def = $_->{ddl};
# Escape ( ) in the key def so Perl treats them literally.
$key_def =~ s/([\(\)])/\\$1/g;
$ddl =~ s/\s+$key_def//i;
my $key_ddl = "ADD $_->{ddl}";
# Last key in table won't have trailing comma, but since
# we're iterating through a hash the last key may not be
# the last in the list we're creating.
# http://code.google.com/p/maatkit/issues/detail?id=833
$key_ddl .= ',' unless $key_ddl =~ m/,$/;
$key_ddl;
sub get_table_status {
my ( $self, $dbh, $db, $like ) = @_;
my $q = $self->{Quoter};
my $sql = "SHOW TABLE STATUS FROM " . $q->quote($db);
my @params;
if ( $like ) {
$sql .= ' LIKE ?';
push @params, $like;
}
grep { $_->{name} ne $clustered_key }
values %{$tbl_struct->{keys}};
MKDEBUG && _d('Secondary indexes:', Dumper(\@sec_indexes));
if ( @sec_indexes ) {
$sec_indexes_ddl = join(' ', @sec_indexes);
$sec_indexes_ddl =~ s/,$//;
}
# Remove trailing comma on last key. Cases like:
# PK,
# KEY,
# ) ENGINE=...
# will leave a trailing comma on PK.
$ddl =~ s/,(\n\) )/$1/s;
}
else {
MKDEBUG && _d('Not removing secondary indexes from',
$tbl_struct->{engine}, 'table');
}
return $ddl, $sec_indexes_ddl, $tbl_struct;
MKDEBUG && _d($sql, @params);
my $sth = $dbh->prepare($sql);
$sth->execute(@params);
my @tables = @{$sth->fetchall_arrayref({})};
@tables = map {
my %tbl; # Make a copy with lowercased keys
@tbl{ map { lc $_ } keys %$_ } = values %$_;
$tbl{engine} ||= $tbl{type} || $tbl{comment};
delete $tbl{type};
\%tbl;
} @tables;
return @tables;
}
sub _d {

View File

@@ -12,7 +12,6 @@ use English qw(-no_match_vars);
use Test::More;
use Quoter;
use MySQLDump;
use TableParser;
use DSNParser;
use QueryParser;
@@ -54,7 +53,6 @@ Transformers->import(qw(make_checksum));
my $vp = new VersionParser();
my $q = new Quoter();
my $qp = new QueryParser();
my $du = new MySQLDump(cache => 0);
my $tp = new TableParser(Quoter => $q);
my $tc = new TableChecksum(Quoter => $q, VersionParser => $vp);
my $of = new Outfile();
@@ -72,7 +70,6 @@ my %modules = (
TableParser => $tp,
TableSyncer => $ts,
QueryParser => $qp,
MySQLDump => $du,
Outfile => $of,
);

View File

@@ -1,92 +0,0 @@
#!/usr/bin/perl
BEGIN {
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
};
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More;
use MySQLDump;
use Quoter;
use DSNParser;
use Sandbox;
use PerconaTest;
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
if ( !$dbh ) {
plan skip_all => "Cannot connect to sandbox master";
}
else {
plan tests => 12;
}
$sb->create_dbs($dbh, ['test']);
my $du = new MySQLDump();
my $q = new Quoter();
my $dump;
# TODO: get_create_table() seems to return an arrayref sometimes!
SKIP: {
skip 'Sandbox master does not have the sakila database', 10
unless @{$dbh->selectcol_arrayref('SHOW DATABASES LIKE "sakila"')};
$dump = $du->dump($dbh, $q, 'sakila', 'film', 'table');
like($dump, qr/language_id/, 'Dump sakila.film');
$dump = $du->dump($dbh, $q, 'mysql', 'film', 'triggers');
ok(!defined $dump, 'no triggers in mysql');
$dump = $du->dump($dbh, $q, 'sakila', 'film', 'triggers');
like($dump, qr/AFTER INSERT/, 'dump triggers');
$dump = $du->dump($dbh, $q, 'sakila', 'customer_list', 'table');
like($dump, qr/CREATE TABLE/, 'Temp table def for view/table');
like($dump, qr/DROP TABLE/, 'Drop temp table def for view/table');
like($dump, qr/DROP VIEW/, 'Drop view def for view/table');
unlike($dump, qr/ALGORITHM/, 'No view def');
$dump = $du->dump($dbh, $q, 'sakila', 'customer_list', 'view');
like($dump, qr/DROP TABLE/, 'Drop temp table def for view');
like($dump, qr/DROP VIEW/, 'Drop view def for view');
like($dump, qr/ALGORITHM/, 'View def');
};
# #############################################################################
# Issue 170: mk-parallel-dump dies when table-status Data_length is NULL
# #############################################################################
# The underlying problem for issue 170 is that MySQLDump doesn't eval some
# of its queries so when MySQLFind uses it and hits a broken table it dies.
diag(`cp $trunk/t/lib/samples/broken_tbl.frm /tmp/12345/data/test/broken_tbl.frm`);
my $output = '';
eval {
local *STDERR;
open STDERR, '>', \$output;
$dump = $du->dump($dbh, $q, 'test', 'broken_tbl', 'table');
};
is(
$EVAL_ERROR,
'',
'No error dumping broken table'
);
like(
$output,
qr/table may be damaged.+selectrow_hashref failed/s,
'Warns about possibly damaged table'
);
$sb->wipe_clean($dbh);
exit;

View File

@@ -17,7 +17,6 @@ use Quoter;
use DSNParser;
use Sandbox;
use OptionParser;
use MySQLDump;
use TableParser;
use TableNibbler;
use RowChecksum;
@@ -44,7 +43,6 @@ else {
my $q = new Quoter();
my $tp = new TableParser(Quoter=>$q);
my $du = new MySQLDump();
my $nb = new TableNibbler(TableParser=>$tp, Quoter=>$q);
my $o = new OptionParser(description => 'NibbleIterator');
my $rc = new RowChecksum(OptionParser => $o, Quoter=>$q);
@@ -54,7 +52,6 @@ $o->get_specs("$trunk/bin/pt-table-checksum");
my %common_modules = (
Quoter => $q,
TableParser => $tp,
MySQLDump => $du,
TableNibbler => $nb,
OptionParser => $o,
);
@@ -301,7 +298,7 @@ SKIP: {
db => 'sakila',
tbl => 'country',
tbl_struct => $tp->parse(
$du->get_create_table($dbh, $q, 'sakila', 'country')),
$tp->get_create_table($dbh, 'sakila', 'country')),
};
my $chunk_checksum = $rc->make_chunk_checksum(
dbh => $dbh,

View File

@@ -14,7 +14,6 @@ use Test::More;
use Transformers;
use QueryReview;
use QueryRewriter;
use MySQLDump;
use TableParser;
use Quoter;
use SlowLogParser;
@@ -41,10 +40,9 @@ my $qr = new QueryRewriter();
my $lp = new SlowLogParser;
my $q = new Quoter();
my $tp = new TableParser(Quoter => $q);
my $du = new MySQLDump();
my $opt_parser = new OptionParser( description => 'hi' );
my $tbl_struct = $tp->parse(
$du->get_create_table($dbh, $q, 'test', 'query_review'));
$tp->get_create_table($dbh, 'test', 'query_review'));
my $qv = new QueryReview(
dbh => $dbh,
@@ -159,7 +157,7 @@ my $create_table = $opt_parser->read_para_after(
$create_table =~ s/query_review_history/test.query_review_history/;
$dbh->do($create_table);
my $hist_struct = $tp->parse(
$du->get_create_table($dbh, $q, 'test', 'query_review_history'));
$tp->get_create_table($dbh, 'test', 'query_review_history'));
$qv->set_history_options(
table => 'test.query_review_history',
@@ -256,7 +254,7 @@ my $min_tbl = "CREATE TABLE query_review_history (
$dbh->do($min_tbl);
$hist_struct = $tp->parse(
$du->get_create_table($dbh, $q, 'test', 'query_review_history'));
$tp->get_create_table($dbh, 'test', 'query_review_history'));
$qv->set_history_options(
table => 'test.query_review_history',
dbh => $dbh,

View File

@@ -14,7 +14,6 @@ use Test::More;
use RowChecksum;
use TableParser;
use Quoter;
use MySQLDump;
use DSNParser;
use OptionParser;
use Sandbox;
@@ -35,7 +34,6 @@ $sb->create_dbs($dbh, ['test']);
my $q = new Quoter();
my $tp = new TableParser(Quoter => $q);
my $du = new MySQLDump();
my $o = new OptionParser(description => 'NibbleIterator');
$o->get_specs("$trunk/bin/pt-table-checksum");
@@ -407,7 +405,7 @@ $sb->load_file('master', 't/lib/samples/issue_94.sql');
$tbl = {
db => 'test',
tbl => 'issue_94',
tbl_struct => $tp->parse($du->get_create_table($dbh, $q, 'test', 'issue_94')),
tbl_struct => $tp->parse($tp->get_create_table($dbh, 'test', 'issue_94')),
};
@ARGV = qw(--ignore-columns c);
$o->get_opts();

View File

@@ -73,13 +73,11 @@ use RowDiff;
use Sandbox;
use DSNParser;
use TableParser;
use MySQLDump;
use Quoter;
my $d = new RowDiff(dbh => 1);
my $s = new MockSync();
my $q = new Quoter();
my $du = new MySQLDump();
my $tp = new TableParser(Quoter => $q);
my $dp = new DSNParser(opts=>$dsn_opts);
@@ -102,7 +100,7 @@ $sb->create_dbs($master_dbh, [qw(test)]);
$sb->load_file('master', 't/lib/samples/issue_11.sql');
my $tbl = $tp->parse(
$du->get_create_table($master_dbh, $q, 'test', 'issue_11'));
$tp->get_create_table($master_dbh, 'test', 'issue_11'));
my $left_sth = $master_dbh->prepare('SELECT * FROM test.issue_11');
my $right_sth = $slave_dbh->prepare('SELECT * FROM test.issue_11');

View File

@@ -17,14 +17,12 @@ use MockSth;
use Sandbox;
use DSNParser;
use TableParser;
use MySQLDump;
use Quoter;
use PerconaTest;
my ($d, $s);
my $q = new Quoter();
my $du = new MySQLDump();
my $tp = new TableParser(Quoter => $q);
my $dp = new DSNParser(opts=>$dsn_opts);
@@ -463,7 +461,7 @@ SKIP: {
);
my $tbl = $tp->parse(
$du->get_create_table($master_dbh, $q, 'test', 'issue_11'));
$tp->get_create_table($master_dbh, 'test', 'issue_11'));
my $left_sth = $master_dbh->prepare('SELECT * FROM test.issue_11');
my $right_sth = $slave_dbh->prepare('SELECT * FROM test.issue_11');

View File

@@ -17,7 +17,6 @@ use Quoter;
use DSNParser;
use Sandbox;
use OptionParser;
use MySQLDump;
use TableParser;
use PerconaTest;
@@ -33,7 +32,7 @@ my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my ($du, $tp);
my $tp;
my $fi = new FileIterator();
my $o = new OptionParser(description => 'SchemaIterator');
$o->get_specs("$trunk/bin/pt-table-checksum");
@@ -68,7 +67,6 @@ sub test_so {
keep_ddl => defined $args{keep_ddl} ? $args{keep_ddl} : 1,
OptionParser => $o,
Quoter => $q,
MySQLDump => $du,
TableParser => $tp,
);
}
@@ -86,7 +84,7 @@ sub test_so {
else {
if ( $result_file || $args{ddl} ) {
$res .= "$obj->{db}.$obj->{tbl}\n";
$res .= "$obj->{ddl}\n\n" if $args{ddl} || $du;
$res .= "$obj->{ddl}\n\n" if $args{ddl} || $tp;
}
else {
$res .= "$obj->{db}.$obj->{tbl} ";
@@ -102,7 +100,6 @@ sub test_so {
$res,
$args{result},
cmd_output => 1,
update_sample => $args{update_sample},
),
$args{test_name},
);
@@ -312,7 +309,7 @@ SKIP: {
# ########################################################################
# Getting CREATE TALBE (ddl).
# ########################################################################
$du = new MySQLDump();
$tp = new TableParser(Quoter => $q);
test_so(
filters => [qw(-t mysql.user)],
result => $sandbox_version eq '5.1' ? "$out/mysql-user-ddl.txt"
@@ -320,8 +317,8 @@ SKIP: {
test_name => "Get CREATE TABLE with dbh",
);
# Kill the MySQLDump obj in case the next tests don't want to use it.
$du = undef;
# Kill the TableParser obj in case the next tests don't want to use it.
$tp = undef;
$sb->wipe_clean($dbh);
};

View File

@@ -15,7 +15,6 @@ use TableChecksum;
use VersionParser;
use TableParser;
use Quoter;
use MySQLDump;
use DSNParser;
use Sandbox;
use PerconaTest;
@@ -36,7 +35,6 @@ $sb->create_dbs($dbh, ['test']);
my $q = new Quoter();
my $tp = new TableParser(Quoter => $q);
my $vp = new VersionParser();
my $du = new MySQLDump();
my $c = new TableChecksum(Quoter=>$q, VersionParser=>$vp);
my $t;
@@ -645,7 +643,7 @@ is_deeply(
# Issue 94: Enhance mk-table-checksum, add a --ignorecols option
# #############################################################################
$sb->load_file('master', 't/lib/samples/issue_94.sql');
$t= $tp->parse( $du->get_create_table($dbh, $q, 'test', 'issue_94') );
$t= $tp->parse( $tp->get_create_table($dbh, 'test', 'issue_94') );
my $query = $c->make_checksum_query(
db => 'test',
tbl => 'issue_47',

View File

@@ -13,7 +13,6 @@ use Test::More;
use TableParser;
use TableChunker;
use MySQLDump;
use Quoter;
use DSNParser;
use Sandbox;
@@ -33,12 +32,11 @@ else {
$sb->create_dbs($dbh, ['test']);
my $q = new Quoter();
my $p = new TableParser(Quoter => $q);
my $du = new MySQLDump();
my $c = new TableChunker(Quoter => $q, MySQLDump => $du);
my $tp = new TableParser(Quoter => $q);
my $c = new TableChunker(Quoter => $q, TableParser => $tp);
my $t;
$t = $p->parse( load_file('t/lib/samples/sakila.film.sql') );
$t = $tp->parse( load_file('t/lib/samples/sakila.film.sql') );
is_deeply(
[ $c->find_chunk_columns(tbl_struct=>$t) ],
[ 0,
@@ -74,7 +72,7 @@ is_deeply(
# 'Found preferred chunkable columns on sakila.film',
#);
$t = $p->parse( load_file('t/lib/samples/pk_not_first.sql') );
$t = $tp->parse( load_file('t/lib/samples/pk_not_first.sql') );
is_deeply(
[ $c->find_chunk_columns(tbl_struct=>$t) ],
[ 0,
@@ -217,7 +215,7 @@ SKIP: {
'Nullable column adds IS NULL chunk',
);
$t = $p->parse( load_file('t/lib/samples/daycol.sql') );
$t = $tp->parse( load_file('t/lib/samples/daycol.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
@@ -242,7 +240,7 @@ SKIP: {
'Date column chunks OK',
);
$t = $p->parse( load_file('t/lib/samples/date.sql') );
$t = $tp->parse( load_file('t/lib/samples/date.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'a',
@@ -285,7 +283,7 @@ SKIP: {
'Date column where min date is 0000-00-00',
);
$t = $p->parse( load_file('t/lib/samples/datetime.sql') );
$t = $tp->parse( load_file('t/lib/samples/datetime.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'a',
@@ -328,7 +326,7 @@ SKIP: {
'Datetime where min is 0000-00-00 00:00:00',
);
$t = $p->parse( load_file('t/lib/samples/timecol.sql') );
$t = $tp->parse( load_file('t/lib/samples/timecol.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'a',
@@ -350,7 +348,7 @@ SKIP: {
'Time column chunks OK',
);
$t = $p->parse( load_file('t/lib/samples/doublecol.sql') );
$t = $tp->parse( load_file('t/lib/samples/doublecol.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'a',
@@ -411,7 +409,7 @@ SKIP: {
'Throws OK when too many chunks',
);
$t = $p->parse( load_file('t/lib/samples/floatcol.sql') );
$t = $tp->parse( load_file('t/lib/samples/floatcol.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'a',
@@ -433,7 +431,7 @@ SKIP: {
'Float column chunks OK',
);
$t = $p->parse( load_file('t/lib/samples/decimalcol.sql') );
$t = $tp->parse( load_file('t/lib/samples/decimalcol.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'a',
@@ -532,7 +530,7 @@ SKIP: {
# #########################################################################
# Issue 1084: Don't try to chunk small tables
# #########################################################################
$t = $p->parse( $du->get_create_table($dbh, $q, 'sakila', 'country') );
$t = $tp->parse( $tp->get_create_table($dbh, 'sakila', 'country') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'country_id',
@@ -555,7 +553,7 @@ SKIP: {
# Issue 47: TableChunker::range_num broken for very large bigint
# #############################################################################
$sb->load_file('master', 't/lib/samples/issue_47.sql');
$t = $p->parse( $du->get_create_table($dbh, $q, 'test', 'issue_47') );
$t = $tp->parse( $tp->get_create_table($dbh, 'test', 'issue_47') );
my %params = $c->get_range_statistics(
dbh => $dbh,
db => 'test',
@@ -596,7 +594,7 @@ is(
);
$sb->load_file('master', 't/lib/samples/issue_8.sql');
$t = $p->parse( $du->get_create_table($dbh, $q, 'test', 'issue_8') );
$t = $tp->parse( $tp->get_create_table($dbh, 'test', 'issue_8') );
my @candidates = $c->find_chunk_columns(tbl_struct=>$t);
is_deeply(
\@candidates,
@@ -637,7 +635,7 @@ $sb->load_file('master', 't/lib/samples/issue_941.sql');
sub test_zero_row {
my ( $tbl, $range, $chunks, $zero_chunk ) = @_;
$zero_chunk = 1 unless defined $zero_chunk;
$t = $p->parse( $du->get_create_table($dbh, $q, 'issue_941', $tbl) );
$t = $tp->parse( $tp->get_create_table($dbh, 'issue_941', $tbl) );
%params = $c->get_range_statistics(
dbh => $dbh,
db => 'issue_941',
@@ -763,7 +761,7 @@ test_zero_row(
# Issue 602: mk-table-checksum issue with invalid dates
# #############################################################################
$sb->load_file('master', 't/pt-table-checksum/samples/issue_602.sql');
$t = $p->parse( $du->get_create_table($dbh, $q, 'issue_602', 't') );
$t = $tp->parse( $tp->get_create_table($dbh, 'issue_602', 't') );
%params = $c->get_range_statistics(
dbh => $dbh,
db => 'issue_602',
@@ -802,7 +800,7 @@ throws_ok(
);
# Like the test above but t2 has nothing but invalid rows.
$t = $p->parse( $du->get_create_table($dbh, $q, 'issue_602', 't2') );
$t = $tp->parse( $tp->get_create_table($dbh, 'issue_602', 't2') );
throws_ok(
sub {
$c->get_range_statistics(
@@ -923,7 +921,7 @@ foreach my $t ( @valid_t ) {
# #############################################################################
# Test get_first_chunkable_column().
# #############################################################################
$t = $p->parse( load_file('t/lib/samples/sakila.film.sql') );
$t = $tp->parse( load_file('t/lib/samples/sakila.film.sql') );
is_deeply(
[ $c->get_first_chunkable_column(tbl_struct=>$t) ],
@@ -970,7 +968,7 @@ is_deeply(
);
$sb->load_file('master', "t/lib/samples/t1.sql", 'test');
$t = $p->parse( load_file('t/lib/samples/t1.sql') );
$t = $tp->parse( load_file('t/lib/samples/t1.sql') );
is_deeply(
[ $c->get_first_chunkable_column(tbl_struct=>$t) ],
@@ -980,7 +978,7 @@ is_deeply(
# char chunking ###############################################################
$sb->load_file('master', "t/lib/samples/char-chunking/ascii.sql", 'test');
$t = $p->parse( $du->get_create_table($dbh, $q, 'test', 'ascii') );
$t = $tp->parse( $tp->get_create_table($dbh, 'test', 'ascii') );
is_deeply(
[ $c->find_chunk_columns(tbl_struct=>$t) ],
@@ -1055,7 +1053,7 @@ sub chunk_it {
$dbh->do("alter table test.t1 add unique index (a)");
my (undef,$output) = $dbh->selectrow_array("show create table test.t1");
$t = $p->parse($output);
$t = $tp->parse($output);
is_deeply(
[ $c->get_first_chunkable_column(tbl_struct=>$t) ],
[qw(a a)],
@@ -1120,7 +1118,7 @@ SKIP: {
my @chunks;
$t = $p->parse( $du->get_create_table($dbh, $q, 'sakila', 'city') );
$t = $tp->parse( $tp->get_create_table($dbh, 'sakila', 'city') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
chunk_col => 'city',
@@ -1173,7 +1171,7 @@ SKIP: {
}
$sb->load_file('master', "t/lib/samples/char-chunking/world-city.sql", 'test');
$t = $p->parse( $du->get_create_table($dbh, $q, 'test', 'world_city') );
$t = $tp->parse( $tp->get_create_table($dbh, 'test', 'world_city') );
%params = $c->get_range_statistics(
dbh => $dbh,
db => 'test',
@@ -1211,7 +1209,7 @@ SKIP: {
unless @{$dbh->selectcol_arrayref('SHOW DATABASES LIKE "sakila"')};
my @chunks;
$t = $p->parse( load_file('t/lib/samples/sakila.film.sql') );
$t = $tp->parse( load_file('t/lib/samples/sakila.film.sql') );
@chunks = $c->calculate_chunks(
tbl_struct => $t,
@@ -1241,7 +1239,7 @@ SKIP: {
# Bug 821673: pt-table-checksum doesn't included --where in min max queries
# ############################################################################
$sb->load_file('master', "t/pt-table-checksum/samples/where01.sql");
$t = $p->parse( $du->get_create_table($dbh, $q, 'test', 'checksum_test') );
$t = $tp->parse( $tp->get_create_table($dbh, 'test', 'checksum_test') );
%params = $c->get_range_statistics(
dbh => $dbh,
db => 'test',
@@ -1263,7 +1261,7 @@ is(
# char chunking
$sb->load_file('master', "t/pt-table-checksum/samples/where02.sql");
$t = $p->parse( $du->get_create_table($dbh, $q, 'test', 'checksum_test') );
$t = $tp->parse( $tp->get_create_table($dbh, 'test', 'checksum_test') );
%params = $c->get_range_statistics(
dbh => $dbh,
db => 'test',

View File

@@ -9,7 +9,7 @@ BEGIN {
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More tests => 54;
use Test::More tests => 38;
use TableParser;
use Quoter;
@@ -20,11 +20,34 @@ use PerconaTest;
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
my $q = new Quoter();
my $tp = new TableParser(Quoter=>$q);
my $tbl;
my $sample = "t/lib/samples/tables/";
SKIP: {
skip "Cannot connect to sandbox master", 2 unless $dbh;
skip 'Sandbox master does not have the sakila database', 2
unless @{$dbh->selectcol_arrayref('SHOW DATABASES LIKE "sakila"')};
is(
$tp->get_create_table($dbh, 'sakila', 'FOO'),
undef,
"get_create_table(nonexistent table)"
);
ok(
no_diff(
$tp->get_create_table($dbh, 'sakila', 'actor'),
"$sample/sakila.actor",
cmd_output => 1,
),
"get_create_table(sakila.actor)"
);
};
eval {
$tp->parse( load_file('t/lib/samples/noquotes.sql') );
};
@@ -575,161 +598,7 @@ is_deeply(
);
# #############################################################################
# Test remove_secondary_indexes().
# #############################################################################
sub test_rsi {
my ( $file, $des, $new_ddl, $indexes ) = @_;
my $ddl = load_file($file);
my ($got_new_ddl, $got_indexes) = $tp->remove_secondary_indexes($ddl);
is(
$got_indexes,
$indexes,
"$des - secondary indexes $file"
);
is(
$got_new_ddl,
$new_ddl,
"$des - new ddl $file"
);
return;
}
test_rsi(
't/lib/samples/t1.sql',
'MyISAM table, no indexes',
"CREATE TABLE `t1` (
`a` int(11) default NULL
) ENGINE=MyISAM DEFAULT CHARSET=latin1
",
undef
);
test_rsi(
't/lib/samples/one_key.sql',
'MyISAM table, one pk',
"CREATE TABLE `t2` (
`a` int(11) NOT NULL,
`b` char(50) default NULL,
PRIMARY KEY (`a`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
",
undef
);
test_rsi(
't/lib/samples/date.sql',
'one pk',
"CREATE TABLE `checksum_test_5` (
`a` date NOT NULL,
`b` int(11) default NULL,
PRIMARY KEY (`a`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1
",
undef
);
test_rsi(
't/lib/samples/auto-increment-actor.sql',
'pk, key (no trailing comma)',
"CREATE TABLE `actor` (
`actor_id` smallint(5) unsigned NOT NULL auto_increment,
`first_name` varchar(45) NOT NULL,
`last_name` varchar(45) NOT NULL,
`last_update` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP,
PRIMARY KEY (`actor_id`)
) ENGINE=InnoDB AUTO_INCREMENT=201 DEFAULT CHARSET=utf8;
",
'ADD KEY `idx_actor_last_name` (`last_name`)'
);
test_rsi(
't/lib/samples/one_fk.sql',
'key, fk, no clustered key',
"CREATE TABLE `t1` (
`a` int(11) NOT NULL,
`b` char(50) default NULL,
CONSTRAINT `t1_ibfk_1` FOREIGN KEY (`a`) REFERENCES `t2` (`a`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1
",
'ADD KEY `a` (`a`)',
);
test_rsi(
't/lib/samples/sakila.film.sql',
'pk, keys and fks',
"CREATE TABLE `film` (
`film_id` smallint(5) unsigned NOT NULL auto_increment,
`title` varchar(255) NOT NULL,
`description` text,
`release_year` year(4) default NULL,
`language_id` tinyint(3) unsigned NOT NULL,
`original_language_id` tinyint(3) unsigned default NULL,
`rental_duration` tinyint(3) unsigned NOT NULL default '3',
`rental_rate` decimal(4,2) NOT NULL default '4.99',
`length` smallint(5) unsigned default NULL,
`replacement_cost` decimal(5,2) NOT NULL default '19.99',
`rating` enum('G','PG','PG-13','R','NC-17') default 'G',
`special_features` set('Trailers','Commentaries','Deleted Scenes','Behind the Scenes') default NULL,
`last_update` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP,
PRIMARY KEY (`film_id`),
CONSTRAINT `fk_film_language` FOREIGN KEY (`language_id`) REFERENCES `language` (`language_id`) ON UPDATE CASCADE,
CONSTRAINT `fk_film_language_original` FOREIGN KEY (`original_language_id`) REFERENCES `language` (`language_id`) ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8
",
'ADD KEY `idx_fk_original_language_id` (`original_language_id`), ADD KEY `idx_fk_language_id` (`language_id`), ADD KEY `idx_title` (`title`)'
);
test_rsi(
't/lib/samples/issue_729.sql',
'issue 729',
"CREATE TABLE `posts` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`template_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`other_id` bigint(20) unsigned NOT NULL DEFAULT '0',
`date` int(10) unsigned NOT NULL DEFAULT '0',
`private` tinyint(3) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=15417 DEFAULT CHARSET=latin1;
",
'ADD KEY `other_id` (`other_id`)',
);
test_rsi(
't/lib/samples/00_geodb_coordinates.sql',
'issue 833',
"CREATE TABLE `geodb_coordinates` (
`loc_id` int(11) NOT NULL default '0',
`lon` double default NULL,
`lat` double default NULL,
`sin_lon` double default NULL,
`sin_lat` double default NULL,
`cos_lon` double default NULL,
`cos_lat` double default NULL,
`coord_type` int(11) NOT NULL default '0',
`coord_subtype` int(11) default NULL,
`valid_since` date default NULL,
`date_type_since` int(11) default NULL,
`valid_until` date NOT NULL default '0000-00-00',
`date_type_until` int(11) NOT NULL default '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1",
'ADD KEY `coord_lon_idx` (`lon`), ADD KEY `coord_loc_id_idx` (`loc_id`), ADD KEY `coord_stype_idx` (`coord_subtype`), ADD KEY `coord_until_idx` (`valid_until`), ADD KEY `coord_lat_idx` (`lat`), ADD KEY `coord_slon_idx` (`sin_lon`), ADD KEY `coord_clon_idx` (`cos_lon`), ADD KEY `coord_slat_idx` (`sin_lat`), ADD KEY `coord_clat_idx` (`cos_lat`), ADD KEY `coord_type_idx` (`coord_type`), ADD KEY `coord_since_idx` (`valid_since`)',
);
# Column and index names are case-insensitive so remove_secondary_indexes()
# returns "ADD KEY `foo_bar` (`i`,`j`)" for "KEY `Foo_Bar` (`i`,`J`)".
test_rsi(
't/lib/samples/issue_956.sql',
'issue 956',
"CREATE TABLE `t` (
`i` int(11) default NULL,
`J` int(11) default NULL
) ENGINE=InnoDB
",
'ADD KEY `foo_bar` (`i`,`j`)',
);
# #############################################################################
# Sandbox tests
# Sandbox test
# #############################################################################
SKIP: {
skip 'Cannot connect to sandbox master', 8 unless $dbh;

View File

@@ -35,7 +35,6 @@ use ChangeHandler;
use TableChecksum;
use TableChunker;
use TableParser;
use MySQLDump;
use VersionParser;
use TableSyncer;
use MasterSlave;
@@ -47,11 +46,10 @@ diag(`$mysql < $trunk/t/lib/samples/before-TableSyncChunk.sql`);
my $q = new Quoter();
my $tp = new TableParser(Quoter => $q);
my $du = new MySQLDump();
my $vp = new VersionParser();
my $ms = new MasterSlave();
my $rr = new Retry();
my $chunker = new TableChunker( Quoter => $q, MySQLDump => $du );
my $chunker = new TableChunker( Quoter => $q, TableParser => $tp );
my $checksum = new TableChecksum( Quoter => $q, VersionParser => $vp );
my $syncer = new TableSyncer(
MasterSlave => $ms,
@@ -93,7 +91,7 @@ my $t = new TableSyncChunk(
);
isa_ok($t, 'TableSyncChunk');
$ddl = $du->get_create_table($dbh, $q, 'test', 'test1');
$ddl = $tp->get_create_table($dbh, 'test', 'test1');
$tbl_struct = $tp->parse($ddl);
%args = (
src => $src,
@@ -296,7 +294,7 @@ is($t->pending_changes(), 0, 'No pending changes');
# be chunked. But to keep the spirit of this test the same, we drop
# the index which makes the table unchunkable again.
$dbh->do('alter table test.test6 drop index a');
$ddl = $du->get_create_table($dbh, $q, 'test', 'test6');
$ddl = $tp->get_create_table($dbh, 'test', 'test6');
$tbl_struct = $tp->parse($ddl);
is_deeply(
[ $t->can_sync(tbl_struct=>$tbl_struct) ],
@@ -304,7 +302,7 @@ is_deeply(
'Cannot sync table1 (no good single column index)'
);
$ddl = $du->get_create_table($dbh, $q, 'test', 'test5');
$ddl = $tp->get_create_table($dbh, 'test', 'test5');
$tbl_struct = $tp->parse($ddl);
is_deeply(
[ $t->can_sync(tbl_struct=>$tbl_struct) ],
@@ -314,7 +312,7 @@ is_deeply(
# create table test3(a int not null primary key, b int not null, unique(b));
$ddl = $du->get_create_table($dbh, $q, 'test', 'test3');
$ddl = $tp->get_create_table($dbh, 'test', 'test3');
$tbl_struct = $tp->parse($ddl);
is_deeply(
[ $t->can_sync(tbl_struct=>$tbl_struct) ],

View File

@@ -20,7 +20,6 @@ use TableChecksum;
use TableChunker;
use TableNibbler;
use TableParser;
use MySQLDump;
use VersionParser;
use MasterSlave;
use Retry;
@@ -43,7 +42,6 @@ my $mysql = $sb->_use_for('master');
my $q = new Quoter();
my $ms = new MasterSlave();
my $tp = new TableParser(Quoter=>$q);
my $du = new MySQLDump();
my $vp = new VersionParser();
my $rr = new Retry();
@@ -56,7 +54,7 @@ my $checksum = new TableChecksum(
VersionParser => $vp,
);
my $chunker = new TableChunker(
MySQLDump => $du,
TableParser => $tp,
Quoter => $q
);
my $t = new TableSyncNibble(
@@ -89,7 +87,7 @@ my $syncer = new TableSyncer(
$sb->create_dbs($dbh, ['test']);
diag(`$mysql < $trunk/t/lib/samples/before-TableSyncNibble.sql`);
my $ddl = $du->get_create_table($dbh, $q, 'test', 'test1');
my $ddl = $tp->get_create_table($dbh, 'test', 'test1');
my $tbl_struct = $tp->parse($ddl);
my $src = {
db => 'test',
@@ -362,7 +360,7 @@ like(
# Issue 96: mk-table-sync: Nibbler infinite loop
# #########################################################################
$sb->load_file('master', 't/lib/samples/issue_96.sql');
$tbl_struct = $tp->parse($du->get_create_table($dbh, $q, 'issue_96', 't'));
$tbl_struct = $tp->parse($tp->get_create_table($dbh, 'issue_96', 't'));
$t->prepare_to_sync(
ChangeHandler => $ch,
cols => $tbl_struct->{cols},
@@ -420,7 +418,7 @@ is(
# If small_table is true, the index check should be skipped.
diag(`/tmp/12345/use -e 'create table issue_96.t3 (i int, unique index (i))'`);
diag(`/tmp/12345/use -e 'insert into issue_96.t3 values (1)'`);
$tbl_struct = $tp->parse($du->get_create_table($dbh, $q, 'issue_96', 't3'));
$tbl_struct = $tp->parse($tp->get_create_table($dbh, 'issue_96', 't3'));
$t->prepare_to_sync(
ChangeHandler => $ch,
cols => $tbl_struct->{cols},
@@ -462,7 +460,7 @@ SKIP: {
my $where = '`player_id` >= 201 AND `player_id` < 301';
$sb->load_file('master', 't/pt-table-sync/samples/issue_560.sql');
$tbl_struct = $tp->parse($du->get_create_table($dbh, $q, 'issue_560', 'buddy_list'));
$tbl_struct = $tp->parse($tp->get_create_table($dbh, 'issue_560', 'buddy_list'));
(undef, %plugin_args) = $t->can_sync(tbl_struct => $tbl_struct);
$t->prepare_to_sync(
ChangeHandler => $ch,
@@ -573,7 +571,7 @@ is(
# Issue 804: mk-table-sync: can't nibble because index name isn't lower case?
# #############################################################################
$sb->load_file('master', 't/lib/samples/issue_804.sql');
$tbl_struct = $tp->parse($du->get_create_table($dbh, $q, 'issue_804', 't'));
$tbl_struct = $tp->parse($tp->get_create_table($dbh, 'issue_804', 't'));
($can_sync, %plugin_args) = $t->can_sync(tbl_struct => $tbl_struct);
is(
$can_sync,

View File

@@ -30,7 +30,6 @@ use TableNibbler;
use ChangeHandler;
use RowDiff;
# And other modules:
use MySQLDump;
use TableParser;
use DSNParser;
use Sandbox;
@@ -59,7 +58,6 @@ $sb->load_file('master', 't/lib/samples/before-TableSyncChunk.sql');
my $q = new Quoter();
my $tp = new TableParser(Quoter=>$q);
my $du = new MySQLDump( cache => 0 );
# ###########################################################################
# Make a TableSyncer object.
@@ -103,8 +101,8 @@ my $syncer = new TableSyncer(
);
isa_ok($syncer, 'TableSyncer');
my $chunker = new TableChunker( Quoter => $q, MySQLDump => $du );
my $nibbler = new TableNibbler( TableParser => $tp, Quoter => $q );
my $chunker = new TableChunker( Quoter => $q, TableParser => $tp );
my $nibbler = new TableNibbler( Quoter => $q, TableParser => $tp );
# Global vars used/set by the subs below and accessed throughout the tests.
my $src;
@@ -182,7 +180,7 @@ sub sync_table {
make_plugins();
}
$tbl_struct = $tp->parse(
$du->get_create_table($src_dbh, $q, $src_db, $src_tbl));
$tp->get_create_table($src_dbh, $src_db, $src_tbl));
$src = {
dbh => $src_dbh,
dsn => {h=>'127.1',P=>'12345',},
@@ -221,7 +219,7 @@ sub sync_table {
# Test get_best_plugin() (formerly best_algorithm()).
# ###########################################################################
make_plugins();
$tbl_struct = $tp->parse($du->get_create_table($src_dbh, $q, 'test', 'test5'));
$tbl_struct = $tp->parse($tp->get_create_table($src_dbh, 'test', 'test5'));
is_deeply(
[
$syncer->get_best_plugin(
@@ -233,7 +231,7 @@ is_deeply(
'Best plugin GroupBy'
);
$tbl_struct = $tp->parse($du->get_create_table($src_dbh, $q,'test','test3'));
$tbl_struct = $tp->parse($tp->get_create_table($src_dbh, 'test', 'test3'));
my ($plugin, %plugin_args) = $syncer->get_best_plugin(
plugins => $plugins,
tbl_struct => $tbl_struct,
@@ -247,7 +245,7 @@ is_deeply(
# With the introduction of char chunking (issue 568), test6 can be chunked
# with Chunk or Nibble. Chunk will be prefered.
$tbl_struct = $tp->parse($du->get_create_table($src_dbh, $q,'test','test6'));
$tbl_struct = $tp->parse($tp->get_create_table($src_dbh, 'test', 'test6'));
($plugin, %plugin_args) = $syncer->get_best_plugin(
plugins => $plugins,
tbl_struct => $tbl_struct,
@@ -842,7 +840,7 @@ SKIP: {
$sb->load_file('slave2', 't/pt-table-sync/samples/bidirectional/remote-1.sql');
make_plugins();
set_bidi_callbacks();
$tbl_struct = $tp->parse($du->get_create_table($src_dbh, $q, 'bidi','t'));
$tbl_struct = $tp->parse($tp->get_create_table($src_dbh, 'bidi', 't'));
$src->{db} = 'bidi';
$src->{tbl} = 't';

View File

@@ -40,3 +40,4 @@ CREATE TABLE `user` (
PRIMARY KEY (`Host`,`User`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Users and global privileges'

View File

@@ -42,3 +42,4 @@ CREATE TABLE `user` (
PRIMARY KEY (`Host`,`User`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_bin COMMENT='Users and global privileges'

View File

@@ -0,0 +1,8 @@
CREATE TABLE `actor` (
`actor_id` smallint(5) unsigned NOT NULL AUTO_INCREMENT,
`first_name` varchar(45) NOT NULL,
`last_name` varchar(45) NOT NULL,
`last_update` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`actor_id`),
KEY `idx_actor_last_name` (`last_name`)
) ENGINE=InnoDB AUTO_INCREMENT=201 DEFAULT CHARSET=utf8