Merge simplify-pqd r549.

This commit is contained in:
Daniel Nichter
2013-01-30 13:33:47 -07:00
15 changed files with 1036 additions and 223 deletions

View File

@@ -7977,7 +7977,7 @@ sub _d {
{
package JSONReportFormatter;
use Mo;
use JSON;
use JSON ();
use List::Util qw(sum);
@@ -7985,8 +7985,22 @@ use Transformers qw(make_checksum parse_timestamp);
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
our $pretty_json = undef;
our $sorted_json = undef;
extends qw(QueryReportFormatter);
has _json => (
is => 'ro',
init_arg => undef,
builder => '_build_json',
handles => { encode_json => 'encode' },
);
sub _build_json {
return JSON->new->utf8->pretty($pretty_json)->canonical($sorted_json);
}
override [qw(rusage date hostname files header profile prepared)] => sub {
return;
};
@@ -8062,7 +8076,7 @@ override query_report => sub {
};
}
return encode_json(\@queries) . "\n";
return $self->encode_json(\@queries) . "\n";
};
1;
@@ -9132,7 +9146,7 @@ sub new {
sub set_history_options {
my ( $self, %args ) = @_;
foreach my $arg ( qw(table dbh tbl_struct col_pat) ) {
foreach my $arg ( qw(table tbl_struct col_pat) ) {
die "I need a $arg argument" unless $args{$arg};
}
@@ -9166,7 +9180,7 @@ sub set_history_options {
} @cols) . ')';
PTDEBUG && _d($sql);
$self->{history_sth} = $args{dbh}->prepare($sql);
$self->{history_sth} = $self->{dbh}->prepare($sql);
$self->{history_metrics} = \@metrics;
return;
@@ -13525,6 +13539,8 @@ my $aux_dbh; # For --aux-dsn (--since/--until "MySQL expression")
my $resume_file;
my $offset;
(my $tool = __PACKAGE__) =~ tr/_/-/;
sub main {
# Reset global vars, else tests will fail.
local @ARGV = @_;
@@ -13555,11 +13571,6 @@ sub main {
}
if ( !$o->get('help') ) {
if ( $review_dsn
&& (!defined $review_dsn->{D} || !defined $review_dsn->{t}) ) {
$o->save_error('The --review DSN requires a D (database) and t'
. ' (table) part specifying the query review table');
}
if ( $o->get('outliers')
&& grep { $_ !~ m/^\w+:[0-9.]+(?::[0-9.]+)?$/ } @{$o->get('outliers')}
) {
@@ -13573,6 +13584,20 @@ sub main {
}
}
if ( my $review_dsn = $o->get('review') ) {
$o->save_error('--review does not accept a t option. Perhaps you meant '
. 'to use --review-table or --history-table?')
if defined $review_dsn->{t};
}
for my $tables ('review-table', 'history-table') {
my $got = $o->get($tables);
if ( grep !defined, Quoter->split_unquote($got) ) {
$o->save_error("--$tables should be passed a "
. "fully-qualified table name, got $got");
}
}
if ( my $patterns = $o->get('embedded-attributes') ) {
$o->save_error("--embedded-attributes should be passed two "
. "comma-separated patterns, got " . scalar(@$patterns) )
@@ -13642,11 +13667,10 @@ sub main {
}
# ########################################################################
# Set up for --review and --review-history.
# Set up for --review.
# ########################################################################
my $qv; # QueryReview
my $qv_dbh; # For QueryReview
my $qv_dbh2; # For QueryReview and --review-history
if ( $review_dsn ) {
my $tp = new TableParser(Quoter => $q);
$qv_dbh = get_cxn(
@@ -13657,28 +13681,33 @@ sub main {
opts => { AutoCommit => 1 },
);
$qv_dbh->{InactiveDestroy} = 1; # Don't die on fork().
my @db_tbl = @{$review_dsn}{qw(D t)};
my @db_tbl = Quoter->split_unquote($o->get('review-table'));
my @hdb_tbl = Quoter->split_unquote($o->get('history-table'));
my $db_tbl = $q->quote(@db_tbl);
my $hdb_tbl = $q->quote(@hdb_tbl);
# Create the review table if desired
if ( $o->get('create-review-table') ) {
my $sql = $o->read_para_after(
my $create_review_sql = $o->read_para_after(
__FILE__, qr/MAGIC_create_review/);
$sql =~ s/query_review/IF NOT EXISTS $db_tbl/;
PTDEBUG && _d($sql);
$qv_dbh->do($sql);
}
$create_review_sql =~ s/query_review/IF NOT EXISTS $db_tbl/;
# Check for the existence of the table.
if ( !$tp->check_table(
my $create_history_sql = $o->read_para_after(
__FILE__, qr/MAGIC_create_review_history/);
$create_history_sql =~ s/query_review_history/IF NOT EXISTS $hdb_tbl/;
for my $create (
[ $db_tbl, $create_review_sql ],
[ $hdb_tbl, $create_history_sql ],
) {
my ($tbl_name, $sql) = @$create;
create_review_tables(
dbh => $qv_dbh,
db => $db_tbl[0],
tbl => $db_tbl[1]) )
{
die "The query review table $db_tbl does not exist. "
. "Specify --create-review-table to create it, "
. "and ensure that the MySQL user has privileges to create "
. "and update the table.\n";
full_table => $tbl_name,
create_table_sql => $sql,
create_table => $o->get('create-review-tables'),
TableParser => $tp,
);
}
# Set up the new QueryReview object.
@@ -13690,40 +13719,6 @@ sub main {
quoter => $q,
);
# Set up the review-history table
if ( my $review_history_dsn = $o->get('review-history') ) {
$qv_dbh2 = get_cxn(
for => '--review-history',
dsn => $review_history_dsn,
OptionParser => $o,
DSNParser => $dp,
opts => { AutoCommit => 1 },
);
$qv_dbh2->{InactiveDestroy} = 1; # Don't die on fork().
my @hdb_tbl = @{$o->get('review-history')}{qw(D t)};
my $hdb_tbl = $q->quote(@hdb_tbl);
# Create the review-history table if desired
if ( $o->get('create-review-history-table') ) {
my $sql = $o->read_para_after(
__FILE__, qr/MAGIC_create_review_history/);
$sql =~ s/query_review_history/IF NOT EXISTS $hdb_tbl/;
PTDEBUG && _d($sql);
$qv_dbh2->do($sql);
}
# Check for the existence of the table.
if ( !$tp->check_table(
dbh => $qv_dbh2,
db => $hdb_tbl[0],
tbl => $hdb_tbl[1]) )
{
die "The query review history table $hdb_tbl does not exist. "
. "Specify --create-review-history-table to create it, "
. "and ensure that the MySQL user has privileges to create "
. "and update the table.\n";
}
# Inspect for MAGIC_history_cols. Add them to the --select list
# only if an explicit --select list was given. Otherwise, leave
# --select undef which will cause EventAggregator to aggregate every
@@ -13731,14 +13726,14 @@ sub main {
# If no --select list was given and we make one by adding the history
# columsn to it, then EventAggregator will only aggregate the
# history columns and nothing else--we don't want this.
my $tbl = $tp->parse($tp->get_create_table($qv_dbh2, @hdb_tbl));
my $tbl = $tp->parse($tp->get_create_table($qv_dbh, @hdb_tbl));
my $pat = $o->read_para_after(__FILE__, qr/MAGIC_history_cols/);
$pat =~ s/\s+//g;
$pat = qr/^(.*?)_($pat)$/;
# Get original --select values.
my %select = map { $_ => 1 } @{$o->get('select')};
foreach my $col ( @{$tbl->{cols}} ) {
my ( $attr, $metric ) = $col =~ m/$pat/;
my ( $attr, $metric ) = $col =~ $pat;
next unless $attr && $metric;
$attr = ucfirst $attr if $attr =~ m/_/; # TableParser lowercases
# Add history table values to original select values.
@@ -13748,20 +13743,18 @@ sub main {
if ( $o->got('select') ) {
# Re-set --select with its original values plus the history
# table values.
$o->set('select', [keys %select]);
PTDEBUG && _d("--select after parsing --review-history table:",
$o->set('select', [sort keys %select]);
PTDEBUG && _d("--select after parsing the history table:",
@{$o->get('select')});
}
# And tell the QueryReview that it has more work to do.
$qv->set_history_options(
table => $hdb_tbl,
dbh => $qv_dbh2,
tbl_struct => $tbl,
col_pat => $pat,
);
}
}
# ########################################################################
# Create all the pipeline processes that do all the work: get input,
@@ -13841,9 +13834,8 @@ sub main {
push @read_files, $filename || "STDIN";
# Read the file offset for --resume.
if ( $o->get('resume') && $filename ) {
$resume_file = $filename . '.resume';
if ( -f $resume_file ) {
if ( ($resume_file = $o->get('resume')) && $filename ) {
if ( -s $resume_file ) {
open my $resume_fh, '<', $resume_file
or die "Error opening $resume_file: $OS_ERROR";
chomp(my $resume_offset = <$resume_fh>);
@@ -14152,7 +14144,7 @@ sub main {
);
$aux_dbh->{InactiveDestroy} = 1; # Don't die on fork().
}
$aux_dbh ||= $qv_dbh || $qv_dbh2 || $ps_dbh || $ep_dbh;
$aux_dbh ||= $qv_dbh || $ps_dbh || $ep_dbh;
PTDEBUG && _d('aux dbh:', $aux_dbh);
my $time_callback = sub {
@@ -14768,7 +14760,7 @@ sub main {
PTDEBUG && _d('Disconnected dbh', $_);
}
grep { $_ }
($qv_dbh, $qv_dbh2, $ps_dbh, $ep_dbh, $aux_dbh);
($qv_dbh, $ps_dbh, $ep_dbh, $aux_dbh);
return 0;
} # End main()
@@ -14777,6 +14769,77 @@ sub main {
# Subroutines.
# ############################################################################
sub create_review_tables {
my ( %args ) = @_;
my @required_args = qw(dbh full_table TableParser);
foreach my $arg ( @required_args ) {
die "I need a $arg argument" unless $args{$arg};
}
my $create_table_sql = $args{create_table_sql};
my ($dbh, $full_table, $tp) = @args{@required_args};
PTDEBUG && _d('Checking --review table', $full_table);
# If the repl db doesn't exit, auto-create it, maybe.
my ($db, $tbl) = Quoter->split_unquote($full_table);
my $show_db_sql = qq{SHOW DATABASES LIKE '$db'};
PTDEBUG && _d($show_db_sql);
my @db_exists = $dbh->selectrow_array($show_db_sql);
if ( !@db_exists && !$args{create_table} ) {
die "--review database $db does not exist and "
. "--no-create-review-tables was specified. You need "
. "to create the database.\n";
}
else {
# Even if the db already exists, do this in case it does not exist
# on a slave.
my $create_db_sql
= "CREATE DATABASE IF NOT EXISTS "
. Quoter->quote($db)
. " /* $tool */";
PTDEBUG && _d($create_db_sql);
eval {
$dbh->do($create_db_sql);
};
if ( $EVAL_ERROR && !@db_exists ) {
warn $EVAL_ERROR;
die "--review database $db does not exist and it cannot be "
. "created automatically. You need to create the database.\n";
}
}
# USE the correct db
my $sql = "USE " . Quoter->quote($db);
PTDEBUG && _d($sql);
$dbh->do($sql);
# Check if the table exists; if not, create it, maybe.
my $tbl_exists = $tp->check_table(
dbh => $dbh,
db => $db,
tbl => $tbl,
);
PTDEBUG && _d('Table exists: ', $tbl_exists ? 'yes' : 'no');
if ( !$tbl_exists && !$args{create_table} ) {
die "Table $full_table does not exist and "
. "--no-create-review-tables was specified. "
. "You need to create the table.\n";
}
else {
PTDEBUG && _d($dbh, $create_table_sql);
eval {
$dbh->do($create_table_sql);
};
if ( $EVAL_ERROR && !$args{create_table} ) {
warn $EVAL_ERROR;
die "--review history table $full_table does not exist and it cannot be "
. "created automatically. You need to create the table.\n"
}
}
}
# TODO: This sub is poorly named since it does more than print reports:
# it aggregates, reports, does QueryReview stuff, etc.
sub print_reports {
@@ -15086,7 +15149,6 @@ sub update_query_review_tables {
first_seen => $stats->{ts}->{min},
last_seen => $stats->{ts}->{max}
);
if ( $o->get('review-history') ) {
my %history;
foreach my $attrib ( @$attribs ) {
$history{$attrib} = $ea->metrics(
@@ -15097,7 +15159,6 @@ sub update_query_review_tables {
$qv->set_review_history(
$item, $sample->{arg} || '', %history);
}
}
return;
}
@@ -15540,9 +15601,9 @@ example,
You can see how useful this meta-data is -- as you analyze your queries, you get
your comments integrated right into the report.
If you add the L<"--review-history"> option, it will also store information into
a separate database table, so you can keep historical trending information on
classes of queries.
The tool will also store information into a separate database table specified
by the L<"--history-table"> option, so you can keep historical trending information
on classes of queries.
=back
@@ -15646,9 +15707,6 @@ Collapse multiple identical UNION queries into a single one.
=head1 OPTIONS
DSN values in L<"--review-history"> default to values in L<"--review"> if COPY
is yes.
This tool accepts additional command-line arguments. Refer to the
L<"SYNOPSIS"> and usage information for details.
@@ -15737,19 +15795,13 @@ Continue parsing even if there is an error. The tool will not continue
forever: it stops once any process causes 100 errors, in which case there
is probably a bug in the tool or the input is invalid.
=item --create-review-history-table
=item --create-review-tables
Create the L<"--review-history"> table if it does not exist.
Create the L<"--review"> tables if they do not exist.
This option causes the table specified by L<"--review-history"> to be created
with the default structure shown in the documentation for that option.
=item --create-review-table
Create the L<"--review"> table if it does not exist.
This option causes the table specified by L<"--review"> to be created with the
default structure shown in the documentation for that option.
This option causes the tables specified by L<"--review-table"> and
L<"--history-table"> to be created with the default structures shown
in the documentation for L<"--review">.
=item --daemonize
@@ -15967,6 +16019,12 @@ L<"ATTRIBUTES">).
Show help and exit.
=item --history-table
type: string; default: percona_schema.query_history
Where to save the historical data produced by L<"--review">.
=item --host
short form: -h; type: string
@@ -16233,22 +16291,26 @@ See L<"OUTPUT"> for more information.
=item --resume
Resume parsing from the last file offset. When specified, the tool
writes the last file offset to C<FILE.resume> where C<FILE> is the original
file name given on the command line. When ran again with the exact same
file name, the tool reads the last file offset from C<FILE.resume>,
seeks to that position in the file, and resuming parsing events.
type: string
If specified, the tool writes the last file offset, if there is one,
to the given filename. When ran again with the same value for this option,
the tool reads the last file offset from the file, seeks to that position
in the log, and resumes parsing events from that point onward.
=item --review
type: DSN
Store a sample of each class of query in this DSN.
Store a sample of each class of query in this DSN, plus historical values
for review trend analysis
The argument specifies a table to store all unique query fingerprints in. The
table must have at least the following columns. You can add more columns for
your own special purposes, but they won't be used by pt-query-digest. The
following CREATE TABLE definition is also used for L<"--create-review-table">.
=for comment ignore-pt-internal-value
MAGIC_create_review:
CREATE TABLE query_review (
@@ -16283,23 +16345,12 @@ After parsing and aggregating events, your table should contain a row for each
fingerprint. This option depends on C<--group-by fingerprint> (which is the
default). It will not work otherwise.
=item --review-history
type: DSN
Additionally, pt-query-digest will save information into a review table,
so you can see how classes of queries have changed over time. You can
change the destination table with the L<"--history-table">
The table in which to store historical values for review trend analysis.
Each time you review queries with L<"--review">, pt-query-digest will save
information into this table so you can see how classes of queries have changed
over time.
This DSN inherits unspecified values from L<"--review">. It should mention a
table in which to store statistics about each class of queries. pt-query-digest
verifies the existence of the table, and your privileges to insert, delete and
update on that table.
pt-query-digest then inspects the columns in the table. The table must have at
least the following columns:
The table must have at least the following columns:
CREATE TABLE query_review_history (
checksum BIGINT UNSIGNED NOT NULL,
@@ -16308,7 +16359,10 @@ least the following columns:
Any columns not mentioned above are inspected to see if they follow a certain
naming convention. The column is special if the name ends with an underscore
followed by any of these MAGIC_history_cols values:
followed by any of these values:
=for comment ignore-pt-internal-value
MAGIC_history_cols
pct|avt|cnt|sum|min|max|pct_95|stddev|median|rank
@@ -16324,8 +16378,11 @@ columns and making them part of the primary key along with the checksum. But
you could also just add a ts_min column and make it a DATE type, so you'd get
one row per class of queries per day.
The default table structure follows. The following MAGIC_create_review_history
table definition is used for L<"--create-review-history-table">:
The default table structure follows. The following table definition is used
for L<"--create-review-tables">:
=for comment ignore-pt-internal-value
MAGIC_create_review_history
CREATE TABLE query_review_history (
checksum BIGINT UNSIGNED NOT NULL,
@@ -16431,6 +16488,12 @@ table definition is used for L<"--create-review-history-table">:
Note that we store the count (cnt) for the ts attribute only; it will be
redundant to store this for other attributes.
=item --review-table
type: string; default: percona_schema.query_review
Where to save the samples produced by L<"--review">.
=item --run-time
type: time
@@ -16533,7 +16596,7 @@ Previously, pt-query-digest only aggregated these attributes:
Query_time,Lock_time,Rows_sent,Rows_examined,user,db:Schema,ts
Attributes specified in the L<"--review-history"> table will always be selected
Attributes in the table specified by L<"--history-table"> will always be selected
even if you do not specify L<"--select">.
See also L<"--ignore-attributes"> and L<"ATTRIBUTES">.
@@ -16595,9 +16658,9 @@ several types:
CURRENT_DATE - INTERVAL 7 DAY
If you give a MySQL time expression, then you must also specify a DSN
so that pt-query-digest can connect to MySQL to evaluate the expression. If you
specify L<"--explain">, L<"--processlist">, L<"--review">
or L<"--review-history">, then one of these DSNs will be used automatically.
so that pt-query-digest can connect to MySQL to evaluate the expression.
If you specify L<"--explain">, L<"--processlist">, L<"--review">, then
one of these DSNs will be used automatically.
Otherwise, you must specify an L<"--aux-dsn"> or pt-query-digest will die
saying that the value is invalid.
@@ -16917,7 +16980,8 @@ Default character set.
dsn: database; copy: yes
Database that contains the query review table.
Default database for the review option. Only useful if there are replication
filters set up.
=item * F
@@ -16951,7 +17015,7 @@ Socket file to use for connection.
=item * t
Table to use as the query review table.
Not used.
=item * u

View File

@@ -1,7 +1,7 @@
{
package JSONReportFormatter;
use Mo;
use JSON;
use JSON ();
use List::Util qw(sum);
@@ -9,8 +9,22 @@ use Transformers qw(make_checksum parse_timestamp);
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
our $pretty_json = undef;
our $sorted_json = undef;
extends qw(QueryReportFormatter);
has _json => (
is => 'ro',
init_arg => undef,
builder => '_build_json',
handles => { encode_json => 'encode' },
);
sub _build_json {
return JSON->new->utf8->pretty($pretty_json)->canonical($sorted_json);
}
override [qw(rusage date hostname files header profile prepared)] => sub {
return;
};
@@ -89,7 +103,7 @@ override query_report => sub {
};
}
return encode_json(\@queries) . "\n";
return $self->encode_json(\@queries) . "\n";
};
1;

View File

@@ -111,7 +111,7 @@ sub new {
# table.
sub set_history_options {
my ( $self, %args ) = @_;
foreach my $arg ( qw(table dbh tbl_struct col_pat) ) {
foreach my $arg ( qw(table tbl_struct col_pat) ) {
die "I need a $arg argument" unless $args{$arg};
}
@@ -157,7 +157,7 @@ sub set_history_options {
} @cols) . ')';
PTDEBUG && _d($sql);
$self->{history_sth} = $args{dbh}->prepare($sql);
$self->{history_sth} = $self->{dbh}->prepare($sql);
$self->{history_metrics} = \@metrics;
return;

View File

@@ -161,7 +161,6 @@ my $hist_struct = $tp->parse(
$qv->set_history_options(
table => 'test.query_review_history',
dbh => $dbh,
quoter => $q,
tbl_struct => $hist_struct,
col_pat => qr/^(.*?)_($pat)$/,
@@ -257,7 +256,6 @@ $hist_struct = $tp->parse(
$tp->get_create_table($dbh, 'test', 'query_review_history'));
$qv->set_history_options(
table => 'test.query_review_history',
dbh => $dbh,
quoter => $q,
tbl_struct => $hist_struct,
col_pat => qr/^(.*?)_($pat)$/,

View File

@@ -34,11 +34,11 @@ else {
my $pid_file = "/tmp/pt-query-digest-test-issue_360.t.$PID";
# Need a clean query review table.
$sb->create_dbs($dbh, [qw(test)]);
$sb->create_dbs($dbh, [qw(test percona_schema)]);
# Run pt-query-digest in the background for 2s,
# saving queries to test.query_review.
diag(`$trunk/bin/pt-query-digest --processlist h=127.1,P=12345,u=msandbox,p=msandbox --interval 0.01 --create-review-table --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review --daemonize --pid $pid_file --log /dev/null --run-time 2`);
diag(`$trunk/bin/pt-query-digest --processlist h=127.1,P=12345,u=msandbox,p=msandbox --interval 0.01 --create-review-table --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review --daemonize --pid $pid_file --log /dev/null --run-time 2`);
# Wait until its running.
PerconaTest::wait_for_files($pid_file);

View File

@@ -13,14 +13,33 @@ use Test::More;
use PerconaTest;
my $cmd = "$trunk/bin/pt-query-digest";
my $help = qx{$cmd --help};
my $output;
# #############################################################################
# Test cmd line op sanity.
# #############################################################################
my $output = `$trunk/bin/pt-query-digest --review h=127.1,P=12345,u=msandbox,p=msandbox`;
like($output, qr/--review DSN requires a D/, 'Dies if no D part in --review DSN');
for my $opt (qw(review-table history-table)) {
$output = `$cmd --review h=127.1,P=12345,u=msandbox,p=msandbox --$opt test`;
like($output, qr/--$opt should be passed a/, "Dies if no database part in --$opt");
}
$output = `$trunk/bin/pt-query-digest --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test`;
like($output, qr/--review DSN requires a D/, 'Dies if no t part in --review DSN');
$output = `$cmd --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=test`;
like($output, qr/--review does not accept a t option/, 'Dies if t part in --review DSN');
like(
$help,
qr/review-table\s+\Qpercona_schema.query_review\E/,
"--review-table has a sane default"
);
like(
$help,
qr/history-table\s+\Qpercona_schema.query_history\E/,
"--history-table has a sane default"
);
# #############################################################################
# https://bugs.launchpad.net/percona-toolkit/+bug/885382
@@ -34,25 +53,25 @@ my @options = qw(
--group-by file
);
$output = `$trunk/bin/pt-query-digest @options --embedded-attributes '-- .*' $sample.slow010.txt`;
$output = `$cmd @options --embedded-attributes '-- .*' $sample.slow010.txt`;
like $output,
qr/\Q--embedded-attributes should be passed two comma-separated patterns, got 1/,
'Bug 885382: --embedded-attributes cardinality';
$output = `$trunk/bin/pt-query-digest @options --embedded-attributes '-- .*,(?{1234})' $sample.slow010.txt`;
$output = `$cmd @options --embedded-attributes '-- .*,(?{1234})' $sample.slow010.txt`;
like $output,
qr/\Q--embedded-attributes Eval-group /,
"Bug 885382: --embedded-attributes rejects invalid patterns early";
$output = `$trunk/bin/pt-query-digest @options --embedded-attributes '-- .*,(?*asdasd' $sample.slow010.txt`;
$output = `$cmd @options --embedded-attributes '-- .*,(?*asdasd' $sample.slow010.txt`;
like $output,
qr/\Q--embedded-attributes Sequence (?*...) not recognized/,
"Bug 885382: --embedded-attributes rejects invalid patterns early";
$output = `$trunk/bin/pt-query-digest @options --embedded-attributes '-- .*,[:alpha:]' $sample.slow010.txt`;
$output = `$cmd @options --embedded-attributes '-- .*,[:alpha:]' $sample.slow010.txt`;
like $output,
qr/\Q--embedded-attributes POSIX syntax [: :] belongs inside character/,
@@ -61,7 +80,7 @@ like $output,
# We removed --statistics, but they should still print out if we use PTDEBUG.
$output = qx{PTDEBUG=1 $trunk/bin/pt-query-digest --no-report ${sample}slow002.txt 2>&1};
$output = qx{PTDEBUG=1 $cmd --no-report ${sample}slow002.txt 2>&1};
my $stats = slurp_file("t/pt-query-digest/samples/stats-slow002.txt");
like(
@@ -81,14 +100,12 @@ like(
# https://bugs.launchpad.net/percona-toolkit/+bug/831525
# #############################################################################
$output = `$trunk/bin/pt-query-digest --help`;
like(
$output,
$help,
qr/\Q--report-format=A\E\s*
\QPrint these sections of the query analysis\E\s*
\Qreport (default rusage,date,hostname,files,\E\s*
\Qheader,profile,query_report,prepared)\E/x,
\Qreport (default rusage\E,\s*date,\s*hostname,\s*files,\s*
header,\s*profile,\s*query_report,\s*prepared\)/x,
"Bug 831525: pt-query-digest help output mangled"
);

View File

@@ -14,6 +14,10 @@ use Test::More;
use PerconaTest;
require "$trunk/bin/pt-query-digest";
no warnings 'once';
local $JSONReportFormatter::sorted_json = 1;
local $JSONReportFormatter::pretty_json = 1;
my @args = qw(--output json);
my $sample = "$trunk/t/lib/samples";
my $results = "t/pt-query-digest/samples";

163
t/pt-query-digest/resume.t Normal file
View File

@@ -0,0 +1,163 @@
#!/usr/bin/env perl
BEGIN {
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
};
use strict;
use warnings FATAL => 'all';
use English qw(-no_match_vars);
use Test::More;
use IO::File;
use Fcntl qw(:seek);
use File::Temp qw(tempfile);
use PerconaTest;
use Sandbox;
require "$trunk/bin/pt-query-digest";
my $dp = new DSNParser(opts=>$dsn_opts);
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
my $dbh = $sb->get_dbh_for('master');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
my $samples = "$trunk/t/lib/samples/slowlogs";
my $output;
$sb->create_dbs($dbh, ['test']);
my $resume_file = (tempfile())[1];
my ($fh, $filename) = tempfile(UNLINK => 1);
$fh->autoflush(1);
sub resume_offset_ok {
my ($resume_file, $file, $msg) = @_;
chomp(my $offset = slurp_file($resume_file));
open my $tmp_fh, q{<}, $file or die $OS_ERROR;
seek $tmp_fh, 0, SEEK_END;
is(tell($tmp_fh), $offset, $msg);
}
sub run_pqd {
my @extra_args = @_;
my $run = output(sub { pt_query_digest::main(qw(--limit 10), @extra_args, $filename) }, stderr => 1);
$run =~ s/\d+ms user time.+//;
$run =~ s/Current date: .+//;
return $run;
}
print { $fh } slurp_file("$samples/slow006.txt");
my @runs;
push @runs, run_pqd() for 1, 2;
is($runs[0], $runs[1], "Sanity check: Behaves the same between runs without --resume");
my @resume_runs;
push @resume_runs, run_pqd('--resume', $resume_file) for 1, 2;
(my $without_resume_line = $resume_runs[0]) =~ s/\n\n. Saved resume file offset.+//;
is(
$runs[0],
$without_resume_line,
"First time with --resume just like the first time without"
);
like(
$resume_runs[0],
qr/\QSaved resume file offset\E/,
"SAves offset with --resume"
);
like(
$resume_runs[1],
qr/\QNo events processed.\E/,
"..and there are no events on the second run"
);
resume_offset_ok($resume_file, $filename, "The resume file has the correct offset");
print { $fh } slurp_file("$samples/slow002.txt");
push @resume_runs, run_pqd('--resume', $resume_file) for 1, 2;
unlike(
$resume_runs[2],
qr/\QNo events processed.\E/,
"New run detects new events"
);
like(
$resume_runs[3],
qr/\QNo events processed.\E/,
"And running again after that finds nothing new"
);
resume_offset_ok($resume_file, $filename, "The resume file has the updated offset");
unlink($resume_file);
close $fh;
# #############################################################################
# Now test the itneraction with --run-time-mode interval
# #############################################################################
($fh, $filename) = tempfile(UNLINK => 1);
$fh->autoflush(1);
print { $fh } slurp_file("$trunk/t/lib/samples/slowlogs/slow033.txt");
my @run_args = (qw(--run-time-mode interval --run-time 1d --iterations 0),
qw(--report-format query_report));
my @resume_args = (@run_args, '--resume', $resume_file);
my @run_time;
push @run_time, run_pqd(@resume_args) for 1,2;
resume_offset_ok($resume_file, $filename, "The resume file has the correct offset when using --run-time-mode interval");
print { $fh } slurp_file("$samples/slow002.txt");
push @run_time, run_pqd(@resume_args) for 1,2;
resume_offset_ok($resume_file, $filename, "...and it updates correctly");
like(
$_,
qr/\QNo events processed.\E/,
"Runs 2 & 4 find no new data"
) for @run_time[1, 3];
# This shows up in the first report, but shouldn't show up in there
# third run, after we add new events to the file.
my $re = qr/\QSELECT * FROM foo\E/;
unlike(
$run_time[2],
$re,
"Events from the first run are correctly ignored"
);
my $no_resume = run_pqd(@run_args);
like(
$no_resume,
$re,
"...but do show up if run without resume"
);
# #############################################################################
# Done.
# #############################################################################
$sb->wipe_clean($dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
done_testing;
exit;

View File

@@ -22,9 +22,6 @@ my $dbh = $sb->get_dbh_for('master');
if ( !$dbh ) {
plan skip_all => 'Cannot connect to sandbox master';
}
else {
plan tests => 18;
}
sub normalize_numbers {
use Scalar::Util qw(looks_like_number);
@@ -43,21 +40,21 @@ $sb->load_file('master', 't/pt-query-digest/samples/query_review.sql');
# Test --create-review and --create-review-history-table
$output = 'foo'; # clear previous test results
$cmd = "${run_with}slow006.txt --create-review-table --review "
. "h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review --create-review-history-table "
. "--review-history t=query_review_history";
$cmd = "${run_with}slow006.txt --create-review-tables --review "
. "h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review "
. "--history-table test.query_review_history";
$output = `$cmd >/dev/null 2>&1`;
my ($table) = $dbh->selectrow_array(
"show tables from test like 'query_review'");
is($table, 'query_review', '--create-review');
is($table, 'query_review', '--create-review-tables');
($table) = $dbh->selectrow_array(
"show tables from test like 'query_review_history'");
is($table, 'query_review_history', '--create-review-history-table');
is($table, 'query_review_history', '--create-review-tables');
$output = 'foo'; # clear previous test results
$cmd = "${run_with}slow006.txt --review h=127.1,u=msandbox,p=msandbox,P=12345,D=test,t=query_review "
. "--review-history t=query_review_history";
$cmd = "${run_with}slow006.txt --review h=127.1,u=msandbox,p=msandbox,P=12345 --review-table test.query_review "
. "--history-table test.query_review_history";
$output = `$cmd`;
my $res = $dbh->selectall_arrayref( 'SELECT * FROM test.query_review',
{ Slice => {} } );
@@ -181,17 +178,21 @@ is_deeply(
# have been reviewed, the report should include both of them with
# their respective query review info added to the report.
ok(
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review', "t/pt-query-digest/samples/slow006_AR_1.txt"),
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review --create-review-tables', "t/pt-query-digest/samples/slow006_AR_1.txt"),
'Analyze-review pass 1 reports not-reviewed queries'
);
($table) = $dbh->selectrow_array(
"show tables from percona_schema like 'query_history'");
is($table, 'query_history', '--create-review-tables creates both percona_schema and query_review_history');
# Mark a query as reviewed and run --report again and that query should
# not be reported.
$dbh->do('UPDATE test.query_review
SET reviewed_by="daniel", reviewed_on="2008-12-24 12:00:00", comments="foo_tbl is ok, so are cranberries"
WHERE checksum=11676753765851784517');
ok(
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review', "t/pt-query-digest/samples/slow006_AR_2.txt"),
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review', "t/pt-query-digest/samples/slow006_AR_2.txt"),
'Analyze-review pass 2 does not report the reviewed query'
);
@@ -199,7 +200,7 @@ ok(
# to re-appear in the report with the reviewed_by, reviewed_on and comments
# info included.
ok(
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review --report-all', "t/pt-query-digest/samples/slow006_AR_4.txt"),
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review --report-all', "t/pt-query-digest/samples/slow006_AR_4.txt"),
'Analyze-review pass 4 with --report-all reports reviewed query'
);
@@ -208,7 +209,7 @@ $dbh->do('ALTER TABLE test.query_review ADD COLUMN foo INT');
$dbh->do('UPDATE test.query_review
SET foo=42 WHERE checksum=15334040482108055940');
ok(
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review', "t/pt-query-digest/samples/slow006_AR_5.txt"),
no_diff($run_with.'slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review', "t/pt-query-digest/samples/slow006_AR_5.txt"),
'Analyze-review pass 5 reports new review info column'
);
@@ -217,7 +218,7 @@ ok(
$dbh->do("update test.query_review set first_seen='0000-00-00 00:00:00', "
. " last_seen='0000-00-00 00:00:00'");
$output = 'foo'; # clear previous test results
$cmd = "${run_with}slow022.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review";
$cmd = "${run_with}slow022.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review";
$output = `$cmd`;
unlike($output, qr/last_seen/, 'no last_seen when 0000 timestamp');
unlike($output, qr/first_seen/, 'no first_seen when 0000 timestamp');
@@ -231,7 +232,7 @@ unlike($output, qr/0000-00-00 00:00:00/, 'no 0000-00-00 00:00:00 timestamp');
# Make sure a missing Time property does not cause a crash. Don't test data
# in table, because it varies based on when you run the test.
$output = 'foo'; # clear previous test results
$cmd = "${run_with}slow021.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review";
$cmd = "${run_with}slow021.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review";
$output = `$cmd`;
unlike($output, qr/Use of uninitialized value/, 'didnt crash due to undef ts');
@@ -239,7 +240,7 @@ unlike($output, qr/Use of uninitialized value/, 'didnt crash due to undef ts');
# crash. Don't test data in table, because it varies based on when you run
# the test.
$output = 'foo'; # clear previous test results
$cmd = "${run_with}slow022.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review";
$cmd = "${run_with}slow022.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review";
$output = `$cmd`;
# Don't test data in table, because it varies based on when you run the test.
unlike($output, qr/Use of uninitialized value/, 'no crash due to totally missing ts');
@@ -248,7 +249,7 @@ unlike($output, qr/Use of uninitialized value/, 'no crash due to totally missing
# --review --no-report
# #############################################################################
$sb->load_file('master', 't/pt-query-digest/samples/query_review.sql');
$output = `${run_with}slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox,D=test,t=query_review --no-report --create-review-table`;
$output = `${run_with}slow006.txt --review h=127.1,P=12345,u=msandbox,p=msandbox --review-table test.query_review --no-report --create-review-table`;
$res = $dbh->selectall_arrayref('SELECT * FROM test.query_review');
is(
$res->[0]->[1],
@@ -268,7 +269,7 @@ is(
$dbh->do('truncate table test.query_review');
$dbh->do('truncate table test.query_review_history');
`${run_with}slow002.txt --review h=127.1,u=msandbox,p=msandbox,P=12345,D=test,t=query_review --review-history t=query_review_history --no-report --filter '\$event->{arg} =~ m/foo\.bar/' > /dev/null`;
`${run_with}slow002.txt --review h=127.1,u=msandbox,p=msandbox,P=12345 --review-table test.query_review --history-table test.query_review_history --no-report --filter '\$event->{arg} =~ m/foo\.bar/' > /dev/null`;
$res = $dbh->selectall_arrayref( 'SELECT * FROM test.query_review_history',
{ Slice => {} } );
@@ -396,8 +397,9 @@ $dbh->do($min_tbl);
$output = output(
sub { pt_query_digest::main(
'--review', 'h=127.1,u=msandbox,p=msandbox,P=12345,D=test,t=query_review',
'--review-history', 't=query_review_history',
'--review', 'h=127.1,u=msandbox,p=msandbox,P=12345',
'--review-table', 'test.query_review',
'--history-table', 'test.query_review_history',
qw(--no-report --no-continue-on-error),
"$trunk/t/lib/samples/slow002.txt")
},
@@ -415,4 +417,5 @@ unlike(
# #############################################################################
$sb->wipe_clean($dbh);
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
done_testing;
exit;

View File

@@ -1,14 +1,12 @@
# Profile
# Rank Query ID Response time Calls R/Call Apdx V/M EXPLAIN Item
# ==== ================== ============= ===== ====== ==== ===== ======= ========
# 1 0xD4B6A5CD2F2F485C 0.2148 100.0% 1 0.2148 1.00 0.00 TF>aa SELECT t
# Rank Query ID Response time Calls R/Call V/M Item
# ==== ================== ============= ===== ====== ===== ========
# 1 0xD4B6A5CD2F2F485C 0.2148 100.0% 1 0.2148 0.00 SELECT t
# Query 1: 0 QPS, 0x concurrency, ID 0xD4B6A5CD2F2F485C at byte 0 ________
# This item is included in the report because it matches --limit.
# Scores: Apdex = 1.00 [1.0]*, V/M = 0.00
# EXPLAIN sparkline: TF>aa
# Query_time sparkline: | ^ |
# Scores: V/M = 0.00
# Time range: all events occurred at 2010-12-14 16:12:28
# Attribute pct total min max avg 95% stddev median
# ============ === ======= ======= ======= ======= ======= ======= =======

View File

@@ -1,2 +1,203 @@
[{"class":{"checksum":"66825DDC008FFA89","ts_min":"2007-12-18 11:48:27","ts_max":"2007-12-18 11:48:27","fingerprint":"update d?tuningdetail_?_? n inner join d?gonzo a using(gonzo) set n.column? = a.column?, n.word? = a.word?","sample":"update db2.tuningdetail_21_265507 n\n inner join db1.gonzo a using(gonzo) \n set n.column1 = a.column1, n.word3 = a.word3","cnt":1},"attributes":{"bytes":{"pct":"0.12","avg":"129.000000","min":"129.000000","max":"129.000000","median":"129.000000","cnt":"1.000000","stddev":0,"pct_95":"129.000000","sum":"129.000000"},"db":{"pct":0.142857142857143,"avg":0,"min":"db1","max":"db1","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Full_join":{"pct":0.125,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"Merge_passes":{"pct":"0.12","avg":0,"min":"0","max":"0","median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0},"Filesort":{"pct":0.125,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"user":{"pct":0.125,"avg":0,"min":"[SQL_SLAVE]","max":"[SQL_SLAVE]","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Rows_sent":{"pct":"0.12","avg":0,"min":"0","max":"0","median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0},"Lock_time":{"pct":"0.12","avg":"0.000091","min":"0.000091","max":"0.000091","median":"0.000091","cnt":"1.000000","stddev":0,"pct_95":"0.000091","sum":"0.000091"},"Full_scan":{"pct":0.125,"avg":1,"min":1,"max":1,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":1},"Filesort_on_disk":{"pct":0.125,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"host":{"pct":0.125,"avg":0,"min":"","max":"","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"pos_in_log":{"pct":"0.12","avg":"338.000000","min":"338.000000","max":"338.000000","median":"338.000000","cnt":"1.000000","stddev":0,"pct_95":"338.000000","sum":"338.000000"},"Tmp_table":{"pct":0.125,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"QC_Hit":{"pct":0.125,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"Tmp_table_on_disk":{"pct":0.125,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"Rows_examined":{"pct":"0.12","avg":"62951.000000","min":"62951.000000","max":"62951.000000","median":"62951.000000","cnt":"1.000000","stddev":0,"pct_95":"62951.000000","sum":"62951.000000"},"Query_time":{"pct":"0.12","avg":"0.726052","min":"0.726052","max":"0.726052","median":"0.726052","cnt":"1.000000","stddev":0,"pct_95":"0.726052","sum":"0.726052"}}}]
[
{
"attributes" : {
"Filesort" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Filesort_on_disk" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Full_join" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Full_scan" : {
"avg" : 1,
"cnt" : 1,
"max" : 1,
"median" : 0,
"min" : 1,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 1
},
"Lock_time" : {
"avg" : "0.000091",
"cnt" : "1.000000",
"max" : "0.000091",
"median" : "0.000091",
"min" : "0.000091",
"pct" : "0.12",
"pct_95" : "0.000091",
"stddev" : 0,
"sum" : "0.000091"
},
"Merge_passes" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : "0",
"median" : 0,
"min" : "0",
"pct" : "0.12",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"QC_Hit" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Query_time" : {
"avg" : "0.726052",
"cnt" : "1.000000",
"max" : "0.726052",
"median" : "0.726052",
"min" : "0.726052",
"pct" : "0.12",
"pct_95" : "0.726052",
"stddev" : 0,
"sum" : "0.726052"
},
"Rows_examined" : {
"avg" : "62951.000000",
"cnt" : "1.000000",
"max" : "62951.000000",
"median" : "62951.000000",
"min" : "62951.000000",
"pct" : "0.12",
"pct_95" : "62951.000000",
"stddev" : 0,
"sum" : "62951.000000"
},
"Rows_sent" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : "0",
"median" : 0,
"min" : "0",
"pct" : "0.12",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Tmp_table" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Tmp_table_on_disk" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"bytes" : {
"avg" : "129.000000",
"cnt" : "1.000000",
"max" : "129.000000",
"median" : "129.000000",
"min" : "129.000000",
"pct" : "0.12",
"pct_95" : "129.000000",
"stddev" : 0,
"sum" : "129.000000"
},
"db" : {
"avg" : 0,
"cnt" : 1,
"max" : "db1",
"median" : 0,
"min" : "db1",
"pct" : 0.142857142857143,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"host" : {
"avg" : 0,
"cnt" : 1,
"max" : "",
"median" : 0,
"min" : "",
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"pos_in_log" : {
"avg" : "338.000000",
"cnt" : "1.000000",
"max" : "338.000000",
"median" : "338.000000",
"min" : "338.000000",
"pct" : "0.12",
"pct_95" : "338.000000",
"stddev" : 0,
"sum" : "338.000000"
},
"user" : {
"avg" : 0,
"cnt" : 1,
"max" : "[SQL_SLAVE]",
"median" : 0,
"min" : "[SQL_SLAVE]",
"pct" : 0.125,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
}
},
"class" : {
"checksum" : "66825DDC008FFA89",
"cnt" : 1,
"fingerprint" : "update d?tuningdetail_?_? n inner join d?gonzo a using(gonzo) set n.column? = a.column?, n.word? = a.word?",
"sample" : "update db2.tuningdetail_21_265507 n\n inner join db1.gonzo a using(gonzo) \n set n.column1 = a.column1, n.word3 = a.word3",
"ts_max" : "2007-12-18 11:48:27",
"ts_min" : "2007-12-18 11:48:27"
}
}
]

View File

@@ -1,2 +1,359 @@
[{"class":{"checksum":"AA8E9FA785927259","ts_min":"2009-12-08 09:23:49.637394","ts_max":"2009-12-08 09:23:49.637394","fingerprint":"prepare select i from d.t where i=?","sample":"PREPARE SELECT i FROM d.t WHERE i=?","cnt":1},"attributes":{"bytes":{"pct":"0.33","avg":"35.000000","min":"35.000000","max":"35.000000","median":"35.000000","cnt":"1.000000","stddev":0,"pct_95":"35.000000","sum":"35.000000"},"No_good_index_used":{"pct":0.333333333333333,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"No_index_used":{"pct":0.333333333333333,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"host":{"pct":0.333333333333333,"avg":0,"min":"127.0.0.1","max":"127.0.0.1","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"pos_in_log":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0},"Rows_affected":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0},"Statement_id":{"pct":0.5,"avg":0,"min":2,"max":2,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Error_no":{"pct":0.333333333333333,"avg":0,"min":"none","max":"none","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Query_time":{"pct":"0.33","avg":"0.000286","min":"0.000286","max":"0.000286","median":"0.000286","cnt":"1.000000","stddev":0,"pct_95":"0.000286","sum":"0.000286"},"Warning_count":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0}}},{"class":{"checksum":"3F79759E7FA2F117","ts_min":"2009-12-08 09:23:49.637892","ts_max":"2009-12-08 09:23:49.637892","fingerprint":"execute select i from d.t where i=?","sample":"EXECUTE SELECT i FROM d.t WHERE i=\"3\"","cnt":1},"attributes":{"bytes":{"pct":"0.33","avg":"37.000000","min":"37.000000","max":"37.000000","median":"37.000000","cnt":"1.000000","stddev":0,"pct_95":"37.000000","sum":"37.000000"},"No_good_index_used":{"pct":0.333333333333333,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"No_index_used":{"pct":0.333333333333333,"avg":1,"min":1,"max":1,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":1},"host":{"pct":0.333333333333333,"avg":0,"min":"127.0.0.1","max":"127.0.0.1","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"pos_in_log":{"pct":"0.33","avg":"1106.000000","min":"1106.000000","max":"1106.000000","median":"1106.000000","cnt":"1.000000","stddev":0,"pct_95":"1106.000000","sum":"1106.000000"},"Rows_affected":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0},"Statement_id":{"pct":0.5,"avg":0,"min":"2","max":"2","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Error_no":{"pct":0.333333333333333,"avg":0,"min":"none","max":"none","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Query_time":{"pct":"0.33","avg":"0.000281","min":"0.000281","max":"0.000281","median":"0.000281","cnt":"1.000000","stddev":0,"pct_95":"0.000281","sum":"0.000281"},"Warning_count":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0}}},{"class":{"checksum":"AA353644DE4C4CB4","ts_min":"2009-12-08 09:23:49.638381","ts_max":"2009-12-08 09:23:49.638381","fingerprint":"administrator command: Quit","sample":"administrator command: Quit","cnt":1},"attributes":{"bytes":{"pct":"0.33","avg":"27.000000","min":"27.000000","max":"27.000000","median":"27.000000","cnt":"1.000000","stddev":0,"pct_95":"27.000000","sum":"27.000000"},"No_good_index_used":{"pct":0.333333333333333,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"No_index_used":{"pct":0.333333333333333,"avg":0,"min":0,"max":0,"median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":0},"host":{"pct":0.333333333333333,"avg":0,"min":"127.0.0.1","max":"127.0.0.1","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"pos_in_log":{"pct":"0.33","avg":"1850.000000","min":"1850.000000","max":"1850.000000","median":"1850.000000","cnt":"1.000000","stddev":0,"pct_95":"1850.000000","sum":"1850.000000"},"Rows_affected":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0},"Error_no":{"pct":0.333333333333333,"avg":0,"min":"none","max":"none","median":0,"cnt":1,"stddev":0,"pct_95":0,"sum":null},"Query_time":{"pct":"0.33","avg":0,"min":"0.000000","max":"0.000000","median":"0.000000","cnt":"1.000000","stddev":0,"pct_95":"0.000000","sum":0},"Warning_count":{"pct":"0.33","avg":0,"min":0,"max":0,"median":0,"cnt":"1.000000","stddev":0,"pct_95":0,"sum":0}}}]
[
{
"attributes" : {
"Error_no" : {
"avg" : 0,
"cnt" : 1,
"max" : "none",
"median" : 0,
"min" : "none",
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"No_good_index_used" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"No_index_used" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Query_time" : {
"avg" : "0.000286",
"cnt" : "1.000000",
"max" : "0.000286",
"median" : "0.000286",
"min" : "0.000286",
"pct" : "0.33",
"pct_95" : "0.000286",
"stddev" : 0,
"sum" : "0.000286"
},
"Rows_affected" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Statement_id" : {
"avg" : 0,
"cnt" : 1,
"max" : 2,
"median" : 0,
"min" : 2,
"pct" : 0.5,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"Warning_count" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"bytes" : {
"avg" : "35.000000",
"cnt" : "1.000000",
"max" : "35.000000",
"median" : "35.000000",
"min" : "35.000000",
"pct" : "0.33",
"pct_95" : "35.000000",
"stddev" : 0,
"sum" : "35.000000"
},
"host" : {
"avg" : 0,
"cnt" : 1,
"max" : "127.0.0.1",
"median" : 0,
"min" : "127.0.0.1",
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"pos_in_log" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
}
},
"class" : {
"checksum" : "AA8E9FA785927259",
"cnt" : 1,
"fingerprint" : "prepare select i from d.t where i=?",
"sample" : "PREPARE SELECT i FROM d.t WHERE i=?",
"ts_max" : "2009-12-08 09:23:49.637394",
"ts_min" : "2009-12-08 09:23:49.637394"
}
},
{
"attributes" : {
"Error_no" : {
"avg" : 0,
"cnt" : 1,
"max" : "none",
"median" : 0,
"min" : "none",
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"No_good_index_used" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"No_index_used" : {
"avg" : 1,
"cnt" : 1,
"max" : 1,
"median" : 0,
"min" : 1,
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : 1
},
"Query_time" : {
"avg" : "0.000281",
"cnt" : "1.000000",
"max" : "0.000281",
"median" : "0.000281",
"min" : "0.000281",
"pct" : "0.33",
"pct_95" : "0.000281",
"stddev" : 0,
"sum" : "0.000281"
},
"Rows_affected" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Statement_id" : {
"avg" : 0,
"cnt" : 1,
"max" : "2",
"median" : 0,
"min" : "2",
"pct" : 0.5,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"Warning_count" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"bytes" : {
"avg" : "37.000000",
"cnt" : "1.000000",
"max" : "37.000000",
"median" : "37.000000",
"min" : "37.000000",
"pct" : "0.33",
"pct_95" : "37.000000",
"stddev" : 0,
"sum" : "37.000000"
},
"host" : {
"avg" : 0,
"cnt" : 1,
"max" : "127.0.0.1",
"median" : 0,
"min" : "127.0.0.1",
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"pos_in_log" : {
"avg" : "1106.000000",
"cnt" : "1.000000",
"max" : "1106.000000",
"median" : "1106.000000",
"min" : "1106.000000",
"pct" : "0.33",
"pct_95" : "1106.000000",
"stddev" : 0,
"sum" : "1106.000000"
}
},
"class" : {
"checksum" : "3F79759E7FA2F117",
"cnt" : 1,
"fingerprint" : "execute select i from d.t where i=?",
"sample" : "EXECUTE SELECT i FROM d.t WHERE i=\"3\"",
"ts_max" : "2009-12-08 09:23:49.637892",
"ts_min" : "2009-12-08 09:23:49.637892"
}
},
{
"attributes" : {
"Error_no" : {
"avg" : 0,
"cnt" : 1,
"max" : "none",
"median" : 0,
"min" : "none",
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"No_good_index_used" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"No_index_used" : {
"avg" : 0,
"cnt" : 1,
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Query_time" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : "0.000000",
"median" : "0.000000",
"min" : "0.000000",
"pct" : "0.33",
"pct_95" : "0.000000",
"stddev" : 0,
"sum" : 0
},
"Rows_affected" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"Warning_count" : {
"avg" : 0,
"cnt" : "1.000000",
"max" : 0,
"median" : 0,
"min" : 0,
"pct" : "0.33",
"pct_95" : 0,
"stddev" : 0,
"sum" : 0
},
"bytes" : {
"avg" : "27.000000",
"cnt" : "1.000000",
"max" : "27.000000",
"median" : "27.000000",
"min" : "27.000000",
"pct" : "0.33",
"pct_95" : "27.000000",
"stddev" : 0,
"sum" : "27.000000"
},
"host" : {
"avg" : 0,
"cnt" : 1,
"max" : "127.0.0.1",
"median" : 0,
"min" : "127.0.0.1",
"pct" : 0.333333333333333,
"pct_95" : 0,
"stddev" : 0,
"sum" : null
},
"pos_in_log" : {
"avg" : "1850.000000",
"cnt" : "1.000000",
"max" : "1850.000000",
"median" : "1850.000000",
"min" : "1850.000000",
"pct" : "0.33",
"pct_95" : "1850.000000",
"stddev" : 0,
"sum" : "1850.000000"
}
},
"class" : {
"checksum" : "AA353644DE4C4CB4",
"cnt" : 1,
"fingerprint" : "administrator command: Quit",
"sample" : "administrator command: Quit",
"ts_max" : "2009-12-08 09:23:49.638381",
"ts_min" : "2009-12-08 09:23:49.638381"
}
}
]

View File

@@ -1,9 +1,7 @@
# Query 1: 0 QPS, 0x concurrency, ID 0x8E306CDB7A800841 at byte 0 ________
# This item is included in the report because it matches --limit.
# Scores: Apdex = 1.00 [1.0]*, V/M = 0.00
# EXPLAIN sparkline: s
# Query_time sparkline: | ^ |
# Scores: V/M = 0.00
# Time range: all events occurred at 2007-12-18 11:48:27
# Attribute pct total min max avg 95% stddev median
# ============ === ======= ======= ======= ======= ======= ======= =======

View File

@@ -1,9 +1,7 @@
# Query 1: 0 QPS, 0x concurrency, ID 0x8E306CDB7A800841 at byte 0 ________
# This item is included in the report because it matches --limit.
# Scores: Apdex = 1.00 [1.0]*, V/M = 0.00
# EXPLAIN sparkline: s
# Query_time sparkline: | ^ |
# Scores: V/M = 0.00
# Time range: all events occurred at 2007-12-18 11:48:27
# Attribute pct total min max avg 95% stddev median
# ============ === ======= ======= ======= ======= ======= ======= =======

View File

@@ -1,9 +1,7 @@
# Query 1: 0 QPS, 0x concurrency, ID 0x8E306CDB7A800841 at byte 0 ________
# This item is included in the report because it matches --limit.
# Scores: Apdex = 1.00 [1.0]*, V/M = 0.00
# EXPLAIN sparkline: I
# Query_time sparkline: | ^ |
# Scores: V/M = 0.00
# Time range: all events occurred at 2007-12-18 11:48:27
# Attribute pct total min max avg 95% stddev median
# ============ === ======= ======= ======= ======= ======= ======= =======