mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-09-23 04:25:43 +00:00
Merge release-2.2.8.
This commit is contained in:
@@ -1,236 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::WebAPI::Client;
|
||||
use Percona::WebAPI::Resource::Agent;
|
||||
use Percona::WebAPI::Resource::Config;
|
||||
use Percona::WebAPI::Resource::Service;
|
||||
use Percona::WebAPI::Resource::Task;
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper have_required_args));
|
||||
Percona::WebAPI::Representation->import(qw(as_json as_hashref));
|
||||
|
||||
# #############################################################################
|
||||
# Create a client with a mock user-agent.
|
||||
# #############################################################################
|
||||
|
||||
my $json = JSON->new;
|
||||
$json->allow_blessed([]);
|
||||
$json->convert_blessed([]);
|
||||
|
||||
my $ua = Percona::Test::Mock::UserAgent->new(
|
||||
encode => sub { my $c = shift; return $json->encode($c || {}) },
|
||||
);
|
||||
|
||||
my $client = eval {
|
||||
Percona::WebAPI::Client->new(
|
||||
api_key => '123',
|
||||
ua => $ua,
|
||||
);
|
||||
};
|
||||
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
'',
|
||||
'Create client'
|
||||
) or die;
|
||||
|
||||
# #############################################################################
|
||||
# First thing a client should do is get the entry links.
|
||||
# #############################################################################
|
||||
|
||||
my $return_links = { # what the server returns
|
||||
agents => '/agents',
|
||||
};
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
content => {
|
||||
links => $return_links,
|
||||
}
|
||||
},
|
||||
];
|
||||
|
||||
my $links = $client->get(link => $client->entry_link);
|
||||
|
||||
is_deeply(
|
||||
$links,
|
||||
$return_links,
|
||||
"Get entry links"
|
||||
) or diag(Dumper($links));
|
||||
|
||||
is_deeply(
|
||||
$ua->{requests},
|
||||
[
|
||||
'GET https://cloud-api.percona.com',
|
||||
],
|
||||
"1 request, 1 GET"
|
||||
) or diag(Dumper($ua->{requests}));
|
||||
|
||||
|
||||
# #############################################################################
|
||||
# Second, a new client will POST an Agent for itself. The entry links
|
||||
# should have an "agents" link. The server response is empty but the
|
||||
# URI for the new Agent resource is given by the Location header.
|
||||
# #############################################################################
|
||||
|
||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||
id => '123',
|
||||
hostname => 'host',
|
||||
);
|
||||
|
||||
$ua->{responses}->{post} = [
|
||||
{
|
||||
headers => { 'Location' => 'agents/5' },
|
||||
content => '',
|
||||
},
|
||||
];
|
||||
|
||||
my $uri = $client->post(resources => $agent, link => $links->{agents});
|
||||
|
||||
is(
|
||||
$uri,
|
||||
"agents/5",
|
||||
"POST Agent, got Location URI"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# After successfully creating the new Agent, the client should fetch
|
||||
# the new Agent resoruce which will have links to the next step: the
|
||||
# agent's config.
|
||||
# #############################################################################
|
||||
|
||||
$return_links = {
|
||||
self => 'agents/5',
|
||||
config => 'agents/5/config',
|
||||
};
|
||||
|
||||
my $content = {
|
||||
%{ as_hashref($agent) },
|
||||
links => $return_links,
|
||||
};
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => $content,
|
||||
},
|
||||
];
|
||||
|
||||
# Re-using $agent, i.e. updating it with the actual, newly created
|
||||
# Agent resource as returned by the server with links.
|
||||
$agent = $client->get(link => $uri);
|
||||
|
||||
# Need to use with_links=>1 here because by as_hashref() removes
|
||||
# links by default because it's usually used to encode and send
|
||||
# resources, and clients never send links; but here we're using
|
||||
# it for testing.
|
||||
is_deeply(
|
||||
as_hashref($agent, with_links => 1),
|
||||
$content,
|
||||
"GET Agent with links"
|
||||
) or diag(Dumper(as_hashref($agent, with_links => 1)));
|
||||
|
||||
# #############################################################################
|
||||
# Now the agent can get its Config.
|
||||
# #############################################################################
|
||||
|
||||
$return_links = {
|
||||
self => 'agents/5/config',
|
||||
services => 'agents/5/services',
|
||||
};
|
||||
|
||||
my $return_config = Percona::WebAPI::Resource::Config->new(
|
||||
ts => '100',
|
||||
name => 'Default',
|
||||
options => {},
|
||||
links => $return_links,
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Config' },
|
||||
content => as_hashref($return_config, with_links => 1),
|
||||
},
|
||||
];
|
||||
|
||||
my $config = $client->get(link => $agent->links->{config});
|
||||
|
||||
is_deeply(
|
||||
as_hashref($config, with_links => 1),
|
||||
as_hashref($return_config, with_links => 1),
|
||||
"GET Config"
|
||||
) or diag(Dumper(as_hashref($config, with_links => 1)));
|
||||
|
||||
# #############################################################################
|
||||
# Once an agent is configured, i.e. successfully gets a Config resource,
|
||||
# its Config should have a services link which returns a list of Service
|
||||
# resources, each with their own links.
|
||||
# #############################################################################
|
||||
|
||||
$return_links = {
|
||||
'send_data' => '/query-monitor',
|
||||
};
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'run-pqd',
|
||||
number => '0',
|
||||
program => 'pt-query-digest',
|
||||
options => '--output json',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '123',
|
||||
name => 'query-monitor',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $run0 ],
|
||||
links => $return_links,
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [ as_hashref($svc0, with_links => 1) ],
|
||||
},
|
||||
];
|
||||
|
||||
my $services = $client->get(link => $config->links->{services});
|
||||
|
||||
is(
|
||||
scalar @$services,
|
||||
1,
|
||||
"Got 1 service"
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
as_hashref($services->[0], with_links => 1),
|
||||
as_hashref($svc0, with_links => 1),
|
||||
"GET Services"
|
||||
) or diag(Dumper(as_hashref($services, with_links => 1)));
|
||||
|
||||
is(
|
||||
$services->[0]->links->{send_data},
|
||||
"/query-monitor",
|
||||
"send_data link for Service"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Percona::Toolkit;
|
||||
use Percona::WebAPI::Resource::Agent;
|
||||
use Percona::WebAPI::Resource::Config;
|
||||
use Percona::WebAPI::Representation;
|
||||
|
||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||
id => '123',
|
||||
hostname => 'pt',
|
||||
versions => {
|
||||
Perl => '5.10.1',
|
||||
},
|
||||
);
|
||||
|
||||
is(
|
||||
Percona::WebAPI::Representation::as_json($agent),
|
||||
q/{"versions":{"Perl":"5.10.1"},"id":"123","hostname":"pt"}/,
|
||||
"as_json"
|
||||
);
|
||||
|
||||
my $config = Percona::WebAPI::Resource::Config->new(
|
||||
ts => '100',
|
||||
name => 'Default',
|
||||
options => {
|
||||
'check-interval' => 60,
|
||||
},
|
||||
);
|
||||
|
||||
is(
|
||||
Percona::WebAPI::Representation::as_config($config),
|
||||
"check-interval=60\n",
|
||||
"as_config"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,101 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Sandbox;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('master');
|
||||
my $dsn = $sb->dsn_for('master');
|
||||
my $o = new OptionParser();
|
||||
$o->get_specs("$trunk/bin/pt-agent");
|
||||
$o->get_opts();
|
||||
my $cxn = Cxn->new(
|
||||
dsn_string => $dsn,
|
||||
OptionParser => $o,
|
||||
DSNParser => $dp,
|
||||
);
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
# Running the agent is going to cause it to schedule the services,
|
||||
# i.e. write a real crontab. The test box/user shouldn't have a
|
||||
# crontab, so we'll warn and clobber it if there is one.
|
||||
my $crontab = `crontab -l 2>/dev/null`;
|
||||
if ( $crontab ) {
|
||||
warn "Removing crontab: $crontab\n";
|
||||
`crontab -r`;
|
||||
}
|
||||
|
||||
my $tmp_lib = "/tmp/pt-agent";
|
||||
my $tmp_log = "/tmp/pt-agent.log";
|
||||
my $tmp_pid = "/tmp/pt-agent.pid";
|
||||
|
||||
diag(`rm -rf $tmp_lib`) if -d $tmp_lib;
|
||||
unlink $tmp_log if -f $tmp_log;
|
||||
unlink $tmp_pid if -f $tmp_pid;
|
||||
|
||||
my $config_file = pt_agent::get_config_file();
|
||||
unlink $config_file if -f $config_file;
|
||||
|
||||
my $output;
|
||||
|
||||
{
|
||||
no strict;
|
||||
no warnings;
|
||||
local *pt_agent::start_agent = sub {
|
||||
print "start_agent\n";
|
||||
return {
|
||||
agent => 0,
|
||||
client => 0,
|
||||
daemon => 0,
|
||||
};
|
||||
};
|
||||
local *pt_agent::run_agent = sub {
|
||||
print "run_agent\n";
|
||||
};
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::main(
|
||||
qw(--api-key 123)
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
}
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/start_agent\nrun_agent\n/,
|
||||
"Starts and runs without a config file"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
`crontab -r 2>/dev/null`;
|
||||
|
||||
if ( -f $config_file ) {
|
||||
unlink $config_file
|
||||
or warn "Error removing $config_file: $OS_ERROR";
|
||||
}
|
||||
|
||||
done_testing;
|
@@ -1,423 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
# Fake --lib and --spool dirs.
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||
output( sub {
|
||||
pt_agent::init_lib_dir(lib_dir => $tmpdir);
|
||||
});
|
||||
|
||||
# #############################################################################
|
||||
# Create mock client and Agent
|
||||
# #############################################################################
|
||||
|
||||
# These aren't the real tests yet: to run_agent, first we need
|
||||
# a client and Agent, so create mock ones.
|
||||
|
||||
my $output;
|
||||
my $json = JSON->new->canonical([1])->pretty;
|
||||
$json->allow_blessed([]);
|
||||
$json->convert_blessed([]);
|
||||
|
||||
my $ua = Percona::Test::Mock::UserAgent->new(
|
||||
encode => sub { my $c = shift; return $json->encode($c || {}) },
|
||||
);
|
||||
|
||||
my $client = eval {
|
||||
Percona::WebAPI::Client->new(
|
||||
api_key => '123',
|
||||
ua => $ua,
|
||||
);
|
||||
};
|
||||
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
'',
|
||||
'Create mock client'
|
||||
) or die;
|
||||
|
||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'host',
|
||||
username => 'user',
|
||||
links => {
|
||||
self => '/agents/123',
|
||||
config => '/agents/123/config',
|
||||
},
|
||||
);
|
||||
|
||||
my @cmds;
|
||||
my $exec_cmd = sub {
|
||||
my $cmd = shift;
|
||||
push @cmds, $cmd;
|
||||
return 0;
|
||||
};
|
||||
|
||||
# #############################################################################
|
||||
# Test get_services()
|
||||
# #############################################################################
|
||||
|
||||
# query-history
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '0',
|
||||
program => 'pt-query-digest --output json',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $qh = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '100',
|
||||
name => 'query-history',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $run0 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
my $run1 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'start-query-history',
|
||||
number => '0',
|
||||
program => 'echo "start-qh"',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $start_qh = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '100',
|
||||
name => 'start-query-history',
|
||||
meta => 1,
|
||||
tasks => [ $run1 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [
|
||||
as_hashref($qh, with_links => 1),
|
||||
as_hashref($start_qh, with_links => 1),
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
my $services = {};
|
||||
my $success = 0;
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
($services, $success) = pt_agent::get_services(
|
||||
# Required args
|
||||
link => '/agents/123/services',
|
||||
agent => $agent,
|
||||
client => $client,
|
||||
lib_dir => $tmpdir,
|
||||
services => $services,
|
||||
# Optional args, for testing
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin/",
|
||||
exec_cmd => $exec_cmd,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
$success,
|
||||
1,
|
||||
"Success"
|
||||
);
|
||||
|
||||
is(
|
||||
ref $services,
|
||||
'HASH',
|
||||
"Return services as hashref"
|
||||
) or diag(Dumper($services));
|
||||
|
||||
is(
|
||||
scalar keys %$services,
|
||||
2,
|
||||
'Only 2 services'
|
||||
) or diag(Dumper($services));
|
||||
|
||||
ok(
|
||||
exists $services->{'query-history'},
|
||||
"services hashref keyed on service name"
|
||||
) or diag(Dumper($services));
|
||||
|
||||
isa_ok(
|
||||
ref $services->{'query-history'},
|
||||
'Percona::WebAPI::Resource::Service',
|
||||
'services->{query-history}'
|
||||
);
|
||||
|
||||
my $crontab = -f "$tmpdir/crontab" ? slurp_file("$tmpdir/crontab") : '';
|
||||
is(
|
||||
$crontab,
|
||||
"1 * * * * $trunk/bin/pt-agent --run-service query-history
|
||||
2 * * * * $trunk/bin/pt-agent --send-data query-history
|
||||
",
|
||||
"crontab file"
|
||||
) or diag($output, `ls -l $tmpdir/*`, Dumper(\@log));
|
||||
|
||||
is_deeply(
|
||||
\@cmds,
|
||||
[
|
||||
"$trunk/bin/pt-agent --run-service start-query-history >> $tmpdir/logs/start-stop.log 2>&1",
|
||||
"crontab $tmpdir/crontab > $tmpdir/crontab.err 2>&1",
|
||||
],
|
||||
"Ran start-service and crontab"
|
||||
) or diag(Dumper(\@cmds), Dumper(\@log));
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/services/query-history",
|
||||
"Wrote --lib/services/query-history"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# A more realistic transaction
|
||||
# #############################################################################
|
||||
|
||||
# services/query-history should exist from the previous tests. For these
|
||||
# tests, get_services() should update the file, so we empty it and check
|
||||
# that it's re-created, i.e. updated.
|
||||
diag(`echo -n > $tmpdir/services/query-history`);
|
||||
is(
|
||||
-s "$tmpdir/services/query-history",
|
||||
0,
|
||||
"Start: empty --lib/services/query-history"
|
||||
);
|
||||
|
||||
# start-query-history
|
||||
|
||||
my $task1 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'disable-slow-query-log',
|
||||
number => '0',
|
||||
query => "SET GLOBAL slow_query_log=0",
|
||||
);
|
||||
|
||||
my $task2 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'set-slow-query-log-file',
|
||||
number => '1',
|
||||
query => "SET GLOBAL slow_query_log_file='/tmp/slow.log'",
|
||||
);
|
||||
|
||||
my $task3 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'set-long-query-time',
|
||||
number => '2',
|
||||
query => "SET GLOBAL long_query_time=0.01",
|
||||
);
|
||||
|
||||
my $task4 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'enable-slow-query-log',
|
||||
number => '3',
|
||||
query => "SET GLOBAL slow_query_log=1",
|
||||
);
|
||||
|
||||
$start_qh = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '100',
|
||||
name => 'start-query-history',
|
||||
tasks => [ $task1, $task2, $task3, $task4 ],
|
||||
meta => 1,
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
# stop-query-history
|
||||
|
||||
my $task5 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'disable-slow-query-log',
|
||||
number => '0',
|
||||
query => "SET GLOBAL slow_query_log=0",
|
||||
);
|
||||
|
||||
my $stop_qh = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '100',
|
||||
name => 'stop-query-history',
|
||||
tasks => [ $task5 ],
|
||||
meta => 1,
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
# We'll use query-history from the previous tests.
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [
|
||||
as_hashref($start_qh, with_links => 1),
|
||||
as_hashref($stop_qh, with_links => 1),
|
||||
as_hashref($qh, with_links => 1), # from previous tests
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
@log = ();
|
||||
@cmds = ();
|
||||
$services = {};
|
||||
$success = 0;
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
($services, $success) = pt_agent::get_services(
|
||||
# Required args
|
||||
link => '/agents/123/services',
|
||||
agent => $agent,
|
||||
client => $client,
|
||||
lib_dir => $tmpdir,
|
||||
services => $services,
|
||||
# Optional args, for testing
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin/",
|
||||
exec_cmd => $exec_cmd,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
\@cmds,
|
||||
[
|
||||
"$trunk/bin/pt-agent --run-service start-query-history >> $tmpdir/logs/start-stop.log 2>&1",
|
||||
"crontab $tmpdir/crontab > $tmpdir/crontab.err 2>&1",
|
||||
],
|
||||
"Start: ran start-query-history"
|
||||
) or diag(Dumper(\@cmds), $output);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/services/start-query-history",
|
||||
"Start: added --lib/services/start-query-history"
|
||||
) or diag($output);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/services/stop-query-history",
|
||||
"Start: added --lib/services/stop-query-history"
|
||||
) or diag($output);
|
||||
|
||||
my $contents = slurp_file("$tmpdir/services/query-history");
|
||||
like(
|
||||
$contents,
|
||||
qr/query-history/,
|
||||
"Start: updated --lib/services/query-history"
|
||||
) or diag($output);
|
||||
|
||||
$crontab = slurp_file("$tmpdir/crontab");
|
||||
is(
|
||||
$crontab,
|
||||
"1 * * * * $trunk/bin/pt-agent --run-service query-history
|
||||
2 * * * * $trunk/bin/pt-agent --send-data query-history
|
||||
",
|
||||
"Start: only scheduled query-history"
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Update and restart a service
|
||||
# #############################################################################
|
||||
|
||||
# pt-agent should remove a service's --lib/meta/ files when restarting,
|
||||
# so create one and check that it's removed.
|
||||
diag(`touch $tmpdir/meta/query-history.foo`);
|
||||
ok(
|
||||
-f "$tmpdir/meta/query-history.foo",
|
||||
"Restart: meta file exists"
|
||||
);
|
||||
|
||||
$qh = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '200', # was 100
|
||||
name => 'query-history',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $run0 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [
|
||||
as_hashref($start_qh, with_links => 1), # has not changed
|
||||
as_hashref($stop_qh, with_links => 1), # has not changed
|
||||
as_hashref($qh, with_links => 1),
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
@log = ();
|
||||
@cmds = ();
|
||||
$success = 0;
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
($services, $success) = pt_agent::get_services(
|
||||
# Required args
|
||||
link => '/agents/123/services',
|
||||
agent => $agent,
|
||||
client => $client,
|
||||
lib_dir => $tmpdir,
|
||||
services => $services, # retval from previous call
|
||||
# Optional args, for testing
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin/",
|
||||
exec_cmd => $exec_cmd,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
\@cmds,
|
||||
[
|
||||
"$trunk/bin/pt-agent --run-service stop-query-history >> $tmpdir/logs/start-stop.log 2>&1",
|
||||
"$trunk/bin/pt-agent --run-service start-query-history >> $tmpdir/logs/start-stop.log 2>&1",
|
||||
"crontab $tmpdir/crontab > $tmpdir/crontab.err 2>&1",
|
||||
],
|
||||
"Restart: ran stop-query-history then start-query-history"
|
||||
) or diag(Dumper(\@cmds), $output);
|
||||
|
||||
ok(
|
||||
!-f "$tmpdir/meta/query-history.foo",
|
||||
"Restart: meta file removed"
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,333 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||
|
||||
my $json = JSON->new->canonical([1])->pretty;
|
||||
$json->allow_blessed([]);
|
||||
$json->convert_blessed([]);
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
my $ua = Percona::Test::Mock::UserAgent->new(
|
||||
encode => sub { my $c = shift; return $json->encode($c || {}) },
|
||||
);
|
||||
|
||||
my $client = eval {
|
||||
Percona::WebAPI::Client->new(
|
||||
api_key => '123',
|
||||
ua => $ua,
|
||||
);
|
||||
};
|
||||
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
'',
|
||||
'Create Client with mock user agent'
|
||||
) or die;
|
||||
|
||||
my @ok;
|
||||
my $oktorun = sub {
|
||||
return shift @ok;
|
||||
};
|
||||
|
||||
my @wait;
|
||||
my $interval = sub {
|
||||
my $t = shift;
|
||||
push @wait, $t;
|
||||
};
|
||||
|
||||
# #############################################################################
|
||||
# Init a new agent, i.e. create it.
|
||||
# #############################################################################
|
||||
|
||||
my $post_agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'host1',
|
||||
username => 'name1',
|
||||
versions => {
|
||||
},
|
||||
links => {
|
||||
self => '/agents/123',
|
||||
config => '/agents/123/config',
|
||||
},
|
||||
);
|
||||
|
||||
my $return_agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'host2',
|
||||
username => 'name2',
|
||||
versions => {
|
||||
},
|
||||
links => {
|
||||
self => '/agents/123',
|
||||
config => '/agents/123/config',
|
||||
},
|
||||
);
|
||||
|
||||
$ua->{responses}->{post} = [
|
||||
{
|
||||
headers => { 'Location' => '/agents/123' },
|
||||
},
|
||||
];
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($return_agent, with_links =>1 ),
|
||||
},
|
||||
];
|
||||
|
||||
my $got_agent;
|
||||
my $output = output(
|
||||
sub {
|
||||
($got_agent) = pt_agent::init_agent(
|
||||
agent => $post_agent,
|
||||
action => 'post',
|
||||
link => "/agents",
|
||||
client => $client,
|
||||
interval => $interval,
|
||||
tries => 4,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
$got_agent->hostname,
|
||||
'host2',
|
||||
'Got and returned Agent'
|
||||
) or diag($output, Dumper(as_hashref($got_agent, with_links => 1)));
|
||||
|
||||
is(
|
||||
scalar @wait,
|
||||
0,
|
||||
"Client did not wait (new Agent)"
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Repeat this test but this time fake an error, so the tool isn't able
|
||||
# to create the Agent first time, so it should wait (call interval),
|
||||
# and try again.
|
||||
# #############################################################################
|
||||
|
||||
$return_agent->{id} = '456';
|
||||
$return_agent->{links} = {
|
||||
self => '/agents/456',
|
||||
config => '/agents/456/config',
|
||||
};
|
||||
|
||||
$ua->{responses}->{post} = [
|
||||
{ # 1, the fake error
|
||||
code => 500,
|
||||
},
|
||||
# 2, code should call interval
|
||||
{ # 3, code should try again, then receive this
|
||||
code => 200,
|
||||
headers => { 'Location' => '/agents/456' },
|
||||
},
|
||||
];
|
||||
# 4, code will GET the new Agent
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($return_agent, with_links =>1 ),
|
||||
},
|
||||
];
|
||||
|
||||
@ok = qw(1 1 0);
|
||||
@wait = ();
|
||||
@log = ();
|
||||
$ua->{requests} = [];
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
($got_agent) = pt_agent::init_agent(
|
||||
agent => $post_agent,
|
||||
action => 'post',
|
||||
link => "/agents",
|
||||
client => $client,
|
||||
interval => $interval,
|
||||
tries => 5,
|
||||
oktorun => $oktorun,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
($got_agent ? $got_agent->hostname : ''),
|
||||
'host2',
|
||||
'Got and returned Agent after error'
|
||||
) or diag($output, Dumper($got_agent));
|
||||
|
||||
is(
|
||||
scalar @wait,
|
||||
1,
|
||||
"Client waited after error"
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
$ua->{requests},
|
||||
[
|
||||
'POST /agents', # first attempt, 500 error
|
||||
'POST /agents', # second attemp, 200 OK
|
||||
'GET /agents/456', # GET new Agent
|
||||
],
|
||||
"POST POST GET new Agent after error"
|
||||
) or diag(Dumper($ua->{requests}));
|
||||
|
||||
like(
|
||||
$log[1],
|
||||
qr{WARNING Failed to POST /agents},
|
||||
"POST /agents failure logged after error"
|
||||
) or diag(Dumper($ua->{requests}), Dumper(\@log));
|
||||
|
||||
# #############################################################################
|
||||
# Init an existing agent, i.e. update it.
|
||||
# #############################################################################
|
||||
|
||||
my $put_agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'host3',
|
||||
username => 'name3',
|
||||
versions => {
|
||||
},
|
||||
links => {
|
||||
self => '/agents/123',
|
||||
config => '/agents/123/config',
|
||||
},
|
||||
);
|
||||
|
||||
$ua->{responses}->{put} = [
|
||||
{
|
||||
code => 200,
|
||||
headers => {
|
||||
Location => '/agents/123',
|
||||
},
|
||||
},
|
||||
];
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
code => 200,
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($return_agent, with_links =>1 ),
|
||||
}
|
||||
];
|
||||
|
||||
@wait = ();
|
||||
$ua->{requests} = [];
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
($got_agent) = pt_agent::init_agent(
|
||||
agent => $put_agent,
|
||||
action => 'put',
|
||||
link => "/agents/123",
|
||||
client => $client,
|
||||
interval => $interval,
|
||||
tries => 4,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
$got_agent->hostname,
|
||||
'host2',
|
||||
'PUT Agent'
|
||||
) or diag($output, Dumper(as_hashref($got_agent, with_links => 1)));
|
||||
|
||||
is(
|
||||
scalar @wait,
|
||||
0,
|
||||
"Client did not wait (saved Agent)"
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
$ua->{requests},
|
||||
[
|
||||
'PUT /agents/123',
|
||||
'GET /agents/123',
|
||||
],
|
||||
"PUT then GET Agent"
|
||||
) or diag(Dumper($ua->{requests}));
|
||||
|
||||
# #############################################################################
|
||||
# Status 403 (too many agents) should abort further attempts.
|
||||
# #############################################################################
|
||||
|
||||
$ua->{responses}->{post} = [
|
||||
{ # 1, the fake error
|
||||
code => 403,
|
||||
},
|
||||
];
|
||||
|
||||
@ok = qw(1 1 0);
|
||||
@wait = ();
|
||||
@log = ();
|
||||
$ua->{requests} = [];
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
($got_agent) = pt_agent::init_agent(
|
||||
agent => $post_agent,
|
||||
action => 'post',
|
||||
link => "/agents",
|
||||
client => $client,
|
||||
interval => $interval,
|
||||
tries => 3,
|
||||
oktorun => $oktorun,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
scalar @wait,
|
||||
2,
|
||||
"Too many agents (403): waits"
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
$ua->{requests},
|
||||
[
|
||||
'POST /agents',
|
||||
'POST /agents',
|
||||
],
|
||||
"Too many agents (403): tries"
|
||||
) or diag(Dumper($ua->{requests}));
|
||||
|
||||
my $n = grep { $_ =~ m/too many agents/ } @log;
|
||||
is(
|
||||
$n,
|
||||
1,
|
||||
"Too many agents (403): does not repeat warning"
|
||||
) or diag(Dumper(\@log));
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,151 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempfile);
|
||||
|
||||
use Percona::Test;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
Percona::Toolkit->import(qw(have_required_args Dumper));
|
||||
|
||||
my $sample = "t/pt-agent/samples";
|
||||
|
||||
sub test_make_new_crontab {
|
||||
my (%args) = @_;
|
||||
have_required_args(\%args, qw(
|
||||
file
|
||||
services
|
||||
)) or die;
|
||||
my $file = $args{file};
|
||||
my $services = $args{services};
|
||||
|
||||
my $crontab_list = slurp_file("$trunk/$sample/$file.in");
|
||||
|
||||
my $new_crontab = pt_agent::make_new_crontab(
|
||||
services => $services,
|
||||
crontab_list => $crontab_list,
|
||||
bin_dir => '',
|
||||
);
|
||||
|
||||
ok(
|
||||
no_diff(
|
||||
$new_crontab,
|
||||
"$sample/$file.out",
|
||||
cmd_output => 1,
|
||||
),
|
||||
$args{name} || $file,
|
||||
) or diag($new_crontab);
|
||||
}
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '0',
|
||||
program => 'pt-query-digest',
|
||||
options => '--output json',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '100',
|
||||
name => 'query-history',
|
||||
run_schedule => '* 8 * * 1,2,3,4,5',
|
||||
spool_schedule => '* 9 * * 1,2,3,4,5',
|
||||
tasks => [ $run0 ],
|
||||
);
|
||||
|
||||
# Empty crontab, add the service.
|
||||
test_make_new_crontab(
|
||||
file => "crontab001",
|
||||
services => [ $svc0 ],
|
||||
);
|
||||
|
||||
# Crontab has another line, add the service to it.
|
||||
test_make_new_crontab(
|
||||
file => "crontab002",
|
||||
services => [ $svc0 ],
|
||||
);
|
||||
|
||||
# Crontab has another line and an old service, remove the old service
|
||||
# and add the current service.
|
||||
test_make_new_crontab(
|
||||
file => "crontab003",
|
||||
services => [ $svc0 ],
|
||||
);
|
||||
|
||||
# Crontab has old service, remove it and add only new service.
|
||||
test_make_new_crontab(
|
||||
file => "crontab004",
|
||||
services => [ $svc0 ],
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Use real crontab.
|
||||
# #############################################################################
|
||||
|
||||
# The previous tests pass in a crontab file to make testing easier.
|
||||
# Now test that make_new_crontab() will run `crontab -l' if not given
|
||||
# input. To test this, we add a fake line to our crontab. If
|
||||
# make_new_crontab() really runs `crontab -l', then this fake line
|
||||
# will be in the new crontab it returns.
|
||||
|
||||
my $crontab = `crontab -l 2>/dev/null`;
|
||||
SKIP: {
|
||||
skip 'Crontab is not empty', 3 if $crontab;
|
||||
|
||||
# On most systems[1], crontab lines must end with a newline,
|
||||
# else an error like this happens:
|
||||
# "/tmp/new_crontab_file":1: premature EOF
|
||||
# errors in crontab file, can't install.
|
||||
# [1] Ubuntu 10 and Mac OS X work without the newline.
|
||||
my ($fh, $file) = tempfile();
|
||||
print {$fh} "* 0 * * * date > /dev/null\n";
|
||||
close $fh or warn "Cannot close $file: $OS_ERROR";
|
||||
my $output = `crontab $file 2>&1`;
|
||||
|
||||
$crontab = `crontab -l 2>&1`;
|
||||
|
||||
is(
|
||||
$crontab,
|
||||
"* 0 * * * date > /dev/null\n",
|
||||
"Set other crontab line"
|
||||
) or diag($output);
|
||||
|
||||
unlink $file or warn "Cannot remove $file: $OS_ERROR";
|
||||
|
||||
my $new_crontab = pt_agent::make_new_crontab(
|
||||
services => [ $svc0 ],
|
||||
bin_dir => '',
|
||||
);
|
||||
|
||||
is(
|
||||
$new_crontab,
|
||||
"* 0 * * * date > /dev/null
|
||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||
",
|
||||
"Runs crontab -l by default"
|
||||
);
|
||||
|
||||
system("crontab -r 2>/dev/null");
|
||||
$crontab = `crontab -l 2>/dev/null`;
|
||||
is(
|
||||
$crontab,
|
||||
"",
|
||||
"Removed crontab"
|
||||
);
|
||||
};
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempfile);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
Percona::Toolkit->import(qw(have_required_args Dumper));
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
my @output_files = ();
|
||||
my $store = {};
|
||||
|
||||
sub test_replace {
|
||||
my (%args) = @_;
|
||||
have_required_args(\%args, qw(
|
||||
cmd
|
||||
expect
|
||||
)) or die;
|
||||
my $cmd = $args{cmd};
|
||||
my $expect = $args{expect};
|
||||
|
||||
my $new_cmd = pt_agent::replace_special_vars(
|
||||
cmd => $cmd,
|
||||
output_files => \@output_files,
|
||||
service => 'service-name',
|
||||
lib_dir => '/var/lib/pt-agent',
|
||||
meta_dir => '/var/lib/pt-agent/meta',
|
||||
stage_dir => '/var/spool/.tmp',
|
||||
spool_dir => '/var/spool',
|
||||
bin_dir => $trunk,
|
||||
ts => '123',
|
||||
store => $store,
|
||||
);
|
||||
|
||||
is(
|
||||
$new_cmd,
|
||||
$expect,
|
||||
$cmd,
|
||||
);
|
||||
};
|
||||
|
||||
@output_files = qw(zero one two);
|
||||
test_replace(
|
||||
cmd => "pt-query-digest __RUN_0_OUTPUT__",
|
||||
expect => "pt-query-digest zero",
|
||||
);
|
||||
|
||||
$store->{slow_query_log_file} = 'slow.log';
|
||||
test_replace(
|
||||
cmd => "echo '__STORE_slow_query_log_file__' > /var/spool/pt-agent/.tmp/1371269644.rotate-slow-query-log-all-5.1.slow_query_log_file",
|
||||
expect => "echo 'slow.log' > /var/spool/pt-agent/.tmp/1371269644.rotate-slow-query-log-all-5.1.slow_query_log_file",
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,527 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
plan skip_all => "Need to make start-service testable";
|
||||
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Sandbox;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('master');
|
||||
my $dsn = $sb->dsn_for('master');
|
||||
my $o = new OptionParser();
|
||||
$o->get_specs("$trunk/bin/pt-agent");
|
||||
$o->get_opts();
|
||||
my $cxn = Cxn->new(
|
||||
dsn_string => $dsn,
|
||||
OptionParser => $o,
|
||||
DSNParser => $dp,
|
||||
);
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
# Running the agent is going to cause it to schedule the services,
|
||||
# i.e. write a real crontab. The test box/user shouldn't have a
|
||||
# crontab, so we'll warn and clobber it if there is one.
|
||||
my $crontab = `crontab -l 2>/dev/null`;
|
||||
if ( $crontab ) {
|
||||
warn "Removing crontab: $crontab\n";
|
||||
`crontab -r`;
|
||||
}
|
||||
|
||||
# Fake --lib and --spool dirs.
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX"); #, CLEANUP => 1);
|
||||
mkdir "$tmpdir/spool" or die "Error making $tmpdir/spool: $OS_ERROR";
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
# #############################################################################
|
||||
# Create mock client and Agent
|
||||
# #############################################################################
|
||||
|
||||
# These aren't the real tests yet: to run_agent, first we need
|
||||
# a client and Agent, so create mock ones.
|
||||
|
||||
my $output;
|
||||
my $json = JSON->new->canonical([1])->pretty;
|
||||
$json->allow_blessed([]);
|
||||
$json->convert_blessed([]);
|
||||
|
||||
my $ua = Percona::Test::Mock::UserAgent->new(
|
||||
encode => sub { my $c = shift; return $json->encode($c || {}) },
|
||||
);
|
||||
|
||||
my $client = eval {
|
||||
Percona::WebAPI::Client->new(
|
||||
api_key => '123',
|
||||
ua => $ua,
|
||||
);
|
||||
};
|
||||
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
'',
|
||||
'Create mock client'
|
||||
) or die;
|
||||
|
||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'host',
|
||||
username => 'user',
|
||||
links => {
|
||||
self => '/agents/123',
|
||||
config => '/agents/123/config',
|
||||
},
|
||||
);
|
||||
|
||||
my $daemon = Daemon->new(
|
||||
daemonzie => 0,
|
||||
);
|
||||
|
||||
my @wait;
|
||||
my $interval = sub {
|
||||
my $t = shift;
|
||||
push @wait, $t;
|
||||
print "interval=" . (defined $t ? $t : 'undef') . "\n";
|
||||
};
|
||||
|
||||
# #############################################################################
|
||||
# Test run_agent
|
||||
# #############################################################################
|
||||
|
||||
my $config = Percona::WebAPI::Resource::Config->new(
|
||||
ts => 1363720060,
|
||||
name => 'Default',
|
||||
options => {
|
||||
'lib' => $tmpdir, # required
|
||||
'spool' => "$tmpdir/spool", # required
|
||||
'check-interval' => "11",
|
||||
},
|
||||
links => {
|
||||
self => '/agents/123/config',
|
||||
services => '/agents/123/services',
|
||||
},
|
||||
);
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '0',
|
||||
program => 'pt-query-digest',
|
||||
options => '--output json',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $run0 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
my $run1 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'start-query-history',
|
||||
number => '0',
|
||||
program => 'echo "start-qh"',
|
||||
);
|
||||
|
||||
my $start_qh = Percona::WebAPI::Resource::Service->new(
|
||||
ts => '100',
|
||||
name => 'start-query-history',
|
||||
meta => 1,
|
||||
tasks => [ $run1 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Config' },
|
||||
content => as_hashref($config, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [
|
||||
as_hashref($start_qh, with_links => 1),
|
||||
as_hashref($svc0, with_links => 1),
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
my $safeguards = Safeguards->new(
|
||||
disk_bytes_free => 1024,
|
||||
disk_pct_free => 1,
|
||||
);
|
||||
|
||||
# The only thing pt-agent must have is the API key in the config file,
|
||||
# everything else relies on defaults until the first Config is gotten
|
||||
# from Percona.
|
||||
my $config_file = pt_agent::get_config_file();
|
||||
unlink $config_file if -f $config_file;
|
||||
|
||||
like(
|
||||
$config_file,
|
||||
qr/$ENV{LOGNAME}\/\.pt-agent.conf$/,
|
||||
"Default config file is ~/.pt-agent.config"
|
||||
);
|
||||
|
||||
pt_agent::write_config(
|
||||
config => $config
|
||||
);
|
||||
|
||||
diag(`echo 'api-key=123' >> $config_file`);
|
||||
|
||||
is(
|
||||
`cat $config_file`,
|
||||
"check-interval=11\nlib=$tmpdir\nspool=$tmpdir/spool\napi-key=123\n",
|
||||
"Write Config to config file"
|
||||
);
|
||||
|
||||
pt_agent::save_agent(
|
||||
agent => $agent,
|
||||
lib_dir => $tmpdir,
|
||||
);
|
||||
|
||||
my @ok_code = (); # callbacks
|
||||
my @oktorun = (
|
||||
1, # 1st main loop check
|
||||
0, # 2nd main loop check
|
||||
);
|
||||
my $oktorun = sub {
|
||||
my $ok = shift @oktorun;
|
||||
print "oktorun=" . (defined $ok ? $ok : 'undef') . "\n";
|
||||
my $code = shift @ok_code;
|
||||
$code->() if $code;
|
||||
return $ok
|
||||
};
|
||||
|
||||
@wait = ();
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::run_agent(
|
||||
# Required args
|
||||
agent => $agent,
|
||||
client => $client,
|
||||
daemon => $daemon,
|
||||
interval => $interval,
|
||||
lib_dir => $tmpdir,
|
||||
safeguards => $safeguards,
|
||||
Cxn => $cxn,
|
||||
# Optional args, for testing
|
||||
oktorun => $oktorun,
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin",
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is(
|
||||
scalar @wait,
|
||||
1,
|
||||
"Called interval once"
|
||||
);
|
||||
|
||||
is(
|
||||
$wait[0],
|
||||
11,
|
||||
"... used Config->options->check-interval"
|
||||
);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/services/query-history",
|
||||
"Created services/query-history"
|
||||
) or diag($output);
|
||||
|
||||
chomp(my $n_files = `ls -1 $tmpdir/services| wc -l | awk '{print \$1}'`);
|
||||
is(
|
||||
$n_files,
|
||||
2,
|
||||
"... created services/query-history and services/start-query-history"
|
||||
);
|
||||
|
||||
ok(
|
||||
no_diff(
|
||||
"cat $tmpdir/services/query-history",
|
||||
"t/pt-agent/samples/service001",
|
||||
),
|
||||
"query-history service file"
|
||||
);
|
||||
|
||||
$crontab = `crontab -l 2>/dev/null`;
|
||||
like(
|
||||
$crontab,
|
||||
qr/pt-agent --run-service query-history$/m,
|
||||
"Scheduled --run-service with crontab"
|
||||
) or diag(Dumper(\@log));
|
||||
|
||||
like(
|
||||
$crontab,
|
||||
qr/pt-agent --send-data query-history$/m,
|
||||
"Scheduled --send-data with crontab"
|
||||
) or diag(Dumper(\@log));
|
||||
exit;
|
||||
# #############################################################################
|
||||
# Run run_agent again, like the agent had been stopped and restarted.
|
||||
# #############################################################################
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
# First check, fail
|
||||
{
|
||||
code => 500,
|
||||
},
|
||||
# interval
|
||||
# 2nd check, init with latest Config and Services
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Config' },
|
||||
content => as_hashref($config, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [ as_hashref($svc0, with_links => 1) ],
|
||||
},
|
||||
# interval
|
||||
# 3rd check, same Config and Services so nothing to do
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Config' },
|
||||
content => as_hashref($config, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [ as_hashref($svc0, with_links => 1) ],
|
||||
},
|
||||
# interval, oktorun=0
|
||||
];
|
||||
|
||||
@oktorun = (
|
||||
1, # 1st main loop check
|
||||
# First check, error 500
|
||||
1, # 2nd main loop check
|
||||
# Init with latest Config and Services
|
||||
1, # 3rd main loop check
|
||||
# Same Config and services
|
||||
0, # 4th main loop check
|
||||
);
|
||||
|
||||
# Before the 3rd check, remove the config file (~/.pt-agent.conf) and
|
||||
# query-history service file. When the tool re-GETs these, they'll be
|
||||
# the same so it won't recreate them. A bug here will cause these files to
|
||||
# exist again after running.
|
||||
$ok_code[2] = sub {
|
||||
unlink "$config_file";
|
||||
unlink "$tmpdir/services/query-history";
|
||||
Percona::Test::wait_until(sub { ! -f "$config_file" });
|
||||
Percona::Test::wait_until(sub { ! -f "$tmpdir/services/query-history" });
|
||||
};
|
||||
|
||||
@wait = ();
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::run_agent(
|
||||
# Required args
|
||||
agent => $agent,
|
||||
client => $client,
|
||||
daemon => $daemon,
|
||||
interval => $interval,
|
||||
lib_dir => $tmpdir,
|
||||
Cxn => $cxn,
|
||||
# Optional args, for testing
|
||||
oktorun => $oktorun,
|
||||
json => $json,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
\@wait,
|
||||
[ 60, 11, 11 ],
|
||||
"Got Config after error"
|
||||
) or diag(Dumper(\@wait));
|
||||
|
||||
ok(
|
||||
! -f "$config_file",
|
||||
"No Config diff, no config file change"
|
||||
);
|
||||
|
||||
ok(
|
||||
! -f "$tmpdir/services/query-history",
|
||||
"No Service diff, no service file changes"
|
||||
);
|
||||
|
||||
my $new_crontab = `crontab -l 2>/dev/null`;
|
||||
is(
|
||||
$new_crontab,
|
||||
$crontab,
|
||||
"Crontab is the same"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Test a run_once_on_start service
|
||||
# #############################################################################
|
||||
|
||||
diag(`rm -f $tmpdir/* >/dev/null 2>&1`);
|
||||
diag(`rm -rf $tmpdir/services/*`);
|
||||
diag(`rm -rf $tmpdir/spool/*`);
|
||||
|
||||
# When pt-agent manually runs --run-service test-run-at-start, it's going
|
||||
# to need an API key because it doesn't call its own run_service(), it runs
|
||||
# another instance of itself with system(). So put the fake API key in
|
||||
# the default config file.
|
||||
unlink $config_file if -f $config_file;
|
||||
diag(`echo "api-key=123" > $config_file`);
|
||||
|
||||
$config = Percona::WebAPI::Resource::Config->new(
|
||||
ts => 1363720060,
|
||||
name => 'Test run_once_on_start',
|
||||
options => {
|
||||
'check-interval' => "15",
|
||||
'lib' => $tmpdir,
|
||||
'spool' => "$tmpdir/spool",
|
||||
'pid' => "$tmpdir/pid",
|
||||
'log' => "$tmpdir/log"
|
||||
},
|
||||
links => {
|
||||
self => '/agents/123/config',
|
||||
services => '/agents/123/services',
|
||||
},
|
||||
);
|
||||
|
||||
$run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'run-at-start',
|
||||
number => '0',
|
||||
program => 'date',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
$svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'test-run-at-start',
|
||||
run_schedule => '0 0 1 1 *',
|
||||
run_once => 1, # here's the magic
|
||||
tasks => [ $run0 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Config' },
|
||||
content => as_hashref($config, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [ as_hashref($svc0, with_links => 1) ],
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Config' },
|
||||
content => as_hashref($config, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Service' },
|
||||
content => [ as_hashref($svc0, with_links => 1) ],
|
||||
},
|
||||
];
|
||||
|
||||
@wait = ();
|
||||
@ok_code = (); # callbacks
|
||||
@oktorun = (
|
||||
1, # 1st main loop check
|
||||
# Run once
|
||||
1, # 2nd main loop check
|
||||
# Don't run it again
|
||||
0, # 3d main loop check
|
||||
);
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::run_agent(
|
||||
# Required args
|
||||
agent => $agent,
|
||||
client => $client,
|
||||
daemon => $daemon,
|
||||
interval => $interval,
|
||||
lib_dir => $tmpdir,
|
||||
Cxn => $cxn,
|
||||
# Optional args, for testing
|
||||
oktorun => $oktorun,
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin/",
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
Percona::Test::wait_for_files("$tmpdir/spool/test-run-at-start/test-run-at-start");
|
||||
|
||||
like(
|
||||
$output,
|
||||
qr/Starting test-run-at-start service/,
|
||||
"Ran service on start"
|
||||
);
|
||||
|
||||
my @runs = $output =~ m/Starting test-run-at-start service/g;
|
||||
|
||||
is(
|
||||
scalar @runs,
|
||||
1,
|
||||
"... only ran it once"
|
||||
);
|
||||
|
||||
chomp($output = `cat $tmpdir/spool/test-run-at-start/test-run-at-start 2>/dev/null`);
|
||||
ok(
|
||||
$output,
|
||||
"... service ran at start"
|
||||
) or diag($output);
|
||||
|
||||
chomp($output = `crontab -l`);
|
||||
unlike(
|
||||
$output,
|
||||
qr/--run-service test-run-at-start/,
|
||||
"... service was not scheduled"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
|
||||
# This shouldn't cause an error, but if it does, let it show up
|
||||
# in the results as an error.
|
||||
`crontab -r`;
|
||||
|
||||
if ( -f $config_file ) {
|
||||
unlink $config_file
|
||||
or warn "Error removing $config_file: $OS_ERROR";
|
||||
}
|
||||
|
||||
done_testing;
|
@@ -1,503 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
$ENV{PTTEST_PRETTY_JSON} = 1;
|
||||
|
||||
use Percona::Test;
|
||||
use Sandbox;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $dbh = $sb->get_dbh_for('master');
|
||||
my $dsn = $sb->dsn_for('master');
|
||||
my $o = new OptionParser();
|
||||
$o->get_specs("$trunk/bin/pt-agent");
|
||||
$o->get_opts();
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper have_required_args));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
my $sample = "t/pt-agent/samples";
|
||||
|
||||
# Create fake spool and lib dirs. Service-related subs in pt-agent
|
||||
# automatically add "/services" to the lib dir, but the spool dir is
|
||||
# used as-is.
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||
output(
|
||||
sub { pt_agent::init_lib_dir(lib_dir => $tmpdir) }
|
||||
);
|
||||
my $spool_dir = "$tmpdir/spool";
|
||||
|
||||
sub write_svc_files {
|
||||
my (%args) = @_;
|
||||
have_required_args(\%args, qw(
|
||||
services
|
||||
)) or die;
|
||||
my $services = $args{services};
|
||||
|
||||
my $output = output(
|
||||
sub {
|
||||
pt_agent::write_services(
|
||||
sorted_services => { added => $services },
|
||||
lib_dir => $tmpdir,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
die => 1,
|
||||
);
|
||||
}
|
||||
|
||||
# #############################################################################
|
||||
# Create mock client and Agent
|
||||
# #############################################################################
|
||||
|
||||
my $json = JSON->new->canonical([1])->pretty;
|
||||
$json->allow_blessed([]);
|
||||
$json->convert_blessed([]);
|
||||
|
||||
my $ua = Percona::Test::Mock::UserAgent->new(
|
||||
encode => sub { my $c = shift; return $json->encode($c || {}) },
|
||||
);
|
||||
|
||||
# Create cilent, get entry links
|
||||
my $links = {
|
||||
agents => '/agents',
|
||||
config => '/agents/1/config',
|
||||
services => '/agents/1/services',
|
||||
'query-history' => '/query-history',
|
||||
};
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
content => $links,
|
||||
},
|
||||
];
|
||||
|
||||
my $client = eval {
|
||||
Percona::WebAPI::Client->new(
|
||||
api_key => '123',
|
||||
ua => $ua,
|
||||
);
|
||||
};
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
'',
|
||||
'Create mock client'
|
||||
) or die;
|
||||
|
||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'prod1',
|
||||
links => $links,
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
as_hashref($agent),
|
||||
{
|
||||
uuid => '123',
|
||||
hostname => 'prod1',
|
||||
},
|
||||
'Create mock Agent'
|
||||
) or die;
|
||||
|
||||
# #############################################################################
|
||||
# Simple single task service using a program.
|
||||
# #############################################################################
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '0',
|
||||
program => "__BIN_DIR__/pt-query-digest --output json $trunk/t/lib/samples/slowlogs/slow008.txt",
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $run0 ],
|
||||
);
|
||||
|
||||
write_svc_files(
|
||||
services => [ $svc0 ],
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
];
|
||||
|
||||
my $exit_status;
|
||||
my $output = output(
|
||||
sub {
|
||||
$exit_status = pt_agent::run_service(
|
||||
api_key => '123',
|
||||
service => 'query-history',
|
||||
lib_dir => $tmpdir,
|
||||
spool_dir => $spool_dir,
|
||||
Cxn => '',
|
||||
# for testing:
|
||||
client => $client,
|
||||
agent => $agent,
|
||||
entry_links => $links,
|
||||
prefix => '1',
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin",
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
ok(
|
||||
no_diff(
|
||||
"cat $tmpdir/spool/query-history/1.query-history.data",
|
||||
"$sample/query-history/data001.json",
|
||||
post_pipe => 'grep -v \'"name" :\'',
|
||||
),
|
||||
"1 run: spool data (query-history/data001.json)"
|
||||
) or diag(
|
||||
`ls -l $tmpdir/spool/query-history/`,
|
||||
`cat $tmpdir/logs/query-history.run`,
|
||||
Dumper(\@log)
|
||||
);
|
||||
|
||||
chomp(my $n_files = `ls -1 $spool_dir/query-history/*.data | wc -l | awk '{print \$1}'`);
|
||||
is(
|
||||
$n_files,
|
||||
1,
|
||||
"1 run: only wrote spool data"
|
||||
) or diag(`ls -l $spool_dir`);
|
||||
|
||||
is(
|
||||
$exit_status,
|
||||
0,
|
||||
"1 run: exit 0"
|
||||
);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/spool/query-history/1.query-history.meta",
|
||||
"1 run: .meta file exists"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Service with two task, both using a program.
|
||||
# #############################################################################
|
||||
|
||||
diag(`rm -rf $tmpdir/spool/* $tmpdir/services/*`);
|
||||
@log = ();
|
||||
|
||||
# The result is the same as the previous single-run test, but instead of
|
||||
# having pqd read the slowlog directly, we have the first run cat the
|
||||
# log to a tmp file which pt-agent should auto-create. Then pqd in run1
|
||||
# references this tmp file.
|
||||
|
||||
$run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'cat-slow-log',
|
||||
number => '0',
|
||||
program => "cat $trunk/t/lib/samples/slowlogs/slow008.txt",
|
||||
output => 'tmp',
|
||||
);
|
||||
|
||||
my $run1 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '1',
|
||||
program => "__BIN_DIR__/pt-query-digest --output json __RUN_0_OUTPUT__",
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
$svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '3 * * * *',
|
||||
spool_schedule => '4 * * * *',
|
||||
tasks => [ $run0, $run1 ],
|
||||
);
|
||||
|
||||
write_svc_files(
|
||||
services => [ $svc0 ],
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
];
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
$exit_status = pt_agent::run_service(
|
||||
api_key => '123',
|
||||
service => 'query-history',
|
||||
spool_dir => $spool_dir,
|
||||
lib_dir => $tmpdir,
|
||||
Cxn => '',
|
||||
# for testing:
|
||||
client => $client,
|
||||
agent => $agent,
|
||||
entry_links => $links,
|
||||
prefix => '2',
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin",
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
ok(
|
||||
no_diff(
|
||||
"cat $tmpdir/spool/query-history/2.query-history.data",
|
||||
"$sample/query-history/data001.json",
|
||||
post_pipe => 'grep -v \'"name" :\'',
|
||||
),
|
||||
"2 runs: spool data (query-history/data001.json)"
|
||||
) or diag(
|
||||
`ls -l $tmpdir/spool/query-history/`,
|
||||
`cat $tmpdir/logs/query-history.run`,
|
||||
Dumper(\@log)
|
||||
);
|
||||
|
||||
chomp($n_files = `ls -1 $spool_dir/query-history/*.data | wc -l | awk '{print \$1}'`);
|
||||
is(
|
||||
$n_files,
|
||||
1,
|
||||
"2 runs: only wrote spool data"
|
||||
) or diag(`ls -l $spool_dir`);
|
||||
|
||||
is(
|
||||
$exit_status,
|
||||
0,
|
||||
"2 runs: exit 0"
|
||||
);
|
||||
|
||||
my @tmp_files = glob "$tmpdir/spool/.tmp/*";
|
||||
is_deeply(
|
||||
\@tmp_files,
|
||||
[],
|
||||
"2 runs: temp file removed"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# More realistc: 3 services, multiple tasks, using programs and queries.
|
||||
# #############################################################################
|
||||
|
||||
SKIP: {
|
||||
skip 'Cannot connect to sandbox master', 5 unless $dbh;
|
||||
skip 'No HOME environment variable', 5 unless $ENV{HOME};
|
||||
|
||||
diag(`rm -rf $tmpdir/spool/* $tmpdir/services/*`);
|
||||
@log = ();
|
||||
|
||||
my (undef, $old_genlog) = $dbh->selectrow_array("SHOW VARIABLES LIKE 'general_log_file'");
|
||||
|
||||
my $new_genlog = "$tmpdir/genlog";
|
||||
|
||||
# First service: set up
|
||||
my $task00 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'disable-gen-log',
|
||||
number => '0',
|
||||
query => "SET GLOBAL general_log=OFF",
|
||||
);
|
||||
my $task01 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'set-gen-log-file',
|
||||
number => '1',
|
||||
query => "SET GLOBAL general_log_file='$new_genlog'",
|
||||
);
|
||||
my $task02 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'enable-gen-log',
|
||||
number => '2',
|
||||
query => "SET GLOBAL general_log=ON",
|
||||
);
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'enable-gen-log',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $task00, $task01, $task02 ],
|
||||
);
|
||||
|
||||
# Second service: the actual service
|
||||
my $task10 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '1',
|
||||
program => "$trunk/bin/pt-query-digest --output json --type genlog $new_genlog",
|
||||
output => 'spool',
|
||||
);
|
||||
my $svc1 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '3 * * * *',
|
||||
spool_schedule => '4 * * * *',
|
||||
tasks => [ $task10 ],
|
||||
);
|
||||
|
||||
# Third service: tear down
|
||||
my $task20 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'disable-gen-log',
|
||||
number => '0',
|
||||
query => "SET GLOBAL general_log=OFF",
|
||||
);
|
||||
my $task21 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'set-gen-log-file',
|
||||
number => '1',
|
||||
query => "SET GLOBAL general_log_file='$old_genlog'",
|
||||
);
|
||||
my $task22 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'enable-gen-log',
|
||||
number => '2',
|
||||
query => "SET GLOBAL general_log=ON",
|
||||
);
|
||||
my $svc2 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'disable-gen-log',
|
||||
run_schedule => '5 * * * *',
|
||||
spool_schedule => '6 * * * *',
|
||||
tasks => [ $task20, $task21, $task22 ],
|
||||
);
|
||||
|
||||
write_svc_files(
|
||||
services => [ $svc0, $svc1, $svc2 ],
|
||||
);
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
];
|
||||
|
||||
my $cxn = Cxn->new(
|
||||
dsn_string => $dsn,
|
||||
OptionParser => $o,
|
||||
DSNParser => $dp,
|
||||
);
|
||||
|
||||
# Run the first service.
|
||||
$output = output(
|
||||
sub {
|
||||
$exit_status = pt_agent::run_service(
|
||||
api_key => '123',
|
||||
service => 'enable-gen-log',
|
||||
spool_dir => $spool_dir,
|
||||
lib_dir => $tmpdir,
|
||||
Cxn => $cxn,
|
||||
# for testing:
|
||||
client => $client,
|
||||
agent => $agent,
|
||||
entry_links => $links,
|
||||
prefix => '3',
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin",
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
my (undef, $genlog) = $dbh->selectrow_array(
|
||||
"SHOW VARIABLES LIKE 'general_log_file'");
|
||||
is(
|
||||
$genlog,
|
||||
$new_genlog,
|
||||
"Task set MySQL var"
|
||||
) or diag($output);
|
||||
|
||||
# Pretend some time passes...
|
||||
|
||||
# The next service doesn't need MySQL, so it shouldn't connect to it.
|
||||
# To check this, the genlog before running and after running should
|
||||
# be identical.
|
||||
`cp $new_genlog $tmpdir/genlog-before`;
|
||||
|
||||
# Run the second service.
|
||||
$output = output(
|
||||
sub {
|
||||
$exit_status = pt_agent::run_service(
|
||||
api_key => '123',
|
||||
service => 'query-history',
|
||||
spool_dir => $spool_dir,
|
||||
lib_dir => $tmpdir,
|
||||
Cxn => $cxn,
|
||||
# for testing:
|
||||
client => $client,
|
||||
agent => $agent,
|
||||
entry_links => $links,
|
||||
prefix => '4',
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin",
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
`cp $new_genlog $tmpdir/genlog-after`;
|
||||
my $diff = `diff $tmpdir/genlog-before $tmpdir/genlog-after`;
|
||||
is(
|
||||
$diff,
|
||||
'',
|
||||
"Tasks didn't need MySQL, didn't connect to MySQL"
|
||||
) or diag($output);
|
||||
|
||||
# Pretend more time passes...
|
||||
|
||||
# Run the third service.
|
||||
$output = output(
|
||||
sub {
|
||||
$exit_status = pt_agent::run_service(
|
||||
api_key => '123',
|
||||
service => 'disable-gen-log',
|
||||
spool_dir => $spool_dir,
|
||||
lib_dir => $tmpdir,
|
||||
Cxn => $cxn,
|
||||
# for testing:
|
||||
client => $client,
|
||||
agent => $agent,
|
||||
entry_links => $links,
|
||||
prefix => '5',
|
||||
json => $json,
|
||||
bin_dir => "$trunk/bin",
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
(undef, $genlog) = $dbh->selectrow_array(
|
||||
"SHOW VARIABLES LIKE 'general_log_file'");
|
||||
is(
|
||||
$genlog,
|
||||
$old_genlog,
|
||||
"Task restored MySQL var"
|
||||
) or diag($output);
|
||||
|
||||
$dbh->do("SET GLOBAL general_log=ON");
|
||||
$dbh->do("SET GLOBAL general_log_file='$old_genlog'");
|
||||
}
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,2 +0,0 @@
|
||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
@@ -1 +0,0 @@
|
||||
17 3 * * 1 cmd
|
@@ -1,3 +0,0 @@
|
||||
17 3 * * 1 cmd
|
||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
@@ -1,3 +0,0 @@
|
||||
17 3 * * 1 cmd
|
||||
* * * * 1 pt-agent --run-service old-service
|
||||
|
@@ -1,3 +0,0 @@
|
||||
17 3 * * 1 cmd
|
||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
@@ -1,2 +0,0 @@
|
||||
1 * * * * pt-agent --run-service foo
|
||||
2 * * * * pt-agent --send-data foo
|
@@ -1,2 +0,0 @@
|
||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
@@ -1,152 +0,0 @@
|
||||
|
||||
{
|
||||
"classes" : [
|
||||
{
|
||||
"attribute" : "fingerprint",
|
||||
"checksum" : "C72BF45D68E35A6E",
|
||||
"distillate" : "SELECT tbl",
|
||||
"example" : {
|
||||
"Query_time" : "0.018799",
|
||||
"query" : "SELECT MIN(id),MAX(id) FROM tbl",
|
||||
"ts" : null
|
||||
},
|
||||
"fingerprint" : "select min(id),max(id) from tbl",
|
||||
"histograms" : {
|
||||
"Query_time" : [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
]
|
||||
},
|
||||
"metrics" : {
|
||||
"Lock_time" : {
|
||||
"avg" : "0.009453",
|
||||
"max" : "0.009453",
|
||||
"median" : "0.009453",
|
||||
"min" : "0.009453",
|
||||
"pct" : "0.333333",
|
||||
"pct_95" : "0.009453",
|
||||
"stddev" : "0.000000",
|
||||
"sum" : "0.009453"
|
||||
},
|
||||
"Query_length" : {
|
||||
"avg" : "31",
|
||||
"max" : "31",
|
||||
"median" : "31",
|
||||
"min" : "31",
|
||||
"pct" : "0",
|
||||
"pct_95" : "31",
|
||||
"stddev" : "0",
|
||||
"sum" : "31"
|
||||
},
|
||||
"Query_time" : {
|
||||
"avg" : "0.018799",
|
||||
"max" : "0.018799",
|
||||
"median" : "0.018799",
|
||||
"min" : "0.018799",
|
||||
"pct" : "0.333333",
|
||||
"pct_95" : "0.018799",
|
||||
"stddev" : "0.000000",
|
||||
"sum" : "0.018799"
|
||||
},
|
||||
"Rows_examined" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
},
|
||||
"Rows_sent" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
},
|
||||
"db" : {
|
||||
"value" : "db2"
|
||||
},
|
||||
"host" : {
|
||||
"value" : ""
|
||||
},
|
||||
"user" : {
|
||||
"value" : "meow"
|
||||
}
|
||||
},
|
||||
"query_count" : 1,
|
||||
"tables" : [
|
||||
{
|
||||
"create" : "SHOW CREATE TABLE `db2`.`tbl`\\G",
|
||||
"status" : "SHOW TABLE STATUS FROM `db2` LIKE 'tbl'\\G"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"global" : {
|
||||
"files" : [
|
||||
{
|
||||
"size" : 656
|
||||
}
|
||||
],
|
||||
"metrics" : {
|
||||
"Lock_time" : {
|
||||
"avg" : "0.003151",
|
||||
"max" : "0.009453",
|
||||
"median" : "0.000000",
|
||||
"min" : "0.000000",
|
||||
"pct_95" : "0.009171",
|
||||
"stddev" : "0.004323",
|
||||
"sum" : "0.009453"
|
||||
},
|
||||
"Query_length" : {
|
||||
"avg" : "24",
|
||||
"max" : "31",
|
||||
"median" : "26",
|
||||
"min" : "14",
|
||||
"pct_95" : "30",
|
||||
"stddev" : "6",
|
||||
"sum" : "72"
|
||||
},
|
||||
"Query_time" : {
|
||||
"avg" : "0.006567",
|
||||
"max" : "0.018799",
|
||||
"median" : "0.000882",
|
||||
"min" : "0.000002",
|
||||
"pct_95" : "0.018157",
|
||||
"stddev" : "0.008359",
|
||||
"sum" : "0.019700"
|
||||
},
|
||||
"Rows_examined" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
},
|
||||
"Rows_sent" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
}
|
||||
},
|
||||
"query_count" : 3,
|
||||
"unique_query_count" : 3
|
||||
}
|
||||
}
|
@@ -1,166 +0,0 @@
|
||||
--Ym91bmRhcnk
|
||||
Content-Disposition: form-data; name="agent"
|
||||
|
||||
{
|
||||
"hostname" : "prod1",
|
||||
"uuid" : "123"
|
||||
}
|
||||
--Ym91bmRhcnk
|
||||
Content-Disposition: form-data; name="meta"
|
||||
|
||||
|
||||
--Ym91bmRhcnk
|
||||
Content-Disposition: form-data; name="data"
|
||||
|
||||
{
|
||||
"classes" : [
|
||||
{
|
||||
"attribute" : "fingerprint",
|
||||
"checksum" : "C72BF45D68E35A6E",
|
||||
"distillate" : "SELECT tbl",
|
||||
"example" : {
|
||||
"Query_time" : "0.018799",
|
||||
"query" : "SELECT MIN(id),MAX(id) FROM tbl",
|
||||
"ts" : null
|
||||
},
|
||||
"fingerprint" : "select min(id),max(id) from tbl",
|
||||
"histograms" : {
|
||||
"Query_time" : [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
]
|
||||
},
|
||||
"metrics" : {
|
||||
"Lock_time" : {
|
||||
"avg" : "0.009453",
|
||||
"max" : "0.009453",
|
||||
"median" : "0.009453",
|
||||
"min" : "0.009453",
|
||||
"pct" : "0.333333",
|
||||
"pct_95" : "0.009453",
|
||||
"stddev" : "0.000000",
|
||||
"sum" : "0.009453"
|
||||
},
|
||||
"Query_length" : {
|
||||
"avg" : "31",
|
||||
"max" : "31",
|
||||
"median" : "31",
|
||||
"min" : "31",
|
||||
"pct" : "0",
|
||||
"pct_95" : "31",
|
||||
"stddev" : "0",
|
||||
"sum" : "31"
|
||||
},
|
||||
"Query_time" : {
|
||||
"avg" : "0.018799",
|
||||
"max" : "0.018799",
|
||||
"median" : "0.018799",
|
||||
"min" : "0.018799",
|
||||
"pct" : "0.333333",
|
||||
"pct_95" : "0.018799",
|
||||
"stddev" : "0.000000",
|
||||
"sum" : "0.018799"
|
||||
},
|
||||
"Rows_examined" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
},
|
||||
"Rows_sent" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
},
|
||||
"db" : {
|
||||
"value" : "db2"
|
||||
},
|
||||
"host" : {
|
||||
"value" : ""
|
||||
},
|
||||
"user" : {
|
||||
"value" : "meow"
|
||||
}
|
||||
},
|
||||
"query_count" : 1,
|
||||
"tables" : [
|
||||
{
|
||||
"create" : "SHOW CREATE TABLE `db2`.`tbl`\\G",
|
||||
"status" : "SHOW TABLE STATUS FROM `db2` LIKE 'tbl'\\G"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"global" : {
|
||||
"files" : [
|
||||
{
|
||||
"size" : 656
|
||||
}
|
||||
],
|
||||
"metrics" : {
|
||||
"Lock_time" : {
|
||||
"avg" : "0.003151",
|
||||
"max" : "0.009453",
|
||||
"median" : "0.000000",
|
||||
"min" : "0.000000",
|
||||
"pct_95" : "0.009171",
|
||||
"stddev" : "0.004323",
|
||||
"sum" : "0.009453"
|
||||
},
|
||||
"Query_length" : {
|
||||
"avg" : "24",
|
||||
"max" : "31",
|
||||
"median" : "26",
|
||||
"min" : "14",
|
||||
"pct_95" : "30",
|
||||
"stddev" : "6",
|
||||
"sum" : "72"
|
||||
},
|
||||
"Query_time" : {
|
||||
"avg" : "0.006567",
|
||||
"max" : "0.018799",
|
||||
"median" : "0.000882",
|
||||
"min" : "0.000002",
|
||||
"pct_95" : "0.018157",
|
||||
"stddev" : "0.008359",
|
||||
"sum" : "0.019700"
|
||||
},
|
||||
"Rows_examined" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
},
|
||||
"Rows_sent" : {
|
||||
"avg" : "0",
|
||||
"max" : "0",
|
||||
"median" : "0",
|
||||
"min" : "0",
|
||||
"pct_95" : "0",
|
||||
"stddev" : "0",
|
||||
"sum" : "0"
|
||||
}
|
||||
},
|
||||
"query_count" : 3,
|
||||
"unique_query_count" : 3
|
||||
}
|
||||
}
|
||||
--Ym91bmRhcnk
|
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"links" : {
|
||||
"data" : "/query-history/data",
|
||||
"self" : "/query-history"
|
||||
},
|
||||
"name" : "query-history",
|
||||
"run_schedule" : "1 * * * *",
|
||||
"spool_schedule" : "2 * * * *",
|
||||
"tasks" : [
|
||||
{
|
||||
"name" : "query-history",
|
||||
"number" : "0",
|
||||
"options" : "--output json",
|
||||
"output" : "spool",
|
||||
"program" : "pt-query-digest"
|
||||
}
|
||||
],
|
||||
"ts" : 100
|
||||
}
|
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"links" : {
|
||||
"data" : "/query-history/data",
|
||||
"self" : "/query-history"
|
||||
},
|
||||
"name" : "query-history",
|
||||
"run_schedule" : "1 * * * *",
|
||||
"spool_schedule" : "2 * * * *",
|
||||
"tasks" : [
|
||||
{
|
||||
"name" : "query-history",
|
||||
"number" : "0",
|
||||
"options" : "--report-format profile slow008.txt",
|
||||
"output" : "spool",
|
||||
"program" : "pt-query-digest"
|
||||
}
|
||||
],
|
||||
"ts" : 100
|
||||
}
|
@@ -1,200 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempfile tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
my $crontab = `crontab -l 2>/dev/null`;
|
||||
if ( $crontab ) {
|
||||
plan skip_all => 'Crontab is not empty';
|
||||
}
|
||||
|
||||
Percona::Toolkit->import(qw(have_required_args Dumper));
|
||||
|
||||
my $sample = "t/pt-agent/samples";
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
# #############################################################################
|
||||
# Schedule a good crontab.
|
||||
# #############################################################################
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '0',
|
||||
program => 'pt-query-digest',
|
||||
options => '--output json',
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '* 8 * * 1,2,3,4,5',
|
||||
spool_schedule => '* 9 * * 1,2,3,4,5',
|
||||
tasks => [ $run0 ],
|
||||
);
|
||||
|
||||
# First add a fake line so we can know that the real, existing
|
||||
# crontab is used and not clobbered.
|
||||
my ($fh, $file) = tempfile();
|
||||
print {$fh} "* 0 * * * date > /dev/null\n";
|
||||
close $fh or warn "Cannot close $file: $OS_ERROR";
|
||||
my $output = `crontab $file 2>&1`;
|
||||
|
||||
$crontab = `crontab -l 2>&1`;
|
||||
|
||||
is(
|
||||
$crontab,
|
||||
"* 0 * * * date > /dev/null\n",
|
||||
"Set other crontab line"
|
||||
) or diag($output);
|
||||
|
||||
unlink $file or warn "Cannot remove $file: $OS_ERROR";
|
||||
|
||||
eval {
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::schedule_services(
|
||||
services => [ $svc0 ],
|
||||
lib_dir => $tmpdir,
|
||||
)
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
};
|
||||
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
"",
|
||||
"No error"
|
||||
) or diag($output);
|
||||
|
||||
$crontab = `crontab -l 2>/dev/null`;
|
||||
|
||||
# pt-agent uses $FindBin::Bin/pt-agent for the path to pt-agent,
|
||||
# which in testing will be $trunk/t/pt-agent/ because that's where
|
||||
# this file is located. However, if $FindBin::Bin resovles sym
|
||||
# links where as $trunk does not, so to make things simple we just
|
||||
# cut out the full path.
|
||||
if ( $crontab ) {
|
||||
$crontab =~ s! /.+?/pt-agent --! pt-agent --!g;
|
||||
}
|
||||
is(
|
||||
$crontab,
|
||||
"* 0 * * * date > /dev/null
|
||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||
",
|
||||
"schedule_services()"
|
||||
);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/crontab",
|
||||
"Wrote crontab to --lib/crontab"
|
||||
) or diag(`ls -l $tmpdir`);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/crontab.err",
|
||||
"Write --lib/crontab.err",
|
||||
) or diag(`ls -l $tmpdir`);
|
||||
|
||||
my $err = -f "$tmpdir/crontab.err" ? `cat $tmpdir/crontab.err` : '';
|
||||
is(
|
||||
$err,
|
||||
"",
|
||||
"No crontab error"
|
||||
);
|
||||
|
||||
system("crontab -r 2>/dev/null");
|
||||
$crontab = `crontab -l 2>/dev/null`;
|
||||
is(
|
||||
$crontab,
|
||||
"",
|
||||
"Removed crontab"
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Handle bad crontab lines.
|
||||
# #############################################################################
|
||||
|
||||
$svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '* * * * Foo', # "foo":0: bad day-of-week
|
||||
spool_schedule => '* 8 * * Mon',
|
||||
tasks => [ $run0 ],
|
||||
);
|
||||
|
||||
eval {
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::schedule_services(
|
||||
services => [ $svc0 ],
|
||||
lib_dir => $tmpdir,
|
||||
),
|
||||
},
|
||||
stderr => 1,
|
||||
die => 1,
|
||||
);
|
||||
};
|
||||
|
||||
like(
|
||||
$EVAL_ERROR,
|
||||
qr/Error setting new crontab/,
|
||||
"Throws errors"
|
||||
) or diag($output);
|
||||
|
||||
$crontab = `crontab -l 2>/dev/null`;
|
||||
is(
|
||||
$crontab,
|
||||
"",
|
||||
"Bad schedule_services()"
|
||||
);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/crontab",
|
||||
"Wrote crontab to --lib/crontab"
|
||||
) or diag(`ls -l $tmpdir`);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/crontab.err",
|
||||
"Write --lib/crontab.err",
|
||||
) or diag(`ls -l $tmpdir`);
|
||||
|
||||
$err = -f "$tmpdir/crontab.err" ? `cat $tmpdir/crontab.err` : '';
|
||||
like(
|
||||
$err,
|
||||
qr/bad/,
|
||||
"Crontab error"
|
||||
);
|
||||
|
||||
system("crontab -r 2>/dev/null");
|
||||
$crontab = `crontab -l 2>/dev/null`;
|
||||
is(
|
||||
$crontab,
|
||||
"",
|
||||
"Removed crontab"
|
||||
);
|
||||
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,234 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper have_required_args));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
my $sample = "t/pt-agent/samples";
|
||||
|
||||
# #############################################################################
|
||||
# Create mock client and Agent
|
||||
# #############################################################################
|
||||
|
||||
# These aren't the real tests yet: to run_agent(), first we need
|
||||
# a client and Agent, so create mock ones.
|
||||
|
||||
my $json = JSON->new->canonical([1])->pretty;
|
||||
$json->allow_blessed([]);
|
||||
$json->convert_blessed([]);
|
||||
|
||||
my $ua = Percona::Test::Mock::UserAgent->new(
|
||||
encode => sub { my $c = shift; return $json->encode($c || {}) },
|
||||
);
|
||||
|
||||
# Create cilent, get entry links
|
||||
my $links = {
|
||||
agents => '/agents',
|
||||
config => '/agents/1/config',
|
||||
services => '/agents/1/services',
|
||||
'query-history' => '/query-history',
|
||||
};
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
content => $links,
|
||||
},
|
||||
];
|
||||
|
||||
my $client = eval {
|
||||
Percona::WebAPI::Client->new(
|
||||
api_key => '123',
|
||||
ua => $ua,
|
||||
);
|
||||
};
|
||||
is(
|
||||
$EVAL_ERROR,
|
||||
'',
|
||||
'Create mock client'
|
||||
) or die;
|
||||
|
||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||
uuid => '123',
|
||||
hostname => 'prod1',
|
||||
links => $links,
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
as_hashref($agent),
|
||||
{
|
||||
uuid => '123',
|
||||
hostname => 'prod1',
|
||||
},
|
||||
'Create mock Agent'
|
||||
) or die;
|
||||
|
||||
# #############################################################################
|
||||
# Test send_data
|
||||
# #############################################################################
|
||||
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||
pt_agent::init_lib_dir(
|
||||
lib_dir => $tmpdir,
|
||||
quiet => 1,
|
||||
);
|
||||
pt_agent::init_spool_dir(
|
||||
spool_dir => $tmpdir,
|
||||
service => 'query-history',
|
||||
quiet => 1,
|
||||
);
|
||||
|
||||
`cp $trunk/$sample/query-history/data001.json $tmpdir/query-history/1.data001.data`;
|
||||
`cp $trunk/$sample/service001 $tmpdir/services/query-history`;
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
];
|
||||
|
||||
$ua->{responses}->{post} = [
|
||||
{
|
||||
content => $links,
|
||||
},
|
||||
];
|
||||
|
||||
my $output = output(
|
||||
sub {
|
||||
pt_agent::send_data(
|
||||
api_key => '123',
|
||||
service => 'query-history',
|
||||
lib_dir => $tmpdir,
|
||||
spool_dir => $tmpdir,
|
||||
# optional, for testing:
|
||||
client => $client,
|
||||
entry_links => $links,
|
||||
agent => $agent,
|
||||
log_file => "$tmpdir/log",
|
||||
json => $json,
|
||||
delay => 0,
|
||||
),
|
||||
},
|
||||
);
|
||||
|
||||
is(
|
||||
scalar @{$client->ua->{content}->{post}},
|
||||
1,
|
||||
"Only sent 1 resource"
|
||||
) or diag(
|
||||
$output,
|
||||
Dumper($client->ua->{content}->{post}),
|
||||
`cat $tmpdir/logs/query-history.send`
|
||||
);
|
||||
|
||||
is_deeply(
|
||||
$ua->{requests},
|
||||
[
|
||||
'GET /agents/123',
|
||||
'POST /query-history/data',
|
||||
],
|
||||
"POST to Service.links.data"
|
||||
);
|
||||
|
||||
ok(
|
||||
no_diff(
|
||||
$client->ua->{content}->{post}->[0] || '',
|
||||
"$sample/query-history/data001.send",
|
||||
cmd_output => 1,
|
||||
),
|
||||
"Sent data file as multi-part resource (query-history/data001)"
|
||||
) or diag(Dumper($client->ua->{content}->{post}));
|
||||
|
||||
ok(
|
||||
!-f "$tmpdir/query-history/1.data001.data",
|
||||
"Removed data file after sending successfully"
|
||||
);
|
||||
|
||||
is(
|
||||
$ua->{request_objs}->[-1]->header('content-type'),
|
||||
'multipart/form-data; boundary=Ym91bmRhcnk',
|
||||
'Content-Type=multipart/form-data; boundary=Ym91bmRhcnk'
|
||||
) or diag(Dumper($ua));
|
||||
|
||||
# #############################################################################
|
||||
# Error 400 on send
|
||||
# #############################################################################
|
||||
|
||||
@log = ();
|
||||
$client->ua->{content}->{post} = [];
|
||||
$ua->{requests} = [];
|
||||
|
||||
`cp $trunk/$sample/query-history/data001.json $tmpdir/query-history/1.data001.data`;
|
||||
|
||||
$ua->{responses}->{get} = [
|
||||
{
|
||||
headers => { 'X-Percona-Resource-Type' => 'Agent' },
|
||||
content => as_hashref($agent, with_links => 1),
|
||||
},
|
||||
];
|
||||
|
||||
$ua->{responses}->{post} = [
|
||||
{
|
||||
code => 400,
|
||||
content => '',
|
||||
},
|
||||
];
|
||||
|
||||
$output = output(
|
||||
sub {
|
||||
pt_agent::send_data(
|
||||
api_key => '123',
|
||||
service => 'query-history',
|
||||
lib_dir => $tmpdir,
|
||||
spool_dir => $tmpdir,
|
||||
# optional, for testing:
|
||||
client => $client,
|
||||
entry_links => $links,
|
||||
agent => $agent,
|
||||
log_file => "$tmpdir/log",
|
||||
json => $json,
|
||||
delay => 0,
|
||||
),
|
||||
},
|
||||
);
|
||||
|
||||
is(
|
||||
scalar @{$client->ua->{content}->{post}},
|
||||
1,
|
||||
"400: sent resource"
|
||||
) or diag(
|
||||
$output,
|
||||
Dumper($client->ua->{content}->{post}),
|
||||
`cat $tmpdir/logs/query-history.send`
|
||||
);
|
||||
|
||||
ok(
|
||||
-f "$tmpdir/query-history/1.data001.data",
|
||||
"400: file not removed"
|
||||
) or diag($output);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -1,108 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
use JSON;
|
||||
use File::Temp qw(tempdir);
|
||||
|
||||
use Percona::Test;
|
||||
use Percona::Test::Mock::UserAgent;
|
||||
use Percona::Test::Mock::AgentLogger;
|
||||
require "$trunk/bin/pt-agent";
|
||||
|
||||
Percona::Toolkit->import(qw(Dumper have_required_args));
|
||||
Percona::WebAPI::Representation->import(qw(as_hashref));
|
||||
|
||||
my $json = JSON->new->canonical([1])->pretty;
|
||||
my $sample = "t/pt-agent/samples";
|
||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||
|
||||
mkdir "$tmpdir/services" or die "Error mkdir $tmpdir/services: $OS_ERROR";
|
||||
|
||||
my @log;
|
||||
my $logger = Percona::Test::Mock::AgentLogger->new(log => \@log);
|
||||
pt_agent::_logger($logger);
|
||||
|
||||
sub test_write_services {
|
||||
my (%args) = @_;
|
||||
have_required_args(\%args, qw(
|
||||
services
|
||||
file
|
||||
)) or die;
|
||||
my $services = $args{services};
|
||||
my $file = $args{file};
|
||||
|
||||
die "$trunk/$sample/$file does not exist"
|
||||
unless -f "$trunk/$sample/$file";
|
||||
|
||||
my $output = output(
|
||||
sub {
|
||||
pt_agent::write_services(
|
||||
sorted_services => $services,
|
||||
lib_dir => $tmpdir,
|
||||
json => $json,
|
||||
);
|
||||
},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
foreach my $service ( @{$services->{added}} ) {
|
||||
my $name = $service->name;
|
||||
ok(
|
||||
no_diff(
|
||||
"cat $tmpdir/services/$name 2>/dev/null",
|
||||
"$sample/$file",
|
||||
),
|
||||
"$file $name"
|
||||
) or diag($output, `cat $tmpdir/services/$name`);
|
||||
}
|
||||
|
||||
diag(`rm -rf $tmpdir/*`);
|
||||
}
|
||||
|
||||
my $run0 = Percona::WebAPI::Resource::Task->new(
|
||||
name => 'query-history',
|
||||
number => '0',
|
||||
program => "pt-query-digest",
|
||||
options => "--report-format profile slow008.txt",
|
||||
output => 'spool',
|
||||
);
|
||||
|
||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||
ts => 100,
|
||||
name => 'query-history',
|
||||
run_schedule => '1 * * * *',
|
||||
spool_schedule => '2 * * * *',
|
||||
tasks => [ $run0 ],
|
||||
links => {
|
||||
self => '/query-history',
|
||||
data => '/query-history/data',
|
||||
},
|
||||
);
|
||||
|
||||
# Key thing here is that the links are written because
|
||||
# --send-data <service> requires them.
|
||||
|
||||
my $sorted_services = {
|
||||
added => [ $svc0 ],
|
||||
updated => [],
|
||||
removed => [],
|
||||
};
|
||||
|
||||
test_write_services(
|
||||
services => $sorted_services,
|
||||
file => "write_services001",
|
||||
);
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
done_testing;
|
@@ -56,6 +56,7 @@ my @called = $output =~ m/^PLUGIN \S+$/gm;
|
||||
is_deeply(
|
||||
\@called,
|
||||
[
|
||||
'PLUGIN get_slave_lag',
|
||||
'PLUGIN init',
|
||||
'PLUGIN before_create_new_table',
|
||||
'PLUGIN after_create_new_table',
|
||||
|
@@ -96,4 +96,11 @@ sub before_exit {
|
||||
print "PLUGIN before_exit\n";
|
||||
}
|
||||
|
||||
sub get_slave_lag {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN get_slave_lag\n";
|
||||
|
||||
return sub { return 0; };
|
||||
}
|
||||
|
||||
1;
|
||||
|
98
t/pt-table-checksum/plugin.t
Normal file
98
t/pt-table-checksum/plugin.t
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
BEGIN {
|
||||
die "The PERCONA_TOOLKIT_BRANCH environment variable is not set.\n"
|
||||
unless $ENV{PERCONA_TOOLKIT_BRANCH} && -d $ENV{PERCONA_TOOLKIT_BRANCH};
|
||||
unshift @INC, "$ENV{PERCONA_TOOLKIT_BRANCH}/lib";
|
||||
};
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use Test::More;
|
||||
|
||||
use PerconaTest;
|
||||
use Sandbox;
|
||||
require "$trunk/bin/pt-table-checksum";
|
||||
|
||||
use Data::Dumper;
|
||||
$Data::Dumper::Indent = 1;
|
||||
$Data::Dumper::Sortkeys = 1;
|
||||
$Data::Dumper::Quotekeys = 0;
|
||||
|
||||
my $dp = new DSNParser(opts=>$dsn_opts);
|
||||
my $sb = new Sandbox(basedir => '/tmp', DSNParser => $dp);
|
||||
my $master_dbh = $sb->get_dbh_for('master');
|
||||
|
||||
if ( !$master_dbh ) {
|
||||
plan skip_all => 'Cannot connect to sandbox master';
|
||||
}
|
||||
|
||||
my $output;
|
||||
my $master_dsn = $sb->dsn_for('master');
|
||||
my $sample = "t/pt-table-checksum/samples";
|
||||
my $plugin = "$trunk/$sample/plugins";
|
||||
my $exit;
|
||||
my $rows;
|
||||
|
||||
$master_dbh->prepare("drop database if exists percona")->execute();
|
||||
$master_dbh->prepare("create database percona")->execute();
|
||||
$master_dbh->prepare("create table if not exists percona.t ( a int primary key);")->execute();
|
||||
$master_dbh->prepare("insert into percona.t values (0),(1),(2),(3),(4),(5),(6),(7),(8),(9)")->execute();
|
||||
$master_dbh->prepare("analyze table percona.t;")->execute();
|
||||
|
||||
# #############################################################################
|
||||
# all_hooks.pm
|
||||
# #############################################################################
|
||||
|
||||
($output) = full_output(
|
||||
sub { pt_table_checksum::main(
|
||||
"$master_dsn",
|
||||
'--databases', 'percona',
|
||||
'--plugin', "$plugin/all_hooks.pm",
|
||||
)},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
my @called = $output =~ m/^PLUGIN \S+$/gm;
|
||||
|
||||
is_deeply(
|
||||
\@called,
|
||||
[
|
||||
'PLUGIN get_slave_lag',
|
||||
'PLUGIN init',
|
||||
'PLUGIN before_checksum_table',
|
||||
'PLUGIN after_checksum_table',
|
||||
],
|
||||
"Called all plugins on basic run"
|
||||
) or diag(Dumper($output));
|
||||
|
||||
|
||||
($output) = full_output(
|
||||
sub { pt_table_checksum::main(
|
||||
"$master_dsn",
|
||||
'--replicate-check', '--replicate-check-only',
|
||||
'--databases', 'percona',
|
||||
'--plugin', "$plugin/all_hooks.pm",
|
||||
)},
|
||||
stderr => 1,
|
||||
);
|
||||
|
||||
@called = $output =~ m/^PLUGIN \S+$/gm;
|
||||
|
||||
is_deeply(
|
||||
\@called,
|
||||
[
|
||||
'PLUGIN before_replicate_check',
|
||||
'PLUGIN after_replicate_check',
|
||||
],
|
||||
"Called all plugins on replicate-check run"
|
||||
) or diag(Dumper($output));
|
||||
|
||||
|
||||
# #############################################################################
|
||||
# Done.
|
||||
# #############################################################################
|
||||
$sb->wipe_clean($master_dbh);
|
||||
ok($sb->ok(), "Sandbox servers") or BAIL_OUT(__FILE__ . " broke the sandbox");
|
||||
done_testing;
|
45
t/pt-table-checksum/samples/plugins/all_hooks.pm
Normal file
45
t/pt-table-checksum/samples/plugins/all_hooks.pm
Normal file
@@ -0,0 +1,45 @@
|
||||
package pt_table_checksum_plugin;
|
||||
|
||||
use strict;
|
||||
use warnings FATAL => 'all';
|
||||
use English qw(-no_match_vars);
|
||||
use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
||||
|
||||
sub new {
|
||||
my ($class, %args) = @_;
|
||||
my $self = { %args };
|
||||
return bless $self, $class;
|
||||
}
|
||||
|
||||
sub init {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN init\n";
|
||||
}
|
||||
|
||||
sub before_replicate_check {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN before_replicate_check\n";
|
||||
}
|
||||
|
||||
sub after_replicate_check {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN after_replicate_check\n";
|
||||
}
|
||||
|
||||
sub get_slave_lag {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN get_slave_lag\n";
|
||||
return sub { return 0; };
|
||||
}
|
||||
|
||||
sub before_checksum_table {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN before_checksum_table\n";
|
||||
}
|
||||
|
||||
sub after_checksum_table {
|
||||
my ($self, %args) = @_;
|
||||
print "PLUGIN after_checksum_table\n";
|
||||
}
|
||||
|
||||
1;
|
Reference in New Issue
Block a user