mirror of
https://github.com/percona/percona-toolkit.git
synced 2025-09-12 06:00:14 +00:00
Fix/update/test sending data. Rename query-monitor to query-server in tests.
This commit is contained in:
791
bin/pt-agent
791
bin/pt-agent
File diff suppressed because it is too large
Load Diff
@@ -7627,8 +7627,8 @@ use constant PTDEBUG => $ENV{PTDEBUG} || 0;
|
|||||||
|
|
||||||
my $have_json = eval { require JSON };
|
my $have_json = eval { require JSON };
|
||||||
|
|
||||||
our $pretty_json = 0;
|
our $pretty_json = $ENV{PTTEST_PRETTY_JSON} ? 1 : 0;
|
||||||
our $sorted_json = 0;
|
our $sorted_json = $ENV{PTTEST_PRETTY_JSON} ? 1 : 0;
|
||||||
|
|
||||||
extends qw(QueryReportFormatter);
|
extends qw(QueryReportFormatter);
|
||||||
|
|
||||||
|
@@ -23,7 +23,7 @@ package Percona::WebAPI::Resource::Agent;
|
|||||||
use Lmo;
|
use Lmo;
|
||||||
|
|
||||||
has 'uuid' => (
|
has 'uuid' => (
|
||||||
is => 'r0',
|
is => 'ro',
|
||||||
isa => 'Str',
|
isa => 'Str',
|
||||||
required => 0,
|
required => 0,
|
||||||
);
|
);
|
||||||
|
@@ -55,7 +55,7 @@ my $run0 = Percona::WebAPI::Resource::Task->new(
|
|||||||
);
|
);
|
||||||
|
|
||||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '* 8 * * 1,2,3,4,5',
|
run_schedule => '* 8 * * 1,2,3,4,5',
|
||||||
spool_schedule => '* 9 * * 1,2,3,4,5',
|
spool_schedule => '* 9 * * 1,2,3,4,5',
|
||||||
tasks => [ $run0 ],
|
tasks => [ $run0 ],
|
||||||
@@ -127,8 +127,8 @@ SKIP: {
|
|||||||
is(
|
is(
|
||||||
$new_crontab,
|
$new_crontab,
|
||||||
"* 0 * * * date > /dev/null
|
"* 0 * * * date > /dev/null
|
||||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-monitor
|
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-monitor
|
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||||
",
|
",
|
||||||
"Runs crontab -l by default"
|
"Runs crontab -l by default"
|
||||||
);
|
);
|
||||||
|
@@ -106,12 +106,13 @@ my $run0 = Percona::WebAPI::Resource::Task->new(
|
|||||||
);
|
);
|
||||||
|
|
||||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '1 * * * *',
|
run_schedule => '1 * * * *',
|
||||||
spool_schedule => '2 * * * *',
|
spool_schedule => '2 * * * *',
|
||||||
tasks => [ $run0 ],
|
tasks => [ $run0 ],
|
||||||
links => {
|
links => {
|
||||||
send_data => '/query-monitor',
|
self => '/query-history',
|
||||||
|
data => '/query-history/data',
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -189,35 +190,35 @@ is(
|
|||||||
);
|
);
|
||||||
|
|
||||||
ok(
|
ok(
|
||||||
-f "$tmpdir/services/query-monitor",
|
-f "$tmpdir/services/query-history",
|
||||||
"Created services/query-monitor"
|
"Created services/query-history"
|
||||||
) or diag($output);
|
) or diag($output);
|
||||||
|
|
||||||
chomp(my $n_files = `ls -1 $tmpdir/services| wc -l | awk '{print \$1}'`);
|
chomp(my $n_files = `ls -1 $tmpdir/services| wc -l | awk '{print \$1}'`);
|
||||||
is(
|
is(
|
||||||
$n_files,
|
$n_files,
|
||||||
1,
|
1,
|
||||||
"... only created services/query-monitor"
|
"... only created services/query-history"
|
||||||
);
|
);
|
||||||
|
|
||||||
ok(
|
ok(
|
||||||
no_diff(
|
no_diff(
|
||||||
"cat $tmpdir/services/query-monitor",
|
"cat $tmpdir/services/query-history",
|
||||||
"t/pt-agent/samples/service001",
|
"t/pt-agent/samples/service001",
|
||||||
),
|
),
|
||||||
"query-monitor service file"
|
"query-history service file"
|
||||||
);
|
);
|
||||||
|
|
||||||
$crontab = `crontab -l 2>/dev/null`;
|
$crontab = `crontab -l 2>/dev/null`;
|
||||||
like(
|
like(
|
||||||
$crontab,
|
$crontab,
|
||||||
qr/pt-agent --run-service query-monitor$/m,
|
qr/pt-agent --run-service query-history$/m,
|
||||||
"Scheduled --run-service with crontab"
|
"Scheduled --run-service with crontab"
|
||||||
);
|
);
|
||||||
|
|
||||||
like(
|
like(
|
||||||
$crontab,
|
$crontab,
|
||||||
qr/pt-agent --send-data query-monitor$/m,
|
qr/pt-agent --send-data query-history$/m,
|
||||||
"Scheduled --send-data with crontab"
|
"Scheduled --send-data with crontab"
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -257,14 +258,14 @@ $ua->{responses}->{get} = [
|
|||||||
@oktorun = (1, 1, 1, 0);
|
@oktorun = (1, 1, 1, 0);
|
||||||
|
|
||||||
# Between the 2nd and 3rd checks, remove the config file (~/.pt-agent.conf)
|
# Between the 2nd and 3rd checks, remove the config file (~/.pt-agent.conf)
|
||||||
# and query-monitor service file. When the tool re-GETs these, they'll be
|
# and query-history service file. When the tool re-GETs these, they'll be
|
||||||
# the same so it won't recreate them. A bug here will cause these files to
|
# the same so it won't recreate them. A bug here will cause these files to
|
||||||
# exist again after running.
|
# exist again after running.
|
||||||
$ok_code[2] = sub {
|
$ok_code[2] = sub {
|
||||||
unlink "$config_file";
|
unlink "$config_file";
|
||||||
unlink "$tmpdir/services/query-monitor";
|
unlink "$tmpdir/services/query-history";
|
||||||
Percona::Test::wait_until(sub { ! -f "$config_file" });
|
Percona::Test::wait_until(sub { ! -f "$config_file" });
|
||||||
Percona::Test::wait_until(sub { ! -f "$tmpdir/services/query-monitor" });
|
Percona::Test::wait_until(sub { ! -f "$tmpdir/services/query-history" });
|
||||||
};
|
};
|
||||||
|
|
||||||
@wait = ();
|
@wait = ();
|
||||||
@@ -296,7 +297,7 @@ ok(
|
|||||||
);
|
);
|
||||||
|
|
||||||
ok(
|
ok(
|
||||||
! -f "$tmpdir/services/query-monitor",
|
! -f "$tmpdir/services/query-history",
|
||||||
"No Service diff, no service file changes"
|
"No Service diff, no service file changes"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@@ -13,6 +13,8 @@ use Test::More;
|
|||||||
use JSON;
|
use JSON;
|
||||||
use File::Temp qw(tempdir);
|
use File::Temp qw(tempdir);
|
||||||
|
|
||||||
|
$ENV{PTTEST_PRETTY_JSON} = 1;
|
||||||
|
|
||||||
use Percona::Test;
|
use Percona::Test;
|
||||||
use Percona::Test::Mock::UserAgent;
|
use Percona::Test::Mock::UserAgent;
|
||||||
require "$trunk/bin/pt-agent";
|
require "$trunk/bin/pt-agent";
|
||||||
@@ -57,12 +59,12 @@ my $run0 = Percona::WebAPI::Resource::Task->new(
|
|||||||
name => 'query-history',
|
name => 'query-history',
|
||||||
number => '0',
|
number => '0',
|
||||||
program => "$trunk/bin/pt-query-digest",
|
program => "$trunk/bin/pt-query-digest",
|
||||||
options => "--report-format profile $trunk/t/lib/samples/slowlogs/slow008.txt",
|
options => "--output json $trunk/t/lib/samples/slowlogs/slow008.txt",
|
||||||
output => 'spool',
|
output => 'spool',
|
||||||
);
|
);
|
||||||
|
|
||||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '1 * * * *',
|
run_schedule => '1 * * * *',
|
||||||
spool_schedule => '2 * * * *',
|
spool_schedule => '2 * * * *',
|
||||||
tasks => [ $run0 ],
|
tasks => [ $run0 ],
|
||||||
@@ -76,7 +78,7 @@ my $exit_status;
|
|||||||
my $output = output(
|
my $output = output(
|
||||||
sub {
|
sub {
|
||||||
$exit_status = pt_agent::run_service(
|
$exit_status = pt_agent::run_service(
|
||||||
service => 'query-monitor',
|
service => 'query-history',
|
||||||
spool_dir => $spool_dir,
|
spool_dir => $spool_dir,
|
||||||
lib_dir => $tmpdir,
|
lib_dir => $tmpdir,
|
||||||
);
|
);
|
||||||
@@ -86,17 +88,17 @@ my $output = output(
|
|||||||
|
|
||||||
ok(
|
ok(
|
||||||
no_diff(
|
no_diff(
|
||||||
"cat $tmpdir/spool/query-monitor",
|
"cat $tmpdir/spool/query-history",
|
||||||
"$sample/spool001.txt",
|
"$sample/query-history/data001.json",
|
||||||
),
|
),
|
||||||
"1 run: spool data (spool001.txt)"
|
"1 run: spool data (query-history/data001.json)"
|
||||||
);
|
);
|
||||||
|
|
||||||
chomp(my $n_files = `ls -1 $spool_dir | wc -l | awk '{print \$1}'`);
|
chomp(my $n_files = `ls -1 $spool_dir | wc -l | awk '{print \$1}'`);
|
||||||
is(
|
is(
|
||||||
$n_files,
|
$n_files,
|
||||||
1,
|
1,
|
||||||
"1 run: only wrote spool data (spool001.txt)"
|
"1 run: only wrote spool data (query-history/data001.json)"
|
||||||
) or diag(`ls -l $spool_dir`);
|
) or diag(`ls -l $spool_dir`);
|
||||||
|
|
||||||
is(
|
is(
|
||||||
@@ -128,12 +130,12 @@ my $run1 = Percona::WebAPI::Resource::Task->new(
|
|||||||
name => 'query-history',
|
name => 'query-history',
|
||||||
number => '1',
|
number => '1',
|
||||||
program => "$trunk/bin/pt-query-digest",
|
program => "$trunk/bin/pt-query-digest",
|
||||||
options => "--report-format profile __RUN_0_OUTPUT__",
|
options => "--output json __RUN_0_OUTPUT__",
|
||||||
output => 'spool',
|
output => 'spool',
|
||||||
);
|
);
|
||||||
|
|
||||||
$svc0 = Percona::WebAPI::Resource::Service->new(
|
$svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '3 * * * *',
|
run_schedule => '3 * * * *',
|
||||||
spool_schedule => '4 * * * *',
|
spool_schedule => '4 * * * *',
|
||||||
tasks => [ $run0, $run1 ],
|
tasks => [ $run0, $run1 ],
|
||||||
@@ -146,7 +148,7 @@ write_svc_files(
|
|||||||
$output = output(
|
$output = output(
|
||||||
sub {
|
sub {
|
||||||
$exit_status = pt_agent::run_service(
|
$exit_status = pt_agent::run_service(
|
||||||
service => 'query-monitor',
|
service => 'query-history',
|
||||||
spool_dir => $spool_dir,
|
spool_dir => $spool_dir,
|
||||||
lib_dir => $tmpdir,
|
lib_dir => $tmpdir,
|
||||||
);
|
);
|
||||||
@@ -156,8 +158,8 @@ $output = output(
|
|||||||
|
|
||||||
ok(
|
ok(
|
||||||
no_diff(
|
no_diff(
|
||||||
"cat $tmpdir/spool/query-monitor",
|
"cat $tmpdir/spool/query-history",
|
||||||
"$sample/spool001.txt",
|
"$sample/query-history/data001.json",
|
||||||
),
|
),
|
||||||
"2 runs: spool data"
|
"2 runs: spool data"
|
||||||
);
|
);
|
||||||
|
@@ -1,2 +1,2 @@
|
|||||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-monitor
|
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-monitor
|
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||||
|
@@ -1,3 +1,3 @@
|
|||||||
17 3 * * 1 cmd
|
17 3 * * 1 cmd
|
||||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-monitor
|
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-monitor
|
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||||
|
@@ -1,3 +1,3 @@
|
|||||||
17 3 * * 1 cmd
|
17 3 * * 1 cmd
|
||||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-monitor
|
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-monitor
|
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||||
|
@@ -1,2 +1,2 @@
|
|||||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-monitor
|
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-monitor
|
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||||
|
73
t/pt-agent/samples/query-history/data001.json
Normal file
73
t/pt-agent/samples/query-history/data001.json
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"attributes" : {
|
||||||
|
"Lock_time" : {
|
||||||
|
"avg" : "0.009453",
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0.009453",
|
||||||
|
"median" : "0.009453",
|
||||||
|
"min" : "0.009453",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : "0.009453",
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : "0.009453"
|
||||||
|
},
|
||||||
|
"Query_time" : {
|
||||||
|
"avg" : "0.018799",
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0.018799",
|
||||||
|
"median" : "0.018799",
|
||||||
|
"min" : "0.018799",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : "0.018799",
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : "0.018799"
|
||||||
|
},
|
||||||
|
"Rows_examined" : {
|
||||||
|
"avg" : 0,
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0",
|
||||||
|
"median" : 0,
|
||||||
|
"min" : "0",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : 0,
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : 0
|
||||||
|
},
|
||||||
|
"Rows_sent" : {
|
||||||
|
"avg" : 0,
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0",
|
||||||
|
"median" : 0,
|
||||||
|
"min" : "0",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : 0,
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : 0
|
||||||
|
},
|
||||||
|
"bytes" : {
|
||||||
|
"value" : 31
|
||||||
|
},
|
||||||
|
"db" : {
|
||||||
|
"value" : "db2"
|
||||||
|
},
|
||||||
|
"host" : {
|
||||||
|
"value" : ""
|
||||||
|
},
|
||||||
|
"pos_in_log" : {
|
||||||
|
"value" : 435
|
||||||
|
},
|
||||||
|
"user" : {
|
||||||
|
"value" : "meow"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"class" : {
|
||||||
|
"checksum" : "C72BF45D68E35A6E",
|
||||||
|
"cnt" : 1,
|
||||||
|
"fingerprint" : "select min(id),max(id) from tbl",
|
||||||
|
"sample" : "SELECT MIN(id),MAX(id) FROM tbl"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
76
t/pt-agent/samples/query-history/data001.send
Normal file
76
t/pt-agent/samples/query-history/data001.send
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
{
|
||||||
|
"hostname" : "prod1",
|
||||||
|
"uuid" : "123"
|
||||||
|
}
|
||||||
|
--Ym91bmRhcnk=
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"attributes" : {
|
||||||
|
"Lock_time" : {
|
||||||
|
"avg" : "0.009453",
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0.009453",
|
||||||
|
"median" : "0.009453",
|
||||||
|
"min" : "0.009453",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : "0.009453",
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : "0.009453"
|
||||||
|
},
|
||||||
|
"Query_time" : {
|
||||||
|
"avg" : "0.018799",
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0.018799",
|
||||||
|
"median" : "0.018799",
|
||||||
|
"min" : "0.018799",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : "0.018799",
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : "0.018799"
|
||||||
|
},
|
||||||
|
"Rows_examined" : {
|
||||||
|
"avg" : 0,
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0",
|
||||||
|
"median" : 0,
|
||||||
|
"min" : "0",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : 0,
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : 0
|
||||||
|
},
|
||||||
|
"Rows_sent" : {
|
||||||
|
"avg" : 0,
|
||||||
|
"cnt" : "1.000000",
|
||||||
|
"max" : "0",
|
||||||
|
"median" : 0,
|
||||||
|
"min" : "0",
|
||||||
|
"pct" : "0.33",
|
||||||
|
"pct_95" : 0,
|
||||||
|
"stddev" : 0,
|
||||||
|
"sum" : 0
|
||||||
|
},
|
||||||
|
"bytes" : {
|
||||||
|
"value" : 31
|
||||||
|
},
|
||||||
|
"db" : {
|
||||||
|
"value" : "db2"
|
||||||
|
},
|
||||||
|
"host" : {
|
||||||
|
"value" : ""
|
||||||
|
},
|
||||||
|
"pos_in_log" : {
|
||||||
|
"value" : 435
|
||||||
|
},
|
||||||
|
"user" : {
|
||||||
|
"value" : "meow"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"class" : {
|
||||||
|
"checksum" : "C72BF45D68E35A6E",
|
||||||
|
"cnt" : 1,
|
||||||
|
"fingerprint" : "select min(id),max(id) from tbl",
|
||||||
|
"sample" : "SELECT MIN(id),MAX(id) FROM tbl"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
@@ -1,13 +0,0 @@
|
|||||||
{
|
|
||||||
"hostname" : "prod1",
|
|
||||||
"id" : "123"
|
|
||||||
}
|
|
||||||
|
|
||||||
--Ym91bmRhcnk=
|
|
||||||
[
|
|
||||||
{
|
|
||||||
query_id: 1,
|
|
||||||
arg: "select * from t where id = 1",
|
|
||||||
Query_time: 0.123456,
|
|
||||||
}
|
|
||||||
]
|
|
@@ -1,8 +1,9 @@
|
|||||||
{
|
{
|
||||||
"links" : {
|
"links" : {
|
||||||
"send_data" : "/query-monitor"
|
"data" : "/query-history/data",
|
||||||
|
"self" : "/query-history"
|
||||||
},
|
},
|
||||||
"name" : "query-monitor",
|
"name" : "query-history",
|
||||||
"run_schedule" : "1 * * * *",
|
"run_schedule" : "1 * * * *",
|
||||||
"spool_schedule" : "2 * * * *",
|
"spool_schedule" : "2 * * * *",
|
||||||
"tasks" : [
|
"tasks" : [
|
||||||
|
@@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
# Profile
|
|
||||||
# Rank Query ID Response time Calls R/Call V/M Item
|
|
||||||
# ==== ================== ============= ===== ====== ===== ==========
|
|
||||||
# 1 0xC72BF45D68E35A6E 0.0188 95.4% 1 0.0188 0.00 SELECT tbl
|
|
||||||
# MISC 0xMISC 0.0009 4.6% 2 0.0005 0.0 <2 ITEMS>
|
|
@@ -1,8 +1,9 @@
|
|||||||
{
|
{
|
||||||
"links" : {
|
"links" : {
|
||||||
"send_data" : "/query-monitor"
|
"data" : "/query-history/data",
|
||||||
|
"self" : "/query-history"
|
||||||
},
|
},
|
||||||
"name" : "query-monitor",
|
"name" : "query-history",
|
||||||
"run_schedule" : "1 * * * *",
|
"run_schedule" : "1 * * * *",
|
||||||
"spool_schedule" : "2 * * * *",
|
"spool_schedule" : "2 * * * *",
|
||||||
"tasks" : [
|
"tasks" : [
|
||||||
|
@@ -39,7 +39,7 @@ my $run0 = Percona::WebAPI::Resource::Task->new(
|
|||||||
);
|
);
|
||||||
|
|
||||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '* 8 * * 1,2,3,4,5',
|
run_schedule => '* 8 * * 1,2,3,4,5',
|
||||||
spool_schedule => '* 9 * * 1,2,3,4,5',
|
spool_schedule => '* 9 * * 1,2,3,4,5',
|
||||||
tasks => [ $run0 ],
|
tasks => [ $run0 ],
|
||||||
@@ -84,8 +84,8 @@ $crontab = `crontab -l 2>/dev/null`;
|
|||||||
is(
|
is(
|
||||||
$crontab,
|
$crontab,
|
||||||
"* 0 * * * date > /dev/null
|
"* 0 * * * date > /dev/null
|
||||||
* 8 * * 1,2,3,4,5 pt-agent --run-service query-monitor
|
* 8 * * 1,2,3,4,5 pt-agent --run-service query-history
|
||||||
* 9 * * 1,2,3,4,5 pt-agent --send-data query-monitor
|
* 9 * * 1,2,3,4,5 pt-agent --send-data query-history
|
||||||
",
|
",
|
||||||
"schedule_services()"
|
"schedule_services()"
|
||||||
);
|
);
|
||||||
@@ -120,7 +120,7 @@ is(
|
|||||||
# #############################################################################
|
# #############################################################################
|
||||||
|
|
||||||
$svc0 = Percona::WebAPI::Resource::Service->new(
|
$svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '* * * * Foo', # "foo":0: bad day-of-week
|
run_schedule => '* * * * Foo', # "foo":0: bad day-of-week
|
||||||
spool_schedule => '* 8 * * Mon',
|
spool_schedule => '* 8 * * Mon',
|
||||||
tasks => [ $run0 ],
|
tasks => [ $run0 ],
|
||||||
|
@@ -42,7 +42,7 @@ my $links = {
|
|||||||
agents => '/agents',
|
agents => '/agents',
|
||||||
config => '/agents/1/config',
|
config => '/agents/1/config',
|
||||||
services => '/agents/1/services',
|
services => '/agents/1/services',
|
||||||
'query-monitor' => '/query-monitor',
|
'query-history' => '/query-history',
|
||||||
};
|
};
|
||||||
|
|
||||||
$ua->{responses}->{get} = [
|
$ua->{responses}->{get} = [
|
||||||
@@ -64,14 +64,14 @@ is(
|
|||||||
) or die;
|
) or die;
|
||||||
|
|
||||||
my $agent = Percona::WebAPI::Resource::Agent->new(
|
my $agent = Percona::WebAPI::Resource::Agent->new(
|
||||||
id => '123',
|
uuid => '123',
|
||||||
hostname => 'prod1',
|
hostname => 'prod1',
|
||||||
);
|
);
|
||||||
|
|
||||||
is_deeply(
|
is_deeply(
|
||||||
as_hashref($agent),
|
as_hashref($agent),
|
||||||
{
|
{
|
||||||
id => '123',
|
uuid => '123',
|
||||||
hostname => 'prod1',
|
hostname => 'prod1',
|
||||||
},
|
},
|
||||||
'Create mock Agent'
|
'Create mock Agent'
|
||||||
@@ -82,13 +82,13 @@ is_deeply(
|
|||||||
# #############################################################################
|
# #############################################################################
|
||||||
|
|
||||||
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
my $tmpdir = tempdir("/tmp/pt-agent.$PID.XXXXXX", CLEANUP => 1);
|
||||||
mkdir "$tmpdir/query-monitor"
|
mkdir "$tmpdir/query-history"
|
||||||
or die "Cannot mkdir $tmpdir/query-monitor: $OS_ERROR";
|
or die "Cannot mkdir $tmpdir/query-history: $OS_ERROR";
|
||||||
mkdir "$tmpdir/services"
|
mkdir "$tmpdir/services"
|
||||||
or die "Cannot mkdir $tmpdir/services: $OS_ERROR";
|
or die "Cannot mkdir $tmpdir/services: $OS_ERROR";
|
||||||
|
|
||||||
`cp $trunk/$sample/query-monitor/data001 $tmpdir/query-monitor/`;
|
`cp $trunk/$sample/query-history/data001.json $tmpdir/query-history/`;
|
||||||
`cp $trunk/$sample/service001 $tmpdir/services/query-monitor`;
|
`cp $trunk/$sample/service001 $tmpdir/services/query-history`;
|
||||||
|
|
||||||
$ua->{responses}->{post} = [
|
$ua->{responses}->{post} = [
|
||||||
{
|
{
|
||||||
@@ -101,7 +101,7 @@ my $output = output(
|
|||||||
pt_agent::send_data(
|
pt_agent::send_data(
|
||||||
client => $client,
|
client => $client,
|
||||||
agent => $agent,
|
agent => $agent,
|
||||||
service => 'query-monitor',
|
service => 'query-history',
|
||||||
lib_dir => $tmpdir,
|
lib_dir => $tmpdir,
|
||||||
spool_dir => $tmpdir,
|
spool_dir => $tmpdir,
|
||||||
json => $json, # optional, for testing
|
json => $json, # optional, for testing
|
||||||
@@ -119,22 +119,22 @@ is(
|
|||||||
is_deeply(
|
is_deeply(
|
||||||
$ua->{requests},
|
$ua->{requests},
|
||||||
[
|
[
|
||||||
'POST /query-monitor',
|
'POST /query-history/data',
|
||||||
],
|
],
|
||||||
"POST to Service.links.send_data"
|
"POST to Service.links.data"
|
||||||
);
|
);
|
||||||
|
|
||||||
ok(
|
ok(
|
||||||
no_diff(
|
no_diff(
|
||||||
$client->ua->{content}->{post}->[0] || '',
|
$client->ua->{content}->{post}->[0] || '',
|
||||||
"$sample/query-monitor/data001.send",
|
"$sample/query-history/data001.send",
|
||||||
cmd_output => 1,
|
cmd_output => 1,
|
||||||
),
|
),
|
||||||
"Sent data file as multi-part resource (query-monitor/data001)"
|
"Sent data file as multi-part resource (query-history/data001)"
|
||||||
) or diag(Dumper($client->ua->{content}->{post}));
|
) or diag(Dumper($client->ua->{content}->{post}));
|
||||||
|
|
||||||
ok(
|
ok(
|
||||||
!-f "$tmpdir/query-monitor/data001",
|
!-f "$tmpdir/query-history/data001.json",
|
||||||
"Removed data file after sending successfully"
|
"Removed data file after sending successfully"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@@ -72,11 +72,14 @@ my $run0 = Percona::WebAPI::Resource::Task->new(
|
|||||||
);
|
);
|
||||||
|
|
||||||
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
my $svc0 = Percona::WebAPI::Resource::Service->new(
|
||||||
name => 'query-monitor',
|
name => 'query-history',
|
||||||
run_schedule => '1 * * * *',
|
run_schedule => '1 * * * *',
|
||||||
spool_schedule => '2 * * * *',
|
spool_schedule => '2 * * * *',
|
||||||
tasks => [ $run0 ],
|
tasks => [ $run0 ],
|
||||||
links => { send_data => '/query-monitor' },
|
links => {
|
||||||
|
self => '/query-history',
|
||||||
|
data => '/query-history/data',
|
||||||
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
# Key thing here is that the links are written because
|
# Key thing here is that the links are written because
|
||||||
|
Reference in New Issue
Block a user