/parse_exit mod2file/;
use Test2::Util::UUID qw/gen_uuid/;
use Test2::Harness::Util::JSON qw/encode_json decode_json/;
use Plack::Runner;
use DBIx::QuickDB;
use App::Yath::Server::Plack;
use App::Ya
ncode_json({
schema_config => $self->{+SCHEMA_CONFIG},
launcher_options => \@options,
}),
);
}
sub _do_server_post_exec {
my $class = shift;
my ($json) = @
_;
$0 = "yath-web-server";
my $data = decode_json($json);
my $r = Plack::Runner->new;
$r->parse_options(@{$data->{launcher_options}});
my $app = App::Yath::Server::Plack->new(
ct ipc_warn ipc_loop pid_is_running inflate set_procname/;
use Test2::Harness::Util::JSON qw/decode_json encode_json/;
use Test2::Harness::Collector::Preloaded;
use Test2::Harness::Util::HashBase qw
STDERR->autoflush(1);
my $ok = eval {
my ($json) = @ARGV;
my $self = $class->new(decode_json($json));
my $got = $self->start();
$EXIT = $go
{pkg}\::EXIT // 255)", # Run it.
encode_json(\%params), # json data for job
]);
return $pid;
}
sub ipc {
est2::Harness::IPC::Util qw/check_pipe ipc_warn pid_is_running/;
use Test2::Harness::Util::JSON qw/decode_json/;
use Test2::Harness::Instance::Message;
use Test2::Harness::Instance::Request;
use Test
;
my $json = $self->{+READ_PIPE}->read_message;
next if !$json && $! == EINTR;
last unless $json;
my $msg;
unless (eval { $msg = decode_json($json); 1 }) {
ipc_warn(error => $@, input_json => $json, input => $msg);
next;
}
$count++;
if (my $class = $msg->{class}) {
require(mod2file($class));
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("sweeps");
__PACKAGE__->add_columns(
"sweep_id",
{ data_type => "bigi
h::Schema::Util qw/schema_config_from_settings format_duration/;
use Test2::Harness::Util::JSON qw/decode_json/;
use App::Yath::Schema::RunProcessor;
use parent 'App::Yath::Command';
use Test2::Harn
ath database" }
sub group { ["database", 'log parsing'] }
sub cli_args { "[--] event_log.jsonl[.gz|.bz2]" }
sub description { "Publish a log file directly to a yath database" }
sub run {
id log file" unless -f $file;
die "'$file' does not look like a log file" unless $file =~ m/\.jsonl(\.(gz|bz2))?$/;
my $lines = 0;
my $fh;
if ($file =~ m/\.bz2$/) {
$fh = IO::
e;
use strict;
use warnings;
our $VERSION = '2.000006'; # TRIAL
use Test2::Harness::Util::File::JSON;
use Scalar::Util qw/weaken/;
use Time::HiRes qw/time/;
use List::Util qw/first min sum0 max/;
us
urn $self->init_state unless -e $self->{+STATE_FILE};
my $file = Test2::Harness::Util::File::JSON->new(name => $self->{+STATE_FILE});
my ($ok, $err);
for (1 .. 5) {
my $state;
umask($self->{+STATE_UMASK});
my $ok = eval {
my $file = Test2::Harness::Util::File::JSON->new(name => $self->{+STATE_FILE});
$file->rewrite($state_copy);
1;
};
my
lp information
=item -p key=val
=item -p=key=val
=item -pkey=value
=item -p '{"json":"hash"}'
=item -p='{"json":"hash"}'
=item -p:{ KEY1 VAL KEY2 :{ VAL1 VAL2 ... }: ... }:
=item -p :{ KEY1 VA
l
=item --plugins=key=val
=item --plugin '{"json":"hash"}'
=item --plugin='{"json":"hash"}'
=item --plugins '{"json":"hash"}'
=item --plugins='{"json":"hash"}'
=item --plugin :{ KEY1 VAL KEY2 :{
--scan-options key=val
=item --scan-options=key=val
=item --scan-options '{"json":"hash"}'
=item --scan-options='{"json":"hash"}'
=item --scan-options(?^:^--(no-)?(?^:scan-(.+))$)
=item --scan-op
lp information
=item -p key=val
=item -p=key=val
=item -pkey=value
=item -p '{"json":"hash"}'
=item -p='{"json":"hash"}'
=item -p:{ KEY1 VAL KEY2 :{ VAL1 VAL2 ... }: ... }:
=item -p :{ KEY1 VA
l
=item --plugins=key=val
=item --plugin '{"json":"hash"}'
=item --plugin='{"json":"hash"}'
=item --plugins '{"json":"hash"}'
=item --plugins='{"json":"hash"}'
=item --plugin :{ KEY1 VAL KEY2 :{
--scan-options key=val
=item --scan-options=key=val
=item --scan-options '{"json":"hash"}'
=item --scan-options='{"json":"hash"}'
=item --scan-options(?^:^--(no-)?(?^:scan-(.+))$)
=item --scan-op
arent 'App::Yath::Command';
use Test2::Harness::Util::HashBase;
use Test2::Harness::Util::JSON qw/decode_json/;
use LWP;
use LWP::UserAgent;
use Getopt::Yath;
include_options(
'App::Yath::Optio
log file" unless -f $log;
die "'$log' does not look like a log file" unless $log =~ m/\.jsonl(\.(gz|bz2))?$/;
my $api_key = $settings->webclient->api_key or die "No API key was specifie
,
json => 1,
log_file => [$log],
],
);
if ($res->is_success) {
my $json = $res->decoded_content;
my $data = decode_json($json);
pr
04';
use Test2::Harness::Util qw/clean_path/;
use Test2::Harness::Util::File::JSONL;
use Test2::Harness::Util::File::JSON;
use Cwd qw/getcwd/;
use parent 'App::Yath::Command';
use Test2::Harness::U
a duration json file, if no path is provided 'duration.json' will be used. The .json extension is added automatically if omitted.",
long_examples => ['', '=/path/to/durations.json'],
autofill => sub { clean_path('durations.json') },
normalize => sub {
my $val = shift;
$val .= '.json' unless $val =~ m/\.json$/;
return clean_path($val);
ict;
use warnings;
our $VERSION = '2.000004';
use Term::Table;
use Test2::Harness::Util::JSON qw/decode_json/;
use App::Yath::Schema::Util qw/schema_config_from_settings/;
use parent 'App::Yath::Co
,
);
my $data = [];
while (my $run = $runs->next) {
push @$data => $run->TO_JSON;
}
return undef unless @$data;
return $data;
}
sub get_from_http {
my $self =
get recent runs from '$url'\n$res->{status}: $res->{reason}\n$res->{content}\n"
unless $res->{success};
return decode_json($res->{content});
}
1;
__END__
=head1 POD IS AUTO-GENERATED
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("versions");
__PACKAGE__->add_columns(
"version",
{ data_type => "dec
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("resource_types");
__PACKAGE__->add_columns(
"resource_type_id",
{ da
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("hosts");
__PACKAGE__->add_columns(
"host_id",
{ data_type => "bigint
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("projects");
__PACKAGE__->add_columns(
"project_id",
{ data_type => "
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("log_files");
__PACKAGE__->add_columns(
"log_file_id",
{ data_type =>
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("coverage_manager");
__PACKAGE__->add_columns(
"coverage_manager_id",
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("email");
__PACKAGE__->add_columns(
"email_id",
{ data_type => "bigin
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("resources");
__PACKAGE__->add_columns(
"event_uuid",
{ data_type =>
},
"resource_ord",
{ data_type => "integer", is_nullable => 0 },
"data",
{ data_type => "json", is_nullable => 0 },
);
__PACKAGE__->set_primary_key("resource_id");
__PACKAGE__->add_unique_con
onents(
"InflateColumn::DateTime",
"InflateColumn::Serializer",
"InflateColumn::Serializer::JSON",
);
__PACKAGE__->table("jobs");
__PACKAGE__->add_columns(
"job_uuid",
{ data_type => "binary