::Deep::JSON;
use strict;
use warnings;
use 5.008_001;
use Test::Deep ();
use Test::Deep::Cmp;
use JSON::MaybeXS;
use Exporter::Lite;
our $VERSION = '0.05';
our @EXPORT = qw(json);
sub json ($) {
lf->{val} = $expected;
}
sub descend {
my ($self, $got) = @_;
my $parsed = eval { decode_json($got) };
if ($@) {
$self->{error} = $@;
return 0;
}
return Test::Deep
ostics(@_);
}
1;
__END__
=head1 NAME
Test::Deep::JSON - Compare JSON with Test::Deep
=head1 SYNOPSIS
use Test::Deep;
use Test::Deep::JSON;
cmp_deeply {
foo => 'bar',
payload =
on::json;
our $DATE = '2018-03-25'; # DATE
our $VERSION = '0.001'; # VERSION
use strict;
use warnings;
use Regexp::Common 'pattern';
use Regexp::Pattern::JSON;
my $re = \%Regexp::Pattern::JSON::RE
name => ['json', $patname],
create => $re->{$patname}{pat},
);
}
1;
# ABSTRACT: Regexp patterns to match JSON
__END__
=pod
=encoding UTF-8
=head1 NAME
Regexp::Common::json - Regexp
JSON
=head1 VERSION
This document describes version 0.001 of Regexp::Common::json (from Perl distribution Regexp-Common-json), released on 2018-03-25.
=head1 SYNOPSIS
use Regexp::Common qw/json/
package DBIx::JSON;
use warnings;
use strict;
=head1 NAME
DBIx::JSON - JSON serialization plugin for DBIx
=head1 DESCRIPTION
This module is a DBIx extension to fetch data in JSON format from
dat
Ix::JSON->new( $dsn, "mysql", $dbusername, $dbpasswd )
->do_select("select * from table;")->get_json;
or
my $dsn = "dbname=$dbname;host=$host;port=$port";
my $obj = DBIx::JSON->n
n1", 1);
$obj->err && die $obj->errstr;
print $obj->get_json;
=head1 EXPORT
None.
=cut
use DBI 1.15 ();
use Carp ();
use JSON::Syck;
sub new {
my $class = shift;
my $self = {};
package Mojolicious::Plugin::Vparam::JSON;
use Mojo::Base -strict;
use Mojolicious::Plugin::Vparam::Common qw(decode_json);
use Mojo::JSON;
sub parse_json($) {
my $str = shift;
return undef
e_json $str;
}
sub check_json($) {
return 'Wrong format' unless defined $_[0];
return 0;
}
sub register {
my ($class, $self, $app, $conf) = @_;
$app->vtype(
json
=>
pre => sub { parse_json $_[1] },
valid => sub { check_json $_[1] },
);
return;
}
1;
vice::Geocodio::JSON;
$WebService::Geocodio::JSON::VERSION = '0.05';
# ABSTRACT: A JSON de/serializer class
use Moo::Role;
use strictures 2;
use JSON;
use Carp qw(confess);
has 'json' => (
is =
efault => sub { JSON->new() },
);
sub encode {
my ($self, $aref) = @_;
return $self->json->encode($aref);
}
sub decode {
my ($self, $data) = @_;
return $self->json->decode($data)
WebService::Geocodio::JSON - A JSON de/serializer class
=head1 VERSION
version 0.05
=head1 ATTRIBUTES
=head2 json
A JSON serializer/deserializer instance. Default is L<JSON>.
=head1 METHODS
=h
e APISchema::JSON;
use strict;
use warnings;
use Exporter 'import';
our @EXPORT = qw(encode_json_canonical);
use JSON::XS;
my $json = JSON::XS->new->utf8->canonical(1);
sub encode_json_canonical {
my ($value) = @_;
$json->encode($value);
}
1;
package Plack::Middleware::Signposting::JSON;
our $VERSION = '0.05';
use Catmandu::Sane;
use Catmandu;
use Catmandu::Fix;
use JSON qw(decode_json);
use Plack::Request;
use Plack::Util::Accessor qw(f
($res->[1], 'Content-Type') || '';
# only json responses
return unless $content_type =~ m{^application/json|application\/vnd\.api\+json}i;
# ignore streaming response for now
s ref $res->[2] eq 'ARRAY';
my $body = join('', @{$res->[2]});
my $data = decode_json($body);
if (ref $data && ref $data eq 'ARRAY') {
$data = $data->[0];
package JSON::RPC2::AnyEvent::Client;
use 5.008005;
use strict;
use warnings;
use utf8;
use AnyEvent::Handle;
use AnyEvent::HTTP;
use JSON::RPC2::Client;
use Scalar::Util qw(weaken);
our $VERSION =
ce named listed destroy );
sub new {
my $class = shift;
my $self = bless {
client => JSON::RPC2::Client->new(),
call => 'call',
@_,
remappable => {},
cb => {},
{url} ) {
$self->{request_fn} = \&JSON::RPC2::AnyEvent::Client::__request_http;
} else {
$self->__connect_tcp;
$self->{request_fn} = \&JSON::RPC2::AnyEvent::Client::__request_tcp;
:Runner::Command::stats::Logger::JSON::Summary::JSONOutput;
use Moose::Role;
use namespace::autoclean;
use JSON;
with 'HPC::Runner::Command::stats::Logger::JSON::JSONOutput';
sub iter_jobs_summary
n_id}->{jobs}->{$jobname} = $summary;
$self->task_data( {} );
}
push( @{ $self->json_data }, $submission_obj );
}
sub gen_job_tasks_summary {
my $self = shift;
my $jobnam
ner::Command::stats::Logger::JSON::Long::TableOutput;
use Moose::Role;
use namespace::autoclean;
with 'HPC::Runner::Command::stats::Logger::JSON::TableOutput';
use JSON;
use Text::ASCIITable;
sub
ckage HPC::Runner::Command::stats::Logger::JSON::Utils;
use Moose::Role;
use namespace::autoclean;
use JSON;
use Try::Tiny;
use File::Slurp;
sub read_json_files {
my $self = shift;
shift;
my @json_files =
glob( File::Spec->catdir( $self->data_dir, $jobname, '*json' ) );
my $running = {};
foreach my $file (@json_files) {
my $running_json = read_file($f
ile);
my $trun = decode_json($running_json);
foreach my $key ( keys %{$trun} ) {
$running->{$key} = $trun->{$key};
}
}
return $running;
}
1;
package HPC::Runner::Command::stats::Logger::JSON::TableOutput;
use Moose::Role;
use namespace::autoclean;
use Text::ASCIITable;
## TODO This one is mostly the same
sub build_table {
my $self =
ge HPC::Runner::Command::stats::Logger::JSON::Summary::TableOutput;
use Moose::Role;
use namespace::autoclean;
with 'HPC::Runner::Command::stats::Logger::JSON::TableOutput';
sub iter_jobs_summary {
PC::Runner::Command::submit_jobs::Logger::JSON;
use Moose::Role;
use namespace::autoclean;
with 'HPC::Runner::Command::execute_job::Logger::Lock';
use JSON;
use File::Spec;
use Data::UUID;
use File
lurp;
use DateTime;
use Capture::Tiny ':all';
=head3 create_json_submission
Create the data for the json submission
=cut
sub create_json_submission {
my $self = shift;
make_path($self->da
_meta;
# my $json_text = encode_json $hpc_meta;
# write_file(File::Spec->catdir($self->data_dir, 'submission.json'), $json_text);
return $hpc_meta;
}
=head3 update_json_submission
Take
package HPC::Runner::Command::stats::Logger::JSON::JSONOutput;
use Moose::Role;
use namespace::autoclean;
use JSON;
has 'json_data' => (
is => 'rw',
isa => 'ArrayRef',
default =
> sub { return [] }
);
after 'iter_submissions' => sub {
my $self = shift;
my $json = encode_json( $self->json_data );
print $json;
print "\n";
};
1;
PC::Runner::Command::execute_job::Logger::JSON;
use Moose::Role;
use namespace::autoclean;
with 'HPC::Runner::Command::execute_job::Logger::Lock';
use JSON;
use File::Spec;
use DateTime;
use Try::T
;
use File::Path qw(make_path remove_tree);
use File::Slurp;
use Cwd;
use Time::HiRes;
has 'task_json' => (
is => 'rw',
isa => 'Str',
default => '',
required => 0,
);
has
my $self = shift;
my $job_meta = {};
if ( $self->metastr ) {
$job_meta = decode_json( $self->metastr );
}
if ( !$job_meta || !exists $job_meta->{jobname} ) {
##TO ac
package HPC::Runner::Command::Logger::JSON;
use MooseX::App::Role;
use MooseX::Types::Path::Tiny qw/File Path Paths AbsPath AbsFile/;
use File::Spec;
use Data::UUID;
use DateTime;
use File::Path qw(m
ake_path remove_tree);
use HPC::Runner::Command::Logger::JSON::Archive;
with 'BioSAILs::Utils::Files::CacheDir';
option 'data_dir' => (
is => 'rw',
isa => AbsPath,
l
package HPC::Runner::Command::Logger::JSON::Archive;
use Moose;
use MooseX::NonMoose;
use File::Spec;
use File::Slurp;
use Try::Tiny;
use Path::Tiny;
use Data::Dumper;
use Capture::Tiny ':all';
use
e HPC::Runner::Command::stats::Logger::JSON::Long;
use Moose::Role;
use namespace::autoclean;
with 'HPC::Runner::Command::stats::Logger::JSON::Utils';
use JSON;
use File::Glob;
use File::Slurp;
su
ift;
my $submission_id = shift;
my $jobname = shift;
my $running = $self->read_json_files($submission_id, $jobname);
my $total_tasks = [];
foreach ( sort { $a <=> $b } keys
HPC::Runner::Command::stats::Logger::JSON::Summary;
use Moose::Role;
use namespace::autoclean;
with 'HPC::Runner::Command::stats::Logger::JSON::Utils';
use JSON;
use Try::Tiny;
use File::Slurp;
##
shift;
my $submission_id = shift;
my $jobname = shift;
my $tasks = $self->read_json_files( $submission_id, $jobname );
my $running =
$self->count_running_tasks( $submissio