Convert::Pheno::Utils::Mapping;
use Convert::Pheno::CSV;
use Convert::Pheno::RDF qw(do_bff2jsonld do_pxf2jsonld);
use Convert::Pheno::OMOP;
use Convert::Pheno::PXF;
use Convert::Pheno::Bff2Pxf;
use Co
CDISC;
use Convert::Pheno::REDCap;
use Exporter 'import';
our @EXPORT =
qw($VERSION io_yaml_or_json omop2bff_stream_processing share_dir); # Symbols imported by default
#our @EXPORT_OK = qw(foo
#####
#############
sub bff2pxf {
my $self = shift;
# <array_dispatcher> will deal with JSON arrays
return $self->array_dispatcher;
}
#############
#############
# BFF2CSV #
#########
e warnings;
use autodie;
use feature qw(say);
use Moo;
use File::Spec::Functions qw(catfile);
use JSON::Validator;
use Term::ANSIColor qw(:constants);
use Convert::Pheno::IO::FileIO;
# Declate attrib
ter an object is created
sub BUILD {
my $self = shift;
$self->{schema} =
io_yaml_or_json( { filepath => $self->{schema_file}, mode => 'read' } )
; # setter
}
##################
##############
# SCHEMA VALIDATION #
#########################
#########################
sub json_validate {
my $self = shift;
my $data = $self->{data};
my $schema = $self->{schem
are usually split in files:
# - phenopacket.json ( usually - 1 individual per file)
# - cohort.json (info on mutliple individuals)
# - family.json (info related to one or multiple individuals).
the root level (/).
#
# However, top-elements might be combined into a single file (e.g., pxf.json),
# as a result, certain files may contain objects for top-level elements:
# - /phenopacket
e = shift;
print Dumper $value;
return JSON::XS::true
if $value && $value ne 'false'; # Non-empty string and not 'false'
return JSON::XS::false; # Empty, 'false', or
e warnings;
use autodie;
use feature qw(say);
use Moo;
use File::Spec::Functions qw(catfile);
use JSON::Validator;
use Term::ANSIColor qw(:constants);
use Convert::Pheno::IO::FileIO;
# Declate attrib
er an object is created
sub BUILD {
my $self = shift;
$self->{schema} =
io_yaml_or_json( { filepath => $self->{schema_file}, mode => 'read' } )
; # setter
}
##################
##############
# SCHEMA VALIDATION #
#########################
#########################
sub json_validate {
my $self = shift;
my $data = $self->{data};
my $schema = $self->{sche
ngs;
use autodie;
#use Carp qw(confess);
use feature qw(say);
use utf8;
use Data::Dumper;
use JSON::XS;
use Time::HiRes qw(gettimeofday);
use POSIX qw(strftime);
use Scalar::Util qw(looks_
(shift);
return
( $val eq 'true' || $val eq 'yes' ) ? JSON::XS::true
: ( $val eq 'false' || $val eq 'no' ) ? JSON::XS::false
: undef;
n = 'JSON::PP'; # use JSON::PP::Boolean objects
use JSON::XS;
use Sort::Naturally qw(nsort);
use Data::Leaf::Walker;
use Exporter 'import';
our @EXPORT = qw(read_json read_yaml io_yaml_or_json writ
#
#########################
#########################
sub read_json {
my $str = path(shift)->slurp_utf8;
return decode_json($str); # Decode to Perl data structure
}
sub read_yaml {
_or_json {
my $arg = shift;
my $file = $arg->{filepath};
my $mode = $arg->{mode};
my $data = $mode eq 'write' ? $arg->{data} : undef;
# Checking only for qw(.yaml .yml .json)
? JSON::XS::false : JSON::XS::true;
$is_boolean++;
}
# ANy other string is excluded = 0 (i.e., included)
else {
$phenotypicFeature->{excluded} = JSON::
atureType' to 'type'
excluded => (exists $_->{excluded} ? delete $_->{excluded} : JSON::PP::false),
# _notes => $_->{notes}
ing;
use strict;
use warnings;
use autodie;
use feature qw(say);
use utf8;
use Data::Dumper;
use JSON::XS;
use Time::HiRes qw(gettimeofday);
use POSIX qw(strftime);
use DateTime::Format::ISO860
(shift);
return
( $val eq 'true' || $val eq 'yes' ) ? JSON::XS::true
: ( $val eq 'false' || $val eq 'no' ) ? JSON::XS::false
: undef;
exists $_->{excluded}
? delete $_->{excluded}
: JSON::PP::false
),
# _notes => $_->{notes}
}
} @{
{
my $obs;
next
if ( $feature->{excluded} && $feature->{excluded} == JSON::PP::true );
$obs->{observation_id} = ++$OBSERVATION_ID_COUNT;
# e.g., $feature
n = 'JSON::PP'; # use JSON::PP::Boolean objects
use JSON::XS;
use Sort::Naturally qw(nsort);
use Data::Leaf::Walker;
use Exporter 'import';
our @EXPORT = qw(read_json read_yaml io_yaml_or_json writ
#
#########################
#########################
sub read_json {
my $str = path(shift)->slurp_utf8;
return decode_json($str); # Decode to Perl data structure
}
sub read_yaml {
; # revert floatings getting stringified by YAML::XS
return $data;
}
sub io_yaml_or_json {
my $arg = shift;
my $file = $arg->{filepath};
my $mode = $arg->{mode};
my $dat
ile
my $data_mapping_file =
io_yaml_or_json( { filepath => $arg->{mapping_file}, mode => 'read' } );
# Validate mapping file against JSON schema
my $jv = Convert::Pheno::Utils::Sche
>{self_validate_schema},
schema_file => $arg->{schema_file}
}
);
$jv->json_validate;
# Remap for quick looukup
remap_assignTermIdFromHeader($data_mapping_file);
self, $data );
# Return JSON string
# - canonical has some overhead but needed for t/)
# - $fh is already utf-8, no need to encode again here
return JSON::XS->new->canonical->encode
##########
use Convert::Pheno;
# Define method
my $method = 'pxf2bff';
# Define data
my $my_pxf_json_data = {
"phenopacket" => {
"id" => "P0007500",
"subject" => {
}
}
} ;
# Create object
my $convert = Convert::Pheno->new(
{
data => $my_pxf_json_data,
method => $method
}
);
# Run method and store result in hashref
my $hashref =