Group
Extension

Matches 6

Datahub-Factory ( N/NE/NETSENSEI/Datahub-Factory-1.77.tar.gz, NETSENSEI, 2019; MetaCPAN )
Datahub-Factory/lib/Datahub/Factory/Exporter/JSON.pm ( view source; MetaCPAN )
package Datahub::Factory::Exporter::JSON;

use Datahub::Factory::Sane;

our $VERSION = '1.77';

use Moo;
use Catmandu;
use namespace::clean;

with 'Datahub::Factory::Exporter';

sub _build_out {
    m
JSON');
    return $exporter;
}

sub add {
    my ($self, $item) = @_;
    $self->out->add($item);
}

1;

__END__

=encoding utf-8

=head1 NAME

Datahub::Factory::Exporter::JSON - Export items to JSON
tory;

    my $exporter = Datahub::Factory->exporter('JSON')->new();

    $exporter->add({'id' => 1});

=head1 DESCRIPTION

Convert records to JSON and send them to STDOUT. The records are
convert as-
Datahub-Factory ( N/NE/NETSENSEI/Datahub-Factory-1.77.tar.gz, NETSENSEI, 2019; MetaCPAN )
Datahub-Factory/lib/Datahub/Factory/Importer/JSON.pm ( view source; MetaCPAN )
package Datahub::Factory::Importer::JSON;

use Datahub::Factory::Sane;

our $VERSION = '1.77';

use Moo;
use Catmandu;
use namespace::clean;

with 'Datahub::Factory::Importer';

has file_name => (is =
 Catmandu->importer('JSON', file => $self->file_name);

    return $importer;
}

1;

__END__

=encoding utf-8

=head1 NAME

Datahub::Factory::Importer::JSON - Import data from JSON flat file data dump
ory;
    use Data::Dumper qw(Dumper);

    my $json = Datahub::Factory->importer('JSON')->new(
        file_name => '/tmp/export.json',
    );

    $json->importer->each(sub {
        my $item = shift
Datahub-Factory ( N/NE/NETSENSEI/Datahub-Factory-1.77.tar.gz, NETSENSEI, 2019; MetaCPAN )
Datahub-Factory/lib/Datahub/Factory/Env.pm ( view source; MetaCPAN )
       if grep {-r File::Spec->catfile($path, $_)}
            grep /^datahubfactory.+(?:yaml|yml|json|pl)$/, readdir $dh;
    }
    Datahub::Factory->default_load_path;
}

has load_paths => (
    is 
$_;
            map {File::Spec->catfile($dir, "datahubfactory*.$_")}
                qw(yaml yml json pl)
        } reverse @config_dirs;

        my $config = Config::Onion->new(prefix_key => '_pref
Datahub-Factory ( N/NE/NETSENSEI/Datahub-Factory-1.77.tar.gz, NETSENSEI, 2019; MetaCPAN )
Datahub-Factory/lib/Datahub/Factory/Introduction.pod ( view source; MetaCPAN )
dhconveyor transport -g general.ini -i importer.ini -f fixer.ini -e exporter.ini

    # Pushing a JSON file to a search index (Solr)
    $ dhconveyor index -p solr.ini

    # Pretty output
    $ dhcon
Datahub-Factory ( N/NE/NETSENSEI/Datahub-Factory-1.77.tar.gz, NETSENSEI, 2019; MetaCPAN )
Datahub-Factory/lib/Datahub/Factory/Indexer/Solr.pm ( view source; MetaCPAN )
1.77';

use Moo;
use Catmandu;
use HTTP::Request::Common;
use HTTP::Request::StreamingUpload;
use JSON;
use LWP::UserAgent;
use URI::URL;
use XML::LibXML;
use namespace::clean;

with 'Datahub::Factory
sub index {
	my $self = shift;

    my ($request_handler, $response, $request); 

    # Index the JSON data

    $request_handler = url $self->{request_handler};
    $request_handler->equery('commit=t
ation/json',
            'Content-Length' => -s $self->{file_name},
        ),
    );

    $response = $self->out->request($request);

    if ($response->is_success) {
        return decode_json($resp
Datahub-Factory ( N/NE/NETSENSEI/Datahub-Factory-1.77.tar.gz, NETSENSEI, 2019; MetaCPAN )
Datahub-Factory/lib/Datahub/Factory/Command/index.pm ( view source; MetaCPAN )
r_Solr]
    request_handler = http://path_to_solr_data_import_handler
    file_name = /tmp/upload.json

All plugins have their own configuration options in sections called C<[plugin_type_name]> where 

Powered by Groonga
Maintained by Kenichi Ishigaki <ishigaki@cpan.org>. If you find anything, submit it on GitHub.