From 3ed371ebd94f56bd7825365062ce4e49c53d0e8a Mon Sep 17 00:00:00 2001 From: David Cook Date: Thu, 2 Mar 2017 18:58:52 +1100 Subject: [PATCH] Bug 10662 - Build OAI-PMH Harvesting Client MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This patch adds an OAI-PMH harvesting client to Koha. The client runs as a daemon in the background. Users interact with the client via the Koha web user interface, which communicates with the daemon via a unix socket using a simple JSON-based protocol. The harvester ingests MARCXML. You can harvest other metadata formats, but you must use a XSLT to transform them into MARCXML, if you want them to be imported into Koha. You can supply your own download and import modules via the oai-pmh-harvester.yaml configuration file, but the default modules supplied in this patch should be good enough for your purposes. If they're not, raise a Bugzilla issue. There is a cleanup_database.pl addition, because high volume harvesting will cause the oai_harvester_import_queue table to fill quickly. This table is not required for adding/updating records. It's mostly just for general monitoring and audit purposes. Signed-off-by: Andreas Hedström Mace --- C4/Installer/PerlDependencies.pm | 10 + Koha/Daemon.pm | 152 +++++ Koha/OAI/Harvester.pm | 652 +++++++++++++++++++++ Koha/OAI/Harvester/Client.pm | 177 ++++++ Koha/OAI/Harvester/Downloader.pm | 308 ++++++++++ Koha/OAI/Harvester/Import/MARCXML.pm | 140 +++++ Koha/OAI/Harvester/Import/Record.pm | 301 ++++++++++ Koha/OAI/Harvester/Listener.pm | 187 ++++++ Koha/OAI/Harvester/Request.pm | 184 ++++++ Koha/OAI/Harvester/Requests.pm | 62 ++ Koha/OAI/Harvester/Worker.pm | 156 +++++ Koha/OAI/Harvester/Worker/Download/Stream.pm | 190 ++++++ Koha/OAI/Harvester/Worker/Import.pm | 133 +++++ Koha/Schema/Result/OaiHarvesterBiblio.pm | 120 ++++ Koha/Schema/Result/OaiHarvesterHistory.pm | 163 ++++++ Koha/Schema/Result/OaiHarvesterImportQueue.pm | 106 ++++ Koha/Schema/Result/OaiHarvesterRequest.pm | 209 +++++++ Makefile.PL | 14 + debian/scripts/koha-create | 7 + debian/scripts/koha-create-dirs | 3 + debian/templates/koha-conf-site.xml.in | 1 + debian/templates/oai-pmh-harvester-site.yaml.in | 22 + etc/koha-conf.xml | 1 + etc/oai-pmh-harvester.yaml | 22 + installer/data/mysql/atomicupdate/bug_10662.sql | 73 +++ installer/data/mysql/kohastructure.sql | 74 +++ .../intranet-tmpl/prog/en/includes/tools-menu.inc | 35 +- .../modules/tools/oai-pmh-harvester/dashboard.tt | 369 ++++++++++++ .../en/modules/tools/oai-pmh-harvester/record.tt | 23 + .../en/modules/tools/oai-pmh-harvester/request.tt | 241 ++++++++ .../prog/en/modules/tools/tools-home.tt | 5 + .../intranet-tmpl/prog/en/xslt/StripOAIPMH.xsl | 28 + misc/cronjobs/cleanup_database.pl | 15 +- misc/harvesterd.pl | 207 +++++++ rewrite-config.PL | 3 + skel/var/lib/koha/oai-pmh-harvester/README | 0 skel/var/run/koha/oai-pmh-harvester/README | 0 skel/var/spool/koha/oai-pmh-harvester/README | 0 svc/oai-pmh-harvester/history | 132 +++++ tools/oai-pmh-harvester/dashboard.pl | 134 +++++ tools/oai-pmh-harvester/record.pl | 53 ++ tools/oai-pmh-harvester/request.pl | 142 +++++ 42 files changed, 4837 insertions(+), 17 deletions(-) create mode 100644 Koha/Daemon.pm create mode 100644 Koha/OAI/Harvester.pm create mode 100644 Koha/OAI/Harvester/Client.pm create mode 100644 Koha/OAI/Harvester/Downloader.pm create mode 100755 Koha/OAI/Harvester/Import/MARCXML.pm create mode 100755 Koha/OAI/Harvester/Import/Record.pm create mode 100644 Koha/OAI/Harvester/Listener.pm create mode 100644 Koha/OAI/Harvester/Request.pm create mode 100644 Koha/OAI/Harvester/Requests.pm create mode 100644 Koha/OAI/Harvester/Worker.pm create mode 100644 Koha/OAI/Harvester/Worker/Download/Stream.pm create mode 100644 Koha/OAI/Harvester/Worker/Import.pm create mode 100755 Koha/Schema/Result/OaiHarvesterBiblio.pm create mode 100755 Koha/Schema/Result/OaiHarvesterHistory.pm create mode 100755 Koha/Schema/Result/OaiHarvesterImportQueue.pm create mode 100644 Koha/Schema/Result/OaiHarvesterRequest.pm create mode 100644 debian/templates/oai-pmh-harvester-site.yaml.in create mode 100644 etc/oai-pmh-harvester.yaml create mode 100644 installer/data/mysql/atomicupdate/bug_10662.sql create mode 100644 koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/dashboard.tt create mode 100644 koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/record.tt create mode 100644 koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/request.tt create mode 100755 koha-tmpl/intranet-tmpl/prog/en/xslt/StripOAIPMH.xsl create mode 100755 misc/harvesterd.pl create mode 100644 skel/var/lib/koha/oai-pmh-harvester/README create mode 100644 skel/var/run/koha/oai-pmh-harvester/README create mode 100644 skel/var/spool/koha/oai-pmh-harvester/README create mode 100755 svc/oai-pmh-harvester/history create mode 100755 tools/oai-pmh-harvester/dashboard.pl create mode 100755 tools/oai-pmh-harvester/record.pl create mode 100755 tools/oai-pmh-harvester/request.pl diff --git a/C4/Installer/PerlDependencies.pm b/C4/Installer/PerlDependencies.pm index 2525a6b334..68a31bc725 100644 --- a/C4/Installer/PerlDependencies.pm +++ b/C4/Installer/PerlDependencies.pm @@ -893,6 +893,16 @@ our $PERL_DEPS = { required => '1', min_ver => '0.37', }, + 'POE' => { + 'usage' => 'OAI-PMH harvester', + 'required' => 1, + 'min_ver' => '1.354', + }, + 'POE::Component::JobQueue' => { + 'usage' => 'OAI-PMH harvester', + 'required' => 1, + 'min_ver' => '0.570', + }, }; 1; diff --git a/Koha/Daemon.pm b/Koha/Daemon.pm new file mode 100644 index 0000000000..cdae369225 --- /dev/null +++ b/Koha/Daemon.pm @@ -0,0 +1,152 @@ +package Koha::Daemon; + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . +# + +use Modern::Perl; +use POSIX; #For daemonizing +use Fcntl qw(:flock); #For pidfile + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +####################################################################### +#NOTE: On Debian, you can use the daemon binary to make a process into a daemon, +# but the following can be used if you don't want to use that program. + +sub daemonize { + my ($self) = @_; + + my $pid = fork; + + die "Couldn't fork: $!" unless defined($pid); + if ($pid){ + exit; #Parent exit + } + + #Become a session leader (ie detach program from controlling terminal) + POSIX::setsid() or die "Can't start a new session: $!"; + + #Change to known system directory + chdir('/'); + + #Reset the file creation mask so only the daemon owner can read/write files it creates + umask(066); + + #Close inherited file handles, so that you can truly run in the background. + open STDIN, '<', '/dev/null'; + open STDOUT, '>', '/dev/null'; + open STDERR, '>&STDOUT'; +} + +sub log_to_file { + my ($self,$logfile) = @_; + + #Open a filehandle to append to a log file + my $opened = open(my $fh, '>>', $logfile); + if ($opened){ + $fh->autoflush(1); #Make filehandle hot (ie don't buffer) + *STDOUT = *$fh; #Re-assign STDOUT to LOG | --stdout + *STDERR = *STDOUT; #Re-assign STDERR to STDOUT | --stderr + } + else { + die "Unable to open a filehandle for $logfile: $!\n"; # --output + } +} + +sub make_pidfilehandle { + my ($self,$pidfile) = @_; + if ( ! -f $pidfile ){ + open(my $fh, '>', $pidfile) or die "Unable to write to $pidfile: $!\n"; + close($fh); + } + open(my $pidfilehandle, '+<', $pidfile) or die "Unable to open a filehandle for $pidfile: $!\n"; + return $pidfilehandle; +} + +sub get_pidfile { + my ($self,$pidfile) = @_; + #NOTE: We need to save the filehandle in the object, so any locks persist for the life of the object + my $pidfilehandle = $self->{pidfilehandle} ||= $self->make_pidfilehandle($pidfile); + return $pidfilehandle; +} + +sub lock_pidfile { + my ($self,$pidfilehandle) = @_; + my $locked; + if (flock($pidfilehandle, LOCK_EX|LOCK_NB)){ + $locked = 1; + + } + return $locked; +} + +sub write_pidfile { + my ($self,$pidfilehandle) = @_; + if ($pidfilehandle){ + truncate($pidfilehandle, 0); + print $pidfilehandle $$."\n" or die $!; + #Flush the filehandle so you're not suffering from buffering + $pidfilehandle->flush(); + return 1; + } +} + +sub run { + my ($self) = @_; + my $pidfile = $self->{pidfile}; + my $logfile = $self->{logfile}; + + if ($pidfile){ + my $pidfilehandle = $self->get_pidfile($pidfile); + if ($pidfilehandle){ + my $locked = $self->lock_pidfile($pidfilehandle); + if ( ! $locked ) { + die "$0 is unable to lock pidfile...\n"; + } + } + } + + if (my $configure = $self->{configure}){ + $configure->($self); + } + + if ($self->{daemonize}){ + $self->daemonize(); + } + + if ($pidfile){ + my $pidfilehandle = $self->get_pidfile($pidfile); + if ($pidfilehandle){ + $self->write_pidfile($pidfilehandle); + } + } + + if ($logfile){ + $self->log_to_file($logfile); + } + + if (my $loop = $self->{loop}){ + $loop->($self); + } +} + +1; diff --git a/Koha/OAI/Harvester.pm b/Koha/OAI/Harvester.pm new file mode 100644 index 0000000000..86619faa29 --- /dev/null +++ b/Koha/OAI/Harvester.pm @@ -0,0 +1,652 @@ +package Koha::OAI::Harvester; + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . +# + +use Modern::Perl; +use POE qw(Component::JobQueue); +use JSON; +use Sereal::Encoder; +use Sereal::Decoder; +use IO::Handle; +use File::Copy; +use File::Path qw(make_path remove_tree); +use DateTime; +use DateTime::Format::Strptime; + +use C4::Context; +use Koha::Database; + +=head1 API + +=head2 Class Methods + +=cut + +my $day_granularity = DateTime::Format::Strptime->new( + pattern => '%F', +); +my $seconds_granularity = DateTime::Format::Strptime->new( + pattern => '%FT%TZ', +); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub spawn { + my ($class, $args) = @_; + my $self = $class->new($args); + my $downloader = $self->{Downloader}; + my $importer = $self->{Importer}; + my $download_worker_limit = ( $self->{DownloaderWorkers} && int($self->{DownloaderWorkers}) ) ? $self->{DownloaderWorkers} : 1; + my $import_worker_limit = ( $self->{ImporterWorkers} && int($self->{ImporterWorkers}) ) ? $self->{ImporterWorkers} : 1; + my $import_queue_poll = ( $self->{ImportQueuePoll} && int($self->{ImportQueuePoll}) ) ? $self->{ImportQueuePoll} : 5; + + #NOTE: This job queue should always be created before the + #harvester so that you can start download jobs immediately + #upon spawning the harvester. + POE::Component::JobQueue->spawn( + Alias => 'oai-downloader', + WorkerLimit => $download_worker_limit, + Worker => sub { + my ($postback, $task) = @_; + if ($downloader){ + if ($task->{status} eq "active"){ + $downloader->run({ + postback => $postback, + task => $task, + }); + } + } + }, + Passive => {}, + ); + + POE::Session->create( + object_states => [ + $self => { + _start => "on_start", + get_task => "get_task", + list_tasks => "list_tasks", + create_task => "create_task", + start_task => "start_task", + stop_task => "stop_task", + delete_task => "delete_task", + repeat_task => "repeat_task", + register => "register", + deregister => "deregister", + restore_state => "restore_state", + save_state => "save_state", + is_task_finished => "is_task_finished", + does_task_repeat => "does_task_repeat", + download_postback => "download_postback", + reset_imports_status => "reset_imports_status", + }, + ], + ); + + POE::Component::JobQueue->spawn( + Alias => 'oai-importer', + WorkerLimit => $import_worker_limit, + Worker => sub { + my $meta_postback = shift; + + #NOTE: We need to only retrieve queue items for active tasks. Otherwise, + #the importer will just spin its wheels on inactive tasks and do nothing. + my $active_tasks = $poe_kernel->call("harvester","list_tasks","active"); + my @active_uuids = map { $_->{uuid} } @$active_tasks; + + my $schema = Koha::Database->new()->schema(); + my $rs = $schema->resultset('OaiHarvesterImportQueue')->search({ + uuid => \@active_uuids, + status => "new" + },{ + order_by => { -asc => 'id' }, + rows => 1, + }); + my $result = $rs->first; + if ($result){ + $result->status("wip"); + $result->update; + my $task = { + id => $result->id, + uuid => $result->uuid, + result => $result->result, + }; + + my $postback = $meta_postback->($task); + $importer->run({ + postback => $postback, + task => $task, + }); + } + }, + Active => { + PollInterval => $import_queue_poll, + AckAlias => undef, + AckState => undef, + }, + ); + + return; +} + +sub on_start { + my ($self, $kernel, $heap) = @_[OBJECT, KERNEL,HEAP]; + $kernel->alias_set('harvester'); + $heap->{scoreboard} = {}; + + #Reset any 'wip' imports to 'new' so they can be re-tried. + $kernel->call("harvester","reset_imports_status"); + + #Restore state from state file + $kernel->call("harvester","restore_state"); +} + +#NOTE: This isn't really implemented at the moment, as it's not really necessary. +sub download_postback { + my ($kernel, $request_packet, $response_packet) = @_[KERNEL, ARG0, ARG1]; + my $message = $response_packet->[0]; +} + +=head3 deregister + + Remove the worker session from the harvester's in-memory scoreboard, + unset the downloading flag if downloading is completed. + +=cut + +sub deregister { + my ($self, $kernel, $heap, $session, $sender, $type) = @_[OBJECT, KERNEL,HEAP,SESSION,SENDER,ARG0]; + + my $scoreboard = $heap->{scoreboard}; + + my $logger = $self->{logger}; + $logger->debug("Start deregistering $sender as $type task"); + + my $task_uuid = delete $scoreboard->{session}->{$sender}; + #NOTE: If you don't check each step of the hashref, autovivication can lead to surprises. + if ($task_uuid){ + if ($scoreboard->{task}->{$task_uuid}){ + if ($scoreboard->{task}->{$task_uuid}->{$type}){ + delete $scoreboard->{task}->{$task_uuid}->{$type}->{$sender}; + } + } + } + + my $task = $heap->{tasks}->{$task_uuid}; + if ($task && $task->{status} && ($task->{status} eq "active") ){ + #NOTE: Each task only has 1 download session, so we can now set/unset flags for the task. + #NOTE: We should unset the downloading flag, if we're not going to repeat the task. + if ($type eq "download"){ + my $task_repeats = $kernel->call("harvester","does_task_repeat",$task_uuid); + if ($task_repeats){ + my $interval = $task->{interval}; + + $task->{effective_from} = delete $task->{effective_until}; + $task->{download_timer} = $kernel->delay_set("repeat_task", $interval, $task_uuid); + } + else { + $task->{downloading} = 0; + $kernel->call("harvester","save_state"); + $kernel->call("harvester","is_task_finished",$task_uuid); + } + } + elsif ($type eq 'import'){ + $kernel->call("harvester","is_task_finished",$task_uuid); + } + } + $logger->debug("End deregistering $sender as $type task"); +} + + +=head3 is_task_finished + + This event handler checks if the task has finished downloading and importing record. + If it is finished downloading and importing, the task is deleted from the harvester. + + This only applies to non-repeating tasks. + +=cut + +sub is_task_finished { + my ($self, $kernel, $heap, $session, $uuid) = @_[OBJECT, KERNEL,HEAP,SESSION,ARG0]; + my $task = $kernel->call("harvester","get_task",$uuid); + if ($task && (! $task->{downloading}) ){ + my $count = $self->get_import_count_for_task($uuid); + if ( ! $count ) { + #Clear this task out of the harvester as it's finished. + $kernel->call("harvester","delete_task",$uuid); + return 1; + } + } + return 0; +} + +sub register { + my ($self, $kernel, $heap, $session, $sender, $type, $task_uuid) = @_[OBJECT, KERNEL,HEAP,SESSION,SENDER,ARG0,ARG1]; + my $logger = $self->{logger}; + + my $scoreboard = $heap->{scoreboard}; + + + if ($type && $task_uuid){ + $logger->debug("Registering $sender as $type for $task_uuid"); + + my $task = $heap->{tasks}->{$task_uuid}; + if ($task){ + + if ($type){ + #Register the task uuid with the session id as a key for later recall + $scoreboard->{session}->{$sender} = $task_uuid; + + #Register the session id as a certain type of session for a task + $scoreboard->{task}->{$task_uuid}->{$type}->{$sender} = 1; + + if ($type eq "download"){ + $task->{downloading} = 1; + + my $task_repeats = $kernel->call("harvester","does_task_repeat",$task_uuid); + if ($task_repeats){ + + #NOTE: Set an effective until, so we know we're not getting records any newer than + #this moment. + my $dt = DateTime->now(); + if ($dt){ + #NOTE: Ideally, I'd like to make sure that we can use 'seconds' granularity, but + #it's valid for 'from' to be null, so it's impossible to know from the data whether + #or not the repository will support the seconds granularity. + #NOTE: Ideally, it would be good to use either 'day' granularity or 'seconds' granularity, + #but at the moment the interval is expressed as seconds only. + $dt->set_formatter($seconds_granularity); + $task->{effective_until} = "$dt"; + } + } + + $kernel->call("harvester","save_state"); + } + } + } + } +} + +sub does_task_repeat { + my ($self, $kernel, $heap, $session, $uuid) = @_[OBJECT, KERNEL,HEAP,SESSION,ARG0]; + my $task = $kernel->call("harvester","get_task",$uuid); + if ($task){ + my $interval = $task->{interval}; + my $parameters = $task->{parameters}; + if ($parameters){ + my $oai_pmh = $parameters->{oai_pmh}; + if ($oai_pmh){ + if ( $interval && ($oai_pmh->{verb} eq "ListRecords") && (! $oai_pmh->{until}) ){ + return 1; + } + } + } + } + return 0; +} + + + +sub reset_imports_status { + my ($self, $kernel, $heap, $session) = @_[OBJECT, KERNEL,HEAP,SESSION]; + + my $schema = Koha::Database->new()->schema(); + my $rs = $schema->resultset('OaiHarvesterImportQueue')->search({ + status => "wip", + }); + $rs->update({ + status => "new", + }); +} + +sub restore_state { + my ($self, $kernel, $heap, $session) = @_[OBJECT, KERNEL,HEAP,SESSION]; + + my $state_file = $self->{state_file}; + if ($state_file){ + my $state_backup = "$state_file~"; + + #NOTE: If there is a state backup, it means we crashed while saving the state. Otherwise, + #let's try the regular state file if it exists. + my $file_to_restore = ( -f $state_backup ) ? $state_backup : ( ( -f $state_file ) ? $state_file : undef ); + if ( $file_to_restore ){ + my $opened = open( my $fh, '<', $file_to_restore ) or die "Couldn't open state: $!"; + if ($opened){ + local $/; + my $in = <$fh>; + my $decoder = Sereal::Decoder->new; + my $state = $decoder->decode($in); + if ($state){ + if ($state->{tasks}){ + #Restore tasks from our saved state + $heap->{tasks} = $state->{tasks}; + foreach my $uuid ( keys %{$heap->{tasks}} ){ + my $task = $heap->{tasks}->{$uuid}; + + #If tasks were still downloading, restart the task + if ( ($task->{status} && $task->{status} eq "active") && $task->{downloading} ){ + $task->{status} = "new"; + $kernel->call("harvester","start_task",$task->{uuid}); + } + } + } + } + } + } + } +} + +sub save_state { + my ($self, $kernel, $heap, $session) = @_[OBJECT, KERNEL,HEAP,SESSION]; + my $state_file = $self->{state_file}; + my $state_backup = "$state_file~"; + + #Make a backup of existing state record + my $moved = move($state_file,$state_backup); + + my $opened = open(my $fh, ">", $state_file) or die "Couldn't save state: $!"; + if ($opened){ + $fh->autoflush(1); + my $tasks = $heap->{tasks}; + my $harvester_state = { + tasks => $tasks, + }; + my $encoder = Sereal::Encoder->new; + my $out = $encoder->encode($harvester_state); + local $\; + my $printed = print $fh $out; + if ($printed){ + close $fh; + unlink($state_backup); + return 1; + } + } + return 0; +} + +=head3 get_task + + This event handler returns a task from a harvester using the task's + uuid as an argument. + +=cut + +sub get_task { + my ($self, $kernel, $heap, $session, $uuid, $sender) = @_[OBJECT, KERNEL,HEAP,SESSION,ARG0, SENDER]; + + if ( ! $uuid && $sender ){ + my $scoreboard = $heap->{scoreboard}; + my $uuid_by_session = $scoreboard->{session}->{$sender}; + if ($uuid_by_session){ + $uuid = $uuid_by_session; + } + } + + my $tasks = $heap->{tasks}; + if ($tasks && $uuid){ + my $task = $tasks->{$uuid}; + if ($task){ + return $task; + } + } + return 0; +} + +=head3 get_import_count_for_task + +=cut + +sub get_import_count_for_task { + my ($self,$uuid) = @_; + my $count = undef; + if ($uuid){ + my $schema = Koha::Database->new()->schema(); + my $items = $schema->resultset('OaiHarvesterImportQueue')->search({ + uuid => $uuid, + }); + $count = $items->count; + } + return $count; +} + +=head3 list_tasks + + This event handler returns a list of tasks that have been submitted + to the harvester. It returns data like uuid, status, parameters, + number of pending imports, etc. + +=cut + +sub list_tasks { + my ($self, $kernel, $heap, $session, $status) = @_[OBJECT, KERNEL,HEAP,SESSION, ARG0]; + my $schema = Koha::Database->new()->schema(); + my @tasks = (); + foreach my $uuid (sort keys %{$heap->{tasks}}){ + my $task = $heap->{tasks}->{$uuid}; + my $items = $schema->resultset('OaiHarvesterImportQueue')->search({ + uuid => $uuid, + }); + my $count = $items->count // 0; + $task->{pending_imports} = $count; + if ( ( ! $status ) || ( $status && $status eq $task->{status} ) ){ + push(@tasks, $task); + } + + } + return \@tasks; +} + +=head3 create_task + + This event handler creates a spool directory for the task's imports. + It also adds it to the harvester's memory and then saves memory to + a persistent datastore. + + Newly created tasks have a status of "new". + +=cut + +sub create_task { + my ($self, $kernel, $heap, $session, $incoming_task) = @_[OBJECT, KERNEL,HEAP,SESSION,ARG0]; + my $logger = $self->{logger}; + if ($incoming_task){ + my $uuid = $incoming_task->{uuid}; + if ( ! $heap->{tasks}->{$uuid} ){ + + #Step One: assign a spool directory to this task + my $spooldir = $self->{spooldir} // "/tmp"; + my $task_spooldir = "$spooldir/$uuid"; + if ( ! -d $task_spooldir ){ + my $made_spool_directory = make_path($task_spooldir); + if ( ! $made_spool_directory ){ + if ($logger){ + $logger->warn("Unable to make task-specific spool directory at '$task_spooldir'"); + } + return 0; + } + } + $incoming_task->{spooldir} = $task_spooldir; + + #Step Two: assign new status + $incoming_task->{status} = "new"; + + #Step Three: add task to harvester's memory + $heap->{tasks}->{ $uuid } = $incoming_task; + + #Step Four: save state + $kernel->call($session,"save_state"); + return 1; + } + } + return 0; +} + +=head3 start_task + + This event handler marks a task as active in the harvester's memory, + save the memory to a persistent datastore, then enqueues the task, + so that it can be directed to the next available download worker. + + Newly started tasks have a status of "active". + +=cut + +sub start_task { + my ($self, $session,$kernel,$heap,$uuid) = @_[OBJECT, SESSION,KERNEL,HEAP,ARG0]; + my $task = $heap->{tasks}->{$uuid}; + if ($task){ + if ( $task->{status} ne "active" ){ + + #Clear any pre-existing error states + delete $task->{error} if $task->{error}; + + #Step One: mark task as active + $task->{status} = "active"; + + #Step Two: save state + $kernel->call("harvester","save_state"); + + #Step Three: enqueue task + $kernel->post("oai-downloader",'enqueue','download_postback', $task); + + return 1; + } + } + return 0; +} + +=head3 repeat_task + + + +=cut + +sub repeat_task { + my ($self, $session,$kernel,$heap,$uuid) = @_[OBJECT, SESSION,KERNEL,HEAP,ARG0]; + my $task = $heap->{tasks}->{$uuid}; + if ($task){ + my $interval = $task->{interval}; + if ($task->{downloading} && $interval){ + $kernel->post("oai-downloader",'enqueue','download_postback', $task); + } + } +} + +=head3 stop_task + + This event handler prevents new workers from spawning, kills + existing workers, and stops pending imports from being imported. + + Newly stopped tasks have a status of "stopped". + +=cut + +sub stop_task { + my ($self, $kernel, $heap, $session, $sender, $task_uuid) = @_[OBJECT, KERNEL,HEAP,SESSION,SENDER,ARG0]; + + my $task = $heap->{tasks}->{$task_uuid}; + + if ($task && $task->{status} && $task->{status} ne "stopped" ){ + + #Step One: deactivate task, so no new workers can be started + $task->{status} = "stopped"; + #NOTE: You could also clear timers for new downloads, but that's probably unnecessary because of this step. + + #Step Two: kill workers + my $scoreboard = $heap->{scoreboard}; + my $session_types = $scoreboard->{task}->{$task_uuid}; + if ($session_types){ + foreach my $type ( keys %$session_types ){ + my $sessions = $session_types->{$type}; + if ($sessions){ + foreach my $session (keys %$sessions){ + if ($session){ + $kernel->signal($session, "cancel"); + } + } + } + } + #Remove the task uuid from the task key of the scoreboard + delete $scoreboard->{task}->{$task_uuid}; + #NOTE: The task uuid will still exist under the session key, + #but the sessions will deregister themselves and clean that up for you. + } + + #Step Three: stop pending imports for this task + my $schema = Koha::Database->new()->schema(); + my $items = $schema->resultset('OaiHarvesterImportQueue')->search({ + uuid => $task_uuid, + }); + my $rows_updated = $items->update({ + status => "stopped", + }); + + #Step Four: save state + $kernel->call("harvester","save_state"); + return 1; + } + return 0; +} + +=head3 delete_task + + Deleted tasks are stopped, pending imports are deleted from the + database and file system, and then the task is removed from the harvester. + +=cut + +sub delete_task { + my ($self, $kernel, $heap, $session, $task_uuid) = @_[OBJECT, KERNEL,HEAP,SESSION,ARG0]; + + my $task = $heap->{tasks}->{$task_uuid}; + if ($task){ + #Step One: stop task + $kernel->call($session,"stop_task",$task_uuid); + + #Step Two: delete pending imports in database + my $schema = Koha::Database->new()->schema(); + my $items = $schema->resultset('OaiHarvesterImportQueue')->search({ + uuid => $task_uuid, + }); + if ($items){ + my $rows_deleted = $items->delete; + #NOTE: shows 0E0 instead of 0 + } + + #Step Three: remove task specific spool directory and files within it + my $spooldir = $task->{spooldir}; + if ($spooldir){ + my $files_deleted = remove_tree($spooldir, { safe => 1 }); + } + + delete $heap->{tasks}->{$task_uuid}; + + #Step Four: save state + $kernel->call("harvester","save_state"); + return 1; + } + return 0; +} + +1; diff --git a/Koha/OAI/Harvester/Client.pm b/Koha/OAI/Harvester/Client.pm new file mode 100644 index 0000000000..d5c5351714 --- /dev/null +++ b/Koha/OAI/Harvester/Client.pm @@ -0,0 +1,177 @@ +package Koha::OAI::Harvester::Client; + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use URI; +use IO::Socket::UNIX; +use IO::Select; +use JSON; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub connect { + my ($self) = @_; + my $socket_uri = $self->{socket_uri}; + if ($socket_uri){ + my $uri = URI->new($socket_uri); + if ($uri && $uri->scheme eq 'unix'){ + my $socket_path = $uri->path; + my $socket = IO::Socket::UNIX->new( + Type => IO::Socket::UNIX::SOCK_STREAM(), + Peer => $socket_path, + ); + if ($socket){ + my $select = new IO::Select(); + $select->add($socket); + + $self->{_select} = $select; + $self->{_socket} = $socket; + my $message = $self->_read(); + if ($message){ + if ($message eq 'HELLO'){ + $self->{_connected} = 1; + return 1; + } + } + } + else { + warn "Failed to create socket." + } + } + } + return 0; +} + +sub create { + my ($self,$task) = @_; + my $message = { + command => "create", + body => { + task => $task, + } + }; + my ($status) = $self->_exchange($message); + return $status; +} + +sub start { + my ($self,$uuid) = @_; + my $message = { + command => "start", + body => { + task => { + uuid => $uuid, + }, + } + }; + my ($status) = $self->_exchange($message); + return $status; +} + +sub stop { + my ($self,$uuid) = @_; + my $message = { + command => "stop", + body => { + task => { + uuid => $uuid, + }, + } + }; + my ($status) = $self->_exchange($message); + return $status; +} + +sub delete { + my ($self,$uuid) = @_; + my $message = { + command => "delete", + body => { + task => { + uuid => $uuid, + }, + } + }; + my ($status) = $self->_exchange($message); + return $status; +} + +sub list { + my ($self) = @_; + my $message = { + command => "list", + }; + my ($status,$tasks) = $self->_exchange($message); + return $tasks; +} + +sub _exchange { + my ($self,$message) = @_; + my $status = 0; + my $data; + if ($message){ + my $output = to_json($message); + if ($output){ + $self->_write($output); + my $json_response = $self->_read(); + if ($json_response){ + my $response = from_json($json_response); + $data = $response->{data} if $response->{data}; + $status = 1 if $response->{msg} && $response->{msg} eq "success"; + } + } + } + return ($status,$data); +} + +sub _write { + my ($self, $output) = @_; + if ($output){ + if (my $select = $self->{_select}){ + if (my @filehandles = $select->can_write(5)){ + foreach my $filehandle (@filehandles){ + #Localize output record separator as null + local $\ = "\x00"; + print $filehandle $output; + } + } + } + } +} + +sub _read { + my ($self) = @_; + if (my $select = $self->{_select}){ + if (my @filehandles = $select->can_read(5)){ + foreach my $filehandle (@filehandles){ + #Localize input record separator as null + local $/ = "\x00"; + my $message = <$filehandle>; + chomp($message) if $message; + return $message; + } + } + } +} + +1; \ No newline at end of file diff --git a/Koha/OAI/Harvester/Downloader.pm b/Koha/OAI/Harvester/Downloader.pm new file mode 100644 index 0000000000..4ca8be6bf1 --- /dev/null +++ b/Koha/OAI/Harvester/Downloader.pm @@ -0,0 +1,308 @@ +package Koha::OAI::Harvester::Downloader; + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use URI; +use XML::LibXML::Reader; +use IO::Handle; +use JSON; + +=head1 API + +=head2 Class Methods + +=cut + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +=head2 BuildURL + + Takes a baseURL and a mix of required and optional OAI-PMH arguments, + and makes them into a suitable URL for an OAI-PMH request. + +=cut + +sub BuildURL { + my ($self, $args) = @_; + my $baseURL = $args->{baseURL}; + my $url = URI->new($baseURL); + if ($url && $url->isa("URI")){ + my $verb = $args->{verb}; + if ($verb){ + my %parameters = ( + verb => $verb, + ); + if ($verb eq "ListRecords"){ + my $resumptionToken = $args->{resumptionToken}; + my $metadataPrefix = $args->{metadataPrefix}; + if ($resumptionToken){ + $parameters{resumptionToken} = $resumptionToken; + } + elsif ($metadataPrefix){ + $parameters{metadataPrefix} = $metadataPrefix; + #Only add optional parameters if they're provided + foreach my $param ( qw( from until set ) ){ + $parameters{$param} = $args->{$param} if $args->{$param}; + } + } + else { + warn "BuildURL() requires an argument of either resumptionToken or metadataPrefix"; + return; + } + } + elsif ($verb eq "GetRecord"){ + my $metadataPrefix = $args->{metadataPrefix}; + my $identifier = $args->{identifier}; + if ($metadataPrefix && $identifier){ + $parameters{metadataPrefix} = $metadataPrefix; + $parameters{identifier} = $identifier; + } + else { + warn "BuildURL() requires an argument of metadataPrefix and an argument of identifier"; + return; + } + } + $url->query_form(%parameters); + return $url; + } + else { + warn "BuildURL() requires a verb of GetRecord or ListRecords"; + return; + } + } + else { + warn "BuildURL() requires a base URL of type URI."; + return; + } +} + +=head2 OpenXMLStream + + Fork a child process to send the HTTP request, which sends chunks + of XML via a pipe to the parent process. + + The parent process creates and returns a XML::LibXML::Reader object, + which reads the XML stream coming through the pipe. + + Normally, using a DOM reader, you must wait to read the entire XML document + into memory. However, using a stream reader, chunks are read into memory, + processed, then discarded. It's faster and more efficient. + +=cut + +sub GetXMLStream { + my ($self, $args) = @_; + my $url = $args->{url}; + my $user_agent = $args->{user_agent}; + if ($url && $user_agent){ + pipe( CHILD, PARENT ) or die "Cannot created connected pipes: $!"; + CHILD->autoflush(1); + PARENT->autoflush(1); + if ( my $pid = fork ){ + #Parent process + close PARENT; + return \*CHILD; + } + else { + #Child process + close CHILD; + my $response = $self->_request({ + url => $url, + user_agent => $user_agent, + file_handle => \*PARENT, + }); + if ($response && $response->is_success){ + #HTTP request has successfully finished, so we close the file handle and exit the process + close PARENT; + CORE::exit(); #some modules like mod_perl redefine exit + } + else { + warn "[child $$] OAI-PMH unsuccessful. Response status: ".$response->status_line."\n" if $response; + CORE::exit(); + } + } + } + else { + warn "GetXMLStream() requires a 'url' argument and a 'user_agent' argument"; + return; + } +} + +sub _request { + my ($self, $args) = @_; + my $url = $args->{url}; + my $user_agent = $args->{user_agent}; + my $fh = $args->{file_handle}; + + if ($url && $user_agent && $fh){ + my $request = HTTP::Request->new( GET => $url ); + my $response = $user_agent->request( $request, sub { + my ($chunk_of_data, $ref_to_response, $ref_to_protocol) = @_; + print $fh $chunk_of_data; + }); + return $response; + } + else { + warn "_request() requires a 'url' argument, 'user_agent' argument, and 'file_handle' argument."; + return; + } +} + +sub ParseXMLStream { + my ($self, $args) = @_; + + my $each_callback = $args->{each_callback}; + my $fh = $args->{file_handle}; + if ($fh){ + my $reader = XML::LibXML::Reader->new( FD => $fh, no_blanks => 1 ); + my $pattern = XML::LibXML::Pattern->new('oai:OAI-PMH|/oai:OAI-PMH/*', { 'oai' => "http://www.openarchives.org/OAI/2.0/" }); + + my $repository; + + warn "Start parsing..."; + while (my $rv = $reader->nextPatternMatch($pattern)){ + #$rv == 1; successful + #$rv == 0; end of document reached + #$rv == -1; error + if ($rv == -1){ + die "Parser error!"; + } + #NOTE: We do this so we only get the opening tag of the element. + next unless $reader->nodeType == XML_READER_TYPE_ELEMENT; + + my $localname = $reader->localName; + if ( $localname eq "request" ){ + my $node = $reader->copyCurrentNode(1); + $repository = $node->textContent; + } + elsif ( $localname eq "error" ){ + #See https://www.openarchives.org/OAI/openarchivesprotocol.html#ErrorConditions + #We should probably die under all circumstances except "noRecordsMatch" + my $node = $reader->copyCurrentNode(1); + if ($node){ + my $code = $node->getAttribute("code"); + if ($code){ + if ($code ne "noRecordsMatch"){ + warn "Error code: $code"; + die; + } + } + } + } + elsif ( ($localname eq "ListRecords") || ($localname eq "GetRecord") ){ + my $each_pattern = XML::LibXML::Pattern->new('//oai:record|oai:resumptionToken', { 'oai' => "http://www.openarchives.org/OAI/2.0/" }); + while (my $each_rv = $reader->nextPatternMatch($each_pattern)){ + if ($rv == "-1"){ + #NOTE: -1 denotes an error state + warn "Error getting pattern match"; + } + next unless $reader->nodeType == XML_READER_TYPE_ELEMENT; + if ($reader->localName eq "record"){ + my $node = $reader->copyCurrentNode(1); + #NOTE: Without the UTF-8 flag, UTF-8 data will be corrupted. + my $document = XML::LibXML::Document->new('1.0', 'UTF-8'); + $document->setDocumentElement($node); + + #Per record callback + if ($each_callback){ + $each_callback->({ + repository => $repository, + document => $document, + }); + } + } + elsif ($reader->localName eq "resumptionToken"){ + my $resumptionToken = $reader->readInnerXml; + return ($resumptionToken,$repository); + + } + } + } + } #/OAI-PMH document match + } + else { + warn "ParseXMLStream() requires a 'file_handle' argument."; + } +} + +sub harvest { + my ($self,$args) = @_; + my $url = $args->{url}; + my $ua = $args->{user_agent}; + my $callback = $args->{callback}; + my $complete_callback = $args->{complete_callback}; + + if ($url && $ua){ + + #NOTE: http://search.cpan.org/~shlomif/XML-LibXML-2.0128/lib/XML/LibXML/Parser.pod#ERROR_REPORTING + while($url){ + warn "URL = $url"; + warn "Creating child process to download and feed parent process parser."; + my $stream = $self->GetXMLStream({ + url => $url, + user_agent => $ua, + }); + + warn "Creating parent process parser."; + my ($resumptionToken) = $self->ParseXMLStream({ + file_handle => $stream, + each_callback => $callback, + }); + warn "Finished parsing current XML document."; + + if ($resumptionToken){ + #If there's a resumptionToken at the end of the stream, + #we build a new URL and repeat this process again. + $url->query_form({ + verb => "ListRecords", + resumptionToken => $resumptionToken, + }); + } + else { + warn "Finished harvest."; + last; + } + + warn "Reap child process downloader."; + #Reap the dead child requester process before performing another request, + #so we don't fill up the process table with zombie children. + while ((my $child = waitpid(-1, 0)) > 0) { + warn "Parent $$ reaped child process $child" . ($? ? " with exit code $?" : '') . ".\n"; + } + } + + if ($complete_callback){ + warn "Run complete callback."; + + #Clear query string + $url->query_form({}); + + #Run complete callback using the actual URL from the request. + $complete_callback->({ + repository => $url, + }); + } + } +} + +1; diff --git a/Koha/OAI/Harvester/Import/MARCXML.pm b/Koha/OAI/Harvester/Import/MARCXML.pm new file mode 100755 index 0000000000..6accb78db2 --- /dev/null +++ b/Koha/OAI/Harvester/Import/MARCXML.pm @@ -0,0 +1,140 @@ +package Koha::OAI::Harvester::Import::MARCXML; + +# Copyright 2016 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . +# + +use Modern::Perl; +use MARC::Record; + +use C4::Context; +use C4::Biblio; + +use constant MAX_MATCHES => 99999; #NOTE: This is an arbitrary value. We want to get all matches. + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + if ( (! $args->{dom}) && (! $args->{marc_record}) ){ + die "You must provide either a dom or marc_record argument to this constructor"; + } + if ( $args->{dom} && ( ! $args->{marc_record} ) ){ + my $dom = $args->{dom}; + my $xml = $dom->toString(2); + my $marcflavour = C4::Context->preference('marcflavour') || 'MARC21'; + my $marc_record = eval {MARC::Record::new_from_xml( $xml, "utf8", $marcflavour)}; + if ($@){ + die "Unable to create MARC::Record object"; + } + if ($marc_record){ + $args->{marc_record} = $marc_record; + } + } + return bless ($args, $class); +} + +sub import_record { + my ($self,$args) = @_; + my $framework = $args->{framework}; + my $record_type = $args->{record_type}; + my $matcher = $args->{matcher}; + my $koha_id = $args->{koha_id}; + + my $action = "error"; + + #Try to find a matching Koha MARCXML record via Zebra + if (! $koha_id && $matcher){ + my $matched_id = $self->_try_matcher({ + matcher => $matcher, + }); + if ($matched_id){ + $koha_id = $matched_id; + } + } + + if ($koha_id){ + #Update + ($action) = $self->_mod_koha_record({ + record_type => $record_type, + framework => $framework, + koha_id => $koha_id, + }); + } + else { + #Add + ($action,$koha_id) = $self->_add_koha_record({ + record_type => $record_type, + framework => $framework, + }); + } + + return ($action,$koha_id); +} + +sub _try_matcher { + my ($self, $args) = @_; + my $marc_record = $self->{marc_record}; + my $matcher = $args->{matcher}; + my $matched_id; + my @matches = $matcher->get_matches($marc_record, MAX_MATCHES); + if (@matches){ + my $bestrecordmatch = shift @matches; + if ($bestrecordmatch && $bestrecordmatch->{record_id}){ + $matched_id = $bestrecordmatch->{record_id}; + } + } + return $matched_id; +} + +sub _add_koha_record { + my ($self, $args) = @_; + my $marc_record = $self->{marc_record}; + my $record_type = $args->{record_type} // "biblio"; + my $framework = $args->{framework}; + my $koha_id; + my $action = "error"; + if ($record_type eq "biblio"){ + #NOTE: Strip item fields to prevent any accidentally getting through. + C4::Biblio::_strip_item_fields($marc_record,$framework); + my ($biblionumber,$biblioitemnumber) = C4::Biblio::AddBiblio($marc_record,$framework); + if ($biblionumber){ + $action = "added"; + $koha_id = $biblionumber; + } + } + return ($action,$koha_id); +} + +sub _mod_koha_record { + my ($self, $args) = @_; + my $marc_record = $self->{marc_record}; + my $record_type = $args->{record_type} // "biblio"; + my $framework = $args->{framework}; + my $koha_id = $args->{koha_id}; + my $action = "error"; + if ($record_type eq "biblio"){ + #NOTE: Strip item fields to prevent any accidentally getting through. + C4::Biblio::_strip_item_fields($marc_record,$framework); + my $updated = C4::Biblio::ModBiblio($marc_record, $koha_id, $framework); + if ($updated){ + $action = "updated"; + } + } + return ($action); +} + +1; diff --git a/Koha/OAI/Harvester/Import/Record.pm b/Koha/OAI/Harvester/Import/Record.pm new file mode 100755 index 0000000000..9a67eb8329 --- /dev/null +++ b/Koha/OAI/Harvester/Import/Record.pm @@ -0,0 +1,301 @@ +package Koha::OAI::Harvester::Import::Record; + +# Copyright 2016 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . +# + +use Modern::Perl; +use XML::LibXML; +use XML::LibXSLT; +use URI; +use File::Basename; + +use C4::Context; +use C4::Biblio; + +use Koha::Database; +use Koha::OAI::Harvester::Import::MARCXML; + +=head1 API + +=head2 Class Methods + +=cut + +my $schema = Koha::Database->new()->schema(); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + + die "You must provide a 'doc' argument to the constructor" unless $args->{doc}; + die "You must provide a 'repository' argument to the constructor" unless $args->{repository}; + + if (my $doc = $args->{doc}){ + + #Get the root element + my $root = $doc->documentElement; + + #Register namespaces for searching purposes + my $xpc = XML::LibXML::XPathContext->new(); + $xpc->registerNs('oai','http://www.openarchives.org/OAI/2.0/'); + + my $xpath_identifier = XML::LibXML::XPathExpression->new("oai:header/oai:identifier"); + my $identifier = $xpc->findnodes($xpath_identifier,$root)->shift; + $args->{header_identifier} = $identifier->textContent; + + my $xpath_datestamp = XML::LibXML::XPathExpression->new("oai:header/oai:datestamp"); + my $datestamp = $xpc->findnodes($xpath_datestamp,$root)->shift; + $args->{header_datestamp} = $datestamp->textContent; + + my $xpath_status = XML::LibXML::XPathExpression->new(q{oai:header/@status}); + my $status_node = $xpc->findnodes($xpath_status,$root)->shift; + $args->{header_status} = $status_node ? $status_node->textContent : ""; + } + + return bless ($args, $class); +} + +sub is_deleted_upstream { + my ($self, $args) = @_; + if ($self->{header_status}){ + if ($self->{header_status} eq "deleted"){ + return 1; + } + } + return 0; +} + +sub set_filter { + my ($self, $filter_definition) = @_; + + #Source a default XSLT to use for filtering + my $htdocs = C4::Context->config('intrahtdocs'); + my $theme = C4::Context->preference("template"); + $self->{filter} = "$htdocs/$theme/en/xslt/StripOAIPMH.xsl"; + $self->{filter_type} = "xslt"; + + if ($filter_definition && $filter_definition ne "default"){ + my ($filter_type, $filter) = $self->_parse_filter($filter_definition); + if ($filter_type eq "xslt"){ + if ( -f $filter ){ + $self->{filter} = $filter; + $self->{filter_type} = "xslt"; + } + } + } +} + +sub _parse_filter { + my ($self,$filter_definition) = @_; + my ($type,$filter); + my $filter_uri = URI->new($filter_definition); + if ($filter_uri){ + my $scheme = $filter_uri->scheme; + if ( ($scheme && $scheme eq "file") || ! $scheme ){ + my $path = $filter_uri->path; + #Filters may theoretically be .xsl or .pm files + my($filename, $dirs, $suffix) = fileparse($path,(".xsl",".pm")); + if ($suffix){ + if ( $suffix eq ".xsl"){ + $type = "xslt"; + $filter = $path; + } + } + } + } + return ($type,$filter); +} + +sub filter { + my ($self) = @_; + my $filtered = 0; + my $doc = $self->{doc}; + my $filter = $self->{filter}; + my $filter_type = $self->{filter_type}; + if ($doc){ + if ($filter && -f $filter){ + if ($filter_type){ + if ( $filter_type eq 'xslt' ){ + my $xslt = XML::LibXSLT->new(); + my $style_doc = XML::LibXML->load_xml(location => $filter); + my $stylesheet = $xslt->parse_stylesheet($style_doc); + if ($stylesheet){ + my $results = $stylesheet->transform($doc); + if ($results){ + my $root = $results->documentElement; + if ($root){ + my $namespace = $root->namespaceURI; + if ($namespace eq "http://www.loc.gov/MARC21/slim"){ + #NOTE: Both MARC21 and UNIMARC should be covered by this namespace + my $marcxml = eval { Koha::OAI::Harvester::Import::MARCXML->new({ dom => $results, }) }; + if ($@){ + warn "Error Koha::OAI::Harvester::Import::MARCXML: $@"; + return; + } else { + return $marcxml; + } + } + } + } + } + } + } + } + } + return; +} + +sub _find_koha_link { + my ($self, $args) = @_; + my $record_type = $args->{record_type} // "biblio"; + my $link_id; + if ($record_type eq "biblio"){ + my $link = $schema->resultset('OaiHarvesterBiblio')->find( + { + oai_repository => $self->{repository}, + oai_identifier => $self->{header_identifier}, + }, + { key => "oai_record",} + ); + if ($link && $link->biblionumber){ + $link_id = $link->biblionumber->id; + } + } + return $link_id; +} + +=head3 import_record + + my ($action,$record_id) = $oai_record->import_record({ + filter => $filter, + framework => $framework, + record_type => $record_type, + matcher => $matcher, + }); + + $action eq "added" || "updated" || "deleted" || "not_found" || "error" + +=cut + +sub import_record { + my ($self, $args) = @_; + my $filter = $args->{filter} || 'default'; + my $framework = $args->{framework} || ''; + my $record_type = $args->{record_type} || 'biblio'; + my $matcher = $args->{matcher}; + + my $action = "error"; + + #Find linkage between OAI-PMH repository-identifier and Koha record id + my $linked_id = $self->_find_koha_link({ + record_type => $record_type, + }); + + if ($self->is_deleted_upstream){ + #NOTE: If a record is deleted upstream, it will not contain a metadata element + if ($linked_id){ + $action = $self->delete_koha_record({ + record_id => $linked_id, + record_type => $record_type, + }); + } + else { + $action = "not_found"; + #NOTE: If there's no OAI-PMH repository-identifier pair in the database, + #then there's no perfect way to find a linked record to delete. + } + } + else { + $self->set_filter($filter); + + + my $import_record = $self->filter(); + + if ($import_record){ + ($action,$linked_id) = $import_record->import_record({ + framework => $framework, + record_type => $record_type, + matcher => $matcher, + koha_id => $linked_id, + }); + + if ($linked_id){ + #Link Koha record ID to OAI-PMH details for this record type, + #if linkage doesn't already exist. + $self->link_koha_record({ + record_type => $record_type, + koha_id => $linked_id, + }); + } + } + } + + #Log record details to database + my $importer = $schema->resultset('OaiHarvesterHistory')->create({ + header_identifier => $self->{header_identifier}, + header_datestamp => $self->{header_datestamp}, + header_status => $self->{header_status}, + record => $self->{doc}->toString(1), + repository => $self->{repository}, + status => $action, + filter => $filter, + framework => $framework, + record_type => $record_type, + matcher_code => $matcher ? $matcher->code : undef, + }); + + return ($action,$linked_id); +} + +sub link_koha_record { + my ($self, $args) = @_; + my $record_type = $args->{record_type} // "biblio"; + my $koha_id = $args->{koha_id}; + if ($koha_id){ + if ($record_type eq "biblio"){ + my $import_oai_biblio = $schema->resultset('OaiHarvesterBiblio')->find_or_create({ + oai_repository => $self->{repository}, + oai_identifier => $self->{header_identifier}, + biblionumber => $koha_id, + }); + if ( ! $import_oai_biblio->in_storage ){ + $import_oai_biblio->insert; + } + } + } +} + +sub delete_koha_record { + my ($self, $args) = @_; + my $record_type = $args->{record_type} // "biblio"; + my $record_id = $args->{record_id}; + + my $action = "error"; + + if ($record_type eq "biblio"){ + my $error = C4::Biblio::DelBiblio($record_id); + if (!$error){ + $action = "deleted"; + #NOTE: If there's no error, a cascading database delete should + #automatically remove the link between the Koha biblionumber and OAI-PMH record too + } + } + return $action; +} + +1; diff --git a/Koha/OAI/Harvester/Listener.pm b/Koha/OAI/Harvester/Listener.pm new file mode 100644 index 0000000000..03ec391a8d --- /dev/null +++ b/Koha/OAI/Harvester/Listener.pm @@ -0,0 +1,187 @@ +package Koha::OAI::Harvester::Listener; + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . +# + +use Modern::Perl; +use POE qw(Wheel::SocketFactory Wheel::ReadWrite); +use IO::Socket qw(AF_UNIX); +use JSON; +use URI; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub spawn { + my ($class, $args) = @_; + my $self = $class->new($args); + my $socket = $args->{socket}; + POE::Session->create( + args => [ + $socket, + ], + object_states => [ + $self => { + _start => "on_start", + "on_server_success" => "on_server_success", + "on_server_error" => "on_server_error", + "on_client_error" => "on_client_error", + "on_client_input" => "on_client_input", + }, + ], + ); +} + +sub on_start { + my ($kernel,$heap,$socket_uri) = @_[KERNEL,HEAP,ARG0]; + + my $uri = URI->new($socket_uri); + if ($uri && $uri->scheme eq 'unix'){ + my $socket_path = $uri->path; + unlink $socket_path if -S $socket_path; + $heap->{server} = POE::Wheel::SocketFactory->new( + SocketDomain => AF_UNIX, + BindAddress => $socket_path, + SuccessEvent => "on_server_success", + FailureEvent => "on_server_error", + ); + + #Make the socket writeable to other users like Apache + chmod 0666, $socket_path; + } +} + +sub on_server_success { + my ($self, $client_socket, $server_wheel_id, $heap, $session) = @_[OBJECT, ARG0, ARG3, HEAP,SESSION]; + my $logger = $self->{logger}; + my $null_filter = POE::Filter::Line->new( + Literal => chr(0), + ); + my $client_wheel = POE::Wheel::ReadWrite->new( + Handle => $client_socket, + InputEvent => "on_client_input", + ErrorEvent => "on_client_error", + InputFilter => $null_filter, + OutputFilter => $null_filter, + ); + $heap->{client}->{ $client_wheel->ID() } = $client_wheel; + $logger->info("Connection ".$client_wheel->ID()." started."); + #TODO: Add basic authentication here? + $client_wheel->put("HELLO"); +} + +sub on_server_error { + my ($self, $operation, $errnum, $errstr, $heap, $session) = @_[OBJECT, ARG0, ARG1, ARG2,HEAP, SESSION]; + my $logger = $self->{logger}; + $logger->error("Server $operation error $errnum: $errstr"); + delete $heap->{server}; +} + +sub on_client_error { + my ($self, $wheel_id,$heap,$session) = @_[OBJECT, ARG3,HEAP,SESSION]; + my $logger = $self->{logger}; + $logger->info("Connection $wheel_id failed or ended."); + delete $heap->{client}->{$wheel_id}; +} + +sub on_client_input { + my ($self, $input, $wheel_id, $session, $kernel, $heap) = @_[OBJECT, ARG0, ARG1, SESSION, KERNEL, HEAP]; + my $logger = $self->{logger}; + $logger->debug("Server input: $input"); + my $server_response = { msg => "fail"}; + eval { + my $json_input = from_json($input); + my $command = $json_input->{command}; + my $body = $json_input->{body}; + if ($command){ + if ($command eq "create"){ + my $task = $body->{task}; + if ($task){ + my $is_created = $kernel->call("harvester","create_task",$task); + if ($is_created){ + $server_response->{msg} = "success"; + } + } + } + elsif ($command eq "start"){ + my $task = $body->{task}; + if ($task){ + my $uuid = $task->{uuid}; + #Fetch from memory now... + my $is_started = $kernel->call("harvester","start_task", $uuid); + if ($is_started){ + $server_response->{msg} = "success"; + } + } + } + elsif ($command eq "stop"){ + my $task = $body->{task}; + if ($task){ + if ($task->{uuid}){ + my $is_stopped = $kernel->call("harvester","stop_task",$task->{uuid}); + if ($is_stopped){ + $server_response->{msg} = "success"; + } + } + } + } + elsif ($command eq "delete"){ + my $task = $body->{task}; + if ($task){ + if ($task->{uuid}){ + my $is_deleted = $kernel->call("harvester","delete_task",$task->{uuid}); + if ($is_deleted){ + $server_response->{msg} = "success"; + } + } + } + } + elsif ($command eq "list"){ + my $tasks = $kernel->call("harvester","list_tasks"); + if ($tasks){ + $server_response->{msg} = "success"; + $server_response->{data} = $tasks; + } + } + } + }; + if ($@){ + #NOTE: An error most likely means that something other than a valid JSON string was received + $logger->error($@); + } + + if ($server_response){ + eval { + my $client = $heap->{client}->{$wheel_id}; + my $json_message = to_json($server_response, { pretty => 1 }); + if ($json_message){ + $logger->debug("Server output: $json_message"); + $client->put($json_message); + } + }; + if ($@){ + #NOTE: An error means our response couldn't be serialised as JSON + $logger->error($@); + } + } +} + +1; diff --git a/Koha/OAI/Harvester/Request.pm b/Koha/OAI/Harvester/Request.pm new file mode 100644 index 0000000000..38249476b6 --- /dev/null +++ b/Koha/OAI/Harvester/Request.pm @@ -0,0 +1,184 @@ +package Koha::OAI::Harvester::Request; + +# Copyright Prosentient Systems 2017 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; + +use Carp; + +use base qw(Koha::Object); + +#For validation +use URI; +use HTTP::OAI; + +=head1 NAME + +Koha::OAI::Harvester::Request - + +=head1 API + +=head2 Class Methods + +=cut + + + +=head3 _type + +=cut + +sub _type { + return 'OaiHarvesterRequest'; +} + +sub validate { + my ($self) = @_; + my $errors = {}; + + #Step one: validate URL + my $uri = URI->new($self->http_url); + if ( $uri && $uri->scheme && ($uri->scheme eq "http" || $uri->scheme eq "https") ){ + + #Step two: validate access and authorization to URL + my $harvester = $self->_harvester(); + my $identify = $harvester->Identify; + if ($identify->is_success){ + + #Step three: validate OAI-PMH parameters + + #Test Set + my $set = $self->oai_set; + if ($set){ + my $set_response = $harvester->ListSets(); + my @server_sets = $set_response->set; + if ( ! grep {$_->setSpec eq $set} @server_sets ){ + $errors->{oai_set}->{unavailable} = 1; + } + } + + #Test Metadata Prefix + my $metadataPrefix = $self->oai_metadataPrefix; + if ($metadataPrefix){ + my $metadata_response = $harvester->ListMetadataFormats(); + my @server_formats = $metadata_response->metadataFormat; + if ( ! grep { $_->metadataPrefix eq $metadataPrefix } @server_formats ){ + $errors->{oai_metadataPrefix}->{unavailable} = 1; + } + } + else { + $errors->{oai_metadataPrefix}->{missing} = 1; + } + + #Test Granularity and Timestamps + my $server_granularity = $identify->granularity; + my $from = $self->oai_from; + my $until = $self->oai_until; + if ($from || $until){ + my ($from_granularity,$until_granularity); + if ($from){ + $from_granularity = _determine_granularity($from); + if ($from_granularity eq "YYYY-MM-DDThh:mm:ssZ"){ + $errors->{oai_from}->{unavailable} = 1 if $server_granularity ne $from_granularity; + } elsif ($from_granularity eq "failed"){ + $errors->{oai_from}->{malformed} = 1; + } + } + if ($until){ + $until_granularity = _determine_granularity($until); + if ($until_granularity eq "YYYY-MM-DDThh:mm:ssZ"){ + $errors->{oai_until}->{unavailable} = 1 if $server_granularity ne $until_granularity; + } elsif ($until_granularity eq "failed"){ + $errors->{oai_until}->{malformed} = 1; + } + } + if ($from && $until){ + if ($from_granularity ne $until_granularity){ + $errors->{oai}->{granularity_mismatch} = 1; + } + } + } + + #Test if identifier is provided when using GetRecord + my $verb = $self->oai_verb; + if ($verb && $verb eq "GetRecord"){ + my $identifier = $self->oai_identifier; + if (! $identifier){ + $errors->{oai_identifier}->{missing} = 1; + } + } + } + elsif ($identify->is_error){ + foreach my $error ($identify->errors){ + if ($error->code =~ /^404$/){ + $errors->{http}->{404} = 1; + } elsif ($error->code =~ /^401$/){ + $errors->{http}->{401} = 1; + } else { + $errors->{http}->{generic} = 1; + } + } + } + else { + $errors->{http}->{generic} = 1; + } + } else { + $errors->{http_url}->{malformed} = 1; + } + return $errors; +} + +sub _harvester { + my ( $self ) = @_; + my $harvester; + if ($self->http_url){ + $harvester = new HTTP::OAI::Harvester( baseURL => $self->http_url ); + my $uri = URI->new($self->http_url); + if ($uri->scheme && ($uri->scheme eq 'http' || $uri->scheme eq 'https') ){ + my $host = $uri->host; + my $port = $uri->port; + $harvester->credentials($host.":".$port, $self->http_realm, $self->http_username, $self->http_password); + } + } + return $harvester; +} + +sub _determine_granularity { + my ($timestamp) = @_; + my $granularity; + if ($timestamp =~ /^(\d{4}-\d{2}-\d{2})(T\d{2}:\d{2}:\d{2}Z)?$/){ + if ($1 && $2){ + $granularity = "YYYY-MM-DDThh:mm:ssZ"; + } elsif ($1 && !$2){ + $granularity = "YYYY-MM-DD"; + } else { + $granularity = "failed"; + } + } else { + $granularity = "failed"; + } + return $granularity; +} + +=head1 AUTHOR + +David Cook + +=cut + +1; diff --git a/Koha/OAI/Harvester/Requests.pm b/Koha/OAI/Harvester/Requests.pm new file mode 100644 index 0000000000..f760ec9c67 --- /dev/null +++ b/Koha/OAI/Harvester/Requests.pm @@ -0,0 +1,62 @@ +package Koha::OAI::Harvester::Requests; + +# Copyright Prosentient Systems 2017 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; + +use Carp; + +use Koha::Database; + +use Koha::OAI::Harvester::Request; + +use base qw(Koha::Objects); + +=head1 NAME + +Koha::OAI::Harvester::Requests - + +=head1 API + +=head2 Class Methods + +=cut + +=head3 _type + +=cut + +sub _type { + return 'OaiHarvesterRequest'; +} + +=head3 object_class + +=cut + +sub object_class { + return 'Koha::OAI::Harvester::Request'; +} + +=head1 AUTHOR + +David Cook + +=cut + +1; diff --git a/Koha/OAI/Harvester/Worker.pm b/Koha/OAI/Harvester/Worker.pm new file mode 100644 index 0000000000..5374a293ad --- /dev/null +++ b/Koha/OAI/Harvester/Worker.pm @@ -0,0 +1,156 @@ +package Koha::OAI::Harvester::Worker; + +# Copyright Prosentient Systems 2017 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use POE; +use DateTime; +use JSON; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{type} = "worker" unless $args->{type}; + return bless ($args, $class); +} + +sub run { + my ($self,$args) = @_; + my $postback = $args->{postback}; + my $task = $args->{task}; + + POE::Session->create( + object_states => [ + $self => { + _start => "on_start", + got_child_stderr => "on_child_stderr", + got_child_close => "on_child_close", + got_child_signal => "on_child_signal", + got_child_stdout => "on_child_stdout", + stop_worker => "stop_worker", + _stop => "on_stop", + }, + ], + args => [ + $postback, + $task, + ], + ); +} + +sub stop_worker { + my ($self,$heap) = @_[OBJECT,HEAP]; + if (my $child_processes = $heap->{children_by_pid}){ + foreach my $child_pid (keys %$child_processes){ + my $child = $child_processes->{$child_pid}; + $child->kill(); + } + } +} + + +sub on_stop { + my ($self,$kernel) = @_[OBJECT,KERNEL]; + + #Deregister the worker session from the harvester's roster of workers + $kernel->call("harvester","deregister",$self->{type}); +} + +# Wheel event, including the wheel's ID. +sub on_child_stdout { + my ($self, $stdout_line, $wheel_id) = @_[OBJECT, ARG0, ARG1]; + my $type = $self->{type}; + my $child = $_[HEAP]{children_by_wid}{$wheel_id}; + my $logger = $self->{logger}; + if ($logger){ + $logger->debug("[$type][pid ".$child->PID."][STDOUT] $stdout_line"); + } + + my $postback = $_[HEAP]{postback}; + if ($postback){ + eval { + my $message = from_json($stdout_line); + if ($message){ + $postback->($message); + } + }; + } +} + +# Wheel event, including the wheel's ID. +sub on_child_stderr { + my ($self,$stderr_line, $wheel_id) = @_[OBJECT, ARG0, ARG1]; + my $type = $self->{type}; + my $child = $_[HEAP]{children_by_wid}{$wheel_id}; + my $logger = $self->{logger}; + if ($logger){ + $logger->debug("[$type][pid ".$child->PID."][STDERR] $stderr_line"); + } +} + +# Wheel event, including the wheel's ID. +sub on_child_close { + my ($self,$heap,$wheel_id) = @_[OBJECT,HEAP,ARG0]; + my $type = $self->{type}; + my $logger = $self->{logger}; + + my $child = delete $heap->{children_by_wid}->{$wheel_id}; + + # May have been reaped by on_child_signal(). + unless (defined $child) { + if ($logger){ + $logger->debug("[$type][wid $wheel_id] closed all pipes"); + } + return; + } + if ($logger){ + $logger->debug("[$type][pid ".$child->PID."] closed all pipes"); + } + delete $heap->{children_by_pid}->{$child->PID}; +} + +sub on_child_signal { + my ($self,$kernel,$pid,$status) = @_[OBJECT,KERNEL,ARG1,ARG2]; + my $type = $self->{type}; + my $logger = $self->{logger}; + if ($logger){ + $logger->debug("[$type][pid $pid] exited with status $status"); + } + + my $child = delete $_[HEAP]{children_by_pid}{$_[ARG1]}; + + # May have been reaped by on_child_close(). + return unless defined $child; + + delete $_[HEAP]{children_by_wid}{$child->ID}; + + #If the child doesn't complete successfully, we lodge an error + #and stop the task. + if ($status != 0){ + my $task = $kernel->call("harvester","get_task"); + if ($task){ + $task->{error} = 1; + my $uuid = $task->{uuid}; + if ($uuid){ + $kernel->call("harvester","stop_task",$uuid); + } + } + } +} + +1; diff --git a/Koha/OAI/Harvester/Worker/Download/Stream.pm b/Koha/OAI/Harvester/Worker/Download/Stream.pm new file mode 100644 index 0000000000..c87580af95 --- /dev/null +++ b/Koha/OAI/Harvester/Worker/Download/Stream.pm @@ -0,0 +1,190 @@ +package Koha::OAI::Harvester::Worker::Download::Stream; + +# Copyright Prosentient Systems 2017 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use LWP::UserAgent; +use UUID; +use POE; +use JSON; +use File::Path qw/make_path/; + +use C4::Context; +use Koha::OAI::Harvester::Downloader; +use parent 'Koha::OAI::Harvester::Worker'; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{type} = "download" unless $args->{type}; + return bless ($args, $class); +} + +sub on_start { + my ($self, $kernel, $heap, $postback,$task,$session) = @_[OBJECT, KERNEL, HEAP, ARG0,ARG1,SESSION]; + #Save postback into heap so other event handlers can use it + $heap->{postback} = $postback; + + my $task_uuid = $task->{uuid}; + + $kernel->sig("cancel" => "stop_worker"); + $kernel->call("harvester","register",$self->{type},$task->{uuid}); + + my $child = POE::Wheel::Run->new( + ProgramArgs => [$task], + Program => sub { + my ($args) = @_; + $self->do_work($args); + }, + StdoutEvent => "got_child_stdout", + StderrEvent => "got_child_stderr", + CloseEvent => "got_child_close", + NoSetPgrp => 1, #Keep child processes in same group as parent. This is especially useful when using Ctrl+C to kill the whole group. + ); + + $_[KERNEL]->sig_child($child->PID, "got_child_signal"); + + # Wheel events include the wheel's ID. + $_[HEAP]{children_by_wid}{$child->ID} = $child; + + # Signal events include the process ID. + $_[HEAP]{children_by_pid}{$child->PID} = $child; + + my $logger = $self->{logger}; + if ($logger){ + $logger->debug("Child pid ".$child->PID." started as wheel ".$child->ID); + } +} + +sub do_work { + my ($self, $task) = @_; + my $batch = ( $self->{batch} && int($self->{batch}) ) ? $self->{batch} : 100; + + #NOTE: Directory to spool files for processing + my $spooldir = $task->{spooldir}; + + my $task_uuid = $task->{uuid}; + my $task_parameters = $task->{parameters}; + my $interval = $task->{interval}; + + my $oai_pmh_parameters = $task_parameters->{oai_pmh}; + my $import_parameters = $task_parameters->{import}; + + #NOTE: Overwrite the 'from' and 'until' parameters for repeatable tasks + if ( $interval && ! $oai_pmh_parameters->{until} ){ + if ($oai_pmh_parameters->{verb} eq "ListRecords"){ + #NOTE: 'effective_from' premiers on the first repetition (ie second request) + $oai_pmh_parameters->{from} = $task->{effective_from} if $task->{effective_from}; + #NOTE: 'effective_until' appears on the first request + $oai_pmh_parameters->{until} = $task->{effective_until} if $task->{effective_until}; + } + } + + my $oai_downloader = Koha::OAI::Harvester::Downloader->new(); + my $url = $oai_downloader->BuildURL($oai_pmh_parameters); + + my $ua = LWP::UserAgent->new(); + #NOTE: setup HTTP Basic Authentication if parameters are supplied + if($url && $url->host && $url->port){ + my $http_basic_auth = $task_parameters->{http_basic_auth}; + if ($http_basic_auth){ + my $username = $http_basic_auth->{username}; + my $password = $http_basic_auth->{password}; + my $realm = $http_basic_auth->{realm}; + $ua->credentials($url->host.":".$url->port, $realm, $username, $password); + } + } + + #NOTE: Prepare database statement handle + my $dbh = C4::Context->dbh; + my $sql = "insert into oai_harvester_import_queue (uuid,result) VALUES (?,?)"; + my $sth = $dbh->prepare($sql); + + if($url && $ua){ + #NOTE: You could define the callbacks as object methods instead... that might be nicer... + #although I suppose it might be a much of a muchness. + eval { + my @filename_cache = (); + + $oai_downloader->harvest({ + user_agent => $ua, + url => $url, + callback => sub { + my ($args) = @_; + + my $repository = $args->{repository}; + my $document = $args->{document}; + + #If the spooldir has disappeared, re-create it. + if ( ! -d $spooldir ){ + my $made_spool_directory = make_path($spooldir); + } + my ($uuid,$uuid_string); + UUID::generate($uuid); + UUID::unparse($uuid, $uuid_string); + my $file_uuid = $uuid_string; + my $filename = "$spooldir/$file_uuid"; + my $state = $document->toFile($filename, 2); + if ($state){ + push(@filename_cache,$filename); + } + + if(scalar @filename_cache == $batch){ + my $result = { + repository => $repository, + filenames => \@filename_cache, + filter => $import_parameters->{filter}, + matcher_code => $import_parameters->{matcher_code}, + frameworkcode => $import_parameters->{frameworkcode}, + record_type => $import_parameters->{record_type}, + }; + eval { + my $json_result = to_json($result, { pretty => 1 }); + $sth->execute($task_uuid,$json_result); + }; + @filename_cache = (); + } + }, + complete_callback => sub { + my ($args) = @_; + my $repository = $args->{repository}; + if (@filename_cache){ + my $result = { + repository => "$repository", + filenames => \@filename_cache, + filter => $import_parameters->{filter}, + matcher_code => $import_parameters->{matcher_code}, + frameworkcode => $import_parameters->{frameworkcode}, + record_type => $import_parameters->{record_type}, + }; + eval { + my $json_result = to_json($result, { pretty => 1 }); + $sth->execute($task_uuid,$json_result); + }; + } + + }, + }); + }; + if ($@){ + die "Error during OAI-PMH download"; + } + } +} + +1; diff --git a/Koha/OAI/Harvester/Worker/Import.pm b/Koha/OAI/Harvester/Worker/Import.pm new file mode 100644 index 0000000000..ec867368e1 --- /dev/null +++ b/Koha/OAI/Harvester/Worker/Import.pm @@ -0,0 +1,133 @@ +package Koha::OAI::Harvester::Worker::Import; + +# Copyright Prosentient Systems 2017 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use POE qw(Wheel::Run); +use JSON; +use XML::LibXML; + +use C4::Context; +use C4::Matcher; +use Koha::OAI::Harvester::Import::Record; + +use parent 'Koha::OAI::Harvester::Worker'; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + #NOTE: This type is used for logging and more importantly for registering with the harvester + $args->{type} = "import" unless $args->{type}; + return bless ($args, $class); +} + +sub on_start { + my ($self, $kernel, $heap, $postback,$task) = @_[OBJECT, KERNEL, HEAP, ARG0,ARG1]; + + $kernel->call("harvester","register",$self->{type},$task->{uuid}); + + $kernel->sig(cancel => "stop_worker"); + + my $child = POE::Wheel::Run->new( + ProgramArgs => [ $task ], + Program => sub { + my ($task,$args) = @_; + + my $debug = $args->{debug} // 0; + + if ($task){ + my $json_result = $task->{result}; + my $id = $task->{id}; + my $task_uuid = $task->{uuid}; + eval { + my $result = from_json($json_result); + if ($result){ + my $repository = $result->{repository}; + my $filenames = $result->{filenames}; + my $filter = $result->{filter}; + my $matcher_code = $result->{matcher_code}; + my $frameworkcode = $result->{frameworkcode}; + my $record_type = $result->{record_type}; + + my $matcher; + if ($matcher_code){ + my $matcher_id = C4::Matcher::GetMatcherId($matcher_code); + $matcher = C4::Matcher->fetch($matcher_id); + } + + foreach my $filename (@$filenames){ + if ($filename){ + if (-f $filename){ + my $dom = XML::LibXML->load_xml(location => $filename, { no_blanks => 1 }); + if ($dom){ + my $oai_record = Koha::OAI::Harvester::Import::Record->new({ + doc => $dom, + repository => $repository, + }); + if ($oai_record){ + my ($action,$linked_id) = $oai_record->import_record({ + filter => $filter, + framework => $frameworkcode, + record_type => $record_type, + matcher => $matcher, + }); + $debug && print STDOUT qq({ "import_result": { "task_uuid": "$task_uuid", "action": "$action", "filename": "$filename", "koha_id": "$linked_id" } }\n); + } + } + my $unlinked = unlink $filename; + } + } + } + } + }; + if ($@){ + warn $@; + } + #NOTE: Even if the file doesn't exist, we still need to process the queue item. + + #NOTE: Don't do this via a postback in the parent process, as it's much faster to let the child process handle it. + + #NOTE: It's vital that files are unlinked before deleting from the database, + #or you could get orphan files if the importer is interrupted. + my $dbh = C4::Context->dbh; + my $sql = "delete from oai_harvester_import_queue where id = ?"; + my $sth = $dbh->prepare($sql); + $sth->execute($id); + } + }, + StdoutEvent => "got_child_stdout", + StderrEvent => "got_child_stderr", + CloseEvent => "got_child_close", + NoSetPgrp => 1, #Keep child processes in same group as parent. This is especially useful when using Ctrl+C to kill the whole group. + ); + + $_[KERNEL]->sig_child($child->PID, "got_child_signal"); + + # Wheel events include the wheel's ID. + $_[HEAP]{children_by_wid}{$child->ID} = $child; + + # Signal events include the process ID. + $_[HEAP]{children_by_pid}{$child->PID} = $child; + + my $logger = $self->{logger}; + if ($logger){ + $logger->debug("Child pid ".$child->PID." started as wheel ".$child->ID); + } +} + +1; diff --git a/Koha/Schema/Result/OaiHarvesterBiblio.pm b/Koha/Schema/Result/OaiHarvesterBiblio.pm new file mode 100755 index 0000000000..85bb64a20b --- /dev/null +++ b/Koha/Schema/Result/OaiHarvesterBiblio.pm @@ -0,0 +1,120 @@ +use utf8; +package Koha::Schema::Result::OaiHarvesterBiblio; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Koha::Schema::Result::OaiHarvesterBiblio + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("oai_harvester_biblios"); + +=head1 ACCESSORS + +=head2 import_oai_biblio_id + + data_type: 'integer' + extra: {unsigned => 1} + is_auto_increment: 1 + is_nullable: 0 + +=head2 oai_repository + + data_type: 'varchar' + is_nullable: 0 + size: 255 + +=head2 oai_identifier + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 biblionumber + + data_type: 'integer' + is_foreign_key: 1 + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "import_oai_biblio_id", + { + data_type => "integer", + extra => { unsigned => 1 }, + is_auto_increment => 1, + is_nullable => 0, + }, + "oai_repository", + { data_type => "varchar", is_nullable => 0, size => 255 }, + "oai_identifier", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "biblionumber", + { data_type => "integer", is_foreign_key => 1, is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("import_oai_biblio_id"); + +=head1 UNIQUE CONSTRAINTS + +=head2 C + +=over 4 + +=item * L + +=item * L + +=back + +=cut + +__PACKAGE__->add_unique_constraint("oai_record", ["oai_identifier", "oai_repository"]); + +=head1 RELATIONS + +=head2 biblionumber + +Type: belongs_to + +Related object: L + +=cut + +__PACKAGE__->belongs_to( + "biblionumber", + "Koha::Schema::Result::Biblio", + { biblionumber => "biblionumber" }, + { is_deferrable => 1, on_delete => "CASCADE", on_update => "NO ACTION" }, +); + + +# Created by DBIx::Class::Schema::Loader v0.07046 @ 2017-03-29 12:23:43 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:2URn8tPABMKC+JuIMfGeYw + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/Koha/Schema/Result/OaiHarvesterHistory.pm b/Koha/Schema/Result/OaiHarvesterHistory.pm new file mode 100755 index 0000000000..3e3d1d57c9 --- /dev/null +++ b/Koha/Schema/Result/OaiHarvesterHistory.pm @@ -0,0 +1,163 @@ +use utf8; +package Koha::Schema::Result::OaiHarvesterHistory; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Koha::Schema::Result::OaiHarvesterHistory + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("oai_harvester_history"); + +=head1 ACCESSORS + +=head2 import_oai_id + + data_type: 'integer' + extra: {unsigned => 1} + is_auto_increment: 1 + is_nullable: 0 + +=head2 repository + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 header_identifier + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 header_datestamp + + data_type: 'datetime' + datetime_undef_if_invalid: 1 + is_nullable: 0 + +=head2 header_status + + data_type: 'varchar' + is_nullable: 1 + size: 45 + +=head2 record + + data_type: 'longtext' + is_nullable: 0 + +=head2 upload_timestamp + + data_type: 'timestamp' + datetime_undef_if_invalid: 1 + default_value: current_timestamp + is_nullable: 0 + +=head2 status + + data_type: 'varchar' + is_nullable: 0 + size: 45 + +=head2 filter + + data_type: 'text' + is_nullable: 0 + +=head2 framework + + data_type: 'varchar' + is_nullable: 0 + size: 4 + +=head2 record_type + + data_type: 'enum' + extra: {list => ["biblio","auth","holdings"]} + is_nullable: 0 + +=head2 matcher_code + + data_type: 'varchar' + is_nullable: 1 + size: 10 + +=cut + +__PACKAGE__->add_columns( + "import_oai_id", + { + data_type => "integer", + extra => { unsigned => 1 }, + is_auto_increment => 1, + is_nullable => 0, + }, + "repository", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "header_identifier", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "header_datestamp", + { + data_type => "datetime", + datetime_undef_if_invalid => 1, + is_nullable => 0, + }, + "header_status", + { data_type => "varchar", is_nullable => 1, size => 45 }, + "record", + { data_type => "longtext", is_nullable => 0 }, + "upload_timestamp", + { + data_type => "timestamp", + datetime_undef_if_invalid => 1, + default_value => \"current_timestamp", + is_nullable => 0, + }, + "status", + { data_type => "varchar", is_nullable => 0, size => 45 }, + "filter", + { data_type => "text", is_nullable => 0 }, + "framework", + { data_type => "varchar", is_nullable => 0, size => 4 }, + "record_type", + { + data_type => "enum", + extra => { list => ["biblio", "auth", "holdings"] }, + is_nullable => 0, + }, + "matcher_code", + { data_type => "varchar", is_nullable => 1, size => 10 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("import_oai_id"); + + +# Created by DBIx::Class::Schema::Loader v0.07046 @ 2017-03-29 12:23:43 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Rp/ZEZsKVlLo2vaM3M37ow + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/Koha/Schema/Result/OaiHarvesterImportQueue.pm b/Koha/Schema/Result/OaiHarvesterImportQueue.pm new file mode 100755 index 0000000000..54188e3fcd --- /dev/null +++ b/Koha/Schema/Result/OaiHarvesterImportQueue.pm @@ -0,0 +1,106 @@ +use utf8; +package Koha::Schema::Result::OaiHarvesterImportQueue; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Koha::Schema::Result::OaiHarvesterImportQueue + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("oai_harvester_import_queue"); + +=head1 ACCESSORS + +=head2 id + + data_type: 'integer' + extra: {unsigned => 1} + is_auto_increment: 1 + is_nullable: 0 + +=head2 uuid + + data_type: 'varchar' + is_nullable: 0 + size: 45 + +=head2 status + + data_type: 'varchar' + default_value: 'new' + is_nullable: 0 + size: 45 + +=head2 result + + data_type: 'text' + is_nullable: 0 + +=head2 result_timestamp + + data_type: 'timestamp' + datetime_undef_if_invalid: 1 + default_value: current_timestamp + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "id", + { + data_type => "integer", + extra => { unsigned => 1 }, + is_auto_increment => 1, + is_nullable => 0, + }, + "uuid", + { data_type => "varchar", is_nullable => 0, size => 45 }, + "status", + { + data_type => "varchar", + default_value => "new", + is_nullable => 0, + size => 45, + }, + "result", + { data_type => "text", is_nullable => 0 }, + "result_timestamp", + { + data_type => "timestamp", + datetime_undef_if_invalid => 1, + default_value => \"current_timestamp", + is_nullable => 0, + }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("id"); + + +# Created by DBIx::Class::Schema::Loader v0.07046 @ 2017-03-29 12:23:43 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:zBD+hMawbvu7sonuLRHnCA + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/Koha/Schema/Result/OaiHarvesterRequest.pm b/Koha/Schema/Result/OaiHarvesterRequest.pm new file mode 100644 index 0000000000..e3222811c6 --- /dev/null +++ b/Koha/Schema/Result/OaiHarvesterRequest.pm @@ -0,0 +1,209 @@ +use utf8; +package Koha::Schema::Result::OaiHarvesterRequest; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Koha::Schema::Result::OaiHarvesterRequest + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("oai_harvester_requests"); + +=head1 ACCESSORS + +=head2 id + + data_type: 'integer' + extra: {unsigned => 1} + is_auto_increment: 1 + is_nullable: 0 + +=head2 uuid + + data_type: 'varchar' + is_nullable: 0 + size: 45 + +=head2 oai_verb + + data_type: 'varchar' + is_nullable: 0 + size: 45 + +=head2 oai_metadataPrefix + + accessor: 'oai_metadata_prefix' + data_type: 'varchar' + is_nullable: 0 + size: 255 + +=head2 oai_identifier + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 oai_from + + data_type: 'varchar' + is_nullable: 1 + size: 45 + +=head2 oai_until + + data_type: 'varchar' + is_nullable: 1 + size: 45 + +=head2 oai_set + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 http_url + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 http_username + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 http_password + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 http_realm + + data_type: 'varchar' + is_nullable: 1 + size: 255 + +=head2 import_filter + + data_type: 'varchar' + is_nullable: 0 + size: 255 + +=head2 import_framework_code + + data_type: 'varchar' + is_nullable: 0 + size: 4 + +=head2 import_record_type + + data_type: 'enum' + extra: {list => ["biblio","auth","holdings"]} + is_nullable: 0 + +=head2 import_matcher_code + + data_type: 'varchar' + is_nullable: 1 + size: 10 + +=head2 interval + + data_type: 'integer' + extra: {unsigned => 1} + is_nullable: 0 + +=head2 name + + data_type: 'varchar' + is_nullable: 0 + size: 45 + +=cut + +__PACKAGE__->add_columns( + "id", + { + data_type => "integer", + extra => { unsigned => 1 }, + is_auto_increment => 1, + is_nullable => 0, + }, + "uuid", + { data_type => "varchar", is_nullable => 0, size => 45 }, + "oai_verb", + { data_type => "varchar", is_nullable => 0, size => 45 }, + "oai_metadataPrefix", + { + accessor => "oai_metadata_prefix", + data_type => "varchar", + is_nullable => 0, + size => 255, + }, + "oai_identifier", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "oai_from", + { data_type => "varchar", is_nullable => 1, size => 45 }, + "oai_until", + { data_type => "varchar", is_nullable => 1, size => 45 }, + "oai_set", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "http_url", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "http_username", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "http_password", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "http_realm", + { data_type => "varchar", is_nullable => 1, size => 255 }, + "import_filter", + { data_type => "varchar", is_nullable => 0, size => 255 }, + "import_framework_code", + { data_type => "varchar", is_nullable => 0, size => 4 }, + "import_record_type", + { + data_type => "enum", + extra => { list => ["biblio", "auth", "holdings"] }, + is_nullable => 0, + }, + "import_matcher_code", + { data_type => "varchar", is_nullable => 1, size => 10 }, + "interval", + { data_type => "integer", extra => { unsigned => 1 }, is_nullable => 0 }, + "name", + { data_type => "varchar", is_nullable => 0, size => 45 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("id"); + + +# Created by DBIx::Class::Schema::Loader v0.07046 @ 2017-04-07 11:26:24 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Vm/b4yurmr8WF7z+Fo6KEw + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/Makefile.PL b/Makefile.PL index 924782f188..f00ad531b1 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -360,6 +360,9 @@ my $target_map = { './skel/var/lib/koha/zebradb/biblios/tmp' => { target => 'ZEBRA_DATA_DIR', trimdir => 6 }, './skel/var/lock/koha/zebradb/rebuild' => { target => 'ZEBRA_LOCK_DIR', trimdir => 6 }, './skel/var/lib/koha/plugins' => { target => 'PLUGINS_DIR', trimdir => 6 }, + './skel/var/lib/koha/oai-pmh-harvester' => { target => 'OAI_LIB_DIR', trimdir => 6 }, + './skel/var/run/koha/oai-pmh-harvester' => { target => 'OAI_RUN_DIR', trimdir => 6 }, + './skel/var/spool/koha/oai-pmh-harvester' => { target => 'OAI_SPOOL_DIR', trimdir => 6 }, './sms' => 'INTRANET_CGI_DIR', './suggestion' => 'INTRANET_CGI_DIR', './svc' => 'INTRANET_CGI_DIR', @@ -594,6 +597,7 @@ my $pl_files = { 'blib/KOHA_CONF_DIR/koha-conf.xml', 'blib/KOHA_CONF_DIR/koha-httpd.conf', 'blib/KOHA_CONF_DIR/log4perl.conf', + 'blib/KOHA_CONF_DIR/oai-pmh-harvester.yaml', 'blib/ZEBRA_CONF_DIR/etc/default.idx', 'blib/MISC_DIR/koha-install-log' ], @@ -1355,6 +1359,9 @@ sub get_target_directories { $dirmap{'PLUGINS_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'koha', 'plugins'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'zebradb'); + $dirmap{'OAI_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'oai-pmh-harvester'); + $dirmap{'OAI_LIB_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'oai-pmh-harvester'); + $dirmap{'OAI_SPOOL_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'spool', 'oai-pmh-harvester'); } elsif ($mode eq 'dev') { my $curdir = File::Spec->rel2abs(File::Spec->curdir()); $dirmap{'API_CGI_DIR'} = File::Spec->catdir($curdir, 'api'); @@ -1390,6 +1397,10 @@ sub get_target_directories { $dirmap{'PLUGINS_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'plugins'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'zebradb'); + $dirmap{'OAI_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'oai-pmh-harvester'); + $dirmap{'OAI_LIB_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'oai-pmh-harvester'); + $dirmap{'OAI_SPOOL_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'spool', 'oai-pmh-harvester'); + } else { # mode is standard, i.e., 'fhs' $dirmap{'API_CGI_DIR'} = File::Spec->catdir(@basedir, $package, 'api'); @@ -1414,6 +1425,9 @@ sub get_target_directories { $dirmap{'PLUGINS_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lib', $package, 'plugins'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lib', $package, 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'run', $package, 'zebradb'); + $dirmap{'OAI_RUN_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'run', $package, 'oai-pmh-harvester'); + $dirmap{'OAI_LIB_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lib', $package, 'oai-pmh-harvester'); + $dirmap{'OAI_SPOOL_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'spool', $package, 'oai-pmh-harvester'); } _get_env_overrides(\%dirmap); diff --git a/debian/scripts/koha-create b/debian/scripts/koha-create index 874c7d1c79..c5f0e20bfd 100755 --- a/debian/scripts/koha-create +++ b/debian/scripts/koha-create @@ -113,6 +113,9 @@ generate_config_file() { -e "s/__PLUGINS_DIR__/\/var\/lib\/koha\/$name\/plugins/g" \ -e "s/__MEMCACHED_NAMESPACE__/$MEMCACHED_NAMESPACE/g" \ -e "s/__MEMCACHED_SERVERS__/$MEMCACHED_SERVERS/g" \ + -e "s/__OAI_RUN_DIR__/\/var\/run\/koha\/$name\/oai-pmh-harvester/g" \ + -e "s/__OAI_LIB_DIR__/\/var\/lib\/koha\/$name\/oai-pmh-harvester/g" \ + -e "s/__OAI_SPOOL_DIR__/\/var\/spool\/koha\/$name\/oai-pmh-harvester/g" \ "/etc/koha/$1" > "$2" } @@ -652,6 +655,10 @@ eof generate_config_file zebra.passwd.in \ "/etc/koha/sites/$name/zebra.passwd" + # Generate and install OAI-PMH harvester config file + generate_config_file oai-pmh-harvester.yaml.in \ + "/etc/koha/sites/$name/oai-pmh-harvester.yaml" + # Create a GPG-encrypted file for requesting a DB to be set up. if [ "$op" = request ] then diff --git a/debian/scripts/koha-create-dirs b/debian/scripts/koha-create-dirs index 822d808db4..bbea3395a0 100755 --- a/debian/scripts/koha-create-dirs +++ b/debian/scripts/koha-create-dirs @@ -56,11 +56,14 @@ do userdir "$name" "/var/lib/koha/$name/plugins" userdir "$name" "/var/lib/koha/$name/uploads" userdir "$name" "/var/lib/koha/$name/tmp" + userdir "$name" "/var/lib/koha/$name/oai-pmh-harvester" userdir "$name" "/var/lock/koha/$name" userdir "$name" "/var/lock/koha/$name/authorities" userdir "$name" "/var/lock/koha/$name/biblios" userdir "$name" "/var/run/koha/$name" userdir "$name" "/var/run/koha/$name/authorities" userdir "$name" "/var/run/koha/$name/biblios" + userdir "$name" "/var/run/koha/$name/oai-pmh-harvester" + userdir "$name" "/var/spool/koha/$name/oai-pmh-harvester" done diff --git a/debian/templates/koha-conf-site.xml.in b/debian/templates/koha-conf-site.xml.in index e6982bb27c..1d547f1d57 100644 --- a/debian/templates/koha-conf-site.xml.in +++ b/debian/templates/koha-conf-site.xml.in @@ -298,6 +298,7 @@ __END_SRU_PUBLICSERVER__ 1 /etc/koha/searchengine/queryparser.yaml __KOHA_CONF_DIR__/log4perl.conf + /etc/koha/sites/__KOHASITE__/oai-pmh-harvester.yaml + Set + From + Until + Interval + + + + + + [% IF ( saved_requests ) %] + [% FOREACH saved_request IN saved_requests %] + + [% saved_request.name %] + [% saved_request.http_url %] + + [% saved_request.oai_set %] + [% saved_request.oai_from %] + [% saved_request.oai_until %] + [% saved_request.interval %] + + + + + + [% END %] + [% END %] + + + +
+
+ +
+ + + + + + + + + + + + + + +
IdRepositoryIdentifierDatestampUpstream statusImport statusImport timestampImported recordDownloaded record
+ +
+ + + +
+ [% INCLUDE 'tools-menu.inc' %] +
+ +[% INCLUDE 'intranet-bottom.inc' %] diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/record.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/record.tt new file mode 100644 index 0000000000..4b2d217356 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/record.tt @@ -0,0 +1,23 @@ +[% INCLUDE 'doc-head-open.inc' %] +Koha › Tools › OAI-PMH harvester › Downloaded record +[% INCLUDE 'doc-head-close.inc' %] + + +[% INCLUDE 'header.inc' %] +[% INCLUDE 'cat-search.inc' %] + +
+
+
+
+

Downloaded record

+ [% IF ( record ) %] +
[% record | xml %]
+ [% END %] +
+
+
+ [% INCLUDE 'tools-menu.inc' %] +
+
+[% INCLUDE 'intranet-bottom.inc' %] diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/request.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/request.tt new file mode 100644 index 0000000000..ca2d98dfbc --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/oai-pmh-harvester/request.tt @@ -0,0 +1,241 @@ +[% INCLUDE 'doc-head-open.inc' %] +Koha › Tools › OAI-PMH harvester › Request +[% INCLUDE 'doc-head-close.inc' %] +[% INCLUDE 'calendar.inc' %] + +[% INCLUDE 'timepicker.inc' %] + + + + +[% INCLUDE 'header.inc' %] +[% INCLUDE 'cat-search.inc' %] + +
+
+
+
+ [% IF ( op == "edit" ) %] +

Edit OAI-PMH request

+ [% ELSE %] +

New OAI-PMH request

+ [% END %] + [% IF ( test_parameters ) %] + [% IF ( errors.size ) %] +
Tests failed!
+ [% ELSE %] +
Tests succeeded!
+ [% END %] + [% END %] +
+ [% IF ( op == "new" ) %] + + [% ELSIF ( op == "edit" ) %] + + [% ELSE %] + + [% END %] + [% IF ( id ) %] + + [% END %] +
+
    +
  1. + + + This is just a short name to help in managing requests. +
  2. +
+
+
+ HTTP parameters: +
    +
  1. + + + [% IF (errors.http_url.malformed) %][This must be a properly formatted HTTP or HTTPS URL.][% END %] + [% IF (errors.http.404) %][Cannot find address specified by this URL.][% END %] + [% IF (errors.http.401) %][Permission denied to access this URL.][% END %] + [% IF (errors.http.generic) %][Unable to access this URL.][% END %] +
  2. +
+ The following parameters are not required by all OAI-PMH repositories, so they may be optional for this task. +
    +
  1. + + +
  2. +
  3. + + +
  4. +
  5. + + +
  6. +
+
+
+ OAI-PMH parameters: +
    +
  1. + + +
  2. +
  3. + + + [% IF (errors.oai_metadataPrefix.unavailable) %][This metadataPrefix is unavailable from this OAI-PMH provider.][% END %] + [% IF (errors.oai_metadataPrefix.missing) %][metadataPrefix is a required field for an OAI-PMH request.][% END %] +
  4. +
  5. + + + [% IF (errors.oai_identifier.missing) %][Identifier is a required field when using GetRecord for an OAI-PMH request.][% END %] +
  6. +
  7. + + + [% IF (errors.oai_set.unavailable) %][This set is unavailable from this OAI-PMH provider.][% END %] +
  8. + [% IF (errors.oai.granularity_mismatch) %][You must specify the same granularity for both From and Until.][% END %] +
  9. + + + This value will be treated as UTC time. Note that some repositories only support YYYY-MM-DD datestamps. + [% IF (errors.oai_from.malformed) %][This must be in YYYY-MM-DD or YYYY-MM-DDThh:mm:ssZ format.][% END %] + [% IF (errors.oai_from.unavailable) %][This granularity is unsupported by this OAI-PMH provider.][% END %] +
  10. +
  11. + + + This value will be treated as UTC time. Note that some repositories only support YYYY-MM-DD datestamps. + [% IF (errors.oai_until.malformed) %][This must be in YYYY-MM-DD or YYYY-MM-DDThh:mm:ssZ format.][% END %] + [% IF (errors.oai_until.unavailable) %][This granularity is unsupported by this OAI-PMH provider.][% END %] +
  12. +
+
+
+ Import parameters: +
    +
  1. + + [% IF ( oai_pmh_request.import_filter == "default" ) %] + + [% ELSE %] + + [% END %] + If no filter is entered, the default filter will be used. +
  2. +
  3. + + +
  4. +
  5. + + +
  6. +
  7. + + + See record matching rules to add or edit rules. +
  8. +
+
+
+ Download parameters: +
    +
  1. + + + The download request will be repeated in intervals of this many seconds. Enter "0" if you want the task to only happen once. +
  2. +
+
+
+ + + Cancel +
+
+
+
+
+ [% INCLUDE 'tools-menu.inc' %] +
+
+[% INCLUDE 'intranet-bottom.inc' %] diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/tools/tools-home.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/tools-home.tt index a035ee146b..3229ae5218 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/modules/tools/tools-home.tt +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/tools-home.tt @@ -195,6 +195,11 @@
Utility to upload scanned cover images for display in OPAC
[% END %] + [% IF ( CAN_user_tools_manage_staged_marc ) %] +
OAI-PMH harvester
+
Harvest (ie download and import) records from remote sources using the OAI-PMH protocol
+ [% END %] +
diff --git a/koha-tmpl/intranet-tmpl/prog/en/xslt/StripOAIPMH.xsl b/koha-tmpl/intranet-tmpl/prog/en/xslt/StripOAIPMH.xsl new file mode 100755 index 0000000000..3bb2c175f2 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/xslt/StripOAIPMH.xsl @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/misc/cronjobs/cleanup_database.pl b/misc/cronjobs/cleanup_database.pl index 3e6c504532..ec0e60fd94 100755 --- a/misc/cronjobs/cleanup_database.pl +++ b/misc/cronjobs/cleanup_database.pl @@ -26,6 +26,7 @@ use constant DEFAULT_LOGS_PURGEDAYS => 180; use constant DEFAULT_SEARCHHISTORY_PURGEDAYS => 30; use constant DEFAULT_SHARE_INVITATION_EXPIRY_DAYS => 14; use constant DEFAULT_DEBARMENTS_PURGEDAYS => 30; +use constant DEFAULT_IMPORTOAI_PURGEDAYS => 30; BEGIN { # find Koha's Perl modules @@ -44,7 +45,7 @@ use Koha::UploadedFiles; sub usage { print STDERR < \$help, @@ -132,6 +136,7 @@ GetOptions( 'temp-uploads-days:i' => \$temp_uploads_days, 'uploads-missing:i' => \$uploads_missing, 'oauth-tokens' => \$oauth_tokens, + 'importoai:i' => \$importoai_days, ) || usage(1); # Use default values @@ -143,6 +148,7 @@ $mail = DEFAULT_MAIL_PURGEDAYS if defined($mail) $pSearchhistory = DEFAULT_SEARCHHISTORY_PURGEDAYS if defined($pSearchhistory) && $pSearchhistory == 0; $pListShareInvites = DEFAULT_SHARE_INVITATION_EXPIRY_DAYS if defined($pListShareInvites) && $pListShareInvites == 0; $pDebarments = DEFAULT_DEBARMENTS_PURGEDAYS if defined($pDebarments) && $pDebarments == 0; +$importoai_days = DEFAULT_IMPORTOAI_PURGEDAYS if defined($importoai_days) && $importoai_days == 0; if ($help) { usage(0); @@ -166,6 +172,7 @@ unless ( $sessions || $temp_uploads || defined $uploads_missing || $oauth_tokens + || $importoai_days ) { print "You did not specify any cleanup work for the script to do.\n\n"; usage(1); @@ -344,6 +351,12 @@ if ($oauth_tokens) { say "Removed $count expired OAuth2 tokens" if $verbose; } +if ($importoai_days){ + my $sql = "DELETE FROM import_oai WHERE date(upload_timestamp) < (date_sub(curdate(), INTERVAL ? DAY))"; + my $sth = $dbh->prepare($sql); + $sth->execute($importoai_days) or die $dbh->errstr; +} + exit(0); sub RemoveOldSessions { diff --git a/misc/harvesterd.pl b/misc/harvesterd.pl new file mode 100755 index 0000000000..84e3984d06 --- /dev/null +++ b/misc/harvesterd.pl @@ -0,0 +1,207 @@ +#!/usr/bin/perl + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . + +use Modern::Perl; +use Getopt::Long; +use Pod::Usage; +use Module::Load; +use Log::Log4perl qw(:easy); +use POE; +use YAML; + +use C4::Context; +use Koha::Daemon; +use Koha::OAI::Harvester; +use Koha::OAI::Harvester::Listener; + +binmode(STDOUT,':encoding(UTF-8)'); +$|++; + +my $help = 0; +my $daemonize = 0; + +my ($socket_addr,$pidfile,$statefile,$spooldir); +my ($download_module,$import_module); +my ($batch,$download_workers,$import_workers,$import_poll); +my ($logfile,$log_level); +my $log_levels = { + FATAL => $FATAL, + ERROR => $ERROR, + WARN => $WARN, + INFO => $INFO, + DEBUG => $DEBUG, + TRACE => $TRACE, +}; + +my $context = C4::Context->new(); +my $config_filename = $context->{config}->{oai_pmh_harvester_config}; +if ($config_filename){ + my $config = YAML::LoadFile($config_filename); + if ($config){ + $socket_addr = $config->{socket}; + $pidfile = $config->{pidfile}; + $statefile = $config->{statefile}; + $spooldir = $config->{spooldir}; + $logfile = $config->{logfile}; + $log_level = $config->{loglevel}; + $download_module = $config->{download_module}; + $import_module = $config->{import_module}; + $batch = $config->{download_batch}; + $download_workers = $config->{download_workers}; + $import_workers = $config->{import_workers}; + $import_poll = $config->{import_poll}; + } +} + +GetOptions( + "help|?" => \$help, + "daemon" => \$daemonize, + "socket-uri=s" => \$socket_addr, + "pid-file=s" => \$pidfile, + "state-file=s" => \$statefile, + "spool-dir=s" => \$spooldir, + "log-file=s" => \$logfile, + "log-level=s" => \$log_level, + "download-module=s" => \$download_module, + "import-module=s" => \$import_module, +) or pod2usage(2); +pod2usage(1) if $help; + +my $level = ( $log_level && $log_levels->{$log_level} ) ? $log_levels->{$log_level} : $log_levels->{WARN}; +Log::Log4perl->easy_init( + { + level => $level, + file => "STDOUT", + layout => '[%d{yyyy-MM-dd HH:mm:ss}][%p] %m%n', + } +); +my $logger = Log::Log4perl->get_logger(); + +unless($download_module){ + $download_module = "Koha::OAI::Harvester::Worker::Download::Stream"; +} +unless($import_module){ + $import_module = "Koha::OAI::Harvester::Worker::Import"; +} + +foreach my $module ( $download_module, $import_module ){ + load $module; +} +my $downloader = $download_module->new({ + logger => $logger, + batch => $batch, +}); +my $importer = $import_module->new({ + logger => $logger, +}); + +my $daemon = Koha::Daemon->new({ + pidfile => $pidfile, + logfile => $logfile, + daemonize => $daemonize, +}); +$daemon->run(); + +my $harvester = Koha::OAI::Harvester->spawn({ + Downloader => $downloader, + DownloaderWorkers => $download_workers, + Importer => $importer, + ImporterWorkers => $import_workers, + ImportQueuePoll => $import_poll, + logger => $logger, + state_file => $statefile, + spooldir => $spooldir, +}); + +my $listener = Koha::OAI::Harvester::Listener->spawn({ + logger => $logger, + socket => $socket_addr, +}); + +$logger->info("OAI-PMH harvester started."); + +POE::Kernel->run(); + +exit; + +=head1 NAME + +harvesterd.pl - a daemon that asynchronously sends OAI-PMH requests and imports OAI-PMH records + +=head1 SYNOPSIS + +KOHA_CONF=/path/to/koha-conf.xml ./harvesterd.pl + +=head1 OPTIONS + +=over 8 + +=item B<--help> + +Print a brief help message and exits. + +=item B<--daemon> + +Run program as a daemon (ie fork process, setsid, chdir to root, reset umask, +and close STDIN, STDOUT, and STDERR). + +=item B<--log-file> + +Specify a file to which to log STDOUT and STDERR. + +=item B<--pid-file> + +Specify a file to store the process id (this prevents multiple copies of the program +from running at the same time). + +=item B<--socket-uri> + +Specify a URI to use for the UNIX socket used to communicate with the daemon. +(e.g. unix:/path/to/socket.sock) + +=item B<--state-file> + +Specify a filename to use for storing the harvester's in-memory state. + +In the event that the harvester crashes, it can resume from where it stopped. + +=item B<--spool-dir> + +Specify a directory to store downloaded OAI-PMH records prior to import. + +=item B<--log-level> + +Specify a log level for logging. The logger uses Log4Perl, which provides +FATAL, ERROR, WARN, INFO, DEBUG, and TRACE in order of descending priority. + +Defaults to WARN level. + +=item B<--download-module> + +Specify a Perl module to use for downloading records. This is a specialty module, +which has particular requirements, so only advanced users should use this option. + +=item B<--import-module> + +Specify a Perl module to use for importing records. This is a specialty module, +which has particular requirements, so only advanced users should use this option. + +=back + +=cut diff --git a/rewrite-config.PL b/rewrite-config.PL index 8b886847f3..219811bd2b 100644 --- a/rewrite-config.PL +++ b/rewrite-config.PL @@ -152,6 +152,9 @@ $prefix = $ENV{'INSTALL_BASE'} || "/usr"; "__MEMCACHED_NAMESPACE__" => "", "__FONT_DIR__" => "/usr/share/fonts/truetype/ttf-dejavu", "__TEMPLATE_CACHE_DIR__" => "/tmp/koha" + "__OAI_RUN_DIR__" => "", + "__OAI_LIB_DIR__" => "", + "__OAI_SPOOL_DIR__" => "", ); # Override configuration from the environment diff --git a/skel/var/lib/koha/oai-pmh-harvester/README b/skel/var/lib/koha/oai-pmh-harvester/README new file mode 100644 index 0000000000..e69de29bb2 diff --git a/skel/var/run/koha/oai-pmh-harvester/README b/skel/var/run/koha/oai-pmh-harvester/README new file mode 100644 index 0000000000..e69de29bb2 diff --git a/skel/var/spool/koha/oai-pmh-harvester/README b/skel/var/spool/koha/oai-pmh-harvester/README new file mode 100644 index 0000000000..e69de29bb2 diff --git a/svc/oai-pmh-harvester/history b/svc/oai-pmh-harvester/history new file mode 100755 index 0000000000..851e8a56bf --- /dev/null +++ b/svc/oai-pmh-harvester/history @@ -0,0 +1,132 @@ +#!/usr/bin/perl + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use C4::Auth qw(check_cookie_auth haspermission get_session); +use JSON; +use Koha::Database; + +my $input = new CGI; + +my ( $auth_status, $sessionID ) = + check_cookie_auth( $input->cookie('CGISESSID')); + +if ( $auth_status ne "ok" ) { + exit 0; +} + +my $data = { + data => [], + recordsTotal => 0, + recordsFiltered => 0, + draw => undef, +}; + +my $length = 10; +my $start = 0; +my @order_by = (); +my @search = (); + +if ($input->request_method eq "POST"){ + my $postdata = $input->param('POSTDATA'); + my $request = from_json($postdata); + $data->{draw} = int( $request->{draw} ) if $request->{draw}; + $length = $request->{length} if $request->{length}; + $start = $request->{start} if $request->{start}; + if (my $search = $request->{search}){ + my $value = $search->{value}; + if ($value){ + foreach my $column (@{$request->{columns}}){ + if ($column->{data} && $column->{searchable}){ + my $search_element = { + $column->{data} => { 'like', "%".$value."%" }, + }; + push(@search,$search_element); + } + } + } + } + if (my $order = $request->{order}){ + foreach my $element (@$order){ + my $dir = $element->{dir}; + my $column_index = $element->{column}; + my $column = $request->{columns}->[$column_index]; + my $orderable = $column->{orderable}; + if ($orderable){ + my $column_name = $column->{data}; + my $direction; + if ($dir){ + if ($dir eq "asc" || $dir eq "desc"){ + $direction = "-$dir"; + } + } + if ($column_name && $direction){ + my $single_order = { + $direction => $column_name, + }; + push(@order_by,$single_order); + } + } + } + } +} + +my $page = ( $start / $length ) + 1; +my $schema = Koha::Database->new()->schema(); +if ($schema){ + my $rs = $schema->resultset("OaiHarvesterHistory"); + my $results = $rs->search( + \@search, + { + result_class => 'DBIx::Class::ResultClass::HashRefInflator', + page => $page, + rows => $length, + order_by => \@order_by, + }, + ); + my $count = $rs->count; + my $filtered_count = $results->pager->total_entries; + my @rows = (); + while (my $row = $results->next){ + $row->{imported_record} = ''; + if ($row->{record_type} eq "biblio"){ + my $harvested_biblio = $schema->resultset("OaiHarvesterBiblio")->find( + { + oai_repository => $row->{repository}, + oai_identifier => $row->{header_identifier}, + }, + { key => "oai_record" }, + ); + $row->{imported_record} = $harvested_biblio->biblionumber->id if $harvested_biblio; + } + push(@rows,$row); + } + if ($count){ + $data->{recordsTotal} = $count; + $data->{recordsFiltered} = $filtered_count; + $data->{data} = \@rows if @rows; + } +} + +binmode STDOUT, ":encoding(UTF-8)"; +print $input->header( + -type => 'application/json', + -charset => 'UTF-8' +); +print to_json($data, { pretty => 1, }); diff --git a/tools/oai-pmh-harvester/dashboard.pl b/tools/oai-pmh-harvester/dashboard.pl new file mode 100755 index 0000000000..16dd15f3f5 --- /dev/null +++ b/tools/oai-pmh-harvester/dashboard.pl @@ -0,0 +1,134 @@ +#!/usr/bin/perl + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . + +use Modern::Perl; +use YAML; + +use C4::Auth; +use C4::Context; +use C4::Output; +use Koha::OAI::Harvester::Client; +use Koha::OAI::Harvester::Requests; +use Koha::BiblioFrameworks; +use Koha::Database; + +my $context = C4::Context->new(); +my $config_filename = $context->{config}->{oai_pmh_harvester_config}; +my $client_config = {}; +if ($config_filename){ + my $config = YAML::LoadFile($config_filename); + if ($config && $config->{socket}){ + $client_config->{socket_uri} = $config->{socket}; + } +} + +my $input = new CGI; + +my ($template, $loggedinuser, $cookie) = + get_template_and_user({template_name => "tools/oai-pmh-harvester/dashboard.tt", + query => $input, + type => "intranet", + authnotrequired => 0, + flagsrequired => {tools => 'manage_staged_marc'}, + }); + +my $op = $input->param('op') // 'list'; +my $id = $input->param('id'); +my $uuid = $input->param('uuid'); + +my $client = Koha::OAI::Harvester::Client->new($client_config); +my $is_connected = $client->connect; + +if ( ($op eq "send") && $id ){ + if ($is_connected){ + my $request = Koha::OAI::Harvester::Requests->find($id); + if ($request){ + my $task = { + name => $request->name, + uuid => $request->uuid, + interval => $request->interval, + parameters => { + oai_pmh => { + baseURL => $request->http_url, + verb => $request->oai_verb, + metadataPrefix => $request->oai_metadataPrefix, + identifier => $request->oai_identifier, + set => $request->oai_set, + from => $request->oai_from, + until => $request->oai_until, + }, + import => { + filter => $request->import_filter, + frameworkcode => $request->import_framework_code, + matcher_code => $request->import_matcher_code, + record_type => $request->import_record_type, + }, + }, + }; + if ($request->http_username && $request->http_password && $request->http_realm){ + $task->{parameters}->{http_basic_auth} = { + username => $request->http_username, + password => $request->http_password, + realm => $request->http_realm, + }; + } + my $is_created = $client->create($task); + $template->{VARS}->{ result }->{ send } = $is_created; + } + } +} +elsif ( ($op eq "start") && ($uuid) ){ + if ($is_connected){ + my $is_started = $client->start($uuid); + $template->{VARS}->{ result }->{ start } = $is_started; + } +} +elsif ( ($op eq "stop") && ($uuid) ){ + if ($is_connected){ + my $is_stopped = $client->stop($uuid); + $template->{VARS}->{ result }->{ stop } = $is_stopped; + } +} +elsif ( ($op eq "delete") && ($uuid) ){ + if ($is_connected){ + my $is_deleted = $client->delete($uuid); + $template->{VARS}->{ result }->{ delete } = $is_deleted; + } +} + +my $requests = Koha::OAI::Harvester::Requests->as_list; +$template->{VARS}->{ saved_requests } = $requests; + +my $frameworks = Koha::BiblioFrameworks->as_list(); +$template->{VARS}->{ frameworks } = $frameworks; + +my $schema = Koha::Database->new()->schema(); +my $matcher_rs = $schema->resultset("MarcMatcher"); +my @matchers = $matcher_rs->all; +$template->{VARS}->{ matchers } = \@matchers; + +if ($is_connected){ + my $submitted_requests = $client->list; + $template->{VARS}->{ submitted_requests } = $submitted_requests; +} +else { + $template->{VARS}->{ harvester }->{ offline } = 1; +} + +output_html_with_http_headers($input, $cookie, $template->output); \ No newline at end of file diff --git a/tools/oai-pmh-harvester/record.pl b/tools/oai-pmh-harvester/record.pl new file mode 100755 index 0000000000..fd2375cb09 --- /dev/null +++ b/tools/oai-pmh-harvester/record.pl @@ -0,0 +1,53 @@ +#!/usr/bin/perl + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . + +use Modern::Perl; + +use C4::Auth; +use C4::Output; +use Koha::Database; + +my $input = new CGI; + +my ($template, $loggedinuser, $cookie) = + get_template_and_user({template_name => "tools/oai-pmh-harvester/record.tt", + query => $input, + type => "intranet", + authnotrequired => 0, + flagsrequired => {tools => 'manage_staged_marc'}, + }); + +my $import_oai_id = $input->param('import_oai_id'); +if ($import_oai_id){ + my $schema = Koha::Database->new()->schema(); + if ($schema){ + my $rs = $schema->resultset("OaiHarvesterHistory"); + if ($rs){ + my $row = $rs->find($import_oai_id); + if ($row){ + my $record = $row->record; + if ($record){ + $template->{VARS}->{ record } = $record; + } + } + } + } +} + +output_html_with_http_headers($input, $cookie, $template->output); diff --git a/tools/oai-pmh-harvester/request.pl b/tools/oai-pmh-harvester/request.pl new file mode 100755 index 0000000000..4d80b1aa36 --- /dev/null +++ b/tools/oai-pmh-harvester/request.pl @@ -0,0 +1,142 @@ +#!/usr/bin/perl + +# Copyright 2017 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . + +use Modern::Perl; +use UUID; + +use C4::Auth; +use C4::Output; +use Koha::OAI::Harvester::Requests; +use Koha::BiblioFrameworks; +use Koha::Database; + +my $input = new CGI; + +my ($template, $loggedinuser, $cookie) = + get_template_and_user({template_name => "tools/oai-pmh-harvester/request.tt", + query => $input, + type => "intranet", + authnotrequired => 0, + flagsrequired => {tools => 'manage_staged_marc'}, + }); + +my $op = $input->param('op'); +my $id = $input->param('id'); + +my @frameworks = Koha::BiblioFrameworks->as_list(); +$template->{VARS}->{ frameworks } = \@frameworks; + +my $schema = Koha::Database->new()->schema(); +my $rs = $schema->resultset("MarcMatcher"); +my @matchers = $rs->all; +$template->{VARS}->{ matchers } = \@matchers; + +my $http_url = $input->param('http_url'); +my $http_username = $input->param('http_username'); +my $http_password = $input->param('http_password'); +my $http_realm = $input->param('http_realm'); + +my $oai_verb = $input->param('oai_verb'); +my $oai_metadataPrefix = $input->param('oai_metadataPrefix'); +my $oai_identifier = $input->param('oai_identifier'); +my $oai_from = $input->param('oai_from'); +my $oai_until = $input->param('oai_until'); +my $oai_set = $input->param('oai_set'); + +my $import_filter = $input->param('import_filter') // 'default'; +my $import_framework_code = $input->param('import_framework_code'); +my $import_record_type = $input->param('import_record_type'); +my $import_matcher_code = $input->param('import_matcher_code'); + +my $interval = $input->param("interval") ? int ( $input->param("interval") ) : 0; +my $name = $input->param("name"); + +my $save = $input->param('save'); +my $test_parameters = $input->param('test_parameters'); + +my $request = $id ? Koha::OAI::Harvester::Requests->find($id) : Koha::OAI::Harvester::Request->new(); +if ($request){ + if ($op eq "create" || $op eq "update"){ + $request->set({ + name => $name, + http_url => $http_url, + http_username => $http_username, + http_password => $http_password, + http_realm => $http_realm, + oai_verb => $oai_verb, + oai_metadataPrefix => $oai_metadataPrefix, + oai_identifier => $oai_identifier, + oai_from => $oai_from, + oai_until => $oai_until, + oai_set => $oai_set, + import_filter => $import_filter, + import_framework_code => $import_framework_code, + import_record_type => $import_record_type, + import_matcher_code => $import_matcher_code, + interval => $interval, + }); + } +} + +if ($test_parameters){ + my $errors = $request->validate(); + $template->{VARS}->{ errors } = $errors; + $template->{VARS}->{ test_parameters } = 1; +} + +if ($op eq "new"){ + #Empty form with some defaults + $request->import_filter("default") unless $request->import_filter; + $request->interval(0) unless $request->interval; +} +elsif ($op eq "create"){ + if ($save){ + my ($uuid,$uuid_string); + UUID::generate($uuid); + UUID::unparse($uuid, $uuid_string); + $request->uuid($uuid_string); + $request->store; + print $input->redirect('/cgi-bin/koha/tools/oai-pmh-harvester/dashboard.pl#saved_results'); + exit; + } +} +elsif ( $op eq "edit"){ + $template->{VARS}->{ id } = $id; +} +elsif ($op eq "update"){ + $template->{VARS}->{ id } = $id; + if ($save){ + $request->store; + print $input->redirect('/cgi-bin/koha/tools/oai-pmh-harvester/dashboard.pl#saved_results'); + exit; + } +} +elsif ($op eq "delete"){ + if ($request){ + $request->delete; + print $input->redirect('/cgi-bin/koha/tools/oai-pmh-harvester/dashboard.pl#saved_results'); + } +} +else { + print $input->redirect('/cgi-bin/koha/tools/oai-pmh-harvester/dashboard.pl#saved_results'); +} +$template->{VARS}->{ op } = $op; +$template->{VARS}->{ oai_pmh_request } = $request; + +output_html_with_http_headers($input, $cookie, $template->output); -- 2.11.0