From c648ec7a966fa3defee91f4ec033e9dccb239064 Mon Sep 17 00:00:00 2001 From: David Cook Date: Tue, 8 Sep 2015 12:52:51 +1000 Subject: [PATCH] Bug 10662 - Build OAI-PMH Harvesting Client _QUICK AND TENTATIVE TEST PLAN_ In the following steps replace "/home/user" with whatever your Koha dev install is _CONFIGURATION_ 1) Add something like the following to your koha-conf.xml after and before unix:/home/user/koha-dev/var/run/icarus.sock /home/user/koha-dev/var/run/icarus.pid /home/user/koha-dev/var/log/icarus.log Koha::Icarus::Task::Enqueue::OAIPMH::Biblio Koha::Icarus::Task::Dequeue::OAIPMH::Biblio Koha::Icarus::Task::Test 30 2) Apply Bugzilla patches 15555, 15541, 15745, then 10662 3) Upgrade your Koha dev install so that it uses new Zebra configuration files; try something like the following: perl Makefile.PL --prev-install-log "$INSTALL_LOG" make make test make upgrade 4) Run upgrade database: perl installer/data/mysql/updatedatabase.pl 4) Set PERL5LIB to include /home/user/koha (i.e. path to your C4 and Koha module directories) export PERL5LIB=/home/user/koha 5) Activate Icarus using the following command: perl /home/user/koha/misc/bin/icarusd.pl -f /home/user/koha-dev/etc/koha-conf.xml NOTE: It will send output to your terminal window. To write to the log file, you can daemonize by adding the "-d" or "--daemon" options. 6) In Koha, create a record matching rule: Code = OAI Match threshold = 100 Record type = Bibliographic Search index = control-number Score = 100 Tag = 001 Search index = id-other,st-urx Score = 100 Tag = 024 Subfields = a Normalization rule = raw _USER DOWNLOAD TASK_ 7a) Go to Koha administration > Saved tasks (http://KOHA/cgi-bin/koha/admin/saved_tasks.pl) 7b) Click "New saved task" 7c) Leave it on "Koha::Icarus::Task::Enqueue::OAIPMH::Biblio" and click "Next" 7d) Choose a "Start time" in the past using the calendar pop-up 7e) Choose a "Repeat interval" of at least 30 seconds (for initial troubleshooting purposes) 7f) Choose a URL of an OAI-PMH repository that you want to harvest (also include a username, password, and realm if necessary) 7g) Fill out your OAI-PMH repositories as you like 7h) Fill out "Queue" with something like file:///home/user/koha/icarus_test 7i) Click "Save" 8a) Check that the Icarus dashboard has a "Status" of "Online" 8b) Click "Send to Icarus" next to your new saved task entry 8c) A task should now appear under "Active Icarus tasks"; click "Start" 8d) Go back to your terminal to check the Icarus server output (or 'tail -f' the log if you daemonized) 8e) You should notice activity; you can also check /home/user/koha/icarus_test to see if records are being downloaded and stored there _USER IMPORT TASK_ 9a) Go back to Koha administration > Saved tasks (http://KOHA/cgi-bin/koha/admin/saved_tasks.pl) 9b) Click "New saved task" 9c) Choose Koha::Icarus::Task::Dequeue::OAIPMH::Biblio, and click "Next" 9d) Repeat the same steps for "Start time" and "Repeat interval" as these are common to all tasks 9e) "Queue" should be the same as before, so try your path of file:///home/user/koha/icarus_test 9f) Provide a username and password for the API authentication; you should be able to use the default URL 9g) You should be able to trust the "Import target parameters" URL 9h) Write "OAI" for "Record matching rule code", if you added it earlier in the configuration steps 9i) Leave the "Action..." defaults... 9j) Change "Filter" to "file:///home/user/koha/koha-tmpl/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl" 9k) Follow the same steps as above for sending the task to Icarus and telling Icarus to start it 9l) You can either watch Icarus's server output, or look at http://KOHA/cgi-bin/koha/tools/manage-marc-import.pl, or look directly at your database's "import_oai" and "biblio" tables to see how records are imported to Koha --- Koha/Icarus.pm | 177 +++++++++++ Koha/Icarus/Base.pm | 22 ++ Koha/Icarus/Listener.pm | 327 ++++++++++++++++++++ Koha/Icarus/Task.pm | 302 ++++++++++++++++++ Koha/Icarus/Task/Base.pm | 24 ++ Koha/Icarus/Task/Dequeue/OAIPMH/Biblio.pm | 111 +++++++ Koha/Icarus/Task/Enqueue/OAIPMH/Biblio.pm | 311 +++++++++++++++++++ Koha/SavedTask.pm | 86 ++++++ Koha/SavedTasks.pm | 62 ++++ Koha/Schema/Result/SavedTask.pm | 98 ++++++ admin/saved_tasks.pl | 338 +++++++++++++++++++++ docs/Icarus/README | 72 +++++ .../bug_10662-Build_import_oai_table.sql | 21 ++ installer/data/mysql/kohastructure.sql | 31 ++ .../intranet-tmpl/prog/en/includes/admin-menu.inc | 1 + .../tasks/KohaIcarusTaskDequeueOAIPMHBiblio.inc | 143 +++++++++ .../tasks/KohaIcarusTaskEnqueueOAIPMHBiblio.inc | 87 ++++++ .../prog/en/modules/admin/admin-home.tt | 2 + .../prog/en/modules/admin/saved_tasks.tt | 333 ++++++++++++++++++++ .../intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl | 57 ++++ misc/bin/icarusd.pl | 156 ++++++++++ svc/import_oai | 197 ++++++++++++ 22 files changed, 2958 insertions(+) create mode 100755 Koha/Icarus.pm create mode 100755 Koha/Icarus/Base.pm create mode 100755 Koha/Icarus/Listener.pm create mode 100755 Koha/Icarus/Task.pm create mode 100755 Koha/Icarus/Task/Base.pm create mode 100755 Koha/Icarus/Task/Dequeue/OAIPMH/Biblio.pm create mode 100755 Koha/Icarus/Task/Enqueue/OAIPMH/Biblio.pm create mode 100755 Koha/SavedTask.pm create mode 100755 Koha/SavedTasks.pm create mode 100755 Koha/Schema/Result/SavedTask.pm create mode 100755 admin/saved_tasks.pl create mode 100755 docs/Icarus/README create mode 100644 installer/data/mysql/atomicupdate/bug_10662-Build_import_oai_table.sql create mode 100644 koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDequeueOAIPMHBiblio.inc create mode 100644 koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskEnqueueOAIPMHBiblio.inc create mode 100644 koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt create mode 100755 koha-tmpl/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl create mode 100755 misc/bin/icarusd.pl create mode 100755 svc/import_oai diff --git a/Koha/Icarus.pm b/Koha/Icarus.pm new file mode 100755 index 0000000..b57e691 --- /dev/null +++ b/Koha/Icarus.pm @@ -0,0 +1,177 @@ +package Koha::Icarus; + +# Copyright 2016 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use IO::Socket::UNIX; +use IO::Select; +use URI; +use JSON; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub connected { + my ($self) = @_; + if ($self->{_connected}){ + return 1; + } +} + +sub connect { + my ($self) = @_; + my $socket_uri = $self->{socket_uri}; + if ($socket_uri){ + my $uri = URI->new($socket_uri); + if ($uri && $uri->scheme eq 'unix'){ + my $socket_path = $uri->path; + my $socket = IO::Socket::UNIX->new( + Type => IO::Socket::UNIX::SOCK_STREAM(), + Peer => $socket_path, + ); + if ($socket){ + my $socketio = new IO::Select(); + $socketio->add($socket); + #FIXME: Should probably fix these return values... + $self->{_socketio} = $socketio; + $self->{_socket} = $socket; + my $message = $self->_read(); + if ($message eq 'HELLO'){ + $self->{_connected} = 1; + return 1; + } + } + } + } + return 0; +} + +sub add_task { + my ($self, $args) = @_; + my $task = $args->{task}; + if ($task && %$task){ + my $response = $self->command("add task", undef, $task); + if ($response){ + return $response; + } + } +} + +sub start_task { + my ($self, $args) = @_; + my $task_id = $args->{task_id}; + if ($task_id){ + my $response = $self->command("start task", $task_id); + if ($response){ + return $response; + } + } +} + +sub remove_task { + my ($self, $args) = @_; + my $task_id = $args->{task_id}; + if ($task_id){ + my $response = $self->command("remove task", $task_id); + if ($response){ + return $response; + } + } +} + +sub list_tasks { + my ($self) = @_; + my $response = $self->command("list tasks"); + if ($response){ + if (my $tasks = $response->{tasks}){ + return $tasks; + } + } +} + +sub shutdown { + my ($self) = @_; + my $response = $self->command("shutdown"); + if ($response){ + return $response; + } +} + + + + + +sub command { + my ($self, $command, $task_id, $task) = @_; + my $serialized = $self->_serialize({ "command" => $command, "task_id" => $task_id, "task" => $task }); + if ($serialized){ + $self->_write({ serialized => $serialized }); + my $json = $self->_read(); + if ($json){ + my $response = from_json($json); + if ($response){ + return $response; + } + } + } +} + +sub _serialize { + my ($self, $output) = @_; + my $serialized = to_json($output); + return $serialized; +} + +sub _write { + my ($self, $args) = @_; + my $socket = $self->{_socket}; + my $output = $args->{serialized}; + if ($output){ + if (my $socketio = $self->{_socketio}){ + if (my @filehandles = $socketio->can_write(5)){ + foreach my $filehandle (@filehandles){ + #Localize output record separator as null + local $\ = "\x00"; + print $socket $output; + } + } + } + } +} + +sub _read { + my ($self) = @_; + if (my $socketio = $self->{_socketio}){ + if (my @filehandles = $socketio->can_read(5)){ + foreach my $filehandle (@filehandles){ + #Localize input record separator as null + local $/ = "\x00"; + my $message = <$filehandle>; + chomp($message) if $message; + return $message; + } + } + } +} + + + +1; \ No newline at end of file diff --git a/Koha/Icarus/Base.pm b/Koha/Icarus/Base.pm new file mode 100755 index 0000000..f5034ee --- /dev/null +++ b/Koha/Icarus/Base.pm @@ -0,0 +1,22 @@ +package Koha::Icarus::Base; + +use Modern::Perl; +use DateTime; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub log { + my ($self,$message) = @_; + my $id = $self->{_id}; + my $component = $self->{_component} // "component"; + if ( ($self->{Verbosity}) && ($self->{Verbosity} > 0) ){ + my $now = DateTime->now(time_zone => "local"); + say "[$now] [$component $id] $message"; + } +} + +1; \ No newline at end of file diff --git a/Koha/Icarus/Listener.pm b/Koha/Icarus/Listener.pm new file mode 100755 index 0000000..6a3637e --- /dev/null +++ b/Koha/Icarus/Listener.pm @@ -0,0 +1,327 @@ +package Koha::Icarus::Listener; + +use Modern::Perl; +use parent 'Koha::Icarus::Base'; + +use POE qw(Wheel::ReadWrite Wheel::SocketFactory Wheel::Run); +use IO::Socket qw(AF_UNIX); +use URI; +use Koha::Icarus::Task; +use JSON; #For "on_client_input" + +my $null_filter = POE::Filter::Line->new( + Literal => chr(0), +); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{_component} = "server"; + $args->{_id} = "undefined"; + return bless ($args, $class); +} + +#NOTE: "spawn" inspired by http://poe.perl.org/?POE_Cookbook/Object_Methods +sub spawn { + my ($class, $args) = @_; + my $self = $class->new($args); + POE::Session->create( + object_states => [ + $self => { + _start => "on_server_start", + shutdown => "shutdown", + set_verbosity => "set_verbosity", + _child => "on_task_event", + got_list_tasks => "on_list_tasks", + graceful_shutdown => "graceful_shutdown", + got_client_accept => "on_client_accept", + got_client_error => "on_client_error", + got_server_error => "on_server_error", + got_add_task => "on_add_task", + got_client_input => "on_client_input", + }, + ], + ); +} + +#Methods for POE::Session + +sub on_server_start { + my ($self, $kernel,$heap,$session) = @_[OBJECT, KERNEL,HEAP,SESSION]; + my $server_id = $session->ID; + $self->{_id} = $server_id; #Set internal id for logging purposes + + my $bind_address_uri = $self->{Socket}; + my $max_tasks = $self->{MaxTasks}; + + $kernel->sig(INT => "graceful_shutdown"); + $kernel->sig(TERM => "graceful_shutdown"); + + $heap->{max_tasks} = $max_tasks // 25; #Default maximum of 25 unless otherwise specified + + $self->log("Maximum number of tasks allowed: $heap->{max_tasks}"); + $self->log("Starting server..."); + + my %server_params = ( + SuccessEvent => "got_client_accept", + FailureEvent => "got_server_error", + ); + + #TODO: At this time, only "unix" sockets are supported. In future, perhaps TCP/IP sockets could also be supported. + my $uri = URI->new($bind_address_uri); + my $scheme = $uri->scheme; + + if ($scheme eq 'unix'){ + my $bind_address = $uri->path; + $server_params{SocketDomain} = AF_UNIX; + $server_params{BindAddress} = $bind_address; + #When starting a unix socket server, you need to remove any existing references to that socket file. + if ($bind_address && (-e $bind_address) ){ + unlink $bind_address; + } + } + + $heap->{server} = POE::Wheel::SocketFactory->new(%server_params); + + if ($scheme eq 'unix'){ + #FIXME/DEBUGGING: This is a way to force a permission denied error... + #chmod 0755, $uri->path; + #Make the socket writeable to other users like Apache + chmod 0666, $uri->path; + } + +} + +sub shutdown { + my ($self,$heap,$session,$kernel) = @_[OBJECT, HEAP,SESSION,KERNEL]; + + if ($heap->{server}){ + $self->log("Shutting down server..."); + #Delete the server, so that you can't get any new connections + delete $heap->{server} if $heap->{server}; + } + + if ($heap->{client}){ + $self->log("Shutting down any remaining clients..."); + #Delete the clients, so that you bring down the existing connections + delete $heap->{client}; #http://www.perlmonks.org/?node_id=176971 + } +} + +sub on_task_event { + my ($self, $kernel, $heap,$session) = @_[OBJECT,KERNEL, HEAP,SESSION]; + my ($action,$child_session,$task) = @_[ARG0,ARG1,ARG2]; + + my $child_id = $child_session->ID; + $self->log("$action child $child_id"); + + if ($action eq 'create'){ + #NOTE: The $task variable is returned by the child POE session's _start event + my $task_id = $child_session->ID; + $heap->{tasks}->{$task_id}->{task} = $task; + + } elsif ($action eq 'lose'){ + my $task_id = $child_session->ID; + delete $heap->{tasks}->{$task_id}; + } +} + +#TODO: Put this in a parent class? +sub set_verbosity { + my ($self,$session,$kernel,$new_verbosity) = @_[OBJECT,SESSION,KERNEL,ARG0]; + if (defined $new_verbosity){ + $self->{Verbosity} = $new_verbosity; + } +} + +sub on_list_tasks { + my ($self, $kernel, $heap,$session) = @_[OBJECT, KERNEL, HEAP,SESSION]; + + #DEBUG: You can access the POE::Kernel's sessions with "$POE::Kernel::poe_kernel->[POE::Kernel::KR_SESSIONS]". + #While it's black magic you shouldn't touch, it can be helpful when debugging. + + my @tasks = (); + foreach my $task_id (keys %{$heap->{tasks}} ){ + push(@tasks,{ task_id => $task_id, task => $heap->{tasks}->{$task_id}->{task} }); + } + return \@tasks; +} + +sub graceful_shutdown { + my ($self, $heap,$session,$kernel,$signal) = @_[OBJECT, HEAP,SESSION,KERNEL,ARG0]; + + #Tell the kernel that you're handling the signal sent to this session + $kernel->sig_handled(); + $kernel->sig($signal); + + my $tasks = $kernel->call($session,"got_list_tasks"); + + + if ( $heap->{tasks} && %{$heap->{tasks}} ){ + $self->log("Waiting for tasks to finish..."); + foreach my $task_id (keys %{$heap->{tasks}}){ + $self->log("Task $task_id still exists..."); + $kernel->post($task_id,"got_task_stop"); + } + } else { + $self->log("All tasks have finished"); + $kernel->yield("shutdown"); + return; + } + + $self->log("Attempting graceful shutdown in 1 second..."); + #NOTE: Basically, we just try another graceful shutdown on the next tick. + $kernel->delay("graceful_shutdown" => 1); +} + +#Accept client connection to listener +sub on_client_accept { + my ($self, $client_socket, $server_wheel_id, $heap, $session) = @_[OBJECT, ARG0, ARG3, HEAP,SESSION]; + + my $client_wheel = POE::Wheel::ReadWrite->new( + Handle => $client_socket, + InputEvent => "got_client_input", + ErrorEvent => "got_client_error", + InputFilter => $null_filter, + OutputFilter => $null_filter, + ); + + $client_wheel->put("HELLO"); + $heap->{client}->{ $client_wheel->ID() } = $client_wheel; + $self->log("Connection ".$client_wheel->ID()." started.$server_wheel_id"); +} + +#Handle server error - shutdown server +sub on_server_error { + my ($self, $operation, $errnum, $errstr, $heap, $session) = @_[OBJECT, ARG0, ARG1, ARG2,HEAP, SESSION]; + $self->log("Server $operation error $errnum: $errstr\n"); + delete $heap->{server}; +} + +#Handle client error - including disconnect +sub on_client_error { + my ($self, $wheel_id,$heap,$session) = @_[OBJECT, ARG3,HEAP,SESSION]; + + $self->log("Connection $wheel_id failed or ended."); + delete $heap->{client}->{$wheel_id}; + +} + +sub on_add_task { + my ($self, $message, $kernel, $heap, $session) = @_[OBJECT, ARG0, KERNEL, HEAP,SESSION]; + + #Fetch a list of all tasks + my @task_keys = keys %{$heap->{tasks}}; + + #If the number in the list is less than the max, add a new task + #else die. + if (scalar @task_keys < $heap->{max_tasks}){ + my $server_id = $session->ID; + my $task_session = Koha::Icarus::Task->spawn({ message => $message, server_id => $server_id, Verbosity => 1, }); + return $task_session->ID; + } else { + #This die should be caught by the event caller... + die "Maximum number of tasks already reached.\n"; + } +} + +sub on_client_input { + my ($self, $input, $wheel_id, $session, $kernel, $heap) = @_[OBJECT, ARG0, ARG1, SESSION, KERNEL, HEAP]; + + #Store server id more explicitly + my $server_id = $session->ID; + + #Server listener has received input from client + my $client = $heap->{client}->{$wheel_id}; + + #FIXME: you probably don't want to log this as it can have auth info... + #$self->log("Input = $input"); + + #Parse input from client + my $message = from_json($input); + + if ( ref $message eq 'HASH' ){ + #Read "command" from client + if (my $command = $message->{command}){ + $self->log("Message received with command \"$command\"."); + if ($command eq 'add task'){ + my $output = {}; + + #Create a task session + eval { + #NOTE: The server automatically keeps track of its child tasks + my $task_id = $kernel->call($server_id,"got_add_task",$message); + + $output->{action} = "added"; + $output->{task_id} = $task_id; + }; + if ($@){ + #FIXME: You might be able to remove this log... + $self->log("$@"); + chomp($@); + $output->{action} = "error"; + $output->{error_message} = $@; + } + my $server_output = to_json($output); + $client->put($server_output); + return; + + } elsif ( ($command eq 'remove task') || ($command eq 'start task' ) ){ + + my $task_id = $message->{task_id}; + + my $output = { + task_id => $task_id, + }; + + if ($command eq 'remove task'){ + $kernel->call($task_id,"got_task_stop"); + $output->{action} = "removed"; + } elsif ($command eq 'start task'){ + my $response = $kernel->call($task_id, "on_task_init"); + $output->{action} = $response; + } + + if ($!){ + $output->{action} = "error"; + $output->{error_message} = $!; + } + + #FIXME: What do we actually want to send back to the client? + my $server_output = to_json($output); + $client->put($server_output); + return; + + } elsif ($command eq 'list tasks'){ + + #Get tasks from listener (ie self) + my $tasks = $kernel->call($server_id, "got_list_tasks"); + + #Prepare output for client + my $server_output = to_json({tasks => $tasks}, {pretty => 1}); + + #Send output to client + $client->put($server_output); + return; + + } elsif ($command eq 'shutdown'){ + $kernel->post($server_id, "graceful_shutdown"); + my $server_output = to_json({action => 'shutting down'}); + $client->put($server_output); + return; + } else { + $self->log("The message contained an invalid command!"); + $client->put("Sorry! That is an invalid command!"); + return; + } + } else { + $self->log("The message was missing a command!"); + } + } else { + $self->log("The message was malformed!"); + } + $client->put("Sorry! That is an invalid message!"); + return; +} + +1; \ No newline at end of file diff --git a/Koha/Icarus/Task.pm b/Koha/Icarus/Task.pm new file mode 100755 index 0000000..1c4232c --- /dev/null +++ b/Koha/Icarus/Task.pm @@ -0,0 +1,302 @@ +package Koha::Icarus::Task; + +use Modern::Perl; +use parent 'Koha::Icarus::Base'; + +use POE qw(Wheel::Run); +use DateTime; +use DateTime::Format::Strptime; +use JSON; +use Module::Load::Conditional qw/can_load/; + +my $datetime_pattern = DateTime::Format::Strptime->new( + pattern => '%F %T', + time_zone => 'local', +); +my $epoch_pattern = DateTime::Format::Strptime->new( + pattern => '%s', +); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{_component} = "task"; + $args->{_id} = "undefined"; + return bless ($args, $class); +} + +#NOTE: "spawn" inspired by http://poe.perl.org/?POE_Cookbook/Object_Methods +sub spawn { + my ($class, $args) = @_; + my $self = $class->new($args); + my $task_session = POE::Session->create( + object_states => [ + $self => { + _start => "on_task_create", + "got_child_stdout" => "on_child_stdout", + "got_child_stderr" => "on_child_stderr", + "got_child_close" => "on_child_close", + "got_child_signal" => "on_child_signal", + "got_terminal_signal" => "on_terminal_signal", + "child_process_success" => "child_process_success", + "got_task_stop" => "on_task_stop", + "on_task_init" => "on_task_init", + "on_task_start" => "on_task_start", + }, + ], + ); + return $task_session; +} + +sub on_task_create { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + + #Trap terminal signals so that the task can stop gracefully. + $kernel->sig(INT => "got_terminal_signal"); + $kernel->sig(TERM => "got_terminal_signal"); + + my $task_id = $session->ID; + if ($task_id){ + #Tell the kernel that this task is waiting for an external action (ie keepalive counter) + $kernel->refcount_increment($task_id,"waiting task"); + $self->{_id} = $task_id; #Set internal id for logging purposes + } + + my $server_id = $self->{server_id}; + if ($server_id){ + $heap->{server_id} = $server_id; + } + + my $task = undef; + my $message = $self->{message}; + if ($message){ + $task = $message->{task}; + if ($task){ + $task->{status} = 'new'; + $heap->{task} = $task; + } + } + return $task; #This return value is used by the parent POE session's _child handler +} + +#This sub is just to start it now, or set it to start in the future... if the time is now or in the past, it starts now... if it's in the future, it starts in the future... +sub on_task_init { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + my $response = 'pending'; + my $task = $heap->{task}; + my $status = $task->{status}; + if ($status){ + if ($status eq 'started'){ + $response = 'already started'; + } elsif ($status eq 'pending'){ + $response = 'already pending'; + } else { + $task->{status} = 'pending'; + + my $start = $task->{start}; + if ( my $dt = $datetime_pattern->parse_datetime($start) ){ + $start = $dt->epoch; + } elsif ( $epoch_pattern->parse_datetime($start) ){ + #No change required + } else { + #If we don't match the datetime_pattern or epoch_pattern, then we start right now. + $start = time(); #time() returns a UNIX epoch time value + } + + $self->log("Start task at $start"); + #NOTE: $start must be in UNIX epoch time (ie number of seconds that have elapsed since 00:00:00 UTC Thursday 1 January 1970) + $kernel->alarm("on_task_start",$start); + } + } + return $response; +} + +sub on_task_start { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + my $task = $heap->{task}; + $task->{status} = 'started'; + + if (my $repeat_interval = $task->{repeat_interval}){ + #NOTE: Reset the start time with a human readable timestamp + my $dt = DateTime->now( time_zone => 'local', ); + $dt->add( seconds => $repeat_interval ); + $task->{start} = $dt->strftime("%F %T"); + } + #FIXME: You need to impose child process limits here! How many child processes are allowed to be running at any given time? Well, you can only have one child process per task... + #so it's really more of a limit on the number of tasks... you probably need to have an internal task queue... that's easy enough though. + my $child = POE::Wheel::Run->new( + ProgramArgs => [ $task, ], + Program => sub { + my ($task) = @_; + + #Perform some last minute POE calls before running the task module plugin + my $session = $poe_kernel->get_active_session(); + if ($session){ + my $heap = $session->get_heap(); + $poe_kernel->call($heap->{server_id},"set_verbosity",0); #This turns off the server logging in this forked process, so the following call() doesn't mess up our logs + $poe_kernel->call($heap->{server_id},"shutdown"); #Shutdown the socket listener on the child process, so there's zero chance of writing to or reading from the socket in the child process + } + + #NOTE: I don't know if this really needs to be run, but it shouldn't hurt. + $poe_kernel->stop(); + + #Try to load the task type module. + my $task_type = $task->{type}; + if ( can_load ( modules => { $task_type => undef, }, ) ){ + #Create the object + my $task_object = $task_type->new({task => $task}); + if ($task_object){ + #Synchronous action: run the task module + $task_object->run; + } + } else { + die "Couldn't load module $task_type: $Module::Load::Conditional::ERROR" + } + }, + StdoutEvent => "got_child_stdout", + StderrEvent => "got_child_stderr", + CloseEvent => "got_child_close", + NoSetPgrp => 1, #Keep child processes in same group as parent. This is especially useful when using Ctrl+C to kill the whole group. + ); + + $kernel->sig_child($child->PID, "got_child_signal"); + # Wheel events include the wheel's ID. + $_[HEAP]{children_by_wid}{$child->ID} = $child; + # Signal events include the process ID. + $_[HEAP]{children_by_pid}{$child->PID} = $child; + + $self->log("child pid ".$child->PID." started as wheel ".$child->ID); +} + +sub on_task_stop { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + my $task = $heap->{task}; + $task->{status} = 'stopping'; + my $task_id = $session->ID; + my $server_id = $heap->{server_id}; + + if ($heap->{stopping}){ + $self->log("Task is already in the process of stopping..."); + + } else { + $self->log("Trying to stop task."); + + #Mark this task as stopping + $heap->{stopping} = 1; + + #Stop the task from spawning new jobs + $kernel->alarm("on_task_start"); + + my $children_by_pid = $heap->{children_by_pid}; + if ($children_by_pid && %$children_by_pid){ + + $self->log("Child processes in progres..."); + my $child_processes = $heap->{children_by_pid}; + foreach my $child_pid (keys %$child_processes){ + my $child = $child_processes->{$child_pid}; + $self->log("Telling child pid $child_pid to stop"); + $child->put("quit"); + #TODO: Perhaps it would be worthwhile having a kill switch too? + # my $rv = $child->kill("TERM"); + } + } + $self->log("Removing task keepalive."); + $kernel->refcount_decrement($task_id,"waiting task"); + } +} + +sub on_terminal_signal { + my ($self, $signal,$session,$kernel) = @_[OBJECT, ARG0,SESSION,KERNEL]; + $self->log("Trapped SIGNAL: $signal."); + #Gracefully stop the task + $kernel->call($session, "got_task_stop"); +} + +sub child_process_success { + my ($self, $heap,$session,$kernel) = @_[OBJECT, HEAP,SESSION,KERNEL]; + my $task = $heap->{task}; + if (my $repeat_interval = $task->{repeat_interval}){ + if ($heap->{stopping}){ + $self->log("Will skip repeating the task, as task is stopping."); + } else { + $self->log("Will repeat the task"); + $task->{status} = "restarting"; + $kernel->yield("on_task_init"); + } + } else { + $self->log("I'm going to stop this task"); + $kernel->yield("got_task_stop"); + } +} + +############################################################# +# # +# Methods for communicating with child processes # +# # +############################################################# +# Originally inspired by the POE::Wheel::Run perldoc example + +# Wheel event, including the wheel's ID +sub on_child_stdout { + my ($self, $stdout_line, $wheel_id, $session) = @_[OBJECT, ARG0, ARG1, SESSION]; + my $child = $_[HEAP]{children_by_wid}{$wheel_id}; + #NOTE: Log everything child process sends to STDOUT + $self->log("[pid ".$child->PID."] STDOUT: $stdout_line"); + + #If the child outputs a line to STDOUT which starts with UPDATE_PARAMS=, we capture the data, + #and update the task params. + if ($stdout_line =~ /^UPDATE_PARAMS=(.*)$/){ + my $json_string = $1; + my $json = from_json($json_string); + my $task = $_[HEAP]->{task}; + my $params = $task->{params}; + foreach my $key (%$json){ + if (defined $params->{$key}){ + #FIXME: Don't just overwrite? Only update differences? + $params->{$key} = $json->{$key}; + } + } + $_[HEAP]->{task} = $task; + } +} + +# Wheel event, including the wheel's ID. +sub on_child_stderr { + my ($self, $stderr_line, $wheel_id, $session) = @_[OBJECT, ARG0, ARG1,SESSION]; + my $child = $_[HEAP]{children_by_wid}{$wheel_id}; + #NOTE: Log everything child process sends to STDERR + $self->log("[pid ".$child->PID."] STDERR: $stderr_line"); +} + +# Wheel event, including the wheel's ID. +sub on_child_close { + my ($self, $wheel_id,$session,$kernel) = @_[OBJECT, ARG0,SESSION,KERNEL]; + + my $child = delete $_[HEAP]{children_by_wid}{$wheel_id}; + + # May have been reaped by on_child_signal(). + unless (defined $child) { + $self->log("[wid $wheel_id] closed all pipes."); + return; + } + $self->log("[pid ".$child->PID."] closed all pipes."); + delete $_[HEAP]{children_by_pid}{$child->PID}; +} + +sub on_child_signal { + my ($self, $heap,$kernel,$pid,$exit_code,$session) = @_[OBJECT, HEAP,KERNEL,ARG1,ARG2,SESSION]; + + #If the child's exit code is 0, handle this successful exit status + if ($exit_code == 0){ + $kernel->yield("child_process_success"); + } + $self->log("pid $pid exited with status $exit_code."); + my $child = delete $_[HEAP]{children_by_pid}{$pid}; + + # May have been reaped by on_child_close(). + return unless defined $child; + + delete $_[HEAP]{children_by_wid}{$child->ID}; +} + +1; diff --git a/Koha/Icarus/Task/Base.pm b/Koha/Icarus/Task/Base.pm new file mode 100755 index 0000000..8de774c --- /dev/null +++ b/Koha/Icarus/Task/Base.pm @@ -0,0 +1,24 @@ +package Koha::Icarus::Task::Base; + +use Modern::Perl; +use IO::Select; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub listen_for_instruction { + my ($self) = @_; + my $select = $self->{_select} ||= IO::Select->new(\*STDIN); + if (my @ready_FHs = $select->can_read(0) ){ + foreach my $FH (@ready_FHs){ + my $line = $FH->getline(); + chomp($line); + return $line; + } + } +} + +1; diff --git a/Koha/Icarus/Task/Dequeue/OAIPMH/Biblio.pm b/Koha/Icarus/Task/Dequeue/OAIPMH/Biblio.pm new file mode 100755 index 0000000..f51f27a --- /dev/null +++ b/Koha/Icarus/Task/Dequeue/OAIPMH/Biblio.pm @@ -0,0 +1,111 @@ +package Koha::Icarus::Task::Dequeue::OAIPMH::Biblio; + +use Modern::Perl; +use parent 'Koha::Icarus::Task::Base'; +use URI; +use LWP::UserAgent; +use HTTP::Status qw(:constants); + +my $ua = LWP::UserAgent->new; + +#FIXME: If we store the cookie jar on disk, we can prevent unnecessary HTTP requests... +$ua->cookie_jar({}); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub run { + my ( $self ) = @_; + + my $task = $self->{task}; + + #DEBUGGING/FIXME: Remove these lines + use Data::Dumper; + warn Dumper($task); + + my $params = $task->{params}; + + my $auth_uri = $params->{auth_uri}; + my $target_uri = $params->{target_uri}; + + my $queue = $params->{queue}; + my $queue_uri = URI->new($queue); + + if ($queue_uri->scheme eq 'file'){ + + my $path = $queue_uri->path; + opendir(my $dh, $path); + my @files = sort readdir($dh); + foreach my $file (@files){ + #NOTE: This is plugin specific as the plugins define when they stop to listen for instructions... + my $instruction = $self->listen_for_instruction(); + if ($instruction eq 'quit'){ + warn "I was asked to quit!"; + return; + } + + next if $file =~ /^\.+$/; + my $filepath = "$path/$file"; + if ( -d $filepath ){ + warn "Directory: $file"; + } elsif ( -e $filepath ){ + warn "File: $file"; + + #Slurp mode + local $/; + #TODO: Check flock on $filepath first + open( my $fh, '<', $filepath ); + my $data = <$fh>; + + #TODO: Improve this section... + #Send to Koha API... (we could speed this up using Asynchronous HTTP requests with AnyEvent::HTTP...) + my $resp = $ua->post( $target_uri, + {'nomatch_action' => $params->{nomatch_action}, + 'overlay_action' => $params->{overlay_action}, + 'match' => $params->{match}, + 'import_mode' => $params->{import_mode}, + 'framework' => $params->{framework}, + 'item_action' => $params->{item_action}, + 'filter' => $params->{filter}, + 'xml' => $data}); + + my $status = $resp->code; + #FIXME: DEBUGGING + warn $status; + warn $resp->code; + warn $resp->decoded_content; + + if ($status == HTTP_UNAUTHORIZED || $status == HTTP_FORBIDDEN) { + my $user = $params->{auth_username}; + my $password = $params->{auth_password}; + $resp = $ua->post( $auth_uri, { userid => $user, password => $password } ); + #FIXME: DEBUGGING + warn $resp->code; + warn $resp->decoded_content; + + $resp = $ua->post( $target_uri, + {'nomatch_action' => $params->{nomatch_action}, + 'overlay_action' => $params->{overlay_action}, + 'match' => $params->{match}, + 'import_mode' => $params->{import_mode}, + 'framework' => $params->{framework}, + 'item_action' => $params->{item_action}, + 'filter' => $params->{filter}, + 'xml' => $data}) + if $resp->is_success; + #FIXME: DEBUGGING + warn $resp->code; + warn $resp->decoded_content; + } + if ($resp->code == 200){ + unlink $filepath; + } + } + } + } +} + +1; diff --git a/Koha/Icarus/Task/Enqueue/OAIPMH/Biblio.pm b/Koha/Icarus/Task/Enqueue/OAIPMH/Biblio.pm new file mode 100755 index 0000000..1759226 --- /dev/null +++ b/Koha/Icarus/Task/Enqueue/OAIPMH/Biblio.pm @@ -0,0 +1,311 @@ +package Koha::Icarus::Task::Enqueue::OAIPMH::Biblio; + +use Modern::Perl; +use parent 'Koha::Icarus::Task::Base'; + +use DateTime; +use DateTime::Format::Strptime; +use HTTP::OAI; +use File::Path qw(make_path); +use Digest::MD5; +use JSON; +use URI; + +my $strp = DateTime::Format::Strptime->new( + pattern => '%Y%m%dT%H%M%S.%NZ', +); + +my $oai_second_granularity = DateTime::Format::Strptime->new( + pattern => '%Y-%m-%dT%H:%M:%SZ', +); + +my $oai_day_granularity = DateTime::Format::Strptime->new( + pattern => '%Y-%m-%d', +); + +sub validate_parameter_names { + +} +sub validate_repeat_interval { + my ($self,$repeat_interval) = @_; + if ($repeat_interval && $repeat_interval =~ /^\d*$/){ + return undef; + } + $self->{invalid_data}++; + return { not_numeric => 1, }; +} + +sub validate_url { + my ($self,$url) = @_; + my $response = {}; + if (my $url_obj = URI->new($url)){ + if ($url_obj->scheme ne "http"){ + $response->{not_http} = 1; + $self->{invalid_data}++; + } + if ( ! $url_obj->path){ + $response->{no_path} = 1; + $self->{invalid_data}++; + } + } else { + $response->{not_a_url} = 1; + $self->{invalid_data}++; + } + + return $response; +} + +sub validate { + my ($self, $args) = @_; + #Reset the invalid data counter... + $self->{invalid_data} = 0; + my $errors = { }; + my $task = $self->{task}; + my $tests = $args->{tests}; + if ($task){ + if ($tests && $tests eq 'all'){ + #warn "PARAMS = ".$task->{params}; + } + } + my $params = $task->{params}; + + #validate_start_time + $errors->{"repeat_interval"} = $self->validate_repeat_interval($task->{repeat_interval}); + + $errors->{"url"} = $self->validate_url($params->{url}); + + #NOTE: You don't need to validate these 3 HTTP Basic Auth parameters + #validate_username + #validate_password + #validate_realm + + #OAI-PMH parameters + #validate_verb + #validate_sets + #validate_marcxml + #validate_from + #validate_until + + #Download parameters + #validate_queue + + return $errors; +} + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{invalid_data} = 0; + return bless ($args, $class); +} + +sub validate_queue { + my ( $self ) = @_; + my $task = $self->{task}; + if (my $queue = $task->{params}->{queue}){ + + my $queue_uri = URI->new($queue); + #TODO: In theory, you could even use a DBI DSN like DBI:mysql:database=koha;host=koha.db;port=3306. + #Then you could provide the table, username, and password in the params as well... + + #NOTE: If the queue directory doesn't exist on the filesystem, we try to make it and change to it. + if ($queue_uri->scheme eq 'file'){ + my $filepath = $queue_uri->file; + if ( ! -d $filepath ){ + make_path($filepath,{ mode => 0755 }); + } + if ( -d $filepath ){ + chdir $filepath or die "$!"; + } + } + + } +} + +sub run { + my ( $self ) = @_; + $self->validate_queue; + + my $task = $self->{task}; + + #DEBUGGING/FIXME: Remove these lines + use Data::Dumper; + warn Dumper($task); + + my $params = $task->{params}; + + my $now = DateTime->now(); #This is in UTC time, which is required by the OAI-PMH protocol. + if ( $oai_second_granularity->parse_datetime($params->{from}) ){ + $now->set_formatter($oai_second_granularity); + } else { + $now->set_formatter($oai_day_granularity); + } + + $params->{until} = "$now" if $task->{repeat_interval}; + + $self->{digester} = Digest::MD5->new(); + $self->create_harvester; + my $sets = $self->prepare_sets; + + #Send a OAI-PMH request for each set + foreach my $set (@{$sets}){ + my $response = $self->send_request({set => $set}); + $self->handle_response({ response => $response, set => $set,}); + } + + #FIXME: Do you want to update the task only when the task is finished, or + #also after each resumption? + #Update the task params in Icarus after the task is finished... + #TODO: This really does make it seem like you should be handling the repeat_interval within the child process rather than the parent... + if ($task->{repeat_interval}){ + $params->{from} = "$now"; + $params->{until} = ""; + my $json_update = to_json($params); + say STDOUT "UPDATE_PARAMS=$json_update"; + } + +} + +#FIXME: I wonder if it would be faster to send your own HTTP requests and not use HTTP::OAI... +sub send_request { + my ( $self, $args ) = @_; + + #NOTE: This is plugin specific as the plugins define when they stop to listen for instructions... + #NOTE: Before sending a new request, check if Icarus has already asked us to quit. + my $instruction = $self->listen_for_instruction(); + if ($instruction eq 'quit'){ + warn "I was asked to quit!"; + return; + } + + my $set = $args->{set}; + my $resumptionToken = $args->{resumptionToken}; + + my $response; + my $task_params = $self->{task}->{params}; + + my $harvester = $self->{harvester}; + my $verb = $task_params->{verb}; + if ($verb eq 'GetRecord'){ + $response = $harvester->GetRecord( + metadataPrefix => $task_params->{metadataPrefix}, + identifier => $task_params->{identifier}, + ); + } elsif ($verb eq 'ListRecords'){ + $response = $harvester->ListRecords( + metadataPrefix => $task_params->{metadataPrefix}, + from => $task_params->{from}, + until => $task_params->{until}, + set => $set, + resumptionToken => $resumptionToken, + ); + } + return $response; +} + +sub create_harvester { + my ( $self ) = @_; + my $task_params = $self->{task}->{params}; + + #FIXME: DEBUGGING + #use HTTP::OAI::Debug qw(+); + + #Create HTTP::OAI::Harvester object + my $harvester = new HTTP::OAI::Harvester( baseURL => $task_params->{url} ); + if ($harvester){ + $harvester->timeout(5); #NOTE: the default timeout is 180 + #Set HTTP Basic Authentication Credentials + my $uri = URI->new($task_params->{url}); + my $host = $uri->host; + my $port = $uri->port; + $harvester->credentials($host.":".$port, $task_params->{realm}, $task_params->{username}, $task_params->{password}); + } + $self->{harvester} = $harvester; +} + +sub prepare_sets { + my ( $self ) = @_; + my $task_params = $self->{task}->{params}; + my @sets = split(/\|/, $task_params->{sets}); + #If no sets are defined, create a null element to force the foreach loop to run once + if (!@sets){ + push(@sets,undef) + } + return \@sets; +} + +sub handle_response { + my ( $self, $args ) = @_; + my $params = $self->{task}->{params}; + my $response = $args->{response}; + my $set = $args->{set}; + if ($response){ + #NOTE: We have options at this point + #Option 1: Use $response->toDOM() to handle the XML response as a single document + #Option 2: Use $response->next() to handle each record individually. You would need to create a new document using $rec->header->dom() and $rec->metadata->dom() anyway. + + #NOTE: I wonder which option would be the fastest. For now, we're going with Option 1: + my $dom = $response->toDOM; + my $root = $dom->documentElement; + + #FIXME: Provide these as arguments so you're not re-creating them for each response + my $xpc = XML::LibXML::XPathContext->new(); + $xpc->registerNs('oai','http://www.openarchives.org/OAI/2.0/'); + my $xpath = XML::LibXML::XPathExpression->new("(oai:GetRecord|oai:ListRecords)/oai:record"); + + + my @records = $xpc->findnodes($xpath,$root); + my $now_pretty = DateTime->now(); + + $now_pretty->set_formatter($strp); + warn "Downloaded ".scalar @records." records at $now_pretty"; + foreach my $record (@records) { + + #FIXME: This is where you could put a filter to prevent certain records from being saved... + + #Create a new XML document from the XML fragment + my $document = XML::LibXML::Document->new( "1.0", "UTF-8" ); + $document->setDocumentElement($record); + my $record_string = $document->toString; + + #NOTE: We have options at this point. + #Option 1: Write documents to disk, and have a separate importer upload the documents + #Option 2: Use AnyEvent::HTTP or POE::Component::Client::HTTP to send to a HTTP API asynchronously + #Option 3: Write records to a database, and have a separate importer upload the documents + #Option 4: Shared memory, although that seems fragile if nothing else + #Option 5: Write the records to a socket/pipe + + #NOTE: I wonder which option would be the fastest. For now, we're going to go with Option 1: + $self->{digester}->add($record_string); + my $digest = $self->{digester}->hexdigest; + #FIXME: If a record appears more than once during the download signified by $now, you'll + #overwrite the former with the latter. While this acts as a sort of heavy-handed de-duplication, + #you need to take into account the importer daemon... + + require Time::HiRes; + my $epoch = Time::HiRes::time(); + my $now = DateTime->from_epoch(epoch => $epoch); + $now->set_formatter($strp); + + my $filename = "$now-$digest"; + #NOTE: Here is where we write the XML out to disk + my $state = $document->toFile($filename); + } + + + #NOTE: Check if object has method due to bug in HTTP::OAI which causes fatal error on $response->resumptionToken if no real response is fetched from the OAI-PMH server + if ($response->can("resumptionToken")){ + my $resumption_token = $response->resumptionToken->resumptionToken if $response->resumptionToken && $response->resumptionToken->resumptionToken; + if ($resumption_token){ + warn "Resumption Token = $resumption_token"; + my $resumed_response = $self->send_request({set => $set, resumptionToken => $resumption_token}); + $self->handle_response({ response => $resumed_response, set => $set,}); + } + } + + #In theory $response->resume(resumptionToken => resumptionToken) should kick off another response... + warn $response->message if $response->is_error; + } +} + +1; diff --git a/Koha/SavedTask.pm b/Koha/SavedTask.pm new file mode 100755 index 0000000..89cbc51 --- /dev/null +++ b/Koha/SavedTask.pm @@ -0,0 +1,86 @@ +package Koha::SavedTask; + +# Copyright Prosentient Systems 2016 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; + +use Carp; + +use JSON; + +use base qw(Koha::Object); + + + +=head1 NAME + +Koha::SavedTask - + +=head1 API + +=head2 Class Methods + +=cut + + + +=head3 _type + +=cut + +sub _type { + return 'SavedTask'; +} + +sub params_as_perl { + my ($self) = @_; + my $perl = from_json($self->params); + return $perl; +} + +sub serialize { + my ($self,$args) = @_; + my $for = $args->{for}; + my $type = $args->{type}; + if ($for eq 'icarus'){ + my $json_params = $self->params; + my $perl_params = from_json($json_params); + + my $icarus_task = { + type => $self->task_type, + start => $self->start_time, + repeat_interval => $self->repeat_interval, + params => $perl_params, + }; + if ($type eq 'perl'){ + return $icarus_task; + } elsif ($type eq 'json'){ + my $json = to_json($icarus_task); + return $json; + } + } + return undef; +} + +=head1 AUTHOR + +David Cook + +=cut + +1; diff --git a/Koha/SavedTasks.pm b/Koha/SavedTasks.pm new file mode 100755 index 0000000..a1ae9c5 --- /dev/null +++ b/Koha/SavedTasks.pm @@ -0,0 +1,62 @@ +package Koha::SavedTasks; + +# Copyright Prosentient Systems 2016 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; + +use Carp; + +use Koha::Database; + +use Koha::SavedTask; + +use base qw(Koha::Objects); + +=head1 NAME + +Koha::SavedTasks - + +=head1 API + +=head2 Class Methods + +=cut + +=head3 _type + +=cut + +sub _type { + return 'SavedTask'; +} + +=head3 object_class + +=cut + +sub object_class { + return 'Koha::SavedTask'; +} + +=head1 AUTHOR + +David Cook + +=cut + +1; diff --git a/Koha/Schema/Result/SavedTask.pm b/Koha/Schema/Result/SavedTask.pm new file mode 100755 index 0000000..948804f --- /dev/null +++ b/Koha/Schema/Result/SavedTask.pm @@ -0,0 +1,98 @@ +use utf8; +package Koha::Schema::Result::SavedTask; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Koha::Schema::Result::SavedTask + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("saved_tasks"); + +=head1 ACCESSORS + +=head2 task_id + + data_type: 'integer' + extra: {unsigned => 1} + is_auto_increment: 1 + is_nullable: 0 + +=head2 start_time + + data_type: 'datetime' + datetime_undef_if_invalid: 1 + is_nullable: 0 + +=head2 repeat_interval + + data_type: 'integer' + extra: {unsigned => 1} + is_nullable: 0 + +=head2 task_type + + data_type: 'varchar' + is_nullable: 0 + size: 255 + +=head2 params + + data_type: 'text' + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "task_id", + { + data_type => "integer", + extra => { unsigned => 1 }, + is_auto_increment => 1, + is_nullable => 0, + }, + "start_time", + { + data_type => "datetime", + datetime_undef_if_invalid => 1, + is_nullable => 0, + }, + "repeat_interval", + { data_type => "integer", extra => { unsigned => 1 }, is_nullable => 0 }, + "task_type", + { data_type => "varchar", is_nullable => 0, size => 255 }, + "params", + { data_type => "text", is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("task_id"); + + +# Created by DBIx::Class::Schema::Loader v0.07042 @ 2016-01-27 13:35:22 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:gnoi7I9fiXM3IfDysMTm+A + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/admin/saved_tasks.pl b/admin/saved_tasks.pl new file mode 100755 index 0000000..527c65e --- /dev/null +++ b/admin/saved_tasks.pl @@ -0,0 +1,338 @@ +#!/usr/bin/perl + +# Copyright Prosentient Systems 2016 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . + +=head1 NAME + +saved_tasks.pl + +=head1 DESCRIPTION + +Admin page to manage saved tasks + +=cut + +use Modern::Perl; +use CGI qw ( -utf8 ); +use C4::Auth; +use C4::Output; +use C4::Context; + +use Koha::SavedTasks; +use Koha::Icarus; +use Module::Load::Conditional qw/can_load check_install/; +use JSON; + +my $input = new CGI; +my ($template, $loggedinuser, $cookie, $flags) = get_template_and_user( { + template_name => 'admin/saved_tasks.tt', + query => $input, + type => 'intranet', + authnotrequired => 0, + flagsrequired => { 'parameters' => 'parameters_remaining_permissions' }, +} ); + +my $filename = "saved_tasks.pl"; +$template->param( + filename => $filename, +); + +my $context = C4::Context->new(); + + +my $task_server = $input->param("task_server") // "icarus"; + + +my $socket_uri = $context->{"icarus"}->{"socket"}; + +my @available_plugins = (); +my $task_plugins = $context->{"icarus"}->{"task_plugin"}; +if ($task_plugins && ref $task_plugins eq 'ARRAY'){ + #FIXME: This should probably be a module method... validation that a plugin is installed... + foreach my $task_plugin (@$task_plugins){ + #Check that plugin module is installed + if ( check_install( module => $task_plugin ) ){ + push(@available_plugins,$task_plugin); + } + } +} + +$template->param( + available_plugins => \@available_plugins, +); + +#Server action and task id +my $server_action = $input->param("server_action"); +my $server_task_id = $input->param('server_task_id'); + +#Saved task op +my $op = $input->param('op'); +my $step = $input->param('step'); + +#Saved task id +my $saved_task_id = $input->param('saved_task_id'); + + +#Create Koha-Icarus interface object +my $icarus = Koha::Icarus->new({ socket_uri => $socket_uri }); +my $daemon_status = ""; + +#Connect to Icarus +if ( $icarus->connect() ){ + $daemon_status = "online"; + if ($server_action){ + if ($server_action eq 'shutdown'){ + my $response = $icarus->shutdown; + if ( $response && (my $action = $response->{action}) ){ + $daemon_status = $action; + } + } elsif ($server_action eq 'start' && $server_task_id){ + my $response = $icarus->start_task({ task_id => $server_task_id }); + $template->param( + task_response => $response, + ); + } elsif ($server_action eq 'remove' && $server_task_id){ + my $response = $icarus->remove_task({ task_id => $server_task_id }); + $template->param( + task_response => $response, + ); + } + } +} else { + $daemon_status = $!; +} +$template->param( + daemon_status => $daemon_status, +); + + + +my $params = $input->param("params"); + +#NOTE: Parse the parameters manually, so that you can "name[]" style of parameter, which we use in the special plugin templates... +my $saved_params = {}; +#Fetch the names of all the parameters passed to your script +my @parameter_names = $input->param; +#Iterate through these parameter names and look for "params[]" +foreach my $parameter_name (@parameter_names){ + if ($parameter_name =~ /^params\[(.*)\]$/){ + #Capture the hash key + my $key = $1; + #Fetch the actual individual value + my $parameter_value = $input->param($parameter_name); + if ($parameter_value){ + $saved_params->{$key} = $parameter_value; + } + } +} +if (%$saved_params){ + my $json = to_json($saved_params, { pretty => 1, }); + if ($json){ + $params = $json; + } +} + +my $start_time = $input->param("start_time"); +my $repeat_interval = $input->param("repeat_interval"); +my $task_type = $input->param("task_type"); +if ($task_type){ + my $task_template = $task_type; + #Create the template name by stripping the colons out of the task type text + $task_template =~ s/://g; + $template->param( + task_template => "tasks/$task_template.inc", + ); +} + + +if ($op){ + if ($op eq 'new'){ + + } elsif ($op eq 'create'){ + + #Validate the $task here + if ($step){ + if ($step eq "one"){ + + $op = "new"; + $template->param( + step => "two", + task_type => $task_type, + ); + } elsif ($step eq "two"){ + my $new_task = Koha::SavedTask->new({ + start_time => $start_time, + repeat_interval => $repeat_interval, + task_type => $task_type, + params => $params, + }); + + #Serialize the data as an Icarus task + my $icarus_task = $new_task->serialize({ for => "icarus", type => "perl", }); + + my $valid = 1; + #Load the plugin module, and create an object instance in order to validate user-entered data + if ( can_load( modules => { $task_type => undef, }, ) ){ + my $plugin = $task_type->new({ task => $icarus_task, }); + if ($plugin->can("validate")){ + my $errors = $plugin->validate({ + "tests" => "all", + }); + if (%$errors){ + $template->param( + errors => $errors, + ); + } + if ($plugin->{invalid_data} > 0){ + $valid = 0; + } + } + } + + if ($valid){ + $new_task->store(); + $op = "list"; + } else { + $op = "new"; + #Create a Perl data structure from the JSON + my $editable_params = from_json($params); + $template->param( + step => "two", + task_type => $task_type, + saved_task => $new_task, + params => $editable_params, + ); + } + } + } + + } elsif ($op eq 'edit'){ + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + #Check if the task's saved task type is actually available... + #FIXME: This should be a Koha::Icarus method... + my $task_type_is_valid = grep { $task->task_type eq $_ } @available_plugins; + $template->param( + task_type_is_valid => $task_type_is_valid, + saved_task => $task, + ); + } + } elsif ($op eq 'update'){ + if ($step){ + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + if ($step eq "one"){ + #We've completed step one, which is choosing the task type, + #so now we're going to populate the form for editing the rest of the values + $op = "edit"; + #This is the JSON string that we've saved in the database + my $current_params_string = $task->params; + my $editable_params = from_json($current_params_string); + + $template->param( + step => "two", + task_type => $task_type, + saved_task => $task, + params => $editable_params, + + ); + } elsif ($step eq "two"){ + #We've completed step two, so we're storing the data now... + $task->set({ + start_time => $start_time, + repeat_interval => $repeat_interval, + task_type => $task_type, + params => $params, + }); + $task->store; + #FIXME: Validate the $task here... + if (my $valid = 1){ + $op = "list"; + } else { + $op = "edit"; + $template->param( + step => "two", + task_type => $task_type, + saved_task => $task, + ); + } + } + } + } + } elsif ($op eq 'send'){ + if ($icarus->connected){ + if ($saved_task_id){ + #Look up task + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + #Create a task for Icarus, and send it to Icarus + my $icarus_task = $task->serialize({ for => "icarus", type => "perl", }); + if ($icarus_task){ + $icarus->add_task({ task => $icarus_task, }); + $op = "list"; + } + } + } + } + } elsif ($op eq 'delete'){ + my $saved_response = "delete_failure"; + if ($saved_task_id){ + #Look up task + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + if (my $something = $task->delete){ + $saved_response = "delete_success"; + } + } + } + $template->param( + saved_response => $saved_response, + ); + $op = "list"; + } else { + #Don't recognize $op, so fallback to list + $op = "list"; + } +} else { + #No $op, so fallback to list + $op = "list"; +} + +if ($op eq 'list'){ + #Get active tasks from Icarus + if ($icarus->connected){ + my $tasks = $icarus->list_tasks(); + if ($tasks && @$tasks){ + $template->param( + tasks => $tasks, + ); + } + } + + #Get saved tasks from Koha + my @saved_tasks = Koha::SavedTasks->as_list(); + $template->param( + saved_tasks => \@saved_tasks, + ); +} + +$template->param( + op => $op, +); + +output_html_with_http_headers $input, $cookie, $template->output; diff --git a/docs/Icarus/README b/docs/Icarus/README new file mode 100755 index 0000000..81493a7 --- /dev/null +++ b/docs/Icarus/README @@ -0,0 +1,72 @@ +TODO: + - Feature "svc/import_oai" + - ***Process deletions + - Check if there is a status and if the status is "deleted"... + - Create an empty MARCXML record, add the OAI-PMH identifier in as a 024$a with a 024$2 of "uri" ( set 942$n=1 for OpacSuppression, set LDR05=d for deleted status...) + - You'll want to use the C4::Matcher to get the matches... and then try deleting them. If there's a problem, you'll need to note it in the import_oai table somehow... + - NOTE: Deletion support REQUIRES Bug 15541 && Bug 15555 && Bug 15745 + - Add a "file_name" which makes it useful... maybe something like saved_task_id:1 + - That way, you could have an interface for viewing all records harvested from a certain saved_task... + - You can't really provide a good way of undoing a whole harvest, since harvests are done incrementally every few seconds... + - But for testing purposes, you could make it a bit easier... + - Do something with the status field? + + - Validation: + "Koha::Icarus::Task::Dequeue::OAIPMH::Biblio": + - Validate HTTP URLs and filepaths... + - Add PLUGIN->validate("parameter_names") + - Add PLUGIN->validate("parameter_values") + - For the downloader, this would validate HTTP && OAI-PMH parameters... + + - Install/Configuration: + - You should make Makefile.PL prompt them for koha-conf.xml configuration options (max_tasks, log?, pidfile, socket)... + - The task_plugin options could be provided by default... + + + - Cleanup: + - Remove any unnecessary logging + - Clean up all the code... + + + + + + + + + + +POSSIBLE IMPROVEMENTS: + - Add default OAI record matching rule + - I thought about adding an atomic update 'bug_10662-Add_oai_record_matching_rule.sql', but adding matching rules seems complex + - Should the field include other fields like 022, 020, 245 rather than just 001 and 024a? + - Add entry to Cleanupdatabase.pl cronjob + - You could remove all import_oai rows older than a certain age? + - Make the "Task type" prettier (and translateable) on saved_tasks.pl. + - Provide more options for the Icarus dashboard + - Add the ability to "edit" and "pause" active Icarus tasks + - Make "Koha::Icarus::Task::Dequeue::OAIPMH::Biblio" use asynchronous HTTP requests to speed up the import + - Add help pages for WEB GUI + - Add documentation to all code... + - Add unit tests + + + +DESIGN CHANGES?: + - WEB UI: + - Add `name` to saved_tasks? + - Move "Saved tasks" from Administration to Tools? + - Look at existing bugs for schedulers: + - https://bugs.koha-community.org/bugzilla3/show_bug.cgi?id=14712 + - https://bugs.koha-community.org/bugzilla3/show_bug.cgi?id=1993 + - Handle datestamp granularity better for OAI-PMH download tasks? + - Change `import_oai` database table? + - Add record_type column? + - The only way you could know the record_type is if you passed it via the task data... + - In the past, I used to store record_type and original_system_field... + - What sort of statuses does import_oai use? Add/update/error/ignore? + - Do I need to store metadata_prefix? + - Misc: + - Instead of using file:///kohawebs/dev/dcook/koha-dev/var/spool/oaipmh, why not use something like file:///tmp/koha-dev/oaipmh? I suppose because you might be able to access someone else's files? + +PROBLEMS: diff --git a/installer/data/mysql/atomicupdate/bug_10662-Build_import_oai_table.sql b/installer/data/mysql/atomicupdate/bug_10662-Build_import_oai_table.sql new file mode 100644 index 0000000..0c924e4 --- /dev/null +++ b/installer/data/mysql/atomicupdate/bug_10662-Build_import_oai_table.sql @@ -0,0 +1,21 @@ +DROP TABLE IF EXISTS import_oai; +CREATE TABLE import_oai ( + import_oai_id int(10) unsigned NOT NULL AUTO_INCREMENT, + header_identifier varchar(45) CHARACTER SET utf8 NOT NULL, + header_datestamp datetime NOT NULL, + header_status varchar(45) CHARACTER SET utf8 DEFAULT NULL, + metadata longtext CHARACTER SET utf8 NOT NULL, + last_modified timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + status varchar(45) CHARACTER SET utf8 NOT NULL, + PRIMARY KEY (import_oai_id) +) ENGINE=InnoDB AUTO_INCREMENT=297 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +DROP TABLE IF EXISTS saved_tasks; +CREATE TABLE saved_tasks ( + task_id int(10) unsigned NOT NULL AUTO_INCREMENT, + start_time datetime NOT NULL, + repeat_interval int(10) unsigned NOT NULL, + task_type varchar(255) CHARACTER SET utf8 NOT NULL, + params text CHARACTER SET utf8 NOT NULL, + PRIMARY KEY (task_id) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; diff --git a/installer/data/mysql/kohastructure.sql b/installer/data/mysql/kohastructure.sql index e6e3142..4d64abc 100644 --- a/installer/data/mysql/kohastructure.sql +++ b/installer/data/mysql/kohastructure.sql @@ -3723,6 +3723,37 @@ CREATE TABLE IF NOT EXISTS edifact_ean ( CONSTRAINT efk_branchcode FOREIGN KEY ( branchcode ) REFERENCES branches ( branchcode ) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +-- +-- Table structure for table 'import_oai' +-- + +DROP TABLE IF EXISTS import_oai; +CREATE TABLE import_oai ( + import_oai_id int(10) unsigned NOT NULL AUTO_INCREMENT, + header_identifier varchar(45) CHARACTER SET utf8 NOT NULL, + header_datestamp datetime NOT NULL, + header_status varchar(45) CHARACTER SET utf8 DEFAULT NULL, + metadata longtext CHARACTER SET utf8 NOT NULL, + last_modified timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + status varchar(45) CHARACTER SET utf8 NOT NULL, + PRIMARY KEY (import_oai_id) +) ENGINE=InnoDB AUTO_INCREMENT=297 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +-- +-- Table structure for table 'saved_tasks' +-- + +DROP TABLE IF EXISTS saved_tasks; +CREATE TABLE saved_tasks ( + task_id int(10) unsigned NOT NULL AUTO_INCREMENT, + start_time datetime NOT NULL, + repeat_interval int(10) unsigned NOT NULL, + task_type varchar(255) CHARACTER SET utf8 NOT NULL, + params text CHARACTER SET utf8 NOT NULL, + PRIMARY KEY (task_id) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc index 8a21712..88e39e7 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc @@ -78,6 +78,7 @@ [% IF Koha.Preference('SMSSendDriver') == 'Email' %]
  • SMS cellular providers
  • [% END %] +
  • Saved tasks
  • diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDequeueOAIPMHBiblio.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDequeueOAIPMHBiblio.inc new file mode 100644 index 0000000..415e1c1 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDequeueOAIPMHBiblio.inc @@ -0,0 +1,143 @@ +[%# Use CGI plugin to create a default target URI %] +[%# TODO: Test if this works with Plack... %] +[% USE CGI %] +[% server = CGI.virtual_host %] +[% IF ( server_port = CGI.virtual_port ) %] + [% IF ( server_port != '80' ) && ( server_port != '443' ) %] + [% server = server _ ':' _ server_port %] + [% END %] +[% END %] +[% default_auth_uri = 'http://' _ server _ '/cgi-bin/koha/svc/authentication' %] +[% default_target_uri = 'http://' _ server _ '/cgi-bin/koha/svc/import_oai' %] +
    + Import source parameters: +
      +
    1. + + [% IF ( params.queue ) %] + + [% ELSE %] + + [% END %] + This is a filepath on your system like file:///var/spool/koha/libraryname/oaipmh +
    2. +
    +
    +
    + API authentication parameters: +
      +
    1. + + [% IF ( params.auth_uri ) %] + + [% ELSE %] + + [% END %] + [% IF (errors.auth_uri.no_path) %][The URL must have a path after "http://" like "koha-community.org/cgi-bin/koha/svc/authentication".][% END %] + [% IF (errors.auth_uri.not_http) %][The URL begin with a scheme of "http://" like "http://koha-community.org/cgi-bin/koha/svc/authentication".][% END %] + [% IF (errors.auth_uri.not_a_url) %][The value of this field must be a URL like "http://koha-community.org/cgi-bin/koha/svc/authentication".][% END %] + This is a Koha authentication URL. The default value +
    2. +
    3. + + + This user must have permission to edit the catalogue. +
    4. +
    5. + + +
    6. +
    +
    +
    + Import target parameters: +
      +
    1. + + [% IF ( params.target_uri ) %] + + [% ELSE %] + + [% END %] + [% IF (errors.target_uri.no_path) %][The URL must have a path after "http://" like "koha-community.org/cgi-bin/koha/svc/import_oai".][% END %] + [% IF (errors.target_uri.not_http) %][The URL begin with a scheme of "http://" like "http://koha-community.org/cgi-bin/koha/svc/import_oai".][% END %] + [% IF (errors.target_uri.not_a_url) %][The value of this field must be a URL like "http://koha-community.org/cgi-bin/koha/svc/import_oai".][% END %] +
    2. + +
    3. + + + This code must exist in "Record matching rules" in Administration for record matching to work. (Example code: OAI) +
    4. +
    5. + [%# TODO: Ideally, I'd like to use 'tools-overlay-action.inc' but the logic doesn't work here. Perhaps it would be better as a TT plugin. %] + + +
    6. +
    7. + [%# TODO: Ideally, I'd like to use 'tools-nomatch-action.inc' but the logic doesn't work here. Perhaps it would be better as a TT plugin. %] + + +
    8. +
    9. + + [%# TODO: Will you ever have a different mode than ignore? %] + + +
    10. +
    11. + + [%# TODO: Will you ever have a different mode than direct? %] + + +
    12. +
    13. + +
    14. +
    15. + + [% IF ( params.filter ) %] + + [% ELSE %] + + [% END %] + This is a filepath on your system like file:///etc/koha/sites/libraryname/OAI2MARC21slim.xsl or file:///usr/share/koha/intranet/htdocs/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl +
    16. + +
    17. + +
    18. +
    +
    diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskEnqueueOAIPMHBiblio.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskEnqueueOAIPMHBiblio.inc new file mode 100644 index 0000000..a66af5c --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskEnqueueOAIPMHBiblio.inc @@ -0,0 +1,87 @@ +[%# USE CGI %] +[%# server_name = CGI.server_name; server_port = CGI.server_port; server = server_name _ ":" _ server_port; %] + +
    + HTTP parameters: +
      +
    1. + + [% IF ( params.url ) %] + + [% ELSE %] + + [% END %] + [% IF (errors.url.no_path) %][The URL must have a path after "http://" like "koha-community.org/cgi-bin/koha/oai.pl".][% END %] + [% IF (errors.url.not_http) %][The URL begin with a scheme of "http://" like "http://koha-community.org/cgi-bin/koha/oai.pl".][% END %] + [% IF (errors.url.not_a_url) %][The value of this field must be a URL like "http://koha-community.org/cgi-bin/koha/oai.pl".][% END %] + +
    2. +
    + The following parameters are not required by all OAI-PMH repositories, so they may be optional for this task. +
      +
    1. + + +
    2. +
    3. + + +
    4. +
    5. + + +
    6. +
    +
    +
    + OAI-PMH parameters: +
      +
    1. + + +
    2. +
    3. + + + This identifier will only be used with the GetRecord verb. +
    4. +
    5. + + You may specify several sets by separating the sets with a pipe (e.g. set1|set2 ) +
    6. +
    7. + + +
    8. +
    9. + + This value will be treated as UTC time. Note that some repositories only support YYYY-MM-DD datestamps. +
    10. +
    11. + + This value will be treated as UTC time. Note that some repositories only support YYYY-MM-DD datestamps. +
    12. +
    +
    +
    + Download parameters: +
      +
    1. + + [% IF ( params.queue ) %] + + [% ELSE %] + + [% END %] + This is a filepath on your system like file:///var/spool/koha/libraryname/oaipmh +
    2. +
    +
    diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt index 32bca5a..0e1760f 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt @@ -120,6 +120,8 @@
    SMS cellular providers
    Define a list of cellular providers for sending SMS messages via email.
    [% END %] +
    Saved tasks
    +
    Define tasks which may be run in the background
    diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt new file mode 100644 index 0000000..378d846 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt @@ -0,0 +1,333 @@ +[% INCLUDE 'doc-head-open.inc' %] +Koha › Administration › Saved tasks +[% INCLUDE 'doc-head-close.inc' %] +[% INCLUDE 'calendar.inc' %] + +[% INCLUDE 'timepicker.inc' %] +[% IF ( op == "list" ) %] + + [% INCLUDE 'datatables.inc' %] + +[% ELSIF ( op == "edit" ) || ( op == "new" ) %] + + +[% END %] + + + +[% INCLUDE 'header.inc' %] +[% INCLUDE 'cat-search.inc' %] + + +
    + +
    +
    +
    + [% IF ( op ) %] + [% IF ( op == "new" ) || ( op == "edit" ) %] + [%# If step is undefined, force it to be step one %] + [% IF ( ! step ); step = "one"; END; %] + + + + [%# HEADING %] + [% IF ( op == "new" ) %] +

    New saved task

    + [% ELSIF ( op == "edit" ) %] +

    Modify saved task

    + [% END %] + [%# /HEADING %] + + [%# TODO: Get this working properly...
    Validation failed.
    #] + + [%# FORM %] +
    + [% IF ( op == "new" ) %] + + [% ELSIF ( op == "edit" ) %] + + + [% END %] + +
    +
      + [% IF ( op == "edit") && ( step == "one" ) && (! task_type_is_valid ) %] +
    1. + + + Sorry! This task type is invalid. Please choose a new one from the following list. +
    2. + [% END %] +
    3. + + [% IF ( step == "one" ) %] + [% IF ( available_plugins ) %] + + [% END %] + + [% ELSIF ( step == "two" ) %] + + + [% END %] +
    4. +
    +
    + + [% IF ( step == "one" ) %] +
    + + Cancel +
    + [% ELSIF ( step == "two" ) %] +
    + Task: +
      +
    1. + + + This value will be treated as local server time, and times in the past will start immediately. +
    2. +
    3. + + + seconds + [% IF (errors.repeat_interval.not_numeric) %][The repeat interval must be a purely numeric value.][% END %] +
    4. +
    +
    + [%# Try to include the template, but if it fails, fallback to a regular text view %] + [% TRY %] + [% INCLUDE $task_template %] + [% CATCH %] +
    + Plugin parameters: +
      +
    1. + + +
    2. +
    +
    + [% END %] +
    + + Cancel +
    + [% END %] +
    + [%# /FORM %] + [% END #/edit or new %] + + + [% IF ( op == "list" ) %] + +

    Saved tasks

    + [% IF ( saved_response ) %] + [% IF ( saved_response == 'delete_success' ) %] +
    Deletion successful.
    + [% ELSIF ( saved_response == 'delete_failure' ) %] +
    Deletion failed.
    + [% END %] + [% END %] + + + + + + + + + + + + + + [% FOREACH saved_task IN saved_tasks %] + + + + + + + + + + [% END %] + +
    Start timeRepeat intervalTask typeParams
    [% IF ( saved_task.start_time ) != "0000-00-00 00:00:00"; saved_task.start_time; END; %][% saved_task.repeat_interval %][% saved_task.task_type %] +
      + [% FOREACH pair IN saved_task.params_as_perl.pairs %] +
    • [% pair.key %] => [% pair.value %]
    • + [% END %] +
    +
    EditSend to IcarusDelete
    +
    +

    Icarus dashboard

    + + + + + + + + [%# TODO: Also provide controls for starting/restarting Icarus? %] + + +
    Status
    + + [% IF ( daemon_status == 'Permission denied' ) #Apache doesn't have permission to write to socket + || ( daemon_status == 'Connection refused' ) #Socket exists, but server is down + || ( daemon_status == 'No such file or directory' ) #Socket doesn't exist at all + %] + Unable to contact + [% ELSIF ( daemon_status == 'online' ) %] + Online + [% ELSIF ( daemon_status == 'shutting down' ) %] + Shutting down + [% ELSE %] + [% daemon_status %] + [% END %] + Shutdown Icarus
    +
    +
    +

    Active Icarus tasks

    + [% IF ( task_response ) %] + [% IF ( task_response.action == 'error' ) %] + [% IF ( task_response.error_message ) %] + [% IF ( task_response.error_message == 'No such process' ) %] +
    Task [% task_response.task_id %] does not exist.
    + [% END %] + [% END %] + [% ELSIF ( task_response.action == 'pending' ) %] +
    Initialising task [% task_response.task_id %].
    + [% ELSIF ( task_response.action == 'already pending' ) %] +
    Already initialised task [% task_response.task_id %].
    + [% ELSIF ( task_response.action == 'already started' ) %] +
    Already started task [% task_response.task_id %].
    + [% ELSIF ( task_response.action == 'removed' ) %] +
    Removing task [% task_response.task_id %].
    + [% END %] + [% END %] + [% IF ( tasks ) %] + + + + + + + + + + + + + + + [% FOREACH task IN tasks %] + + + + + + + + + + + [% END %] + +
    Task idStatusNext start time (local server time)Repeat intervalTask typeParams
    [% task.task_id %] + [% SWITCH task.task.status %] + [% CASE 'new' %] + New + [% CASE 'pending' %] + Pending + [% CASE 'started' %] + Started + [% CASE 'stopping' %] + Stopping + [% CASE %] + [% task.task.status %] + [% END %] + [% task.task.start %][% task.task.repeat_interval %][% task.task.type %] +
      + [% FOREACH pair IN task.task.params.pairs %] +
    • [% pair.key %] => [% pair.value %]
    • + [% END %] +
    +
    StartRemove
    + [% END %] +
    + [% END #/list %] + [% END #/op %] +
    +
    +
    + [% INCLUDE 'admin-menu.inc' %] +
    +
    +[% INCLUDE 'intranet-bottom.inc' %] diff --git a/koha-tmpl/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl b/koha-tmpl/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl new file mode 100755 index 0000000..f76778a --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 7 + + 024 + + + a + + + + + 2 + uri + + + + + + + diff --git a/misc/bin/icarusd.pl b/misc/bin/icarusd.pl new file mode 100755 index 0000000..23c9989 --- /dev/null +++ b/misc/bin/icarusd.pl @@ -0,0 +1,156 @@ +#!/usr/bin/perl + +####################################################################### + +use Modern::Perl; +use POSIX; #For daemonizing +use Fcntl qw(:flock); #For pidfile +use Getopt::Long; + +#Make the STDOUT filehandle hot, so that you can use shell re-direction. Otherwise, you'll suffer from buffering. +STDOUT->autoflush(1); +#Note that STDERR, by default, is already hot. + +####################################################################### +#FIXME: Debugging signals +#BEGIN { +# package POE::Kernel; +# use constant TRACE_SIGNALS => 1; +#} + +use POE; +use JSON; #For Listener messages +use XML::LibXML; #For configuration files + +use Koha::Icarus::Listener; + +####################################################################### + +my ($filename,$daemon,$log); +GetOptions ( + "f|file|filename=s" => \$filename, #/kohawebs/dev/dcook/koha-dev/etc/koha-conf.xml + "l|log=s" => \$log, + "d|daemon" => \$daemon, +) or die("Error in command line arguments\n"); + +#Declare the variable with file scope so the flock stays for the duration of the process's life +my $pid_filehandle; + +#Read configuration file +my $config = read_config_file($filename); + +my $SOCK_PATH = $config->{socket}; +my $pid_file = $config->{pidfile}; +my $max_tasks = $config->{max_tasks}; + +#Overwrite configuration file with command line options +if ($log){ + $config->{log} = $log; +} + +#Go into daemon mode, if user has included flag +if ($daemon){ + daemonize(); +} + +if ($pid_file){ + #NOTE: The filehandle needs to have file scope, so that the flock is preserved. + $pid_filehandle = make_pid_file($pid_file); +} + +#FIXME: Do we want to log to file only in daemon mode? $config->{log} should be populated by either the config file or the l|log GetOpt... +if ($daemon && $config->{log}){ + log_to_file($config->{log}); +} + + +#FIXME: 1) In daemon mode, SIGUSR1 or SIGHUP for reloading/restarting? +####################################################################### + +#Creates Icarus Listener +Koha::Icarus::Listener->spawn({ + Socket => $SOCK_PATH, + MaxTasks => $max_tasks, + Verbosity => 1, +}); + +POE::Kernel->run(); + +exit; + +sub read_config_file { + my $filename = shift; + my $config = {}; + if ( -e $filename ){ + eval { + my $doc = XML::LibXML->load_xml(location => $filename); + if ($doc){ + my $root = $doc->documentElement; + my $icarus = $root->find('icarus')->shift; + if ($icarus){ + #Get all child nodes for the 'icarus' element + my @childnodes = $icarus->childNodes(); + foreach my $node (@childnodes){ + #Only consider nodes that are elements + if ($node->nodeType == XML_ELEMENT_NODE){ + my $config_key = $node->nodeName; + my $first_child = $node->firstChild; + #Only consider nodes that have a text node as their first child + if ($first_child && $first_child->nodeType == XML_TEXT_NODE){ + $config->{$config_key} = $first_child->nodeValue; + } + } + } + } + } + }; + } + return $config; +} + +####################################################################### +#NOTE: On Debian, you can use the daemon binary to make a process into a daemon, +# the following subs are for systems that don't have the daemon binary. + +sub daemonize { + my $pid = fork; + die "Couldn't fork: $!" unless defined($pid); + if ($pid){ + exit; #Parent exit + } + POSIX::setsid() or die "Can't start a new session: $!"; +} + +sub log_to_file { + my $logfile = shift; + #Open a filehandle to append to a log file + open(LOG, '>>', $logfile) or die "Unable to open a filehandle for $logfile: $!\n"; # --output + LOG->autoflush(1); #Make filehandle hot (ie don't buffer) + *STDOUT = *LOG; #Re-assign STDOUT to LOG | --stdout + *STDERR = *STDOUT; #Re-assign STDERR to STDOUT | --stderr +} + +sub make_pid_file { + my $pidfile = shift; + if ( ! -e $pidfile ){ + open(my $fh, '>', $pidfile) or die "Unable to write to $pidfile: $!\n"; + $fh->close; + } + + open(my $pidfilehandle, '+<', $pidfile) or die "Unable to open a filehandle for $pidfile: $!\n"; + if (flock($pidfilehandle, LOCK_EX|LOCK_NB)){ + #Write pid to pidfile + print "Acquiring lock on $pidfile\n"; + #Now that we've acquired a lock, let's truncate the file + truncate($pidfilehandle, 0); + print $pidfilehandle $$."\n" or die $!; + #Flush the filehandle so you're not suffering from buffering + $pidfilehandle->flush(); + return $pidfilehandle; + } else { + my $number = <$pidfilehandle>; + chomp($number); + warn "$0 is already running with pid $number. Exiting.\n"; + exit(1); + } +} diff --git a/svc/import_oai b/svc/import_oai new file mode 100755 index 0000000..b1d73d9 --- /dev/null +++ b/svc/import_oai @@ -0,0 +1,197 @@ +#!/usr/bin/perl + +# Copyright 2012 CatalystIT Ltd +# Copyright 2016 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . +# + +use Modern::Perl; +use XML::LibXML; +use URI; +use File::Basename; + +use CGI qw ( -utf8 ); +use C4::Auth qw/check_api_auth/; +use C4::Context; +use C4::ImportBatch; +use C4::Matcher; +use XML::Simple; + +my $query = new CGI; +binmode STDOUT, ':encoding(UTF-8)'; + +my ($status, $cookie, $sessionID) = check_api_auth($query, { editcatalogue => 'edit_catalogue'} ); +unless ($status eq "ok") { + print $query->header(-type => 'text/xml', -status => '403 Forbidden'); + print XMLout({ auth_status => $status }, NoAttr => 1, RootName => 'response', XMLDecl => 1); + exit 0; +} + +my $xml; +if ($query->request_method eq "POST") { + $xml = $query->param('xml'); +} +if ($xml) { + #TODO: You could probably use $query->Vars here instead... + my %params = map { $_ => $query->param($_) } $query->param; + my $result = import_oai($xml, \%params ); + print $query->header(-type => 'text/xml'); + print XMLout($result, NoAttr => 1, RootName => 'response', XMLDecl => 1); +} else { + print $query->header(-type => 'text/xml', -status => '400 Bad Request'); +} + +exit 0; + +sub import_oai { + my ($inxml, $params) = @_; + + my $result = {}; + + my $filter = delete $params->{filter} || ''; + my $import_mode = delete $params->{import_mode} || ''; + my $framework = delete $params->{framework} || ''; + + if (my $matcher_code = delete $params->{match}) { + $params->{matcher_id} = C4::Matcher::GetMatcherId($matcher_code); + } + + my $batch_id = GetWebserviceBatchId($params); + unless ($batch_id) { + $result->{'status'} = "failed"; + $result->{'error'} = "Batch create error"; + return $result; + } + + #Log it in the import_oai table here... + + #Parse the XML string into a XML::LibXML object + my $doc = XML::LibXML->load_xml(string => $inxml); + + #Get the root element + my $root = $doc->documentElement; + + #Register namespaces for searching purposes + my $xpc = XML::LibXML::XPathContext->new(); + $xpc->registerNs('oai','http://www.openarchives.org/OAI/2.0/'); + + my $xpath_identifier = XML::LibXML::XPathExpression->new("oai:header/oai:identifier"); + my $identifier = $xpc->findnodes($xpath_identifier,$root)->shift; + my $identifier_string = $identifier->textContent; + + my $xpath_datestamp = XML::LibXML::XPathExpression->new("oai:header/oai:datestamp"); + my $datestamp = $xpc->findnodes($xpath_datestamp,$root)->shift; + my $datestamp_string = $datestamp->textContent; + + my $status_string = ""; + + #OAI-PMH Header = identifier, datestamp, status, setSpec? + #OAI-PMH Metadata + + my $log_dbh = C4::Context->dbh; + my $log_sql = "INSERT INTO import_oai (header_identifier, header_datestamp, header_status, metadata) VALUES (?, ?, ?, ?)"; + my $log_sth = $log_dbh->prepare($log_sql); + $log_sth->execute($identifier_string,$datestamp_string,$status_string,$inxml); + + + + #Filter the OAI-PMH record into a MARCXML record + my $metadata_xml; + + #Source a default XSLT + my $htdocs = C4::Context->config('intrahtdocs'); + my $theme = C4::Context->preference("template"); + #FIXME: This doesn't work for UNIMARC! + my $xslfilename = "$htdocs/$theme/en/xslt/OAI2MARC21slim.xsl"; + + #FIXME: There's a better way to do these filters... + if ($filter){ + my $filter_uri = URI->new($filter); + if ($filter_uri){ + my $scheme = $filter_uri->scheme; + if ($scheme && $scheme eq "file"){ + my $path = $filter_uri->path; + #Filters may theoretically be .xsl or .pm files + my($filename, $dirs, $suffix) = fileparse($path,(".xsl",".pm")); + if ($suffix && $suffix eq ".xsl"){ + #If this new path exists, change the filter XSLT to it + if ( -f $path ){ + $xslfilename = $path; + } + } + } + } + } + + if ( -f $xslfilename ){ + #FIXME: Ideally, it would be good to use Koha::XSLT_Handler here... (especially for persistent environments...) + my $xslt = XML::LibXSLT->new(); + my $style_doc = XML::LibXML->load_xml(location => $xslfilename); + my $stylesheet = $xslt->parse_stylesheet($style_doc); + if ($stylesheet){ + my $results = $stylesheet->transform($doc); + $metadata_xml = $stylesheet->output_as_bytes($results); + } + } else { + $result->{'status'} = "failed"; + $result->{'error'} = "Metadata filter unavailable"; + return $result; + } + + + + + + + + + + #Import the MARCXML record into Koha + my $marcflavour = C4::Context->preference('marcflavour') || 'MARC21'; + my $marc_record = eval {MARC::Record::new_from_xml( $metadata_xml, "utf8", $marcflavour)}; + if ($@) { + $result->{'status'} = "failed"; + $result->{'error'} = $@; + return $result; + } + + my $import_record_id = AddBiblioToBatch($batch_id, 0, $marc_record, "utf8", int(rand(99999))); + my @import_items_ids = AddItemsToImportBiblio($batch_id, $import_record_id, $marc_record, 'UPDATE COUNTS'); + + my $matcher = C4::Matcher->new($params->{record_type} || 'biblio'); + $matcher = C4::Matcher->fetch($params->{matcher_id}); + my $number_of_matches = BatchFindDuplicates($batch_id, $matcher); + + # XXX we are ignoring the result of this; + BatchCommitRecords($batch_id, $framework) if lc($import_mode) eq 'direct'; + + my $dbh = C4::Context->dbh(); + my $sth = $dbh->prepare("SELECT matched_biblionumber FROM import_biblios WHERE import_record_id =?"); + $sth->execute($import_record_id); + my $biblionumber=$sth->fetchrow_arrayref->[0] || ''; + $sth = $dbh->prepare("SELECT overlay_status FROM import_records WHERE import_record_id =?"); + $sth->execute($import_record_id); + my $match_status = $sth->fetchrow_arrayref->[0] || 'no_match'; + my $url = 'http://'. C4::Context->preference('staffClientBaseURL') .'/cgi-bin/koha/catalogue/detail.pl?biblionumber='. $biblionumber; + + $result->{'status'} = "ok"; + $result->{'import_batch_id'} = $batch_id; + $result->{'match_status'} = $match_status; + $result->{'biblionumber'} = $biblionumber; + $result->{'url'} = $url; + return $result; +} -- 2.1.4