From c1bc38e09da6e2df0d483ad3bc6adfe16102c4a4 Mon Sep 17 00:00:00 2001 From: David Cook Date: Mon, 16 May 2016 16:06:02 +1000 Subject: [PATCH] Bug 10662 - Icarus job server and Koha UI for it NOTE: You cannot use koha-gitify to test changes to koha-conf.xml NOTE: Check koha_perl_deps.pl; you may need to install POE.pm --- C4/Installer/PerlDependencies.pm | 5 + Koha/Icarus.pm | 177 ++++++++++ Koha/Icarus/Base.pm | 32 ++ Koha/Icarus/Listener.pm | 330 ++++++++++++++++++ Koha/Icarus/Task.pm | 324 ++++++++++++++++++ Koha/Icarus/Task/Base.pm | 24 ++ Koha/Icarus/Task/Download/OAIPMH/Biblio.pm | 318 +++++++++++++++++ Koha/Icarus/Task/Upload/OAIPMH/Biblio.pm | 118 +++++++ Koha/SavedTask.pm | 86 +++++ Koha/SavedTasks.pm | 62 ++++ Koha/Schema/Result/SavedTask.pm | 98 ++++++ Makefile.PL | 27 +- admin/saved_tasks.pl | 379 +++++++++++++++++++++ docs/Icarus/README | 33 ++ etc/koha-conf.xml | 10 + .../data/mysql/atomicupdate/bug_10662-Icarus.sql | 9 + .../intranet-tmpl/prog/en/includes/admin-menu.inc | 1 + .../tasks/KohaIcarusTaskDownloadOAIPMHBiblio.inc | 87 +++++ .../tasks/KohaIcarusTaskUploadOAIPMHBiblio.inc | 132 +++++++ .../prog/en/modules/admin/admin-home.tt | 2 + .../prog/en/modules/admin/saved_tasks.tt | 345 +++++++++++++++++++ misc/bin/icarusd.pl | 213 ++++++++++++ rewrite-config.PL | 3 + skel/var/run/koha/icarus/README | 1 + skel/var/spool/koha/icarus/README | 1 + 25 files changed, 2816 insertions(+), 1 deletion(-) create mode 100755 Koha/Icarus.pm create mode 100755 Koha/Icarus/Base.pm create mode 100755 Koha/Icarus/Listener.pm create mode 100755 Koha/Icarus/Task.pm create mode 100755 Koha/Icarus/Task/Base.pm create mode 100755 Koha/Icarus/Task/Download/OAIPMH/Biblio.pm create mode 100755 Koha/Icarus/Task/Upload/OAIPMH/Biblio.pm create mode 100755 Koha/SavedTask.pm create mode 100755 Koha/SavedTasks.pm create mode 100755 Koha/Schema/Result/SavedTask.pm create mode 100755 admin/saved_tasks.pl create mode 100755 docs/Icarus/README create mode 100644 installer/data/mysql/atomicupdate/bug_10662-Icarus.sql create mode 100755 koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDownloadOAIPMHBiblio.inc create mode 100755 koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskUploadOAIPMHBiblio.inc create mode 100755 koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt create mode 100755 misc/bin/icarusd.pl create mode 100644 skel/var/run/koha/icarus/README create mode 100644 skel/var/spool/koha/icarus/README diff --git a/C4/Installer/PerlDependencies.pm b/C4/Installer/PerlDependencies.pm index 89df812..574f839 100644 --- a/C4/Installer/PerlDependencies.pm +++ b/C4/Installer/PerlDependencies.pm @@ -807,6 +807,11 @@ our $PERL_DEPS = { required => 1, min_ver => '1.00', }, + 'POE' => { + 'usage' => 'Icarus job server', + 'required' => '1', + 'min_ver' => '1.35', + }, }; 1; diff --git a/Koha/Icarus.pm b/Koha/Icarus.pm new file mode 100755 index 0000000..b57e691 --- /dev/null +++ b/Koha/Icarus.pm @@ -0,0 +1,177 @@ +package Koha::Icarus; + +# Copyright 2016 Prosentient Systems +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; +use IO::Socket::UNIX; +use IO::Select; +use URI; +use JSON; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub connected { + my ($self) = @_; + if ($self->{_connected}){ + return 1; + } +} + +sub connect { + my ($self) = @_; + my $socket_uri = $self->{socket_uri}; + if ($socket_uri){ + my $uri = URI->new($socket_uri); + if ($uri && $uri->scheme eq 'unix'){ + my $socket_path = $uri->path; + my $socket = IO::Socket::UNIX->new( + Type => IO::Socket::UNIX::SOCK_STREAM(), + Peer => $socket_path, + ); + if ($socket){ + my $socketio = new IO::Select(); + $socketio->add($socket); + #FIXME: Should probably fix these return values... + $self->{_socketio} = $socketio; + $self->{_socket} = $socket; + my $message = $self->_read(); + if ($message eq 'HELLO'){ + $self->{_connected} = 1; + return 1; + } + } + } + } + return 0; +} + +sub add_task { + my ($self, $args) = @_; + my $task = $args->{task}; + if ($task && %$task){ + my $response = $self->command("add task", undef, $task); + if ($response){ + return $response; + } + } +} + +sub start_task { + my ($self, $args) = @_; + my $task_id = $args->{task_id}; + if ($task_id){ + my $response = $self->command("start task", $task_id); + if ($response){ + return $response; + } + } +} + +sub remove_task { + my ($self, $args) = @_; + my $task_id = $args->{task_id}; + if ($task_id){ + my $response = $self->command("remove task", $task_id); + if ($response){ + return $response; + } + } +} + +sub list_tasks { + my ($self) = @_; + my $response = $self->command("list tasks"); + if ($response){ + if (my $tasks = $response->{tasks}){ + return $tasks; + } + } +} + +sub shutdown { + my ($self) = @_; + my $response = $self->command("shutdown"); + if ($response){ + return $response; + } +} + + + + + +sub command { + my ($self, $command, $task_id, $task) = @_; + my $serialized = $self->_serialize({ "command" => $command, "task_id" => $task_id, "task" => $task }); + if ($serialized){ + $self->_write({ serialized => $serialized }); + my $json = $self->_read(); + if ($json){ + my $response = from_json($json); + if ($response){ + return $response; + } + } + } +} + +sub _serialize { + my ($self, $output) = @_; + my $serialized = to_json($output); + return $serialized; +} + +sub _write { + my ($self, $args) = @_; + my $socket = $self->{_socket}; + my $output = $args->{serialized}; + if ($output){ + if (my $socketio = $self->{_socketio}){ + if (my @filehandles = $socketio->can_write(5)){ + foreach my $filehandle (@filehandles){ + #Localize output record separator as null + local $\ = "\x00"; + print $socket $output; + } + } + } + } +} + +sub _read { + my ($self) = @_; + if (my $socketio = $self->{_socketio}){ + if (my @filehandles = $socketio->can_read(5)){ + foreach my $filehandle (@filehandles){ + #Localize input record separator as null + local $/ = "\x00"; + my $message = <$filehandle>; + chomp($message) if $message; + return $message; + } + } + } +} + + + +1; \ No newline at end of file diff --git a/Koha/Icarus/Base.pm b/Koha/Icarus/Base.pm new file mode 100755 index 0000000..8e6423f --- /dev/null +++ b/Koha/Icarus/Base.pm @@ -0,0 +1,32 @@ +package Koha::Icarus::Base; + +use Modern::Perl; +use DateTime; + +use constant DEBUG => 9; +use constant SILENT => 0; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub debug { + my ($self,$message) = @_; + if ($self->{Verbosity} == DEBUG){ + $self->log($message); + } +} + +sub log { + my ($self,$message) = @_; + my $id = $self->{_id}; + my $component = $self->{_component} // "component"; + if ( ($self->{Verbosity}) && ($self->{Verbosity} > SILENT) ){ + my $now = DateTime->now(time_zone => "local"); + say "[$now] [$component $id] $message"; + } +} + +1; \ No newline at end of file diff --git a/Koha/Icarus/Listener.pm b/Koha/Icarus/Listener.pm new file mode 100755 index 0000000..e413aef --- /dev/null +++ b/Koha/Icarus/Listener.pm @@ -0,0 +1,330 @@ +package Koha::Icarus::Listener; + +use Modern::Perl; +use parent 'Koha::Icarus::Base'; + +use POE qw(Wheel::ReadWrite Wheel::SocketFactory Wheel::Run); +use IO::Socket qw(AF_UNIX); +use URI; +use Koha::Icarus::Task; +use JSON; #For "on_client_input" + +my $null_filter = POE::Filter::Line->new( + Literal => chr(0), +); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{_component} = "server"; + $args->{_id} = "undefined"; + return bless ($args, $class); +} + +#NOTE: "spawn" inspired by http://poe.perl.org/?POE_Cookbook/Object_Methods +sub spawn { + my ($class, $args) = @_; + my $self = $class->new($args); + POE::Session->create( + object_states => [ + $self => { + _start => "on_server_start", + shutdown => "shutdown", + set_verbosity => "set_verbosity", + _child => "on_task_event", + got_list_tasks => "on_list_tasks", + graceful_shutdown => "graceful_shutdown", + got_client_accept => "on_client_accept", + got_client_error => "on_client_error", + got_server_error => "on_server_error", + got_add_task => "on_add_task", + got_client_input => "on_client_input", + }, + ], + ); +} + +#Methods for POE::Session + +sub on_server_start { + my ($self, $kernel,$heap,$session) = @_[OBJECT, KERNEL,HEAP,SESSION]; + my $server_id = $session->ID; + $self->{_id} = $server_id; #Set internal id for logging purposes + + + my $bind_address_uri = $self->{Socket}; + my $max_tasks = $self->{MaxTasks}; + + $kernel->sig(INT => "graceful_shutdown"); + $kernel->sig(TERM => "graceful_shutdown"); + + $heap->{max_tasks} = $max_tasks // 25; #Default maximum of 25 unless otherwise specified + + $self->log("Maximum number of tasks allowed: $heap->{max_tasks}"); + $self->log("Starting server..."); + + my %server_params = ( + SuccessEvent => "got_client_accept", + FailureEvent => "got_server_error", + ); + + #TODO: At this time, only "unix" sockets are supported. In future, perhaps TCP/IP sockets could also be supported. + my $uri = URI->new($bind_address_uri); + my $scheme = $uri->scheme; + + if ($scheme eq 'unix'){ + my $bind_address = $uri->path; + $server_params{SocketDomain} = AF_UNIX; + $server_params{BindAddress} = $bind_address; + #When starting a unix socket server, you need to remove any existing references to that socket file. + if ($bind_address && (-e $bind_address) ){ + $self->debug("Unlinking $bind_address"); + unlink $bind_address or warn "Could not unlink $bind_address: $!"; + } + } + + $heap->{server} = POE::Wheel::SocketFactory->new(%server_params); + + if ($scheme eq 'unix'){ + #FIXME/DEBUGGING: This is a way to force a permission denied error... + #chmod 0755, $uri->path; + #Make the socket writeable to other users like Apache + chmod 0666, $uri->path; + } + +} + +sub shutdown { + my ($self,$heap,$session,$kernel) = @_[OBJECT, HEAP,SESSION,KERNEL]; + + if ($heap->{server}){ + $self->log("Shutting down server..."); + #Delete the server, so that you can't get any new connections + delete $heap->{server} if $heap->{server}; + } + + if ($heap->{client}){ + $self->log("Shutting down any remaining clients..."); + #Delete the clients, so that you bring down the existing connections + delete $heap->{client}; #http://www.perlmonks.org/?node_id=176971 + } +} + +sub on_task_event { + my ($self, $kernel, $heap,$session) = @_[OBJECT,KERNEL, HEAP,SESSION]; + my ($action,$child_session,$task) = @_[ARG0,ARG1,ARG2]; + + my $child_id = $child_session->ID; + + $self->debug("$action child $child_id"); + + + if ($action eq 'create'){ + #NOTE: The $task variable is returned by the child POE session's _start event + my $task_id = $child_session->ID; + $heap->{tasks}->{$task_id}->{task} = $task; + + } elsif ($action eq 'lose'){ + my $task_id = $child_session->ID; + delete $heap->{tasks}->{$task_id}; + } +} + +#TODO: Put this in a parent class? +sub set_verbosity { + my ($self,$session,$kernel,$new_verbosity) = @_[OBJECT,SESSION,KERNEL,ARG0]; + if (defined $new_verbosity){ + $self->{Verbosity} = $new_verbosity; + } +} + +sub on_list_tasks { + my ($self, $kernel, $heap,$session) = @_[OBJECT, KERNEL, HEAP,SESSION]; + + #DEBUG: You can access the POE::Kernel's sessions with "$POE::Kernel::poe_kernel->[POE::Kernel::KR_SESSIONS]". + #While it's black magic you shouldn't touch, it can be helpful when debugging. + + my @tasks = (); + foreach my $task_id (keys %{$heap->{tasks}} ){ + push(@tasks,{ task_id => $task_id, task => $heap->{tasks}->{$task_id}->{task} }); + } + return \@tasks; +} + +sub graceful_shutdown { + my ($self, $heap,$session,$kernel,$signal) = @_[OBJECT, HEAP,SESSION,KERNEL,ARG0]; + + #Tell the kernel that you're handling the signal sent to this session + $kernel->sig_handled(); + $kernel->sig($signal); + + my $tasks = $kernel->call($session,"got_list_tasks"); + + + if ( $heap->{tasks} && %{$heap->{tasks}} ){ + $self->log("Waiting for tasks to finish..."); + foreach my $task_id (keys %{$heap->{tasks}}){ + $self->log("Task $task_id still exists..."); + $kernel->post($task_id,"got_task_stop"); + } + } else { + $self->log("All tasks have finished"); + $kernel->yield("shutdown"); + return; + } + + $self->log("Attempting graceful shutdown in 1 second..."); + #NOTE: Basically, we just try another graceful shutdown on the next tick. + $kernel->delay("graceful_shutdown" => 1); +} + +#Accept client connection to listener +sub on_client_accept { + my ($self, $client_socket, $server_wheel_id, $heap, $session) = @_[OBJECT, ARG0, ARG3, HEAP,SESSION]; + + my $client_wheel = POE::Wheel::ReadWrite->new( + Handle => $client_socket, + InputEvent => "got_client_input", + ErrorEvent => "got_client_error", + InputFilter => $null_filter, + OutputFilter => $null_filter, + ); + + $client_wheel->put("HELLO"); + $heap->{client}->{ $client_wheel->ID() } = $client_wheel; + + $self->debug("Connection ".$client_wheel->ID()." started."); + +} + +#Handle server error - shutdown server +sub on_server_error { + my ($self, $operation, $errnum, $errstr, $heap, $session) = @_[OBJECT, ARG0, ARG1, ARG2,HEAP, SESSION]; + $self->log("Server $operation error $errnum: $errstr"); + delete $heap->{server}; +} + +#Handle client error - including disconnect +sub on_client_error { + my ($self, $wheel_id,$heap,$session) = @_[OBJECT, ARG3,HEAP,SESSION]; + + $self->debug("Connection $wheel_id failed or ended."); + + delete $heap->{client}->{$wheel_id}; + +} + +sub on_add_task { + my ($self, $message, $kernel, $heap, $session) = @_[OBJECT, ARG0, KERNEL, HEAP,SESSION]; + + #Fetch a list of all tasks + my @task_keys = keys %{$heap->{tasks}}; + + #If the number in the list is less than the max, add a new task + #else die. + if (scalar @task_keys < $heap->{max_tasks}){ + my $server_id = $session->ID; + my $task_session = Koha::Icarus::Task->spawn({ message => $message, server_id => $server_id, Verbosity => $self->{Verbosity}, SpoolDir => $self->{SpoolDir}, }); + return $task_session->ID; + } else { + #This die should be caught by the event caller... + die "Maximum number of tasks already reached.\n"; + } +} + +sub on_client_input { + my ($self, $input, $wheel_id, $session, $kernel, $heap) = @_[OBJECT, ARG0, ARG1, SESSION, KERNEL, HEAP]; + + #Store server id more explicitly + my $server_id = $session->ID; + + #Server listener has received input from client + my $client = $heap->{client}->{$wheel_id}; + + #Parse input from client + my $message = from_json($input); + + if ( ref $message eq 'HASH' ){ + #Read "command" from client + if (my $command = $message->{command}){ + $self->log("Message received with command \"$command\"."); + if ($command eq 'add task'){ + my $output = {}; + + #Create a task session + eval { + #NOTE: The server automatically keeps track of its child tasks + my $task_id = $kernel->call($server_id,"got_add_task",$message); + + $output->{action} = "added"; + $output->{task_id} = $task_id; + }; + if ($@){ + $self->debug("$@"); + chomp($@); + $output->{action} = "error"; + $output->{error_message} = $@; + } + my $server_output = to_json($output); + $client->put($server_output); + return; + + } elsif ( ($command eq 'remove task') || ($command eq 'start task' ) ){ + + my $task_id = $message->{task_id}; + + my $output = { + task_id => $task_id, + }; + + if ($command eq 'remove task'){ + $kernel->call($task_id,"got_task_stop"); + $output->{action} = "removed"; + } elsif ($command eq 'start task'){ + my $response = $kernel->call($task_id, "on_task_init"); + $output->{action} = $response; + } + + if ($!){ + $output->{action} = "error"; + $output->{error_message} = $!; + } + + #FIXME: What do we actually want to send back to the client? + my $server_output = to_json($output); + $client->put($server_output); + return; + + } elsif ($command eq 'list tasks'){ + + #Get tasks from listener (ie self) + my $tasks = $kernel->call($server_id, "got_list_tasks"); + + #Prepare output for client + my $server_output = to_json({tasks => $tasks}, {pretty => 1}); + + #Send output to client + $client->put($server_output); + return; + + } elsif ($command eq 'shutdown'){ + $kernel->post($server_id, "graceful_shutdown"); + my $server_output = to_json({action => 'shutting down'}); + $client->put($server_output); + return; + } else { + $self->log("The message contained an invalid command!"); + $client->put("Sorry! That is an invalid command!"); + return; + } + } else { + $self->log("The message was missing a command!"); + } + } else { + $self->log("The message was malformed!"); + } + $client->put("Sorry! That is an invalid message!"); + return; +} + +1; diff --git a/Koha/Icarus/Task.pm b/Koha/Icarus/Task.pm new file mode 100755 index 0000000..d4092c0 --- /dev/null +++ b/Koha/Icarus/Task.pm @@ -0,0 +1,324 @@ +package Koha::Icarus::Task; + +use Modern::Perl; +use parent 'Koha::Icarus::Base'; + +use POE qw(Wheel::Run); +use DateTime; +use DateTime::Format::Strptime; +use JSON; +use Module::Load::Conditional qw/can_load/; + +my $datetime_pattern = DateTime::Format::Strptime->new( + pattern => '%F %T', + time_zone => 'local', +); +my $epoch_pattern = DateTime::Format::Strptime->new( + pattern => '%s', +); + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{_component} = "task"; + $args->{_id} = "undefined"; + return bless ($args, $class); +} + +#NOTE: "spawn" inspired by http://poe.perl.org/?POE_Cookbook/Object_Methods +sub spawn { + my ($class, $args) = @_; + my $self = $class->new($args); + my $task_session = POE::Session->create( + object_states => [ + $self => { + _start => "on_task_create", + "got_child_stdout" => "on_child_stdout", + "got_child_stderr" => "on_child_stderr", + "got_child_close" => "on_child_close", + "got_child_signal" => "on_child_signal", + "got_terminal_signal" => "on_terminal_signal", + "child_process_success" => "child_process_success", + "child_process_failure" => "child_process_failure", + "got_task_stop" => "on_task_stop", + "on_task_init" => "on_task_init", + "on_task_start" => "on_task_start", + }, + ], + ); + return $task_session; +} + +sub on_task_create { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + + #Trap terminal signals so that the task can stop gracefully. + $kernel->sig(INT => "got_terminal_signal"); + $kernel->sig(TERM => "got_terminal_signal"); + + my $task_id = $session->ID; + if ($task_id){ + #Tell the kernel that this task is waiting for an external action (ie keepalive counter) + $kernel->refcount_increment($task_id,"waiting task"); + $self->{_id} = $task_id; #Set internal id for logging purposes + } + + my $server_id = $self->{server_id}; + if ($server_id){ + $heap->{server_id} = $server_id; + } + + my $task = undef; + my $message = $self->{message}; + if ($message){ + $task = $message->{task}; + if ($task){ + $task->{status} = 'new'; + $heap->{task} = $task; + } + } + return $task; #This return value is used by the parent POE session's _child handler +} + +#This sub is just to start it now, or set it to start in the future... if the time is now or in the past, it starts now... if it's in the future, it starts in the future... +sub on_task_init { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + my $response = 'pending'; + my $task = $heap->{task}; + my $status = $task->{status}; + if ($status){ + if ($status eq 'started'){ + $response = 'already started'; + } elsif ($status eq 'pending'){ + $response = 'already pending'; + } else { + $task->{status} = 'pending'; + + my $start = $task->{start}; + my $start_message = $start; + + + my $dt; + if ( $dt = $datetime_pattern->parse_datetime($start) ){ + #e.g. 2016-04-06 00:00:00 + } elsif ( $dt = $epoch_pattern->parse_datetime($start) ){ + #e.g. 1459837498 or apparently 0000-00-00 00:00:00 + } else { + #If we don't match the datetime_pattern or epoch_pattern, then we start right now. + $dt = DateTime->now( time_zone => 'local', ); + } + if ($dt){ + $start = $dt->epoch; + $start_message = $dt; + } + + + $self->log("Start task at $start_message"); + #NOTE: $start must be in UNIX epoch time (ie number of seconds that have elapsed since 00:00:00 UTC Thursday 1 January 1970) + $kernel->alarm("on_task_start",$start); + } + } + return $response; +} + +sub on_task_start { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + my $task = $heap->{task}; + $task->{status} = 'started'; + + if (my $repeat_interval = $task->{repeat_interval}){ + #NOTE: Reset the start time with a human readable timestamp + my $dt = DateTime->now( time_zone => 'local', ); + $dt->add( seconds => $repeat_interval ); + $task->{start} = $dt->strftime("%F %T"); + } + #FIXME: You need to impose child process limits here! How many child processes are allowed to be running at any given time? Well, you can only have one child process per task... + #so it's really more of a limit on the number of tasks... you probably need to have an internal task queue... that's easy enough though. + my $child = POE::Wheel::Run->new( + ProgramArgs => [ $task, ], + Program => sub { + my ($task) = @_; + + #Perform some last minute POE calls before running the task module plugin + my $session = $poe_kernel->get_active_session(); + if ($session){ + my $heap = $session->get_heap(); + $poe_kernel->call($heap->{server_id},"set_verbosity",0); #This turns off the server logging in this forked process, so the following call() doesn't mess up our logs + $poe_kernel->call($heap->{server_id},"shutdown"); #Shutdown the socket listener on the child process, so there's zero chance of writing to or reading from the socket in the child process + } + + #NOTE: I don't know if this really needs to be run, but it shouldn't hurt. + $poe_kernel->stop(); + + #Try to load the task type module. + my $task_type = $task->{type}; + if ( can_load ( modules => { $task_type => undef, }, ) ){ + #Create the object + my $task_object = $task_type->new({task => $task, Verbosity => $self->{Verbosity}, SpoolDir => $self->{SpoolDir}, }); + if ($task_object){ + #Synchronous action: run the task module + $task_object->run; + } + } else { + die "Couldn't load module $task_type: $Module::Load::Conditional::ERROR" + } + }, + StdoutEvent => "got_child_stdout", + StderrEvent => "got_child_stderr", + CloseEvent => "got_child_close", + NoSetPgrp => 1, #Keep child processes in same group as parent. This is especially useful when using Ctrl+C to kill the whole group. + ); + + $kernel->sig_child($child->PID, "got_child_signal"); + # Wheel events include the wheel's ID. + $_[HEAP]{children_by_wid}{$child->ID} = $child; + # Signal events include the process ID. + $_[HEAP]{children_by_pid}{$child->PID} = $child; + + $self->debug("child pid ".$child->PID." started as wheel ".$child->ID); +} + +sub on_task_stop { + my ($self, $session, $kernel, $heap) = @_[OBJECT, SESSION, KERNEL, HEAP]; + my $task = $heap->{task}; + $task->{status} = 'stopping'; + my $task_id = $session->ID; + my $server_id = $heap->{server_id}; + + if ($heap->{stopping}){ + $self->debug("Task is already in the process of stopping..."); + + } else { + + $self->log("Trying to stop task."); + + + #Mark this task as stopping + $heap->{stopping} = 1; + + #Stop the task from spawning new jobs + $kernel->alarm("on_task_start"); + + my $children_by_pid = $heap->{children_by_pid}; + if ($children_by_pid && %$children_by_pid){ + + $self->debug("Child processes in progres..."); + my $child_processes = $heap->{children_by_pid}; + foreach my $child_pid (keys %$child_processes){ + my $child = $child_processes->{$child_pid}; + $self->debug("Telling child pid $child_pid to stop"); + $child->put("quit"); + #TODO: Perhaps it would be worthwhile having a kill switch too? + # my $rv = $child->kill("TERM"); + } + } + + $self->log("Removing task keepalive."); + + $kernel->refcount_decrement($task_id,"waiting task"); + } +} + +sub on_terminal_signal { + my ($self, $signal,$session,$kernel) = @_[OBJECT, ARG0,SESSION,KERNEL]; + $self->debug("Trapped SIGNAL: $signal."); + #Gracefully stop the task + $kernel->call($session, "got_task_stop"); +} + +sub child_process_failure { + my ($self, $heap,$session,$kernel) = @_[OBJECT, HEAP,SESSION,KERNEL]; + my $task = $heap->{task}; + $task->{status} = "failed"; +} + +sub child_process_success { + my ($self, $heap,$session,$kernel) = @_[OBJECT, HEAP,SESSION,KERNEL]; + my $task = $heap->{task}; + if (my $repeat_interval = $task->{repeat_interval}){ + if ($heap->{stopping}){ + $self->log("Will skip repeating the task, as task is stopping."); + } else { + $self->log("Will repeat the task"); + $task->{status} = "restarting"; + $kernel->yield("on_task_init"); + } + } else { + $self->debug("I'm going to stop this task"); + $kernel->yield("got_task_stop"); + } +} + +############################################################# +# # +# Methods for communicating with child processes # +# # +############################################################# +# Originally inspired by the POE::Wheel::Run perldoc example + +# Wheel event, including the wheel's ID +sub on_child_stdout { + my ($self, $stdout_line, $wheel_id, $session) = @_[OBJECT, ARG0, ARG1, SESSION]; + my $child = $_[HEAP]{children_by_wid}{$wheel_id}; + #NOTE: Log everything child process sends to STDOUT + $self->log("[pid ".$child->PID."] STDOUT: $stdout_line"); + + #If the child outputs a line to STDOUT which starts with UPDATE_PARAMS=, we capture the data, + #and update the task params. + if ($stdout_line =~ /^UPDATE_PARAMS=(.*)$/){ + my $json_string = $1; + my $json = from_json($json_string); + my $task = $_[HEAP]->{task}; + my $params = $task->{params}; + foreach my $key (%$json){ + if (defined $params->{$key}){ + #FIXME: Don't just overwrite? Only update differences? + $params->{$key} = $json->{$key}; + } + } + $_[HEAP]->{task} = $task; + } +} + +# Wheel event, including the wheel's ID. +sub on_child_stderr { + my ($self, $stderr_line, $wheel_id, $session) = @_[OBJECT, ARG0, ARG1,SESSION]; + my $child = $_[HEAP]{children_by_wid}{$wheel_id}; + #NOTE: Log everything child process sends to STDERR + $self->log("[pid ".$child->PID."] STDERR: $stderr_line"); +} + +# Wheel event, including the wheel's ID. +sub on_child_close { + my ($self, $wheel_id,$session,$kernel) = @_[OBJECT, ARG0,SESSION,KERNEL]; + + my $child = delete $_[HEAP]{children_by_wid}{$wheel_id}; + + # May have been reaped by on_child_signal(). + unless (defined $child) { + $self->debug("[wid $wheel_id] closed all pipes."); + return; + } + $self->debug("[pid ".$child->PID."] closed all pipes."); + delete $_[HEAP]{children_by_pid}{$child->PID}; +} + +sub on_child_signal { + my ($self, $heap,$kernel,$pid,$exit_code,$session) = @_[OBJECT, HEAP,KERNEL,ARG1,ARG2,SESSION]; + + #If the child's exit code is 0, handle this successful exit status + if ($exit_code == 0){ + $kernel->yield("child_process_success"); + } else { + $kernel->yield("child_process_failure"); + } + $self->debug("pid $pid exited with status $exit_code."); + my $child = delete $_[HEAP]{children_by_pid}{$pid}; + + # May have been reaped by on_child_close(). + return unless defined $child; + + delete $_[HEAP]{children_by_wid}{$child->ID}; +} + +1; diff --git a/Koha/Icarus/Task/Base.pm b/Koha/Icarus/Task/Base.pm new file mode 100755 index 0000000..8de774c --- /dev/null +++ b/Koha/Icarus/Task/Base.pm @@ -0,0 +1,24 @@ +package Koha::Icarus::Task::Base; + +use Modern::Perl; +use IO::Select; + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub listen_for_instruction { + my ($self) = @_; + my $select = $self->{_select} ||= IO::Select->new(\*STDIN); + if (my @ready_FHs = $select->can_read(0) ){ + foreach my $FH (@ready_FHs){ + my $line = $FH->getline(); + chomp($line); + return $line; + } + } +} + +1; diff --git a/Koha/Icarus/Task/Download/OAIPMH/Biblio.pm b/Koha/Icarus/Task/Download/OAIPMH/Biblio.pm new file mode 100755 index 0000000..13b8948 --- /dev/null +++ b/Koha/Icarus/Task/Download/OAIPMH/Biblio.pm @@ -0,0 +1,318 @@ +package Koha::Icarus::Task::Download::OAIPMH::Biblio; + +use Modern::Perl; +use parent 'Koha::Icarus::Task::Base'; + +use DateTime; +use DateTime::Format::Strptime; +use HTTP::OAI; +use File::Path qw(make_path); +use Digest::MD5; +use JSON; +use URI; + +my $strp = DateTime::Format::Strptime->new( + pattern => '%Y%m%dT%H%M%S.%NZ', +); + +my $oai_second_granularity = DateTime::Format::Strptime->new( + pattern => '%Y-%m-%dT%H:%M:%SZ', +); + +my $oai_day_granularity = DateTime::Format::Strptime->new( + pattern => '%Y-%m-%d', +); + +sub validate_parameter_names { + +} +sub validate_repeat_interval { + my ($self,$repeat_interval) = @_; + if (defined $repeat_interval && $repeat_interval =~ /^\d+$/){ + return undef; + } + $self->{invalid_data}++; + return { not_numeric => 1, }; +} + +sub validate_url { + my ($self,$url) = @_; + my $response = {}; + if (my $url_obj = URI->new($url)){ + if ($url_obj->scheme ne "http"){ + $response->{not_http} = 1; + $self->{invalid_data}++; + } + if ( ! $url_obj->path){ + $response->{no_path} = 1; + $self->{invalid_data}++; + } + } else { + $response->{not_a_url} = 1; + $self->{invalid_data}++; + } + + return $response; +} + +sub validate { + my ($self, $args) = @_; + #Reset the invalid data counter... + $self->{invalid_data} = 0; + my $errors = { }; + my $task = $self->{task}; + my $tests = $args->{tests}; + if ($task){ + if ($tests && $tests eq 'all'){ + #warn "PARAMS = ".$task->{params}; + } + } + my $params = $task->{params}; + + #validate_start_time + $errors->{"repeat_interval"} = $self->validate_repeat_interval($task->{repeat_interval}); + + $errors->{"url"} = $self->validate_url($params->{url}); + + #NOTE: You don't need to validate these 3 HTTP Basic Auth parameters + #validate_username + #validate_password + #validate_realm + + #OAI-PMH parameters + #validate_verb + #validate_sets + #validate_marcxml + #validate_from + #validate_until + + #Download parameters + #validate_queue + + return $errors; +} + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + $args->{invalid_data} = 0; + return bless ($args, $class); +} + +sub validate_queue { + my ( $self ) = @_; + my $task = $self->{task}; + my $spooldir = $self->{SpoolDir}; + if ( -d $spooldir ){ + chdir $spooldir or die "$!"; + + my $named_queue = $task->{params}->{queue}; + my $queue = "queue/$named_queue"; + if ( ! -d $queue ){ + make_path($queue,{ mode => 0755 }); + } + if ( -d $queue ){ + chdir $queue or die "$!"; + } + } else { + die "$!"; + } +} + +sub run { + my ( $self ) = @_; + $self->validate_queue; + + my $task = $self->{task}; + + #DEBUGGING/FIXME: Remove these lines + if ($self->{Verbosity} && $self->{Verbosity} == 9){ + use Data::Dumper; + warn Dumper($task); + } + + my $params = $task->{params}; + + my $now = DateTime->now(); #This is in UTC time, which is required by the OAI-PMH protocol. + if ( $oai_second_granularity->parse_datetime($params->{from}) ){ + $now->set_formatter($oai_second_granularity); + } else { + $now->set_formatter($oai_day_granularity); + } + + $params->{until} = "$now" if $task->{repeat_interval}; + + $self->{digester} = Digest::MD5->new(); + $self->create_harvester; + my $sets = $self->prepare_sets; + + #Send a OAI-PMH request for each set + foreach my $set (@{$sets}){ + my $response = $self->send_request({set => $set}); + $self->handle_response({ response => $response, set => $set,}); + } + + #FIXME: Do you want to update the task only when the task is finished, or + #also after each resumption? + #Update the task params in Icarus after the task is finished... + #TODO: This really does make it seem like you should be handling the repeat_interval within the child process rather than the parent... + if ($task->{repeat_interval}){ + $params->{from} = "$now"; + $params->{until} = ""; + my $json_update = to_json($params); + say STDOUT "UPDATE_PARAMS=$json_update"; + } + +} + +#FIXME: I wonder if it would be faster to send your own HTTP requests and not use HTTP::OAI... +sub send_request { + my ( $self, $args ) = @_; + + #NOTE: This is plugin specific as the plugins define when they stop to listen for instructions... + #NOTE: Before sending a new request, check if Icarus has already asked us to quit. + my $instruction = $self->listen_for_instruction(); + if ($instruction eq 'quit'){ + warn "I was asked to quit!"; + return; + } + + my $set = $args->{set}; + my $resumptionToken = $args->{resumptionToken}; + + my $response; + my $task_params = $self->{task}->{params}; + + my $harvester = $self->{harvester}; + my $verb = $task_params->{verb}; + if ($verb eq 'GetRecord'){ + $response = $harvester->GetRecord( + metadataPrefix => $task_params->{metadataPrefix}, + identifier => $task_params->{identifier}, + ); + } elsif ($verb eq 'ListRecords'){ + $response = $harvester->ListRecords( + metadataPrefix => $task_params->{metadataPrefix}, + from => $task_params->{from}, + until => $task_params->{until}, + set => $set, + resumptionToken => $resumptionToken, + ); + } + return $response; +} + +sub create_harvester { + my ( $self ) = @_; + my $task_params = $self->{task}->{params}; + + #FIXME: DEBUGGING + #use HTTP::OAI::Debug qw(+); + + #Create HTTP::OAI::Harvester object + my $harvester = new HTTP::OAI::Harvester( baseURL => $task_params->{url} ); + if ($harvester){ + $harvester->timeout(5); #NOTE: the default timeout is 180 + #Set HTTP Basic Authentication Credentials + my $uri = URI->new($task_params->{url}); + my $host = $uri->host; + my $port = $uri->port; + $harvester->credentials($host.":".$port, $task_params->{realm}, $task_params->{username}, $task_params->{password}); + } + $self->{harvester} = $harvester; +} + +sub prepare_sets { + my ( $self ) = @_; + my $task_params = $self->{task}->{params}; + my @sets = (); + if ($task_params->{sets}){ + @sets = split(/\|/, $task_params->{sets}); + } + #If no sets are defined, create a null element to force the foreach loop to run once + if (!@sets){ + push(@sets,undef) + } + return \@sets; +} + +sub handle_response { + my ( $self, $args ) = @_; + my $params = $self->{task}->{params}; + my $response = $args->{response}; + my $set = $args->{set}; + if ($response){ + #NOTE: We have options at this point + #Option 1: Use $response->toDOM() to handle the XML response as a single document + #Option 2: Use $response->next() to handle each record individually. You would need to create a new document using $rec->header->dom() and $rec->metadata->dom() anyway. + + #NOTE: I wonder which option would be the fastest. For now, we're going with Option 1: + my $dom = $response->toDOM; + my $root = $dom->documentElement; + + #FIXME: Provide these as arguments so you're not re-creating them for each response + my $xpc = XML::LibXML::XPathContext->new(); + $xpc->registerNs('oai','http://www.openarchives.org/OAI/2.0/'); + my $xpath = XML::LibXML::XPathExpression->new("(oai:GetRecord|oai:ListRecords)/oai:record"); + + + my @records = $xpc->findnodes($xpath,$root); + my $now_pretty = DateTime->now(); + + $now_pretty->set_formatter($strp); + print "Downloaded ".scalar @records." records at $now_pretty\n"; + foreach my $record (@records) { + + #FIXME: This is where you could put a filter to prevent certain records from being saved... + + #Create a new XML document from the XML fragment + my $document = XML::LibXML::Document->new( "1.0", "UTF-8" ); + $document->setDocumentElement($record); + my $record_string = $document->toString; + + #NOTE: We have options at this point. + #Option 1: Write documents to disk, and have a separate importer upload the documents + #Option 2: Use AnyEvent::HTTP or POE::Component::Client::HTTP to send to a HTTP API asynchronously + #Option 3: Write records to a database, and have a separate importer upload the documents + #Option 4: Shared memory, although that seems fragile if nothing else + #Option 5: Write the records to a socket/pipe + + #NOTE: I wonder which option would be the fastest. For now, we're going to go with Option 1: + $self->{digester}->add($record_string); + my $digest = $self->{digester}->hexdigest; + #FIXME: If a record appears more than once during the download signified by $now, you'll + #overwrite the former with the latter. While this acts as a sort of heavy-handed de-duplication, + #you need to take into account the importer daemon... + + require Time::HiRes; + my $epoch = Time::HiRes::time(); + my $now = DateTime->from_epoch(epoch => $epoch); + $now->set_formatter($strp); + + my $filename = "$now-$digest"; + #NOTE: Here is where we write the XML out to disk + eval { + my $state = $document->toFile($filename); + }; + if ($@){ + die("Error while writing to disk: $@"); + } + } + + + #NOTE: Check if object has method due to bug in HTTP::OAI which causes fatal error on $response->resumptionToken if no real response is fetched from the OAI-PMH server + if ($response->can("resumptionToken")){ + my $resumption_token = $response->resumptionToken->resumptionToken if $response->resumptionToken && $response->resumptionToken->resumptionToken; + if ($resumption_token){ + #warn "Resumption Token = $resumption_token"; + my $resumed_response = $self->send_request({set => $set, resumptionToken => $resumption_token}); + $self->handle_response({ response => $resumed_response, set => $set,}); + } + } + + #In theory $response->resume(resumptionToken => resumptionToken) should kick off another response... + warn $response->message if $response->is_error; + } +} + +1; diff --git a/Koha/Icarus/Task/Upload/OAIPMH/Biblio.pm b/Koha/Icarus/Task/Upload/OAIPMH/Biblio.pm new file mode 100755 index 0000000..763d220 --- /dev/null +++ b/Koha/Icarus/Task/Upload/OAIPMH/Biblio.pm @@ -0,0 +1,118 @@ +package Koha::Icarus::Task::Upload::OAIPMH::Biblio; + +use Modern::Perl; +use parent 'Koha::Icarus::Task::Base'; +#use URI; +use LWP::UserAgent; +use HTTP::Status qw(:constants); + +my $ua = LWP::UserAgent->new; + +#FIXME: If we store the cookie jar on disk, we can prevent unnecessary HTTP requests... +#We would need to make sure that it's stored on a private per-instance basis though... +$ua->cookie_jar({}); + + +sub new { + my ($class, $args) = @_; + $args = {} unless defined $args; + return bless ($args, $class); +} + +sub run { + my ( $self ) = @_; + + my $task = $self->{task}; + my $params = $task->{params}; + + my $spooldir = $self->{SpoolDir}; + my $named_queue = $task->{params}->{queue}; + my $queue = "$spooldir/queue/$named_queue"; + + if ($self->{Verbosity} && $self->{Verbosity} == 9){ + use Data::Dumper; + warn Dumper($task); + } + + if ( -d $queue){ + my $is_opened = opendir(my $dh, $queue); + if ($is_opened){ + my @files = sort readdir($dh); + foreach my $file (@files){ + #NOTE: This is plugin specific as the plugins define when they stop to listen for instructions... + my $instruction = $self->listen_for_instruction(); + if ($instruction eq 'quit'){ + warn "I was asked to quit!"; + return; + } + + next if $file =~ /^\.+$/; + my $filepath = "$queue/$file"; + if ( -d $filepath ){ + #Do nothing for directories + } elsif ( -e $filepath ){ + print "File: $file\n"; + + #Slurp mode + local $/; + #TODO: Check flock on $filepath first + open( my $fh, '<', $filepath ); + my $data = <$fh>; + + #TODO: Improve this section... + #Send to Koha API... (we could speed this up using Asynchronous HTTP requests with AnyEvent::HTTP...) + my $resp = post_to_api($data,$params); + + my $status = $resp->code; + + if ($status == HTTP_UNAUTHORIZED || $status == HTTP_FORBIDDEN) { + $resp = remote_authenticate($params); + $resp = post_to_api($data,$params) if $resp->is_success; + } + + if ($resp->code == HTTP_OK){ + print "Success.\n"; + print $resp->decoded_content; + print "\n"; + unlink $filepath; + } + } + } + } else { + die "Couldn't open queue"; + } + } else { + die "Queue doesn't exist"; + } +} + +sub post_to_api { + my ($data, $params) = @_; + print "Posting to API...\n"; + my $resp = $ua->post( $params->{target_uri}, + {'nomatch_action' => $params->{nomatch_action}, + 'overlay_action' => $params->{overlay_action}, + 'match' => $params->{match}, + 'framework' => $params->{framework}, + 'filter' => $params->{filter}, + 'record_type' => "biblio", + 'xml' => $data} + ); + return $resp; +} + +sub remote_authenticate { + my ($params) = @_; + print "Authenticating...\n"; + + my $auth_uri = $params->{auth_uri}; + my $user = $params->{auth_username}; + my $password = $params->{auth_password}; + my $resp = $ua->post( $auth_uri, { userid => $user, password => $password } ); + if ($resp->code == HTTP_OK){ + print "Authenticated.\n"; + } + return $resp +} + +1; diff --git a/Koha/SavedTask.pm b/Koha/SavedTask.pm new file mode 100755 index 0000000..89cbc51 --- /dev/null +++ b/Koha/SavedTask.pm @@ -0,0 +1,86 @@ +package Koha::SavedTask; + +# Copyright Prosentient Systems 2016 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; + +use Carp; + +use JSON; + +use base qw(Koha::Object); + + + +=head1 NAME + +Koha::SavedTask - + +=head1 API + +=head2 Class Methods + +=cut + + + +=head3 _type + +=cut + +sub _type { + return 'SavedTask'; +} + +sub params_as_perl { + my ($self) = @_; + my $perl = from_json($self->params); + return $perl; +} + +sub serialize { + my ($self,$args) = @_; + my $for = $args->{for}; + my $type = $args->{type}; + if ($for eq 'icarus'){ + my $json_params = $self->params; + my $perl_params = from_json($json_params); + + my $icarus_task = { + type => $self->task_type, + start => $self->start_time, + repeat_interval => $self->repeat_interval, + params => $perl_params, + }; + if ($type eq 'perl'){ + return $icarus_task; + } elsif ($type eq 'json'){ + my $json = to_json($icarus_task); + return $json; + } + } + return undef; +} + +=head1 AUTHOR + +David Cook + +=cut + +1; diff --git a/Koha/SavedTasks.pm b/Koha/SavedTasks.pm new file mode 100755 index 0000000..a1ae9c5 --- /dev/null +++ b/Koha/SavedTasks.pm @@ -0,0 +1,62 @@ +package Koha::SavedTasks; + +# Copyright Prosentient Systems 2016 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation; either version 3 of the License, or (at your option) any later +# version. +# +# Koha is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with Koha; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +use Modern::Perl; + +use Carp; + +use Koha::Database; + +use Koha::SavedTask; + +use base qw(Koha::Objects); + +=head1 NAME + +Koha::SavedTasks - + +=head1 API + +=head2 Class Methods + +=cut + +=head3 _type + +=cut + +sub _type { + return 'SavedTask'; +} + +=head3 object_class + +=cut + +sub object_class { + return 'Koha::SavedTask'; +} + +=head1 AUTHOR + +David Cook + +=cut + +1; diff --git a/Koha/Schema/Result/SavedTask.pm b/Koha/Schema/Result/SavedTask.pm new file mode 100755 index 0000000..948804f --- /dev/null +++ b/Koha/Schema/Result/SavedTask.pm @@ -0,0 +1,98 @@ +use utf8; +package Koha::Schema::Result::SavedTask; + +# Created by DBIx::Class::Schema::Loader +# DO NOT MODIFY THE FIRST PART OF THIS FILE + +=head1 NAME + +Koha::Schema::Result::SavedTask + +=cut + +use strict; +use warnings; + +use base 'DBIx::Class::Core'; + +=head1 TABLE: C + +=cut + +__PACKAGE__->table("saved_tasks"); + +=head1 ACCESSORS + +=head2 task_id + + data_type: 'integer' + extra: {unsigned => 1} + is_auto_increment: 1 + is_nullable: 0 + +=head2 start_time + + data_type: 'datetime' + datetime_undef_if_invalid: 1 + is_nullable: 0 + +=head2 repeat_interval + + data_type: 'integer' + extra: {unsigned => 1} + is_nullable: 0 + +=head2 task_type + + data_type: 'varchar' + is_nullable: 0 + size: 255 + +=head2 params + + data_type: 'text' + is_nullable: 0 + +=cut + +__PACKAGE__->add_columns( + "task_id", + { + data_type => "integer", + extra => { unsigned => 1 }, + is_auto_increment => 1, + is_nullable => 0, + }, + "start_time", + { + data_type => "datetime", + datetime_undef_if_invalid => 1, + is_nullable => 0, + }, + "repeat_interval", + { data_type => "integer", extra => { unsigned => 1 }, is_nullable => 0 }, + "task_type", + { data_type => "varchar", is_nullable => 0, size => 255 }, + "params", + { data_type => "text", is_nullable => 0 }, +); + +=head1 PRIMARY KEY + +=over 4 + +=item * L + +=back + +=cut + +__PACKAGE__->set_primary_key("task_id"); + + +# Created by DBIx::Class::Schema::Loader v0.07042 @ 2016-01-27 13:35:22 +# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:gnoi7I9fiXM3IfDysMTm+A + + +# You can replace this text with custom code or comments, and it will be preserved on regeneration +1; diff --git a/Makefile.PL b/Makefile.PL index f0a9368..afd119e 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -198,6 +198,14 @@ Directory for Zebra's data files. Directory for Zebra's UNIX-domain sockets. +=item ICARUS_RUN_DIR + +Directory for Icarus's UNIX-domain socket and pid file. + +=item ICARUS_SPOOL_DIR + +Directory for Icarus's temporary data like queues + =item MISC_DIR Directory for for miscellaenous scripts, among other @@ -317,6 +325,8 @@ my $target_map = { './skel/var/log/koha' => { target => 'LOG_DIR', trimdir => -1 }, './skel/var/spool/koha' => { target => 'BACKUP_DIR', trimdir => -1 }, './skel/var/run/koha/zebradb' => { target => 'ZEBRA_RUN_DIR', trimdir => -1 }, + './skel/var/run/koha/icarus' => { target => 'ICARUS_RUN_DIR', trimdir => 6 }, + './skel/var/spool/koha/icarus' => { target => 'ICARUS_SPOOL_DIR', trimdir => 6 }, './skel/var/lock/koha/zebradb/authorities' => { target => 'ZEBRA_LOCK_DIR', trimdir => 6 }, './skel/var/lib/koha/zebradb/authorities/key' => { target => 'ZEBRA_DATA_DIR', trimdir => 6 }, './skel/var/lib/koha/zebradb/authorities/register' => { target => 'ZEBRA_DATA_DIR', trimdir => 6 }, @@ -414,6 +424,10 @@ System user account that will own Koha's files. System group that will own Koha's files. +=item ICARUS_MAX_TASKS + +Maximum number of tasks allowed by Icarus. + =back =cut @@ -448,7 +462,8 @@ my %config_defaults = ( 'USE_MEMCACHED' => 'no', 'MEMCACHED_SERVERS' => '127.0.0.1:11211', 'MEMCACHED_NAMESPACE' => 'KOHA', - 'FONT_DIR' => '/usr/share/fonts/truetype/ttf-dejavu' + 'FONT_DIR' => '/usr/share/fonts/truetype/ttf-dejavu', + 'ICARUS_MAX_TASKS' => '30', ); # set some default configuration options based on OS @@ -1092,6 +1107,10 @@ Memcached namespace?); Path to DejaVu fonts?); $config{'FONT_DIR'} = _get_value('FONT_DIR', $msg, $defaults->{'FONT_DIR'}, $valid_values, $install_log_values); + $msg = q( +Maximum number of tasks allowed by Icarus?); + $config{'ICARUS_MAX_TASKS'} = _get_value('ICARUS_MAX_TASKS', $msg, $defaults->{'ICARUS_MAX_TASKS'}, $valid_values, $install_log_values); + $msg = q( Would you like to run the database-dependent test suite?); @@ -1241,6 +1260,8 @@ sub get_target_directories { $dirmap{'PLUGINS_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'koha', 'plugins'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'zebradb'); + $dirmap{'ICARUS_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'icarus'); + $dirmap{'ICARUS_SPOOL_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'spool', 'icarus'); } elsif ($mode eq 'dev') { my $curdir = File::Spec->rel2abs(File::Spec->curdir()); $dirmap{'API_CGI_DIR'} = File::Spec->catdir($curdir, 'api'); @@ -1276,6 +1297,8 @@ sub get_target_directories { $dirmap{'PLUGINS_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'plugins'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'zebradb'); + $dirmap{'ICARUS_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'icarus'); + $dirmap{'ICARUS_SPOOL_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'spool', 'icarus'); } else { # mode is standard, i.e., 'fhs' $dirmap{'API_CGI_DIR'} = File::Spec->catdir(@basedir, $package, 'api'); @@ -1300,6 +1323,8 @@ sub get_target_directories { $dirmap{'PLUGINS_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lib', $package, 'plugins'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lib', $package, 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'run', $package, 'zebradb'); + $dirmap{'ICARUS_RUN_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'run', $package, 'icarus'); + $dirmap{'ICARUS_SPOOL_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'spool', $package, 'icarus'); } _get_env_overrides(\%dirmap); diff --git a/admin/saved_tasks.pl b/admin/saved_tasks.pl new file mode 100755 index 0000000..512bf27 --- /dev/null +++ b/admin/saved_tasks.pl @@ -0,0 +1,379 @@ +#!/usr/bin/perl + +# Copyright Prosentient Systems 2016 +# +# This file is part of Koha. +# +# Koha is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Koha is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Koha; if not, see . + +=head1 NAME + +saved_tasks.pl + +=head1 DESCRIPTION + +Admin page to manage saved tasks + +=cut + +use Modern::Perl; +use CGI qw ( -utf8 ); +use C4::Auth; +use C4::Output; +use C4::Context; + +use Koha::SavedTasks; +use Koha::Icarus; +use Module::Load::Conditional qw/can_load check_install/; +use JSON; + +my $try_to_connect = 1; +my $input = new CGI; +my ($template, $loggedinuser, $cookie, $flags) = get_template_and_user( { + template_name => 'admin/saved_tasks.tt', + query => $input, + type => 'intranet', + authnotrequired => 0, + flagsrequired => { 'parameters' => 'parameters_remaining_permissions' }, +} ); + +my $filename = "saved_tasks.pl"; +$template->param( + filename => $filename, +); + +my $context = C4::Context->new(); + + +my $task_server = $input->param("task_server") // "icarus"; + + +my $socket_uri = $context->{"icarus"}->{"socket"}; + +my @available_plugins = (); +my $task_plugins = $context->{"icarus"}->{"task_plugin"}; +if ($task_plugins && ref $task_plugins eq 'ARRAY'){ + #FIXME: This should probably be a module method... validation that a plugin is installed... + foreach my $task_plugin (@$task_plugins){ + #Check that plugin module is installed + if ( check_install( module => $task_plugin ) ){ + push(@available_plugins,$task_plugin); + } + } +} + +$template->param( + available_plugins => \@available_plugins, +); + +#Server action and task id +my $server_action = $input->param("server_action"); +my $server_task_id = $input->param('server_task_id'); + +#Saved task op +my $op = $input->param('op'); +my $step = $input->param('step'); + +#Saved task id +my $saved_task_id = $input->param('saved_task_id'); + + +#Create Koha-Icarus interface object +my $icarus = Koha::Icarus->new({ socket_uri => $socket_uri }); +my $daemon_status = ""; + + + +#NOTE: If you're having problems starting the server from the web ui, +#remember that Apache must be able to write to icarus.log, icarus.pid, icarus.sock and the directories containing them. +if ($server_action && $server_action eq "start_server" ){ + my $icarusd = $context->{icarus}->{bin}; + my $KOHA_CONF = $ENV{'KOHA_CONF'}; + if ( -f $icarusd && $KOHA_CONF){ + my $start_daemon = "$icarusd -f $KOHA_CONF --daemon -v 9"; + #NOTE: If the daemon didn't close STDOUT and STDERR itself, we'd need to redirect them to /dev/null them here to prevent Apache from looping/erroring + if (system("$start_daemon") == 0){ + #This means the parent daemon process succeeded. However, it's still possible that the child daemon process has failed. + } else { + $try_to_connect = 0; + $daemon_status = "Start failed"; + } + } +} + +if ($try_to_connect){ + #Connect to Icarus + if ( $icarus->connect() ){ + $daemon_status = "online"; + if ($server_action){ + if ($server_action eq 'shutdown_server'){ + my $response = $icarus->shutdown; + if ( $response && (my $action = $response->{action}) ){ + $daemon_status = $action; + } + } elsif ($server_action eq 'start' && $server_task_id){ + my $response = $icarus->start_task({ task_id => $server_task_id }); + $template->param( + task_response => $response, + ); + } elsif ($server_action eq 'remove' && $server_task_id){ + my $response = $icarus->remove_task({ task_id => $server_task_id }); + $template->param( + task_response => $response, + ); + } + } + } else { + warn "Daemon status: $!"; + $daemon_status = $!; + } +} + +$template->param( + daemon_status => $daemon_status, +); + + + +my $params = $input->param("params"); + +#NOTE: Parse the parameters manually, so that you can "name[]" style of parameter, which we use in the special plugin templates... +my $saved_params = {}; +#Fetch the names of all the parameters passed to your script +my @parameter_names = $input->param; +#Iterate through these parameter names and look for "params[]" +foreach my $parameter_name (@parameter_names){ + if ($parameter_name =~ /^params\[(.*)\]$/){ + #Capture the hash key + my $key = $1; + #Fetch the actual individual value + my $parameter_value = $input->param($parameter_name); + if ($parameter_value){ + $saved_params->{$key} = $parameter_value; + } + } +} +if (%$saved_params){ + my $json = to_json($saved_params, { pretty => 1, }); + if ($json){ + $params = $json; + } +} + +my $start_time = $input->param("start_time"); +my $repeat_interval = $input->param("repeat_interval"); +my $task_type = $input->param("task_type"); +if ($task_type){ + my $task_template = $task_type; + #Create the template name by stripping the colons out of the task type text + $task_template =~ s/://g; + $template->param( + task_template => "tasks/$task_template.inc", + ); +} + + +if ($op){ + #FIXME: This is cheating... you should create a TemplateToolkit plugin that does this and use it instead + if ($op eq 'new' || $op eq 'update'){ + require Koha::Database; + require Koha::BiblioFrameworks; + my @frameworks = Koha::BiblioFrameworks->as_list(); + $template->{VARS}->{ frameworks } = \@frameworks; + } + + if ($op eq 'new'){ + + } elsif ($op eq 'create'){ + + #Validate the $task here + if ($step){ + if ($step eq "one"){ + + $op = "new"; + $template->param( + step => "two", + task_type => $task_type, + ); + } elsif ($step eq "two"){ + my $new_task = Koha::SavedTask->new({ + start_time => $start_time, + repeat_interval => $repeat_interval, + task_type => $task_type, + params => $params, + }); + + #Serialize the data as an Icarus task + my $icarus_task = $new_task->serialize({ for => "icarus", type => "perl", }); + + my $valid = 1; + #Load the plugin module, and create an object instance in order to validate user-entered data + if ( can_load( modules => { $task_type => undef, }, ) ){ + my $plugin = $task_type->new({ task => $icarus_task, }); + if ($plugin->can("validate")){ + my $errors = $plugin->validate({ + "tests" => "all", + }); + if (%$errors){ + $template->param( + errors => $errors, + ); + } + if ($plugin->{invalid_data} > 0){ + $valid = 0; + } + } + } + + if ($valid){ + $new_task->store(); + $op = "list"; + } else { + $op = "new"; + #Create a Perl data structure from the JSON + my $editable_params = from_json($params); + $template->param( + step => "two", + task_type => $task_type, + saved_task => $new_task, + params => $editable_params, + ); + } + } + } + + } elsif ($op eq 'edit'){ + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + #Check if the task's saved task type is actually available... + #FIXME: This should be a Koha::Icarus method... + my $task_type_is_valid = grep { $task->task_type eq $_ } @available_plugins; + $template->param( + task_type_is_valid => $task_type_is_valid, + saved_task => $task, + ); + } + } elsif ($op eq 'update'){ + if ($step){ + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + if ($step eq "one"){ + #We've completed step one, which is choosing the task type, + #so now we're going to populate the form for editing the rest of the values + $op = "edit"; + #This is the JSON string that we've saved in the database + my $current_params_string = $task->params; + my $editable_params = from_json($current_params_string); + + $template->param( + step => "two", + task_type => $task_type, + saved_task => $task, + params => $editable_params, + + ); + } elsif ($step eq "two"){ + #We've completed step two, so we're storing the data now... + $task->set({ + start_time => $start_time, + repeat_interval => $repeat_interval, + task_type => $task_type, + params => $params, + }); + $task->store; + #FIXME: Validate the $task here... + if (my $valid = 1){ + $op = "list"; + } else { + $op = "edit"; + $template->param( + step => "two", + task_type => $task_type, + saved_task => $task, + ); + } + } + } + } + } elsif ($op eq 'send'){ + my $sent_response; + if ($icarus->connected){ + if ($saved_task_id){ + #Look up task + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + #Create a task for Icarus, and send it to Icarus + my $icarus_task = $task->serialize({ for => "icarus", type => "perl", }); + if ($icarus_task){ + $icarus->add_task({ task => $icarus_task, }); + $op = "list"; + } + } + } + } else { + $sent_response = "icarus_offline"; + $template->param( + sent_response => $sent_response, + ); + $op = "list"; + } + } elsif ($op eq 'delete'){ + my $saved_response = "delete_failure"; + if ($saved_task_id){ + #Look up task + my $task = Koha::SavedTasks->find($saved_task_id); + if ($task){ + if (my $something = $task->delete){ + $saved_response = "delete_success"; + } + } + } + $template->param( + saved_response => $saved_response, + ); + $op = "list"; + } else { + #Don't recognize $op, so fallback to list + $op = "list"; + } +} else { + #No $op, so fallback to list + $op = "list"; +} + +if ($op eq 'list'){ + #Get active tasks from Icarus + if ($icarus->connected){ + my $tasks = $icarus->list_tasks(); + if ($tasks && @$tasks){ + #Sort tasks that come from Icarus, since it returns an unsorted list of hashrefs + my @sorted_tasks = sort { $a->{task_id} <=> $b->{task_id} } @$tasks; + $template->param( + tasks => \@sorted_tasks, + ); + } + } + + #Get saved tasks from Koha + my @saved_tasks = Koha::SavedTasks->as_list(); + $template->param( + saved_tasks => \@saved_tasks, + ); +} + +$template->param( + op => $op, +); + +output_html_with_http_headers $input, $cookie, $template->output; diff --git a/docs/Icarus/README b/docs/Icarus/README new file mode 100755 index 0000000..5e06c41 --- /dev/null +++ b/docs/Icarus/README @@ -0,0 +1,33 @@ +TODO: + +_ICARUS_ +- Improve error handling for Koha::Icarus::Task::Upload::* + - When does it get a status of "Failed"? +- Data validation: + "Koha::Icarus::Task::Upload::OAIPMH::Biblio": + - Validate HTTP URLs and filepaths... + - MAKE IT SO YOU HAVE TO USE A RECORD MATCHING RULE! To at the very least strip the OAI wrapper... + - Add PLUGIN->validate("parameter_names") + - Add PLUGIN->validate("parameter_values") + - For the downloader, this would validate HTTP && OAI-PMH parameters... + +####### + +ICARUS: +- admin/saved_tasks.pl + - Add a clone button to ease task creation +- Make the "Task type" prettier (and translateable) on saved_tasks.pl. +- Provide more options for the Icarus dashboard? (already have start/shutdown...) +- Add the ability to "edit" and "pause" active Icarus tasks + - A pause function would make debugging much easier. +- Add help pages for WEB GUI +- Make "Koha::Icarus::Task::Upload::OAIPMH::Biblio" use asynchronous HTTP requests to speed up the import +- Instead of using file:///home/koha/koha-dev/var/spool/oaipmh, use something like file:///tmp/koha-instance/koha-dev/oaipmh + - How is the user going to specify file:///tmp/koha-instance/koha-dev/oaipmh? Or do you put this in koha-conf.xml and then make a user-defined relative path? +- WEB UI: + - Add `name` to saved_tasks? +- Move "Saved tasks" from Administration to Tools? + - Look at existing bugs for schedulers: + - https://bugs.koha-community.org/bugzilla3/show_bug.cgi?id=14712 + - https://bugs.koha-community.org/bugzilla3/show_bug.cgi?id=1993 +- Handle datestamp granularity better for OAI-PMH download tasks? diff --git a/etc/koha-conf.xml b/etc/koha-conf.xml index c361846..9f6f207 100644 --- a/etc/koha-conf.xml +++ b/etc/koha-conf.xml @@ -137,4 +137,14 @@ __PAZPAR2_TOGGLE_XML_POST__ + + __SCRIPT_DIR__/icarusd.pl + unix:__ICARUS_RUN_DIR__/icarus.sock + __ICARUS_RUN_DIR__/icarus.pid + __ICARUS_SPOOL_DIR__ + __LOG_DIR__/icarus.log + Koha::Icarus::Task::Download::OAIPMH::Biblio + Koha::Icarus::Task::Upload::OAIPMH::Biblio + __ICARUS_MAX_TASKS__ + diff --git a/installer/data/mysql/atomicupdate/bug_10662-Icarus.sql b/installer/data/mysql/atomicupdate/bug_10662-Icarus.sql new file mode 100644 index 0000000..f27aa84 --- /dev/null +++ b/installer/data/mysql/atomicupdate/bug_10662-Icarus.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS saved_tasks; +CREATE TABLE saved_tasks ( + task_id int(10) unsigned NOT NULL AUTO_INCREMENT, + start_time datetime NOT NULL, + repeat_interval int(10) unsigned NOT NULL, + task_type varchar(255) CHARACTER SET utf8 NOT NULL, + params text CHARACTER SET utf8 NOT NULL, + PRIMARY KEY (task_id) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc index e736272..32d0b0e 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/admin-menu.inc @@ -78,6 +78,7 @@ [% IF Koha.Preference('SMSSendDriver') == 'Email' %]
  • SMS cellular providers
  • [% END %] +
  • Saved tasks
  • diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDownloadOAIPMHBiblio.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDownloadOAIPMHBiblio.inc new file mode 100755 index 0000000..24c75f5 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskDownloadOAIPMHBiblio.inc @@ -0,0 +1,87 @@ +[%# USE CGI %] +[%# server_name = CGI.server_name; server_port = CGI.server_port; server = server_name _ ":" _ server_port; %] + +
    + HTTP parameters: +
      +
    1. + + [% IF ( params.url ) %] + + [% ELSE %] + + [% END %] + [% IF (errors.url.no_path) %][The URL must have a path after "http://" like "koha-community.org/cgi-bin/koha/oai.pl".][% END %] + [% IF (errors.url.not_http) %][The URL begin with a scheme of "http://" like "http://koha-community.org/cgi-bin/koha/oai.pl".][% END %] + [% IF (errors.url.not_a_url) %][The value of this field must be a URL like "http://koha-community.org/cgi-bin/koha/oai.pl".][% END %] + +
    2. +
    + The following parameters are not required by all OAI-PMH repositories, so they may be optional for this task. +
      +
    1. + + +
    2. +
    3. + + +
    4. +
    5. + + +
    6. +
    +
    +
    + OAI-PMH parameters: +
      +
    1. + + +
    2. +
    3. + + + This identifier will only be used with the GetRecord verb. +
    4. +
    5. + + You may specify several sets by separating the sets with a pipe (e.g. set1|set2 ) +
    6. +
    7. + + +
    8. +
    9. + + This value will be treated as UTC time. Note that some repositories only support YYYY-MM-DD datestamps. +
    10. +
    11. + + This value will be treated as UTC time. Note that some repositories only support YYYY-MM-DD datestamps. +
    12. +
    +
    +
    + Download parameters: +
      +
    1. + + [% IF ( params.queue ) %] + + [% ELSE %] + + [% END %] + Choose a unique name for the queue where your downloaded records will be stored prior to upload. (e.g. biblio-sync, unioncatalogue) +
    2. +
    +
    diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskUploadOAIPMHBiblio.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskUploadOAIPMHBiblio.inc new file mode 100755 index 0000000..f6fae68 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/tasks/KohaIcarusTaskUploadOAIPMHBiblio.inc @@ -0,0 +1,132 @@ +[% USE CGI %] +[% server = CGI.http('HTTP_ORIGIN') %] +[%# TODO: Test if this works with Plack and HTTPS... %] +[% default_auth_uri = server _ '/cgi-bin/koha/svc/authentication' %] +[% default_target_uri = server _ '/cgi-bin/koha/svc/import_oai' %] +
    + Import source parameters: +
      +
    1. + + [% IF ( params.queue ) %] + + [% ELSE %] + + [% END %] + This is the name of a unique queue you already defined for downloaded records (e.g. biblio-sync, unioncatalogue) +
    2. +
    +
    +
    + API authentication parameters: +
      +
    1. + + [% IF ( params.auth_uri ) %] + + [% ELSE %] + + [% END %] + [% IF (errors.auth_uri.no_path) %][The URL must have a path after "http://" like "koha-community.org/cgi-bin/koha/svc/authentication".][% END %] + [% IF (errors.auth_uri.not_http) %][The URL begin with a scheme of "http://" like "http://koha-community.org/cgi-bin/koha/svc/authentication".][% END %] + [% IF (errors.auth_uri.not_a_url) %][The value of this field must be a URL like "http://koha-community.org/cgi-bin/koha/svc/authentication".][% END %] + This is a Koha authentication URL. The default value +
    2. +
    3. + + + This user must have permission to edit the catalogue. +
    4. +
    5. + + +
    6. +
    +
    +
    + Import target parameters: +
      +
    1. + + [% IF ( params.target_uri ) %] + + [% ELSE %] + + [% END %] + [% IF (errors.target_uri.no_path) %][The URL must have a path after "http://" like "koha-community.org/cgi-bin/koha/svc/import_oai".][% END %] + [% IF (errors.target_uri.not_http) %][The URL begin with a scheme of "http://" like "http://koha-community.org/cgi-bin/koha/svc/import_oai".][% END %] + [% IF (errors.target_uri.not_a_url) %][The value of this field must be a URL like "http://koha-community.org/cgi-bin/koha/svc/import_oai".][% END %] +
    2. + +
    3. + + + This code must exist in "Record matching rules" in Administration for record matching to work. (Example code: OAI) +
    4. +
    5. + [%# TODO: Ideally, I'd like to use 'tools-overlay-action.inc' but the logic doesn't work here. Perhaps it would be better as a TT plugin. %] + + +
    6. +
    7. + [%# TODO: Ideally, I'd like to use 'tools-nomatch-action.inc' but the logic doesn't work here. Perhaps it would be better as a TT plugin. %] + + +
    8. +
    9. + + + This framework is only used for new records. Koha will use the original record framework when replacing records. +
    10. +
    11. + + [% IF ( params.filter ) %] + + [% ELSE %] + + [% END %] + Using the keyword "default", Koha will use the default filter for converting OAI-PMH records to MARCXML records. Alternatively, you can provide a file URL like file:///etc/koha/sites/libraryname/OAI2MARC21slim.xsl or file:///usr/share/koha/intranet/htdocs/intranet-tmpl/prog/en/xslt/OAI2MARC21slim.xsl to use a different filter. +
    12. +
    +
    diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt index bd951bc..2be69ea 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/admin-home.tt @@ -120,6 +120,8 @@
    SMS cellular providers
    Define a list of cellular providers for sending SMS messages via email.
    [% END %] +
    Saved tasks
    +
    Define tasks which may be run in the background
    diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt new file mode 100755 index 0000000..3dc2855 --- /dev/null +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/saved_tasks.tt @@ -0,0 +1,345 @@ +[% INCLUDE 'doc-head-open.inc' %] +Koha › Administration › Saved tasks +[% INCLUDE 'doc-head-close.inc' %] +[% INCLUDE 'calendar.inc' %] + +[% INCLUDE 'timepicker.inc' %] +[% IF ( op == "list" ) %] + + [% INCLUDE 'datatables.inc' %] + +[% ELSIF ( op == "edit" ) || ( op == "new" ) %] + + +[% END %] + + + +[% INCLUDE 'header.inc' %] +[% INCLUDE 'cat-search.inc' %] + + +
    + +
    +
    +
    + [% IF ( op ) %] + [% IF ( op == "new" ) || ( op == "edit" ) %] + [%# If step is undefined, force it to be step one %] + [% IF ( ! step ); step = "one"; END; %] + + + + [%# HEADING %] + [% IF ( op == "new" ) %] +

    New saved task

    + [% ELSIF ( op == "edit" ) %] +

    Modify saved task

    + [% END %] + [%# /HEADING %] + + [%# TODO: Get this working properly...
    Validation failed.
    #] + + [%# FORM %] +
    + [% IF ( op == "new" ) %] + + [% ELSIF ( op == "edit" ) %] + + + [% END %] + +
    +
      + [% IF ( op == "edit") && ( step == "one" ) && (! task_type_is_valid ) %] +
    1. + + + Sorry! This task type is invalid. Please choose a new one from the following list. +
    2. + [% END %] +
    3. + + [% IF ( step == "one" ) %] + [% IF ( available_plugins ) %] + + [% END %] + + [% ELSIF ( step == "two" ) %] + + + [% END %] +
    4. +
    +
    + + [% IF ( step == "one" ) %] +
    + + Cancel +
    + [% ELSIF ( step == "two" ) %] +
    + Task: +
      +
    1. + + + This value will be treated as local server time, and times in the past will start immediately. +
    2. +
    3. + + + seconds + [% IF (errors.repeat_interval.not_numeric) %][The repeat interval must be a purely numeric value.][% END %] +
    4. +
    +
    + [%# Try to include the template, but if it fails, fallback to a regular text view %] + [% TRY %] + [% INCLUDE $task_template %] + [% CATCH %] +
    + Plugin parameters: +
      +
    1. + + +
    2. +
    +
    + [% END %] +
    + + Cancel +
    + [% END %] +
    + [%# /FORM %] + [% END #/edit or new %] + + + [% IF ( op == "list" ) %] + +

    Saved tasks

    + [% IF ( saved_response ) %] + [% IF ( saved_response == 'delete_success' ) %] +
    Deletion successful.
    + [% ELSIF ( saved_response == 'delete_failure' ) %] +
    Deletion failed.
    + [% END %] + [% END %] + [% IF ( sent_response ) %] + [% IF ( sent_response == 'icarus_offline' ) %] +
    Send failed. Icarus is currently offline.
    + [% END %] + [% END %] + + + + + + + + + + + + + + [% FOREACH saved_task IN saved_tasks %] + + + + + + + + + + [% END %] + +
    Start timeRepeat intervalTask typeParams
    [% IF ( saved_task.start_time ) != "0000-00-00 00:00:00"; saved_task.start_time; END; %][% saved_task.repeat_interval %][% saved_task.task_type %] +
      + [% FOREACH pair IN saved_task.params_as_perl.pairs %] +
    • [% pair.key %] => [% IF ( pair.key.match('.*password.*') ); '########'; ELSE; pair.value; END; %]
    • + [% END %] +
    +
    EditSend to IcarusDelete
    +
    +

    Icarus dashboard

    + + + + + + + + + [%# TODO: Also provide controls for starting/restarting Icarus? %] + + + +
    Status
    + + [% IF ( daemon_status == 'No such file or directory' ) #Socket doesn't exist at all %] + Unable to contact + [% ELSIF ( daemon_status == 'Permission denied' ) #Apache doesn't have permission to write to socket %] + Permission denied + [% ELSIF ( daemon_status == 'Connection refused' ) #Socket exists, but server is down %] + Connection refused + [% ELSIF ( daemon_status == 'Start failed' ) %] + Start failed + [% ELSIF ( daemon_status == 'online' ) %] + Online + [% ELSIF ( daemon_status == 'shutting down' ) %] + Shutting down + [% ELSE %] + [% daemon_status %] + [% END %] + Start IcarusShutdown Icarus
    +
    +
    +

    Active Icarus tasks

    + [% IF ( task_response ) %] + [% IF ( task_response.action == 'error' ) %] + [% IF ( task_response.error_message ) %] + [% IF ( task_response.error_message == 'No such process' ) %] +
    Task [% task_response.task_id %] does not exist.
    + [% END %] + [% END %] + [% ELSIF ( task_response.action == 'pending' ) %] +
    Initialising task [% task_response.task_id %].
    + [% ELSIF ( task_response.action == 'already pending' ) %] +
    Already initialised task [% task_response.task_id %].
    + [% ELSIF ( task_response.action == 'already started' ) %] +
    Already started task [% task_response.task_id %].
    + [% ELSIF ( task_response.action == 'removed' ) %] +
    Removing task [% task_response.task_id %].
    + [% END %] + [% END %] + [% IF ( tasks ) %] + + + + + + + + + + + + + + + [% FOREACH task IN tasks %] + + + + + + + + + + + [% END %] + +
    Task idStatusNext start time (local server time)Repeat intervalTask typeParams
    [% task.task_id %] + [% SWITCH task.task.status %] + [% CASE 'new' %] + New + [% CASE 'pending' %] + Pending + [% CASE 'started' %] + Started + [% CASE 'stopping' %] + Stopping + [% CASE 'failed' %] + Failed + [% CASE %] + [% task.task.status %] + [% END %] + [% task.task.start %][% task.task.repeat_interval %][% task.task.type %] +
      + [% FOREACH pair IN task.task.params.pairs %] +
    • [% pair.key %] => [% IF ( pair.key.match('.*password.*') ); '########'; ELSE; pair.value; END; %]
    • + [% END %] +
    +
    StartRemove
    + [% END %] +
    + [% END #/list %] + [% END #/op %] +
    +
    +
    + [% INCLUDE 'admin-menu.inc' %] +
    +
    +[% INCLUDE 'intranet-bottom.inc' %] diff --git a/misc/bin/icarusd.pl b/misc/bin/icarusd.pl new file mode 100755 index 0000000..fa265bd --- /dev/null +++ b/misc/bin/icarusd.pl @@ -0,0 +1,213 @@ +#!/usr/bin/perl + +####################################################################### + +use Modern::Perl; +use POSIX; #For daemonizing +use Fcntl qw(:flock); #For pidfile +use Getopt::Long; +use Pod::Usage; + +#Make the STDOUT filehandle hot, so that you can use shell re-direction. Otherwise, you'll suffer from buffering. +STDOUT->autoflush(1); +#Note that STDERR, by default, is already hot. + +####################################################################### +#FIXME: Debugging signals +=pod +BEGIN { + package POE::Kernel; + use constant TRACE_SIGNALS => 1; + use constant ASSERT_USAGE => 1; + use constant ASSERT_DATA => 1; +} +=cut + + +use POE; +use JSON; #For Listener messages +use XML::LibXML; #For configuration files + +use Koha::Icarus::Listener; + +####################################################################### + +my ($filename,$daemon,$log,$help); +my $verbosity = 1; +GetOptions ( + "f|file|filename=s" => \$filename, #/kohawebs/dev/dcook/koha-dev/etc/koha-conf.xml + "l|log=s" => \$log, + "d|daemon" => \$daemon, + "v=i" => \$verbosity, + "h|?" => \$help, +) or pod2usage(2); +pod2usage(1) if $help; + + +if ( ! $filename || ! -f $filename ){ + warn "Failed to start.\n"; + if ( ! $filename ){ + warn("You must provide a valid configuration file using the -f switch.\n"); + pod2usage(1); + } + if ( ! -f $filename ){ + die(qq{"$filename" is not a file.\n}); + } +} + +#Declare the variable with file scope so the flock stays for the duration of the process's life +my $pid_filehandle; + +#Read configuration file +my $config = read_config_file($filename); + +my $SOCK_PATH = $config->{socket} || ''; +my $spooldir = $config->{spooldir} || ''; +my $pid_file = $config->{pidfile} || ''; +my $max_tasks = $config->{max_tasks}; + +#Overwrite configuration file with command line options +if ($log){ + $config->{log} = $log; +} + +#Test file permissions... +my @warnings = (); +foreach my $file_to_check ($pid_file, $config->{log}){ + local (*TMP); + if ($file_to_check){ + utime(undef, undef, $file_to_check) || open(TMP, ">>$file_to_check") || push(@warnings,"couldn't touch $file_to_check: $!"); + } +} +if (@warnings){ + foreach my $warning (@warnings){ + warn $warning; + } + exit 1; +} + +#Go into daemon mode, if user has included flag +if ($daemon){ + daemonize(); +} + +if ($pid_file){ + #NOTE: The filehandle needs to have file scope, so that the flock is preserved. + $pid_filehandle = make_pid_file($pid_file); +} + +#FIXME: Do we want to log to file only in daemon mode? $config->{log} should be populated by either the config file or the l|log GetOpt... +if ($daemon && $config->{log}){ + log_to_file($config->{log}); +} + + +#FIXME: 1) In daemon mode, SIGUSR1 or SIGHUP for reloading/restarting? +####################################################################### + +#Creates Icarus Listener +Koha::Icarus::Listener->spawn({ + Socket => $SOCK_PATH, + MaxTasks => $max_tasks, + Verbosity => $verbosity, + SpoolDir => $spooldir, +}); + +POE::Kernel->run(); + +exit; + +sub read_config_file { + my $filename = shift; + my $config = {}; + if ( -e $filename ){ + eval { + my $doc = XML::LibXML->load_xml(location => $filename); + if ($doc){ + my $root = $doc->documentElement; + my $icarus = $root->find('icarus')->shift; + if ($icarus){ + #Get all child nodes for the 'icarus' element + my @childnodes = $icarus->childNodes(); + foreach my $node (@childnodes){ + #Only consider nodes that are elements + if ($node->nodeType == XML_ELEMENT_NODE){ + my $config_key = $node->nodeName; + my $first_child = $node->firstChild; + #Only consider nodes that have a text node as their first child + if ($first_child && $first_child->nodeType == XML_TEXT_NODE){ + $config->{$config_key} = $first_child->nodeValue; + } + } + } + } + } + }; + } + return $config; +} + +####################################################################### +#NOTE: On Debian, you can use the daemon binary to make a process into a daemon, +# the following subs are for systems that don't have the daemon binary. + +sub daemonize { + my $pid = fork; + die "Couldn't fork: $!" unless defined($pid); + if ($pid){ + exit; #Parent exit + } + POSIX::setsid() or die "Can't start a new session: $!"; + + #Change to known system directory + chdir('/'); + + #Close inherited file handles, so that you can truly run in the background. + open STDIN, '<', '/dev/null'; + open STDOUT, '>', '/dev/null'; + open STDERR, '>&STDOUT'; + + #FIXME: You should probabl rset file creation mask here as well... +} + +sub log_to_file { + my $logfile = shift; + #Open a filehandle to append to a log file + open(LOG, '>>', $logfile) or die "Unable to open a filehandle for $logfile: $!\n"; # --output + LOG->autoflush(1); #Make filehandle hot (ie don't buffer) + *STDOUT = *LOG; #Re-assign STDOUT to LOG | --stdout + *STDERR = *STDOUT; #Re-assign STDERR to STDOUT | --stderr +} + +sub make_pid_file { + my $pidfile = shift; + if ( ! -e $pidfile ){ + open(my $fh, '>', $pidfile) or die "Unable to write to $pidfile: $!\n"; + $fh->close; + } + + open(my $pidfilehandle, '+<', $pidfile) or die "Unable to open a filehandle for $pidfile: $!\n"; + if (flock($pidfilehandle, LOCK_EX|LOCK_NB)){ + #Write pid to pidfile + warn "Acquiring lock on $pidfile\n"; + #Now that we've acquired a lock, let's truncate the file + truncate($pidfilehandle, 0); + print $pidfilehandle $$."\n" or die $!; + #Flush the filehandle so you're not suffering from buffering + $pidfilehandle->flush(); + return $pidfilehandle; + } else { + my $number = <$pidfilehandle>; + chomp($number); + warn "$0 is already running with pid $number. Exiting.\n"; + exit(1); + } +} + +__END__ + +=head1 SYNOPSIS + +icarusd.pl -f koha-conf.xml [--log icarus.log] [--daemon] [ -v 0-9 ] [-h] + +=cut diff --git a/rewrite-config.PL b/rewrite-config.PL index a66b119..c5d25d4 100644 --- a/rewrite-config.PL +++ b/rewrite-config.PL @@ -149,6 +149,9 @@ $prefix = $ENV{'INSTALL_BASE'} || "/usr"; "__MEMCACHED_SERVERS__" => "", "__MEMCACHED_NAMESPACE__" => "", "__FONT_DIR__" => "/usr/share/fonts/truetype/ttf-dejavu", + "__ICARUS_RUN_DIR__" => "$prefix/var/run/icarus", + "__ICARUS_SPOOL_DIR__" => "$prefix/var/spool/icarus", + "__ICARUS_MAX_TASKS__" => "30", ); # Override configuration from the environment diff --git a/skel/var/run/koha/icarus/README b/skel/var/run/koha/icarus/README new file mode 100644 index 0000000..ecb05dd --- /dev/null +++ b/skel/var/run/koha/icarus/README @@ -0,0 +1 @@ +icarus dir diff --git a/skel/var/spool/koha/icarus/README b/skel/var/spool/koha/icarus/README new file mode 100644 index 0000000..0ebff51 --- /dev/null +++ b/skel/var/spool/koha/icarus/README @@ -0,0 +1 @@ +icarus spool -- 2.1.4