From 6d4e003db71841b00ea58df687cbcebbd9822f95 Mon Sep 17 00:00:00 2001 From: David Cook Date: Wed, 25 Oct 2017 11:56:09 +1100 Subject: [PATCH] Fix problems reported by Koha QA tools https://bugs.koha-community.org/show_bug.cgi?id=10662 --- Koha/Daemon.pm | 7 +- Koha/OAI/Harvester.pm | 64 +++++++------ Koha/OAI/Harvester/Downloader.pm | 6 ++ Koha/OAI/Harvester/Import/RDFXML.pm | 7 +- Koha/OAI/Harvester/Import/Record.pm | 61 +++++++----- Koha/OAI/Harvester/Request.pm | 102 ++++++++++----------- installer/data/mysql/kohastructure.sql | 2 +- .../intranet-tmpl/prog/en/includes/tools-menu.inc | 34 +++---- .../modules/tools/oai-pmh-harvester/dashboard.tt | 26 +++--- 9 files changed, 172 insertions(+), 137 deletions(-) diff --git a/Koha/Daemon.pm b/Koha/Daemon.pm index 62199c46a4..cdae369225 100644 --- a/Koha/Daemon.pm +++ b/Koha/Daemon.pm @@ -61,10 +61,10 @@ sub log_to_file { my ($self,$logfile) = @_; #Open a filehandle to append to a log file - my $opened = open(LOG, '>>', $logfile); + my $opened = open(my $fh, '>>', $logfile); if ($opened){ - LOG->autoflush(1); #Make filehandle hot (ie don't buffer) - *STDOUT = *LOG; #Re-assign STDOUT to LOG | --stdout + $fh->autoflush(1); #Make filehandle hot (ie don't buffer) + *STDOUT = *$fh; #Re-assign STDOUT to LOG | --stdout *STDERR = *STDOUT; #Re-assign STDERR to STDOUT | --stderr } else { @@ -76,6 +76,7 @@ sub make_pidfilehandle { my ($self,$pidfile) = @_; if ( ! -f $pidfile ){ open(my $fh, '>', $pidfile) or die "Unable to write to $pidfile: $!\n"; + close($fh); } open(my $pidfilehandle, '+<', $pidfile) or die "Unable to open a filehandle for $pidfile: $!\n"; return $pidfilehandle; diff --git a/Koha/OAI/Harvester.pm b/Koha/OAI/Harvester.pm index 7b52261e61..86619faa29 100644 --- a/Koha/OAI/Harvester.pm +++ b/Koha/OAI/Harvester.pm @@ -32,6 +32,12 @@ use DateTime::Format::Strptime; use C4::Context; use Koha::Database; +=head1 API + +=head2 Class Methods + +=cut + my $day_granularity = DateTime::Format::Strptime->new( pattern => '%F', ); @@ -288,9 +294,13 @@ sub does_task_repeat { if ($task){ my $interval = $task->{interval}; my $parameters = $task->{parameters}; - my $oai_pmh = $parameters->{oai_pmh} if $parameters->{oai_pmh}; - if ( $interval && ($oai_pmh->{verb} eq "ListRecords") && (! $oai_pmh->{until}) ){ - return 1; + if ($parameters){ + my $oai_pmh = $parameters->{oai_pmh}; + if ($oai_pmh){ + if ( $interval && ($oai_pmh->{verb} eq "ListRecords") && (! $oai_pmh->{until}) ){ + return 1; + } + } } } return 0; @@ -314,29 +324,31 @@ sub restore_state { my ($self, $kernel, $heap, $session) = @_[OBJECT, KERNEL,HEAP,SESSION]; my $state_file = $self->{state_file}; - my $state_backup = "$state_file~"; - - #NOTE: If there is a state backup, it means we crashed while saving the state. Otherwise, - #let's try the regular state file if it exists. - my $file_to_restore = ( -f $state_backup ) ? $state_backup : ( ( -f $state_file ) ? $state_file : undef ); - if ( $file_to_restore ){ - my $opened = open( my $fh, '<', $file_to_restore ) or die "Couldn't open state: $!"; - if ($opened){ - local $/; - my $in = <$fh>; - my $decoder = Sereal::Decoder->new; - my $state = $decoder->decode($in); - if ($state){ - if ($state->{tasks}){ - #Restore tasks from our saved state - $heap->{tasks} = $state->{tasks}; - foreach my $uuid ( keys %{$heap->{tasks}} ){ - my $task = $heap->{tasks}->{$uuid}; - - #If tasks were still downloading, restart the task - if ( ($task->{status} && $task->{status} eq "active") && $task->{downloading} ){ - $task->{status} = "new"; - $kernel->call("harvester","start_task",$task->{uuid}); + if ($state_file){ + my $state_backup = "$state_file~"; + + #NOTE: If there is a state backup, it means we crashed while saving the state. Otherwise, + #let's try the regular state file if it exists. + my $file_to_restore = ( -f $state_backup ) ? $state_backup : ( ( -f $state_file ) ? $state_file : undef ); + if ( $file_to_restore ){ + my $opened = open( my $fh, '<', $file_to_restore ) or die "Couldn't open state: $!"; + if ($opened){ + local $/; + my $in = <$fh>; + my $decoder = Sereal::Decoder->new; + my $state = $decoder->decode($in); + if ($state){ + if ($state->{tasks}){ + #Restore tasks from our saved state + $heap->{tasks} = $state->{tasks}; + foreach my $uuid ( keys %{$heap->{tasks}} ){ + my $task = $heap->{tasks}->{$uuid}; + + #If tasks were still downloading, restart the task + if ( ($task->{status} && $task->{status} eq "active") && $task->{downloading} ){ + $task->{status} = "new"; + $kernel->call("harvester","start_task",$task->{uuid}); + } } } } diff --git a/Koha/OAI/Harvester/Downloader.pm b/Koha/OAI/Harvester/Downloader.pm index 896a8163c7..4ca8be6bf1 100644 --- a/Koha/OAI/Harvester/Downloader.pm +++ b/Koha/OAI/Harvester/Downloader.pm @@ -23,6 +23,12 @@ use XML::LibXML::Reader; use IO::Handle; use JSON; +=head1 API + +=head2 Class Methods + +=cut + sub new { my ($class, $args) = @_; $args = {} unless defined $args; diff --git a/Koha/OAI/Harvester/Import/RDFXML.pm b/Koha/OAI/Harvester/Import/RDFXML.pm index 5fa1f07566..712fa2af9f 100755 --- a/Koha/OAI/Harvester/Import/RDFXML.pm +++ b/Koha/OAI/Harvester/Import/RDFXML.pm @@ -29,7 +29,7 @@ use Koha::RDF; sub new { my ($class, $args) = @_; $args = {} unless defined $args; - return bless ($args, $class); + return bless ($args, $class); } sub _parse_rdf_into_model { @@ -106,8 +106,11 @@ sub import_record { my $action = "error"; #Create a model for our Koha triplestore backend + my $real_model = undef; my $context = C4::Context->new(); - my $real_model = $context->triplestore('update') if $context && $context->triplestore('update'); + if ( $context && $context->triplestore('update') ){ + $real_model = $context->triplestore('update'); + } my $memory_model = $self->_parse_rdf_into_model(); my $iterator = $memory_model->as_stream; if ($iterator){ diff --git a/Koha/OAI/Harvester/Import/Record.pm b/Koha/OAI/Harvester/Import/Record.pm index 2841333784..6f2b5452bd 100755 --- a/Koha/OAI/Harvester/Import/Record.pm +++ b/Koha/OAI/Harvester/Import/Record.pm @@ -33,6 +33,12 @@ use Koha::OAI::Harvester::Import::MARCXML; use Koha::OAI::Harvester::Import::RDFXML; use Koha::RDF; +=head1 API + +=head2 Class Methods + +=cut + my $schema = Koha::Database->new()->schema(); sub new { @@ -133,27 +139,27 @@ sub filter { my $stylesheet = $xslt->parse_stylesheet($style_doc); if ($stylesheet){ my $results = $stylesheet->transform($doc); - if ($results){ - my $root = $results->documentElement; - if ($root){ - my $namespace = $root->namespaceURI; - if ($namespace eq "http://www.loc.gov/MARC21/slim"){ - #NOTE: Both MARC21 and UNIMARC should be covered by this namespace - my $marcxml = eval { Koha::OAI::Harvester::Import::MARCXML->new({ dom => $results, }) }; + if ($results){ + my $root = $results->documentElement; + if ($root){ + my $namespace = $root->namespaceURI; + if ($namespace eq "http://www.loc.gov/MARC21/slim"){ + #NOTE: Both MARC21 and UNIMARC should be covered by this namespace + my $marcxml = eval { Koha::OAI::Harvester::Import::MARCXML->new({ dom => $results, }) }; if ($@){ warn "Error Koha::OAI::Harvester::Import::MARCXML: $@"; - return undef; + return; } else { return $marcxml; } - } elsif ($namespace eq "http://www.w3.org/1999/02/22-rdf-syntax-ns#"){ - my $rdfxml = Koha::OAI::Harvester::Import::RDFXML->new({ dom => $results, }); - if ($rdfxml){ - return $rdfxml; - } - } - } - } + } elsif ($namespace eq "http://www.w3.org/1999/02/22-rdf-syntax-ns#"){ + my $rdfxml = Koha::OAI::Harvester::Import::RDFXML->new({ dom => $results, }); + if ($rdfxml){ + return $rdfxml; + } + } + } + } } } } @@ -217,7 +223,7 @@ sub _find_rdf_link { return ($subject,$graph); } -=head3 +=head3 import_record my ($action,$record_id) = $oai_record->import_record({ filter => $filter, @@ -247,9 +253,12 @@ sub import_record { }); #NOTE: RDF - my $context = C4::Context->new(); #Find linkage between OAI-PMH repository-identifier and RDF records in the triplestore - my ($linked_subject, $linked_graph) = $self->_find_rdf_link if $context && $context->triplestore('query'); + my ($linked_subject, $linked_graph); + my $context = C4::Context->new(); + if ( $context && $context->triplestore('query') ){ + ($linked_subject, $linked_graph) = $self->_find_rdf_link; + } if ($self->is_deleted_upstream){ #FIXME: If a record is deleted upstream, it will not contain a metadata element, so we don't know what metadata @@ -259,7 +268,10 @@ sub import_record { #https://www.openarchives.org/OAI/openarchivesprotocol.html#DeletedRecords if ($linked_graph){ - my $model = $context->triplestore('update') if $context && $context->triplestore('update'); + my $model; + if ( $context && $context->triplestore('update') ){ + $model = $context->triplestore('update'); + } my $rdf = Koha::RDF->new(); if ($rdf){ $model->begin_bulk_ops; @@ -321,8 +333,11 @@ sub import_record { #NOTE: Link Koha RDF to Imported RDF if ( ($record_type && $linked_id) && ($linked_subject) ){ my $rdf = Koha::RDF->new(); + my $triplestore; my $context = C4::Context->new(); - my $triplestore = $context->triplestore('update') if $context && $context->triplestore('update'); + if ( $context && $context->triplestore('update') ){ + $triplestore = $context->triplestore('update'); + } if ( $triplestore && $rdf ){ my $koha_uri = $rdf->mint_uri($record_type, $linked_id); my $koha_subject = RDF::Trine::Node::Resource->new($koha_uri); @@ -377,11 +392,11 @@ sub link_koha_record { sub delete_koha_record { my ($self, $args) = @_; my $record_type = $args->{record_type} // "biblio"; - my $record_id = $args->{record_id}; + my $record_id = $args->{record_id}; my $action = "error"; - if ($record_type eq "biblio"){ + if ($record_type eq "biblio"){ my $error = C4::Biblio::DelBiblio($record_id); if (!$error){ $action = "deleted"; diff --git a/Koha/OAI/Harvester/Request.pm b/Koha/OAI/Harvester/Request.pm index 535b7e52bd..35e930a8eb 100644 --- a/Koha/OAI/Harvester/Request.pm +++ b/Koha/OAI/Harvester/Request.pm @@ -63,56 +63,56 @@ sub validate { #Step three: validate OAI-PMH parameters #Test Set - my $set = $self->oai_set; + my $set = $self->oai_set; if ($set){ my $set_response = $harvester->ListSets(); - my @server_sets = $set_response->set; - if ( ! grep {$_->setSpec eq $set} @server_sets ){ + my @server_sets = $set_response->set; + if ( ! grep {$_->setSpec eq $set} @server_sets ){ $errors->{oai_set}->{unavailable} = 1; } } #Test Metadata Prefix - my $metadataPrefix = $self->oai_metadataPrefix; - if ($metadataPrefix){ - my $metadata_response = $harvester->ListMetadataFormats(); - my @server_formats = $metadata_response->metadataFormat; - if ( ! grep { $_->metadataPrefix eq $metadataPrefix } @server_formats ){ + my $metadataPrefix = $self->oai_metadataPrefix; + if ($metadataPrefix){ + my $metadata_response = $harvester->ListMetadataFormats(); + my @server_formats = $metadata_response->metadataFormat; + if ( ! grep { $_->metadataPrefix eq $metadataPrefix } @server_formats ){ $errors->{oai_metadataPrefix}->{unavailable} = 1; } - } + } else { $errors->{oai_metadataPrefix}->{missing} = 1; } #Test Granularity and Timestamps - my $server_granularity = $identify->granularity; - my $from = $self->oai_from; - my $until = $self->oai_until; - if ($from || $until){ - my ($from_granularity,$until_granularity); - if ($from){ - $from_granularity = _determine_granularity($from); - if ($from_granularity eq "YYYY-MM-DDThh:mm:ssZ"){ - $errors->{oai_from}->{unavailable} = 1 if $server_granularity ne $from_granularity; - } elsif ($from_granularity eq "failed"){ - $errors->{oai_from}->{malformed} = 1; - } - } - if ($until){ - $until_granularity = _determine_granularity($until); - if ($until_granularity eq "YYYY-MM-DDThh:mm:ssZ"){ - $errors->{oai_until}->{unavailable} = 1 if $server_granularity ne $until_granularity; - } elsif ($until_granularity eq "failed"){ - $errors->{oai_until}->{malformed} = 1; - } - } - if ($from && $until){ - if ($from_granularity ne $until_granularity){ - $errors->{oai}->{granularity_mismatch} = 1; - } - } - } + my $server_granularity = $identify->granularity; + my $from = $self->oai_from; + my $until = $self->oai_until; + if ($from || $until){ + my ($from_granularity,$until_granularity); + if ($from){ + $from_granularity = _determine_granularity($from); + if ($from_granularity eq "YYYY-MM-DDThh:mm:ssZ"){ + $errors->{oai_from}->{unavailable} = 1 if $server_granularity ne $from_granularity; + } elsif ($from_granularity eq "failed"){ + $errors->{oai_from}->{malformed} = 1; + } + } + if ($until){ + $until_granularity = _determine_granularity($until); + if ($until_granularity eq "YYYY-MM-DDThh:mm:ssZ"){ + $errors->{oai_until}->{unavailable} = 1 if $server_granularity ne $until_granularity; + } elsif ($until_granularity eq "failed"){ + $errors->{oai_until}->{malformed} = 1; + } + } + if ($from && $until){ + if ($from_granularity ne $until_granularity){ + $errors->{oai}->{granularity_mismatch} = 1; + } + } + } #Test if identifier is provided when using GetRecord my $verb = $self->oai_verb; @@ -167,7 +167,7 @@ sub _harvester { my $harvester; if ($self->http_url){ $harvester = new HTTP::OAI::Harvester( baseURL => $self->http_url ); - my $uri = URI->new($self->http_url); + my $uri = URI->new($self->http_url); if ($uri->scheme && ($uri->scheme eq 'http' || $uri->scheme eq 'https') ){ my $host = $uri->host; my $port = $uri->port; @@ -178,20 +178,20 @@ sub _harvester { } sub _determine_granularity { - my ($timestamp) = @_; - my $granularity; - if ($timestamp =~ /^(\d{4}-\d{2}-\d{2})(T\d{2}:\d{2}:\d{2}Z)?$/){ - if ($1 && $2){ - $granularity = "YYYY-MM-DDThh:mm:ssZ"; - } elsif ($1 && !$2){ - $granularity = "YYYY-MM-DD"; - } else { - $granularity = "failed"; - } - } else { - $granularity = "failed"; - } - return $granularity; + my ($timestamp) = @_; + my $granularity; + if ($timestamp =~ /^(\d{4}-\d{2}-\d{2})(T\d{2}:\d{2}:\d{2}Z)?$/){ + if ($1 && $2){ + $granularity = "YYYY-MM-DDThh:mm:ssZ"; + } elsif ($1 && !$2){ + $granularity = "YYYY-MM-DD"; + } else { + $granularity = "failed"; + } + } else { + $granularity = "failed"; + } + return $granularity; } =head1 AUTHOR diff --git a/installer/data/mysql/kohastructure.sql b/installer/data/mysql/kohastructure.sql index 0a6e135ee4..5754ea15b2 100644 --- a/installer/data/mysql/kohastructure.sql +++ b/installer/data/mysql/kohastructure.sql @@ -4235,7 +4235,7 @@ CREATE TABLE IF NOT EXISTS `oai_harvester_requests` ( `name` varchar(45) NOT NULL, `import_rdf_type` varchar(255) DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/tools-menu.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/tools-menu.inc index 10ea37ed17..8058fe1a4f 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/includes/tools-menu.inc +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/tools-menu.inc @@ -6,22 +6,22 @@
Patrons and circulation
Catalog
Additional tools