Line 0
Link Here
|
|
|
1 |
package Koha::OAI::Client::Repository; |
2 |
|
3 |
# Copyright Prosentient Systems 2015 |
4 |
# |
5 |
# This file is part of Koha. |
6 |
# |
7 |
# Koha is free software; you can redistribute it and/or modify it under the |
8 |
# terms of the GNU General Public License as published by the Free Software |
9 |
# Foundation; either version 3 of the License, or (at your option) any later |
10 |
# version. |
11 |
# |
12 |
# Koha is distributed in the hope that it will be useful, but WITHOUT ANY |
13 |
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR |
14 |
# A PARTICULAR PURPOSE. See the GNU General Public License for more details. |
15 |
# |
16 |
# You should have received a copy of the GNU General Public License along |
17 |
# with Koha; if not, write to the Free Software Foundation, Inc., |
18 |
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
19 |
|
20 |
use Modern::Perl; |
21 |
use Carp; |
22 |
use POSIX qw(strftime); |
23 |
use HTTP::OAI; |
24 |
use URI; |
25 |
use DateTime; |
26 |
use DateTime::Format::Strptime; |
27 |
use base qw(Koha::Object); |
28 |
|
29 |
use Koha::OAI::Client::Records; |
30 |
|
31 |
=head1 NAME |
32 |
|
33 |
Koha::OAI::Client::Repository - |
34 |
|
35 |
=head1 API |
36 |
|
37 |
=head2 Class Methods |
38 |
|
39 |
=cut |
40 |
|
41 |
=head3 test_settings |
42 |
|
43 |
=cut |
44 |
|
45 |
sub test_settings { |
46 |
my ( $self ) = @_; |
47 |
my $errors = {}; |
48 |
my $harvester = $self->harvester(); |
49 |
if ($harvester){ |
50 |
my $identify = $harvester->Identify; |
51 |
if ($identify->is_success){ |
52 |
|
53 |
#Test Granularity |
54 |
my $granularity_setting = $self->datetime_granularity; |
55 |
if ($granularity_setting eq "YYYY-MM-DDThh:mm:ssZ"){ |
56 |
my $actual_granularity = $identify->granularity; |
57 |
if ($granularity_setting ne $actual_granularity){ |
58 |
$errors->{second_granularity_not_supported} = 1; |
59 |
} |
60 |
} |
61 |
|
62 |
#Test Set |
63 |
my $set_setting = $self->opt_set; |
64 |
if ($set_setting){ |
65 |
my $sets = $harvester->ListSets(); |
66 |
my $matched_set; |
67 |
while ( my $set = $sets->next ){ |
68 |
if ($set_setting eq $set->setSpec){ |
69 |
$matched_set = 1; |
70 |
last; |
71 |
} |
72 |
} |
73 |
if ( ! $matched_set ){ |
74 |
$errors->{set_does_not_exist} = 1; |
75 |
} |
76 |
} |
77 |
|
78 |
#Test Metadata prefix |
79 |
my $metadata_prefix_setting = $self->metadata_prefix; |
80 |
if ($metadata_prefix_setting){ |
81 |
my $metadata_formats = $harvester->ListMetadataFormats(); |
82 |
my $matched_format; |
83 |
while ( my $metadata_format = $metadata_formats->next ){ |
84 |
if ($metadata_prefix_setting eq $metadata_format->metadataPrefix){ |
85 |
$matched_format = 1; |
86 |
last; |
87 |
} |
88 |
} |
89 |
if ( ! $matched_format ){ |
90 |
$errors->{metadata_prefix_does_not_exist} = 1; |
91 |
} |
92 |
} |
93 |
} else { |
94 |
if ($identify->is_error()){ |
95 |
foreach my $error ($identify->errors){ |
96 |
if ($error->code =~ /^404$/){ |
97 |
$errors->{url_not_found} = 1; |
98 |
} elsif ($error->code =~ /^401$/){ |
99 |
$errors->{failed_authentication} = 1; |
100 |
} else { |
101 |
$errors->{generic_identify_error} = 1; |
102 |
} |
103 |
} |
104 |
} else { |
105 |
#This should never happen, but you never know... |
106 |
$errors->{generic_identify_error} = 1; |
107 |
} |
108 |
} |
109 |
} else { |
110 |
$errors->{no_harvester} = 1; |
111 |
} |
112 |
return $errors; |
113 |
} |
114 |
|
115 |
|
116 |
=head3 validate |
117 |
|
118 |
=cut |
119 |
|
120 |
sub validate { |
121 |
my ( $self ) = @_; |
122 |
my $errors = {}; |
123 |
if ($self->base_url){ |
124 |
my $uri = URI->new($self->base_url); |
125 |
if ($uri){ |
126 |
my $reference = ref $uri; |
127 |
if ( ! grep { $reference eq $_ } ("URI::http","URI::https")){ |
128 |
$errors->{base_url}->{invalid} = 1; |
129 |
} |
130 |
} |
131 |
} elsif (! defined $self->base_url){ |
132 |
#base_url is undefined |
133 |
$errors->{base_url}->{required} = 1; |
134 |
} |
135 |
if ( ! defined $self->metadata_prefix ){ |
136 |
$errors->{metadata_prefix}->{required} = 1; |
137 |
} |
138 |
return $errors; |
139 |
} |
140 |
|
141 |
=head3 find_original_system_number |
142 |
|
143 |
=cut |
144 |
|
145 |
sub find_original_system_number { |
146 |
my ( $self, $args ) = @_; |
147 |
|
148 |
my $original_system_number; |
149 |
my $metadata = $args->{metadata}; |
150 |
if ($metadata){ |
151 |
my $original_system_field = $self->original_system_field; #e.g. 001 or 999$c |
152 |
if ($original_system_field){ |
153 |
#Select root element (e.g. <metadata>) |
154 |
my $root = $metadata->documentElement(); |
155 |
#Select the first child of root (ie the original metadata record) (eg <record>) |
156 |
my $original_metadata = $root->firstChild; |
157 |
|
158 |
#Check to see if there is a MARC namespace |
159 |
my $marcxml_prefix = $original_metadata->lookupNamespacePrefix("http://www.loc.gov/MARC21/slim"); |
160 |
if (defined $marcxml_prefix){ |
161 |
my ($tag,$code) = split(/\$/,$original_system_field); #e.g. split 999$c into 999 and c || split 001 into 001 |
162 |
my $xpc = XML::LibXML::XPathContext->new; |
163 |
$xpc->registerNs('marc', 'http://www.loc.gov/MARC21/slim'); |
164 |
my $fields = $xpc->find(qq(//marc:record/node()[\@tag="$tag"]),$original_metadata); |
165 |
if ($fields){ |
166 |
#Use the first field found |
167 |
my $field = $fields->get_node(1); |
168 |
if ($field){ |
169 |
my $node_name = $field->nodeName; |
170 |
if ($node_name eq 'controlfield'){ |
171 |
#If it's a controlfield, we can use the text without drilling down |
172 |
$original_system_number = $field->textContent; |
173 |
} elsif ($node_name eq 'datafield'){ |
174 |
#If it's a datafield, we need to drill down into the subfields |
175 |
if ($code){ |
176 |
my $subfields = $field->find(qq(node()[\@code="$code"])); |
177 |
if ($subfields){ |
178 |
#Use the first subfield found |
179 |
my $subfield = $subfields->get_node(1); |
180 |
if ($subfield){ |
181 |
$original_system_number = $subfield->textContent; |
182 |
} |
183 |
|
184 |
} |
185 |
} |
186 |
} |
187 |
} |
188 |
} |
189 |
} |
190 |
} |
191 |
} |
192 |
return $original_system_number; |
193 |
} |
194 |
|
195 |
=head3 queue_record |
196 |
|
197 |
=cut |
198 |
|
199 |
sub queue_record { |
200 |
my ( $self, $args ) = @_; |
201 |
my $record = $args->{record}; |
202 |
my $stylesheet = $args->{stylesheet}; |
203 |
|
204 |
my $action = "to_import"; |
205 |
if ( $record->is_deleted() ){ |
206 |
my $already_deleted_rows = Koha::OAI::Client::Records->search({ identifier => $record->identifier, action => "deleted" }); |
207 |
my $identified_rows = Koha::OAI::Client::Records->search({ identifier => $record->identifier }); |
208 |
if ($identified_rows->count() > 0 && $already_deleted_rows->count() == 0){ |
209 |
#Only queue records "to_delete", if the record has been previously harvested |
210 |
$action = "to_delete"; |
211 |
} else { |
212 |
return; |
213 |
} |
214 |
} |
215 |
|
216 |
my $metadata = $record->metadata ? $record->metadata->dom : ''; |
217 |
my $original_system_number; |
218 |
if ($metadata){ |
219 |
if ($stylesheet){ |
220 |
#Use a stylesheet to strip the OAI-PMH metadata XML wrapper |
221 |
eval { |
222 |
my $result = $stylesheet->transform($metadata); |
223 |
if ($result){ |
224 |
$metadata = $result; |
225 |
} |
226 |
}; |
227 |
if ($@){ |
228 |
warn "Problem transforming harvested metadata with XSLT: $@"; |
229 |
} |
230 |
} |
231 |
|
232 |
$original_system_number = $self->find_original_system_number({metadata => $metadata}); |
233 |
} |
234 |
|
235 |
my $entry = Koha::OAI::Client::Records->_resultset->find_or_new({ |
236 |
repository_id => $self->repository_id, #Internal ID for the OAI repository |
237 |
action => $action, |
238 |
identifier => $record->identifier, #External ID for the OAI record |
239 |
datestamp => $record->datestamp, #External datestamp for the OAI record |
240 |
metadata_prefix => $self->metadata_prefix, #External metadataPrefix for the OAI record |
241 |
metadata => $metadata ? $metadata->toString(1) : undef, |
242 |
record_type => $self->record_type, |
243 |
original_system_number => $original_system_number, |
244 |
}, |
245 |
{ key => "harvest_identifier_datestamp" } |
246 |
); |
247 |
if( !$entry->in_storage ) { |
248 |
$entry->insert; |
249 |
} |
250 |
} |
251 |
|
252 |
sub _format_datetime { |
253 |
my ( $self, $args ) = @_; |
254 |
my $datetime_string = $args->{datetime_string}; |
255 |
my $formatted_string; |
256 |
if ($datetime_string && $datetime_string ne "0000-00-00 00:00:00"){ |
257 |
my $strp = DateTime::Format::Strptime->new( |
258 |
pattern => '%F %T', |
259 |
); |
260 |
my $dt = $strp->parse_datetime($datetime_string); |
261 |
if ($dt){ |
262 |
if ($self->datetime_granularity && $self->datetime_granularity eq 'YYYY-MM-DDThh:mm:ssZ'){ |
263 |
$formatted_string = $dt->strftime("%FT%TZ"); |
264 |
} else { |
265 |
$formatted_string = $dt->strftime("%F"); |
266 |
} |
267 |
} |
268 |
} |
269 |
return $formatted_string; |
270 |
} |
271 |
|
272 |
sub harvester { |
273 |
my ( $self ) = @_; |
274 |
my $harvester; |
275 |
if ($self->base_url){ |
276 |
$harvester = new HTTP::OAI::Harvester( baseURL => $self->base_url ); |
277 |
#Use basic http authentication credentials if they're set |
278 |
# if ($self->basic_realm && $self->basic_username && $self->basic_password){ |
279 |
#Decompose the URI in order to register the basic authentication credentials |
280 |
my $uri = URI->new($self->base_url); |
281 |
my $host = $uri->host; |
282 |
my $port = $uri->port; |
283 |
$harvester->credentials($host.":".$port, $self->basic_realm, $self->basic_username, $self->basic_password); |
284 |
# } |
285 |
} |
286 |
return $harvester; |
287 |
} |
288 |
|
289 |
=head3 query_repository |
290 |
|
291 |
=cut |
292 |
|
293 |
sub query_repository { |
294 |
my ( $self ) = @_; |
295 |
my $response; |
296 |
my $h = $self->harvester(); |
297 |
if ($h){ |
298 |
my $opt_from = $self->opt_from; |
299 |
my $opt_until = $self->opt_until; |
300 |
foreach my $datetime ($opt_from,$opt_until){ |
301 |
#Format the datetime strings from the database into the format and granularity |
302 |
#expected by the target OAI-PMH server |
303 |
$datetime = $self->_format_datetime({datetime_string => $datetime,}); |
304 |
} |
305 |
|
306 |
#Issue the request to the OAI-PMH server |
307 |
$response = $h->ListRecords( |
308 |
metadataPrefix => $self->metadata_prefix, |
309 |
from => $opt_from, |
310 |
until => $opt_until, |
311 |
set => $self->opt_set, |
312 |
); |
313 |
} |
314 |
return $response; |
315 |
} |
316 |
|
317 |
=head3 harvest |
318 |
|
319 |
=cut |
320 |
|
321 |
sub harvest { |
322 |
my ( $self, $args ) = @_; |
323 |
|
324 |
my $verbose = $args->{verbose} // ''; |
325 |
my $stylesheet = $args->{stylesheet} // ''; |
326 |
$verbose && print "Harvesting records from ".$self->base_url."\n"; |
327 |
|
328 |
#Record the exact time before sending the first OAI-PMH request, and set "opt_from" to this after the harvest is finished |
329 |
my $new_from_date = strftime "%Y-%m-%d %H:%M:%S", localtime; |
330 |
#Send a request to the OAI-PMH repository |
331 |
my $oai_response = $self->query_repository(); |
332 |
if ($oai_response){ |
333 |
if( $oai_response->is_error ){ |
334 |
warn "responseDate => " . $oai_response->responseDate . "\n"; |
335 |
warn "requestURL => " . $oai_response->requestURL . "\n"; |
336 |
warn "Error harvesting: " . $oai_response->message . "\n"; |
337 |
} else { |
338 |
while( my $rec = $oai_response->next ) { |
339 |
$verbose && print $rec->identifier."\n"; |
340 |
#Queue each record, so a different script can process them in Koha |
341 |
$self->queue_record({record => $rec, stylesheet => $stylesheet, }); |
342 |
} |
343 |
#Update opt_from after a successful harvest |
344 |
$self->opt_from($new_from_date); |
345 |
$verbose && print "Updating `opt_from` to $new_from_date \n"; |
346 |
$self->store(); |
347 |
} |
348 |
} |
349 |
} |
350 |
|
351 |
#FIXME |
352 |
=head3 reharvest |
353 |
|
354 |
=cut |
355 |
|
356 |
sub reharvest { |
357 |
my ( $self, $args ) = @_; |
358 |
my $verbose = $args->{verbose} // ''; |
359 |
eval { |
360 |
$self->delete_harvest({ verbose => $verbose, }); |
361 |
}; |
362 |
if ($@){ |
363 |
warn $@; |
364 |
} else { |
365 |
$self->harvest({ verbose => $verbose, }); |
366 |
} |
367 |
} |
368 |
|
369 |
#FIXME |
370 |
=head3 delete_harvest |
371 |
|
372 |
=cut |
373 |
|
374 |
sub delete_harvest { |
375 |
my ( $self ) = @_; |
376 |
|
377 |
my $count_of_records = 0; |
378 |
my $count_of_deleted_records = 0; |
379 |
my $problem_records = []; |
380 |
|
381 |
if ($self->repository_id){ |
382 |
my $records = Koha::OAI::Client::Records->search({ "repository_id" => $self->repository_id }); |
383 |
while (my $record = $records->next){ |
384 |
#Increment overall count |
385 |
$count_of_records++; |
386 |
eval { |
387 |
my $is_deleted = $record->delete(); |
388 |
if ($is_deleted){ |
389 |
$count_of_deleted_records++; |
390 |
} |
391 |
}; |
392 |
if ($@){ |
393 |
push(@$problem_records,$record); |
394 |
} |
395 |
} |
396 |
} |
397 |
|
398 |
if ($count_of_deleted_records < $count_of_records){ |
399 |
#Not all of this repository's records could be deleted. Manual intervention may be required to remove items |
400 |
die $problem_records; |
401 |
} |
402 |
} |
403 |
|
404 |
=head3 type |
405 |
|
406 |
=cut |
407 |
|
408 |
sub type { |
409 |
return 'OaiHarvestRepository'; |
410 |
} |
411 |
|
412 |
=head1 AUTHOR |
413 |
|
414 |
David Cook <dcook@prosentient.com.au> |
415 |
|
416 |
=cut |
417 |
|
418 |
1; |