Line 0
Link Here
|
|
|
1 |
package C4::OAI::Harvester; |
2 |
|
3 |
# This file is part of Koha. |
4 |
# |
5 |
# Copyright 2013 Prosentient Systems |
6 |
# |
7 |
# Koha is free software; you can redistribute it and/or modify it under the |
8 |
# terms of the GNU General Public License as published by the Free Software |
9 |
# Foundation; either version 3 of the License, or (at your option) any later |
10 |
# version. |
11 |
# |
12 |
# Koha is distributed in the hope that it will be useful, but WITHOUT ANY |
13 |
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR |
14 |
# A PARTICULAR PURPOSE. See the GNU General Public License for more details. |
15 |
# |
16 |
# You should have received a copy of the GNU General Public License along |
17 |
# with Koha; if not, see <http://www.gnu.org/licenses>. |
18 |
|
19 |
=head1 NAME |
20 |
|
21 |
C4::OAI::Harvester - OAI-PMH harvester/client which implements the 6 OAI-PMH verbs, record retrieval/harvesting, and import into Koha |
22 |
|
23 |
=head1 SYNOPSIS |
24 |
|
25 |
use C4::OAI::Harvester; |
26 |
my $oai_repo = C4::OAI::Harvester->new($repository_data); |
27 |
|
28 |
my $identify_repository = $oai_repo->Identify; |
29 |
|
30 |
my @sets = $oai_repo->ListSets; |
31 |
|
32 |
my @formats = $oai_repo->ListMetadataFormats; |
33 |
|
34 |
my @headers = $oai_repo->ListIdentifiers; |
35 |
|
36 |
my @records = $oai_repo->ListRecords; |
37 |
|
38 |
my @records = $oai_repo->GetRecord($oai_unique_identifier); |
39 |
|
40 |
my $import_mode = ''; #i.e. not "automatic" |
41 |
|
42 |
$oai_repo->ImportRecordsIntoKoha($import_mode,@records); |
43 |
|
44 |
=head1 DESCRIPTION |
45 |
|
46 |
C4::OAI::Harvester contains functions for querying and harvesting OAI-PMH repositories. |
47 |
|
48 |
More information on OAI-PMH can be found L<here|http://www.openarchives.org/OAI/openarchivesprotocol.html> |
49 |
|
50 |
=head1 FUNCTIONS |
51 |
|
52 |
=head1 AUTHOR |
53 |
|
54 |
David Cook <dcook AT prosentient DOT com DOT au> |
55 |
|
56 |
=cut |
57 |
|
58 |
use Modern::Perl; |
59 |
use Data::Dumper; |
60 |
use C4::Context; |
61 |
|
62 |
use C4::ImportBatch qw/AddImportBatch SetImportBatchItemAction AddBiblioToBatch SetImportBatchStatus BatchCommitRecords BatchFindDuplicates/; |
63 |
use C4::Matcher; |
64 |
|
65 |
use HTTP::OAI; |
66 |
|
67 |
use base qw(Class::Accessor); |
68 |
|
69 |
sub new { |
70 |
my($proto, $fields) = @_; |
71 |
my($class) = ref $proto || $proto; |
72 |
|
73 |
$fields = {} unless defined $fields; |
74 |
|
75 |
if ($fields->{'baseURL'}){ |
76 |
my $h = new HTTP::OAI::Harvester( |
77 |
baseURL => $fields->{'baseURL'}, |
78 |
); |
79 |
#If resume is set to 0, automatic token resumption is turned off. This is useful for testing/debugging. |
80 |
if ($h && exists $fields->{'resume'}){ |
81 |
if ($fields->{'resume'} == 0){ |
82 |
$h->resume(0); |
83 |
} |
84 |
} |
85 |
my $response = $h->repository($h->Identify); |
86 |
if( $response->is_error ) { |
87 |
print "Error requesting Identify:\n", |
88 |
$response->code . " " . $response->message, "\n"; |
89 |
exit; |
90 |
} |
91 |
$fields->{rh} = $h; #Store HTTP::OAI::Harvester object as "repository handle" |
92 |
} |
93 |
bless {%$fields}, $class; |
94 |
} |
95 |
|
96 |
__PACKAGE__->follow_best_practice; #Use get_ and set_ prefixes for accessors |
97 |
__PACKAGE__->mk_accessors(qw(baseURL from until set metadataPrefix rh repository_id XSLT_path matcher_id import_mode)); |
98 |
|
99 |
=head2 OAI-PMH Verbs |
100 |
|
101 |
Koha-specific implementations of the 6 OAI-PMH Verbs. |
102 |
|
103 |
The key verbs are "ListRecords" and "GetRecords". These do the actual |
104 |
harvesting of records from a OAI-PMH repository. The others are useful for |
105 |
getting information about a repository and what it has available. |
106 |
|
107 |
1) ListRecords |
108 |
|
109 |
2) GetRecord |
110 |
|
111 |
3) ListIdentifiers |
112 |
|
113 |
4) ListMetadataFormats |
114 |
|
115 |
5) ListSets |
116 |
|
117 |
6) Identify |
118 |
|
119 |
=cut |
120 |
|
121 |
sub ListRecords { |
122 |
my $self = shift; |
123 |
my $response = $self->{rh}->ListRecords( |
124 |
metadataPrefix => $self->{metadataPrefix}, |
125 |
from => $self->{opt_from}, |
126 |
until => $self->{opt_until}, |
127 |
set => $self->{opt_set}, |
128 |
); |
129 |
if( $response->is_error ) { |
130 |
print "Error requesting ListRecords:\n", |
131 |
$response->code . " " . $response->message, "\n"; |
132 |
exit; |
133 |
} |
134 |
#print Dumper($response->toDOM); #FIXME: dac2013: Fails with this warning "bad ns attribute!" |
135 |
my @records = _parse_records_into_useful_format($response); |
136 |
return @records; |
137 |
} |
138 |
|
139 |
sub GetRecord { |
140 |
my ( $self, $identifier ) = @_; |
141 |
my $response = $self->{rh}->GetRecord( |
142 |
metadataPrefix => $self->{metadataPrefix}, |
143 |
identifier => $identifier, |
144 |
); |
145 |
if( $response->is_error ) { |
146 |
print "Error requesting GetRecord:\n", |
147 |
$response->code . " " . $response->message, "\n"; |
148 |
exit; |
149 |
} |
150 |
my @records = _parse_records_into_useful_format($response); |
151 |
return @records; |
152 |
} |
153 |
|
154 |
sub ListIdentifiers { |
155 |
my $self = shift; |
156 |
my @headers; |
157 |
my $response = $self->{rh}->ListIdentifiers( |
158 |
metadataPrefix => $self->{metadataPrefix}, |
159 |
from => $self->{opt_from}, |
160 |
until => $self->{opt_until}, |
161 |
set => $self->{opt_set}, |
162 |
); |
163 |
if( $response->is_error ) { |
164 |
print "Error requesting ListIdentifiers:\n", |
165 |
$response->code . " " . $response->message, "\n"; |
166 |
exit; |
167 |
} |
168 |
#print Dumper($response->toDOM); #FIXME: dac2013: Fails with this warning "bad ns attribute!" |
169 |
while (my $h = $response->next){ |
170 |
my $header; |
171 |
#print Dumper($h->dom->toString); #DEBUG: dac2013: XML representation |
172 |
$header->{identifier} = $h->identifier; |
173 |
$header->{datestamp} = $h->datestamp; |
174 |
|
175 |
$header->{status} = $h->status; |
176 |
$header->{is_deleted} = $h->is_deleted; |
177 |
|
178 |
my @sets = $h->setSpec; |
179 |
$header->{sets} = \@sets; |
180 |
|
181 |
push (@headers,$header); |
182 |
} |
183 |
return @headers; |
184 |
} |
185 |
|
186 |
sub ListMetadataFormats { |
187 |
my ( $self, $identifier ) = @_; |
188 |
my @formats; |
189 |
my $response = $self->{rh}->ListMetadataFormats( |
190 |
identifier => $identifier, |
191 |
); |
192 |
if( $response->is_error ) { |
193 |
print "Error requesting ListMetadataFormats:\n", |
194 |
$response->code . " " . $response->message, "\n"; |
195 |
exit; |
196 |
} |
197 |
for($response->metadataFormat) { |
198 |
push(@formats,$_->metadataPrefix); |
199 |
} |
200 |
return @formats; |
201 |
} |
202 |
|
203 |
sub ListSets { |
204 |
my $self = shift; |
205 |
my @sets; |
206 |
my $response = $self->{rh}->ListSets(); |
207 |
if( $response->is_error ) { |
208 |
print "Error requesting ListSets:\n", |
209 |
$response->code . " " . $response->message, "\n"; |
210 |
exit; |
211 |
} |
212 |
while (my $s = $response->next){ |
213 |
my $set; |
214 |
$set->{setSpec} = $s->setSpec; |
215 |
$set->{setName} = $s->setName; |
216 |
|
217 |
#FIXME: dac2013: Not really sure what to do with the descriptions as they're XML and not necessarily that easy to parse for GUI views... |
218 |
#my @temp_setDescriptions = $s->setDescription; |
219 |
#my @setDescriptions; |
220 |
#foreach my $temp_setDescription (@temp_setDescriptions){ |
221 |
# push (@setDescriptions,$temp_setDescription->dom->toString); #dac2013: I think I need to do better than just return the setDescription XML...That's not very useful... |
222 |
#} |
223 |
#$set->{setDescription} = \@setDescriptions; |
224 |
push (@sets,$set); |
225 |
} |
226 |
return @sets; |
227 |
} |
228 |
|
229 |
sub Identify { |
230 |
my $self = shift; |
231 |
my $response = $self->{rh}->Identify(); |
232 |
if( $response->is_error ) { |
233 |
print "Error requesting Identify:\n", |
234 |
$response->code . " " . $response->message, "\n"; |
235 |
exit; |
236 |
} |
237 |
my $identify_data; |
238 |
#DEBUG: View what's in the Identify object |
239 |
#print Dumper($response->headers); |
240 |
|
241 |
$identify_data->{repositoryName} = $response->repositoryName; |
242 |
$identify_data->{baseURL} = $response->baseURL; |
243 |
$identify_data->{protocolVersion} = $response->protocolVersion; #Tim Brody says this will always return 2.0 and that ->version should be used to find the actual version... |
244 |
#$identify_data->{version} = $response->version; |
245 |
$identify_data->{earliestDatestamp} = $response->earliestDatestamp; |
246 |
$identify_data->{deletedRecord} = $response->deletedRecord; #not in the perldoc, but it's in the code and the OAI-PMH spec |
247 |
$identify_data->{granularity} = $response->granularity; |
248 |
|
249 |
#These methods should be used with an array context so they return all the elements and not just the first one |
250 |
my @adminEmails = $response->adminEmail; |
251 |
$identify_data->{adminEmail} = \@adminEmails; |
252 |
my @compressions = $response->compression; |
253 |
$identify_data->{compression} = \@compressions; |
254 |
|
255 |
#FIXME: dac2013: Descriptions are encapsulated in XML containers, I believe. Not sure what to do with these at present... |
256 |
#my @descriptions = $response->description; |
257 |
#$identify_data->{description} = \@descriptions; |
258 |
#$response->next |
259 |
|
260 |
return $identify_data; |
261 |
} |
262 |
|
263 |
=head2 _parse_records_into_useful_format |
264 |
|
265 |
A C4::OAI::Harvester internal subroutine that parses HTTP::OAI::Record |
266 |
objects into C4::OAI::Harvester::Record objects which have methods |
267 |
for transforming incoming XML into MARCXML, saving historical records |
268 |
in the database, and comparing incoming records against historical |
269 |
records. |
270 |
|
271 |
=cut |
272 |
|
273 |
sub _parse_records_into_useful_format { |
274 |
my ( $response ) = @_; |
275 |
my @records; |
276 |
|
277 |
while( my $rec = $response->next ) { |
278 |
my $record; |
279 |
|
280 |
$record->{header} = $rec->header ? $rec->header->dom : undef; |
281 |
$record->{identifier} = $rec->identifier; |
282 |
$record->{datestamp} = $rec->datestamp; |
283 |
$record->{metadata} = $rec->metadata ? $rec->metadata->dom : undef; #N.B. there won't be any metadata for deleted records |
284 |
$record->{about} = $rec->about ? $rec->about->dom : undef; |
285 |
$record->{deleted} = $rec->is_deleted; |
286 |
$record->{status} = $rec->status; |
287 |
|
288 |
#FIXME: Will these always exist for every record? Add error checking. Maybe include an undef or blank if they're not set... |
289 |
my $record_object = C4::OAI::Harvester::Record->new($record); |
290 |
|
291 |
#FIXME: Add error checking |
292 |
#if( $rec->is_error ) { |
293 |
# die $response->message; |
294 |
#} |
295 |
push(@records,$record_object); |
296 |
} |
297 |
return @records; |
298 |
} |
299 |
|
300 |
=head2 ImportRecordsIntoKoha |
301 |
|
302 |
$oai_repo->ImportRecordsIntoKoha(@records); |
303 |
|
304 |
@records is an array of records that have been retrieved using the |
305 |
GetRecord or ListRecords verbs. |
306 |
|
307 |
$import_mode is used to specify "automatic" or "manual" modes. The |
308 |
default mode is "manual", but "automatic" can be triggered by |
309 |
passing the "automatic" string in the variable. Manual mode is |
310 |
indicated using any other string or even undef. |
311 |
|
312 |
This subroutine handles importing our retrieved records into Koha. |
313 |
|
314 |
1. Foreach record: |
315 |
a) Check its identifier status in the database |
316 |
b) If its status is "create", "replace", or "error", |
317 |
we transform it into MARCXML. |
318 |
|
319 |
Note: "error" simply indicates that a "replace" couldn't |
320 |
occur automatically due to multiple biblionumbers being |
321 |
linked to the unique identifier. |
322 |
c) If the status is "ignore", we skip to the next record. |
323 |
2. Add an import batch |
324 |
3. Foreach record: |
325 |
a) Add the record to the import batch |
326 |
b) Add a historical record to the oai_harvest table |
327 |
4. Check the import batch for duplicates/matches |
328 |
(i.e. existing records). |
329 |
|
330 |
5. If automatic importing is turned on, batch import the records |
331 |
(otherwise, leave in staging area for manual import) |
332 |
|
333 |
=cut |
334 |
|
335 |
sub ImportRecordsIntoKoha { |
336 |
my ( $self, @records ) = @_; |
337 |
my @records_to_import; |
338 |
my ( %ignored, %created, %errored, %replaced, %deleted ); |
339 |
|
340 |
|
341 |
foreach my $record (@records){ |
342 |
|
343 |
|
344 |
#FIXME: Do the delete handling in the function...only use "next" with an "ignore"... |
345 |
#FIXME: stop returning @biblionumbers with GetIdentifierSatus...it's unnecessary |
346 |
|
347 |
#FIXME: Add a way of forcing add/replace despite existing Identifier+datestamp(+metadataPrefix)? |
348 |
my $identifier_status = $record->AddIdentifierStatusAndLinkedBiblionumbers($self->get_repository_id); |
349 |
if ($identifier_status){ |
350 |
if ($identifier_status eq "ignore"){ |
351 |
#This record's identifier and datestamp already exist in the database, so we don't import this record, or |
352 |
#this is a record with a deleted status that hasn't been imported into Koha before. |
353 |
next; |
354 |
} |
355 |
} |
356 |
else { |
357 |
#We weren't able to get the status of this record, so let's ignore it and skip to the next one |
358 |
next; |
359 |
} |
360 |
my $transform_result_flag = $record->TransformMetadata($self->get_metadataPrefix, $self->get_XSLT_path); |
361 |
push(@records_to_import,$record); |
362 |
} |
363 |
|
364 |
if (scalar @records_to_import > 0){ |
365 |
#DEBUGGING ONLY - DUMPER |
366 |
#print Dumper(@records_to_import); |
367 |
my $overlay_action = 'replace'; #If an incoming record matches an existing one, we replace the existing one with the incoming record |
368 |
my $file_name = $self->get_baseURL; |
369 |
my $comments = "OAI-PMH Harvest"; |
370 |
my $matcher_id = undef; |
371 |
my $rec_num = 0; |
372 |
my $num_with_matches = 0; |
373 |
|
374 |
#If a matcher_id is linked to this repository, we'll use that for matching records when applying updates. Otherwise, don't use a matcher. |
375 |
#FIXME: It might be an idea to make matcher_id a required field for OAI-PMH repositories... |
376 |
if ($self->get_matcher_id){ |
377 |
$matcher_id = $self->get_matcher_id; |
378 |
} |
379 |
|
380 |
#FIXME: Have other import options be configurable? |
381 |
my $batch_id = AddImportBatch( { |
382 |
overlay_action => $overlay_action, |
383 |
import_status => 'staging', |
384 |
batch_type => 'batch', #FIXME: batch|z3950|webservice are the three current options. Despite this being a web service, I that type of batch has special properties that shouldn't be used here? |
385 |
file_name => $file_name, #This is the baseURL for our remote OAI-PMH repository |
386 |
comments => $comments, #This contains a mention that this is a OAI-PMH harvest |
387 |
matcher_id => $matcher_id, #This is defined in our DB config for the repository |
388 |
} ); |
389 |
|
390 |
#N.B. In theory, it would be good if items could be processed. |
391 |
#However, duplicate items would appear for updated records if |
392 |
#we turn this on, since there is no way of matching items at the |
393 |
#moment. |
394 |
SetImportBatchItemAction($batch_id, 'ignore'); |
395 |
|
396 |
foreach my $record_to_import (@records_to_import){ |
397 |
#Check for transformed_metadata. We do not want to try to |
398 |
#add a record to a batch without transformed metadata |
399 |
#(i.e. if the record is deleted). |
400 |
my $import_record_id = undef; |
401 |
if ($record_to_import->get_transformed_metadata){ |
402 |
my $encoding = "utf8"; |
403 |
my $format = "MARC21"; |
404 |
my $marc_record = MARC::Record->new_from_xml( $record_to_import->get_transformed_metadata->toString, $encoding, $format ); |
405 |
$rec_num++; |
406 |
$import_record_id = AddBiblioToBatch($batch_id, $rec_num, $marc_record, $encoding, int(rand(99999)), 0); |
407 |
_update_batch_record_counts($batch_id); |
408 |
} |
409 |
my $save_result_flag = $record_to_import->AddHistoricalRecord($self->get_metadataPrefix,$self->get_repository_id, $import_record_id); |
410 |
#if ($save_result_flag != 1){ |
411 |
#If there was an error saving to the database, we ignore this record |
412 |
#FIXME: Add debug and print errors to log |
413 |
#next; |
414 |
#} |
415 |
} |
416 |
|
417 |
my $matcher = C4::Matcher->fetch($matcher_id); |
418 |
if ($matcher){ |
419 |
$num_with_matches = BatchFindDuplicates($batch_id, $matcher); |
420 |
} |
421 |
|
422 |
SetImportBatchStatus($batch_id, 'staged'); |
423 |
|
424 |
#When active, this automatically imports records into the catalogue without having to use the "Manage staged MARC records interface", |
425 |
#although the import can still be reverted using that interface. |
426 |
if ($self->get_import_mode eq "automatic"){ |
427 |
my $framework = ''; |
428 |
my ($num_added, $num_updated, $num_items_added, $num_items_errored, $num_ignored) = |
429 |
BatchCommitRecords($batch_id, $framework); |
430 |
return ($batch_id, $num_with_matches, $num_added, $num_updated, $num_items_added, $num_items_errored, $num_ignored); |
431 |
} |
432 |
return ($batch_id, $num_with_matches); |
433 |
} |
434 |
|
435 |
|
436 |
} |
437 |
|
438 |
#Copied from ImportBatch.pm for convenience...for now |
439 |
sub _update_batch_record_counts { |
440 |
my ($batch_id) = @_; |
441 |
|
442 |
my $dbh = C4::Context->dbh; |
443 |
my $sth = $dbh->prepare_cached("UPDATE import_batches SET |
444 |
-- num_biblios = ( |
445 |
num_records = ( |
446 |
SELECT COUNT(*) |
447 |
FROM import_records |
448 |
WHERE import_batch_id = import_batches.import_batch_id |
449 |
AND record_type = 'biblio'), |
450 |
num_items = ( |
451 |
SELECT COUNT(*) |
452 |
FROM import_records |
453 |
JOIN import_items USING (import_record_id) |
454 |
WHERE import_batch_id = import_batches.import_batch_id |
455 |
AND record_type = 'biblio') |
456 |
WHERE import_batch_id = ?"); |
457 |
$sth->bind_param(1, $batch_id); |
458 |
$sth->execute(); |
459 |
$sth->finish(); |
460 |
} |
461 |
|
462 |
|
463 |
|
464 |
|
465 |
|
466 |
package C4::OAI::Harvester::Record; |
467 |
|
468 |
=head1 NAME |
469 |
|
470 |
C4::OAI::Harvester::Record - an internal class for handling records retrieved via the OAI-PMH protocol |
471 |
|
472 |
=head1 DESCRIPTION |
473 |
|
474 |
C4::OAI::Harvester::Record - Class to handle the management of records retrieved via OAI-PMH |
475 |
|
476 |
More information on OAI-PMH can be found L<here|http://www.openarchives.org/OAI/openarchivesprotocol.html> |
477 |
|
478 |
=head1 AUTHOR |
479 |
|
480 |
David Cook <dcook AT prosentient DOT com DOT au> |
481 |
|
482 |
=cut |
483 |
|
484 |
use Modern::Perl; |
485 |
use C4::Context; |
486 |
use C4::Templates; |
487 |
use C4::XSLT qw/GetURI/; |
488 |
|
489 |
use XML::LibXML; |
490 |
use XML::LibXSLT; |
491 |
|
492 |
use base qw(Class::Accessor); |
493 |
|
494 |
sub new { |
495 |
my($proto, $fields) = @_; |
496 |
my($class) = ref $proto || $proto; |
497 |
|
498 |
$fields = {} unless defined $fields; |
499 |
|
500 |
$fields->{transformed_metadata} = undef; |
501 |
$fields->{linked_biblionumbers} = ''; |
502 |
|
503 |
bless {%$fields}, $class; |
504 |
} |
505 |
|
506 |
__PACKAGE__->follow_best_practice; #Use get_ and set_ prefixes for accessors |
507 |
__PACKAGE__->mk_accessors(qw(identifier datestamp metadata status deleted header about transformed_metadata linked_biblionumbers)); |
508 |
|
509 |
=head2 TransformMetadata |
510 |
|
511 |
A C4::OAI::Harvester::Record method for transforming incoming metadata |
512 |
into a format that Koha can use (e.g. MARC21 MARCXML). |
513 |
|
514 |
my $transform_result_flag = $record->TransformMetadata( |
515 |
$self->get_metadataPrefix, |
516 |
$self->get_XSLT_path, |
517 |
$biblionumber_to_use_in_XSLT); |
518 |
|
519 |
$self->get_metadataPrefix is the metadataPrefix for the record, |
520 |
which comes from the database config for this OAI-PMH repository. It |
521 |
is a required parameter. |
522 |
|
523 |
$self->get_XSLT_path is the filepath to the XSLT to be used in |
524 |
the metadata transformation. This can be a local filepath or a remote |
525 |
URL (like in XSLT.pm). Technically, this is an optional parameter if |
526 |
the metadataPrefix is "marcxml", "dc", or "oai_dc". However, it is |
527 |
recommended to always specify a XSLT path. You will almost certainly |
528 |
have problems down the road if you do not use one, especially |
529 |
for marcxml (as the biblionumber for any existing imported records |
530 |
will not be added, which means you'll have great difficulty managing |
531 |
updates via OAI-PMH). |
532 |
|
533 |
$biblionumber_to_use_in_XSLT is the biblionumber linked to this |
534 |
record's unique OAI-PMH identifier. Typically, this will only be used |
535 |
if this record has always been ingested/added to Koha before. This |
536 |
biblionumber is added during the transform process so that Koha |
537 |
can match the incoming and existing records for merging/replacing. |
538 |
|
539 |
=cut |
540 |
|
541 |
sub TransformMetadata { |
542 |
my ( $self, $metadataPrefix, $XSLT_path ) = @_; |
543 |
my $source = $self->{metadata}; #LibXML object retrieved via HTTP::OAI::Harvester |
544 |
|
545 |
my $biblionumber; |
546 |
my $no_XSLT_path; |
547 |
if (!$XSLT_path){ |
548 |
$no_XSLT_path = 1; |
549 |
} |
550 |
|
551 |
#If we have a single linked_biblionumber and our status is replace, |
552 |
#then we initialize $biblionumber with that biblionumber, so that |
553 |
#we can create a 999$c or 999$d for matching duringn the import. |
554 |
my $linked_biblionumbers = $self->get_linked_biblionumbers; |
555 |
if (ref $linked_biblionumbers eq 'ARRAY' && $self->get_status eq 'replace'){ |
556 |
if (scalar @$linked_biblionumbers == 1){ |
557 |
$biblionumber = @$linked_biblionumbers[0]; |
558 |
} |
559 |
} |
560 |
|
561 |
if ($metadataPrefix && $source){ |
562 |
my $xslt = XML::LibXSLT->new(); |
563 |
my $style_doc; |
564 |
|
565 |
#$parser based on equivalent from XSLT.pm |
566 |
my $parser = XML::LibXML->new(); |
567 |
# don't die when you find &, >, etc |
568 |
$parser->recover_silently(0); |
569 |
|
570 |
if ($XSLT_path){ |
571 |
#This conditional for handling secure remote XSLTs copied from XSLT.pm |
572 |
if ( $XSLT_path =~ /^https?:\/\// ) { |
573 |
my $xsltstring = GetURI($XSLT_path); |
574 |
if ($xsltstring){ |
575 |
$style_doc = $parser->parse_string($xsltstring); |
576 |
} |
577 |
else{ |
578 |
#If a string isn't retrieved using GetURI, we switch to our default transforms |
579 |
$no_XSLT_path = 1; |
580 |
} |
581 |
} else { |
582 |
if ( -e $XSLT_path){ |
583 |
$style_doc = $parser->parse_file($XSLT_path); |
584 |
} |
585 |
else{ |
586 |
#If the file doesn't actually exist, we switch to our default transforms |
587 |
$no_XSLT_path = 1; |
588 |
} |
589 |
} |
590 |
} |
591 |
|
592 |
if ($no_XSLT_path){ |
593 |
if ($metadataPrefix eq 'marcxml'){ |
594 |
$self->set_transformed_metadata($source); |
595 |
return 1; |
596 |
} |
597 |
elsif ($metadataPrefix eq 'dc' || $metadataPrefix eq 'oai_dc'){ |
598 |
my $xsl = C4::Context->config('intrahtdocs') . |
599 |
'/' . C4::Context->preference("template") . |
600 |
'/' . C4::Templates::_current_language() . |
601 |
'/xslt/DC2MARC21slim.xsl'; |
602 |
#'/xslt/' . |
603 |
#C4::Context->preference('marcflavour') . |
604 |
#"slim2intranetDetail.xsl"; |
605 |
$style_doc = $parser->parse_file($xsl); |
606 |
} |
607 |
else{ |
608 |
#FIXME: Use more robust error handling than this |
609 |
return -1; |
610 |
} |
611 |
} |
612 |
|
613 |
my $stylesheet = $xslt->parse_stylesheet($style_doc); |
614 |
#FIXME: Add error handling here... |
615 |
|
616 |
my %xslt_params; |
617 |
$xslt_params{'identifier'} = $self->{identifier}; |
618 |
$xslt_params{'biblionumber'} = $biblionumber; |
619 |
|
620 |
#Pass OAI-PMH identifier and matched biblionumber (if there is one) to the XSLT for unique identification/provenance |
621 |
my $results = $stylesheet->transform($source, XML::LibXSLT::xpath_to_string(%xslt_params)); |
622 |
|
623 |
if ($results){ |
624 |
$self->set_transformed_metadata($results); |
625 |
return 1; |
626 |
} |
627 |
else{ |
628 |
return -1; |
629 |
} |
630 |
|
631 |
#FIXME: Add some better error checking here...throw an exception if a record fails? |
632 |
|
633 |
} |
634 |
else{ |
635 |
return -1; |
636 |
} |
637 |
} |
638 |
|
639 |
=head2 AddHistoricalRecord |
640 |
|
641 |
A C4::OAI::Harvester::Record method that saves an entry into the |
642 |
database for historical and importing purposes. |
643 |
|
644 |
my $save_result_flag = $record_to_import->AddHistoricalRecord($self->get_metadataPrefix,$self->get_repository_id, $import_record_id); |
645 |
|
646 |
$self->get_metadataPrefix is the record's metadataPrefix (i.e. metadata |
647 |
format), as defined in the database config, since records do not |
648 |
self-identify their metadataPrefix. |
649 |
|
650 |
$self->get_repository_id is the local DB primary key for the record's |
651 |
repository. |
652 |
|
653 |
$import_record_id is the key in the import_records and import_biblios |
654 |
tables which show that the record has been staged and if it has been |
655 |
imported. This is ESSENTIAL for performing matching between existing |
656 |
and incoming records. |
657 |
|
658 |
=cut |
659 |
|
660 |
sub AddHistoricalRecord { |
661 |
my ( $self, $metadataPrefix, $repository_id, $import_record_id ) = @_; |
662 |
#FIXME: Error/variable checking should be better in this sub... |
663 |
|
664 |
#Only check for these two, since deleted records won't have an $import_record_id |
665 |
if ($metadataPrefix && $repository_id){ |
666 |
my $identifier = $self->get_identifier; |
667 |
my $datestamp = $self->get_datestamp; |
668 |
my $metadata = $self->get_metadata ? $self->get_metadata->toString : undef; |
669 |
my $header = $self->get_header ? $self->get_header->toString : undef; |
670 |
my $status = $self->get_status; |
671 |
my $string_of_linked_biblionumbers = undef; |
672 |
|
673 |
my $linked_biblionumbers = $self->get_linked_biblionumbers; |
674 |
if (ref $linked_biblionumbers eq 'ARRAY'){ |
675 |
$string_of_linked_biblionumbers = join(" | ",@$linked_biblionumbers); |
676 |
} |
677 |
|
678 |
my $dbh = C4::Context->dbh; |
679 |
my $sql = "INSERT INTO oai_harvest (identifier,datestamp,metadataPrefix,import_record_id,metadata,header,repository_id,status, linked_biblionumbers) VALUES (?,?,?,?,?,?,?,?,?)"; |
680 |
my $sth = $dbh->prepare($sql); |
681 |
$sth->execute($identifier, $datestamp, $metadataPrefix, $import_record_id, $metadata, $header, $repository_id, $status, $string_of_linked_biblionumbers); |
682 |
if ($sth->err){ |
683 |
return "ERROR! return code: " . $sth->err . " error msg: " . $sth->errstr . "\n"; |
684 |
} |
685 |
else{ |
686 |
return 1; |
687 |
} |
688 |
} |
689 |
else{ |
690 |
return -1; |
691 |
} |
692 |
} |
693 |
|
694 |
=head2 AddIdentifierStatusAndLinkedBiblionumbers |
695 |
|
696 |
A C4::OAI::Harvester::Record method that compares the status of |
697 |
the record with any matching identifiers in the database. It sets |
698 |
and returns a status saying "ignore", "deleted", "create", |
699 |
"replace", or "ambiguous". It also checks if the incoming record is |
700 |
linked with any existing biblionumbers. If it is, it sets the |
701 |
linked_biblionumbers property to any linkages it finds. |
702 |
|
703 |
"Ignore" means that this identifier already exists with the same |
704 |
datestamp, or the incoming record has a deleted status and isn't |
705 |
linked to any existing bib records in Koha. |
706 |
|
707 |
"Deleted" means that the incoming record has a status of "deleted" |
708 |
and contains no metadata, but shares an identifier with an imported |
709 |
record in the Koha database. We record this status as evidence |
710 |
that there has been a deletion in the remote OAI-PMH repository. |
711 |
|
712 |
"Create" means that the identifier hasn't been logged before, or it |
713 |
it has been logged and its record staged, but it has not been |
714 |
imported, so we are free to create a new HistoricalRecord and |
715 |
stage a new bib record. |
716 |
|
717 |
"Replace" means that this identifier already exists, but the |
718 |
datestamp is different, so we assume that this is an updated |
719 |
version of the record. We check to see if there is a biblionumber |
720 |
linked to this identifier (using import_record_id). If there is |
721 |
a biblionumber, we store it so that we can use it for matching |
722 |
during import. |
723 |
|
724 |
However, if there is more than one biblionumber, we're in an |
725 |
"Ambiguous" situation where we can not reliably determine which |
726 |
bib record we should be replacing. In this case, we store all |
727 |
the bib numbers linked to this identifier, and treat it as a |
728 |
new record. |
729 |
|
730 |
Ideally, we signal staff to perform manual merging |
731 |
or deleting to remedy this one identifier to many biblionumber |
732 |
scenario. |
733 |
|
734 |
In theory, the "ambiguous" situation should never occur. However, |
735 |
it's better to be safe than sorry. We don't want to accidentally |
736 |
overwrite the wrong record during a "replace" update. |
737 |
|
738 |
=cut |
739 |
|
740 |
sub AddIdentifierStatusAndLinkedBiblionumbers { |
741 |
my ( $self, $repository_id ) = @_; |
742 |
my $dbh = C4::Context->dbh; |
743 |
my $identifier_status; |
744 |
my @linked_biblionumbers; |
745 |
|
746 |
my $check_for_id_sql = " |
747 |
SELECT count(*) as count |
748 |
FROM oai_harvest |
749 |
WHERE identifier = ? and repository_id = ?"; |
750 |
my $check_for_id_sth = $dbh->prepare($check_for_id_sql); |
751 |
$check_for_id_sth->execute($self->get_identifier, $repository_id); |
752 |
my $check_for_id_row = $check_for_id_sth->fetchrow_hashref; |
753 |
if ($check_for_id_row->{count} == 0){ |
754 |
#OAI-PMH Unique Identifier doesn't exist in database == CREATE |
755 |
|
756 |
$identifier_status = "create"; |
757 |
} |
758 |
else{ |
759 |
#OAI-PMH Unique Identifier does exist in database == IGNORE || REPLACE |
760 |
#FIXME: System preference to govern whether you match on identifier+datestamp || identifier+datestamp+metadataPrefix? |
761 |
my $check_for_id_and_datestamp_sql = "SELECT count(*) as count FROM oai_harvest WHERE identifier = ? and datestamp = ? and repository_id = ?"; |
762 |
my $check_for_id_and_datestamp_sth = $dbh->prepare($check_for_id_and_datestamp_sql); |
763 |
$check_for_id_and_datestamp_sth->execute($self->get_identifier, $self->get_datestamp, $repository_id); |
764 |
my $check_for_id_and_datestamp_row = $check_for_id_and_datestamp_sth->fetchrow_hashref; |
765 |
|
766 |
if ($check_for_id_and_datestamp_row->{count} > 0){ |
767 |
#OAI-PMH Unique Identifier and Datestamp combo exist in database == IGNORE |
768 |
#FIXME: What happens if the metadataPrefix is different? Perhaps do a REPLACE in this case? It would need to be tied to a system preference... |
769 |
|
770 |
$identifier_status = "ignore"; |
771 |
} |
772 |
else{ |
773 |
#OAI-PMH Unique Identifier and Datestamp combo don't exist in database == REPLACE |
774 |
#i.e. The identifier exists in the database, but this is an updated datestamp |
775 |
|
776 |
|
777 |
my $check_for_biblionumber_sql = "SELECT * |
778 |
FROM oai_harvest |
779 |
JOIN import_biblios using (import_record_id) |
780 |
WHERE identifier = ? and repository_id = ?"; |
781 |
my $check_for_biblionumber_sth = $dbh->prepare($check_for_biblionumber_sql); |
782 |
$check_for_biblionumber_sth->execute($self->get_identifier, $repository_id); |
783 |
while ( my $check_for_biblionumber_row = $check_for_biblionumber_sth->fetchrow_hashref ) { |
784 |
if ($check_for_biblionumber_row->{matched_biblionumber}){ |
785 |
if (!grep(/^$check_for_biblionumber_row->{matched_biblionumber}$/, @linked_biblionumbers)){ |
786 |
push(@linked_biblionumbers,$check_for_biblionumber_row->{matched_biblionumber}); |
787 |
} |
788 |
} |
789 |
} |
790 |
if (scalar @linked_biblionumbers == 1){ |
791 |
#If there is only one matching bib number for this record, we'll add it to our incoming |
792 |
#record for import matching |
793 |
|
794 |
$identifier_status = "replace"; |
795 |
|
796 |
} |
797 |
elsif (scalar @linked_biblionumbers == 0){ |
798 |
#In this case, we've staged this OAI-PMH record before, but haven't imported it. |
799 |
#Treat this as a CREATE |
800 |
|
801 |
$identifier_status = "create"; |
802 |
} |
803 |
elsif (scalar @linked_biblionumbers > 1){ |
804 |
#EXCEPTIONAL CASE - This means that we have imported this record as two different bibs before |
805 |
|
806 |
#As per wizzyrea's suggestion, email the administrator/librarian to tell them about this exceptional case |
807 |
#We can give them the bib numbers, the OAI-PMH identifier, and other information so they can manually |
808 |
#merge these bibs together... |
809 |
|
810 |
$identifier_status = "ambiguous"; |
811 |
} |
812 |
} |
813 |
} |
814 |
|
815 |
if ($self->get_deleted){ |
816 |
#If the incoming record has a deleted status, we'll want to |
817 |
#ignore it during import, since deleted records do not |
818 |
#have any metadata attached. |
819 |
$identifier_status = "ignore"; |
820 |
} |
821 |
|
822 |
if (scalar @linked_biblionumbers > 0){ |
823 |
$self->set_linked_biblionumbers(\@linked_biblionumbers); |
824 |
|
825 |
#If the incoming record has a deleted status AND it is linked to |
826 |
#existing bib records that we've imported, we will want to keep |
827 |
#evidence of this deleted status, so that we can alert staff to |
828 |
#delete these records. |
829 |
|
830 |
#We will commit an AddHistoricalRecord entry, but we won't import |
831 |
#this record into Koha (since there won't be any metadata to |
832 |
#import anyway) |
833 |
|
834 |
#FIXME: Email the librarian/administrator mentioning that the record associated with XXX bibs have been deleted in the OAI-PMH repo? |
835 |
#FIXME: Suggest that they change the LEADER position 05 to "d" for deleted? How does Koha handle records with a LDR05 of "d"? |
836 |
|
837 |
if ($self->get_deleted){ |
838 |
$identifier_status = "deleted"; |
839 |
} |
840 |
} |
841 |
|
842 |
if ($identifier_status){ |
843 |
$self->set_status($identifier_status); |
844 |
} |
845 |
|
846 |
return ($identifier_status); |
847 |
} |
848 |
|
849 |
1; |
850 |
__END__ |