From f1251f3cb270346262abdf4790bf95edebf88789 Mon Sep 17 00:00:00 2001 From: David Gustafsson Date: Mon, 9 Apr 2018 19:28:41 +0200 Subject: [PATCH] Bug 20551: Add option for including deleted records in export_records.pl Add option "--include_deleted" to include deleted biblios in export_records.pl as marc records with record status (in leader) set to "d" and "--deleted_only" to export only deleted biblios. How to test: 1) Run tests in t/db_dependent/Exporter/Record.t 2) All tests should pass 3) Delete a biblio record in Koha 4) Run the export script as: `export_records.pl --date= --include_deleted` 5) Open the generated koha.mrc, and verify that contains the recently deleted record, and that the record header has record status "d". 6) Perform the same check with: `export_records.pl --date= --include_deleted --format=xml` 7) Run: `export_records.pl --date= --deleted_only --format=xml` and verify that only the deleted biblio was included in the export. Sponsored-by: Gothenburg University Library Signed-off-by: Frank Hansen Signed-off-by: Andrew Fuerste Henry --- Koha/Exporter/Record.pm | 130 ++++++++++++++++++++++++------- misc/export_records.pl | 88 ++++++++++++++------- t/db_dependent/Exporter/Record.t | 45 ++++++++--- 3 files changed, 196 insertions(+), 67 deletions(-) diff --git a/Koha/Exporter/Record.pm b/Koha/Exporter/Record.pm index 4d818e1614..86127e2e3d 100644 --- a/Koha/Exporter/Record.pm +++ b/Koha/Exporter/Record.pm @@ -6,13 +6,18 @@ use MARC::File::USMARC; use C4::AuthoritiesMarc; use C4::Biblio qw( GetMarcFromKohaField ); +use C4::Charset; use C4::Record; use Koha::Biblios; use Koha::CsvProfiles; +use Koha::Database; use Koha::Logger; use Koha::RecordProcessor; use List::Util qw( all any ); +use MARC::Record; +use MARC::File::XML; + sub _get_record_for_export { my ($params) = @_; my $record_type = $params->{record_type}; @@ -105,6 +110,37 @@ sub _get_record_for_export { return $record; } +sub _get_deleted_biblio_for_export { + my ($params) = @_; + my $biblionumber = $params->{biblionumber}; + + # Creating schema is expensive, allow caller to + # pass it so don't have to recreate for each call + my $resultset = $params->{resultset} || Koha::Database->new()->schema()->resultset('DeletedbiblioMetadata'); + my $marc_flavour = C4::Context->preference('marcflavour'); + my $biblio_metadata = $resultset->find( + { + 'biblionumber' => $biblionumber, + 'format' => 'marcxml', + 'marcflavour' => $marc_flavour + } + ); + my $marc_xml = $biblio_metadata->metadata; + $marc_xml = StripNonXmlChars($marc_xml); + + my $record = eval { MARC::Record::new_from_xml( $marc_xml, 'UTF-8', $marc_flavour ) }; + if ( !$record ) { + Koha::Logger->get->warn("Failed to load MARCXML for deleted biblio with biblionumber \"$biblionumber\": $@"); + return; + } + + # Set deleted flag (record status, position 05) + my $leader = $record->leader; + substr $leader, 5, 1, 'd'; + $record->leader($leader); + return $record; +} + sub _get_authority_for_export { my ($params) = @_; my $authid = $params->{authid} || return; @@ -124,7 +160,10 @@ sub _get_biblio_for_export { my $biblio = Koha::Biblios->find($biblionumber); my $record = eval { $biblio->metadata->record }; - return if $@ or not defined $record; + if ( !$record ) { + Koha::Logger->get->warn("Failed to load MARCXML for biblio with biblionumber \"$biblionumber\": $@"); + return; + } if ($embed_see_from_headings) { my $record_processor = Koha::RecordProcessor->new( { filters => 'EmbedSeeFromHeadings' } ); @@ -159,7 +198,8 @@ sub export { my ($params) = @_; my $record_type = $params->{record_type}; - my $record_ids = $params->{record_ids} || []; + my $record_ids = $params->{record_ids} || []; + my $deleted_record_ids = $params->{deleted_record_ids} || []; my $format = $params->{format}; my $itemnumbers = $params->{itemnumbers} || []; # Does not make sense with record_type eq auths my $export_items = $params->{export_items}; @@ -171,7 +211,7 @@ sub export { Koha::Logger->get->warn("No record_type given."); return; } - return unless @$record_ids; + return unless ( @{$record_ids} || @{$deleted_record_ids} && $format ne 'csv' ); my $fh; if ($output_filepath) { @@ -182,40 +222,72 @@ sub export { binmode STDOUT, ':encoding(UTF-8)' unless $format eq 'csv'; } - if ( $format eq 'iso2709' ) { - for my $record_id (@$record_ids) { - my $record = _get_record_for_export( { %$params, record_id => $record_id } ); - next unless $record; - my $errorcount_on_decode = eval { scalar( MARC::File::USMARC->decode( $record->as_usmarc )->warnings() ) }; - if ( $errorcount_on_decode or $@ ) { - my $msg = "Record $record_id could not be exported. " . ( $@ // '' ); - chomp $msg; - Koha::Logger->get->info($msg); - next; - } - print $record->as_usmarc(); + if ( $format eq 'xml' || $format eq 'iso2709' ) { + my @records; + @records = map { + my $record = _get_record_for_export( { %{$params}, record_id => $_ } ); + $record ? $record : (); + } @{$record_ids}; + + my @deleted_records; + if ( @{$deleted_record_ids} ) { + my $resultset = Koha::Database->new()->schema()->resultset('DeletedbiblioMetadata'); + @deleted_records = map { + my $record = _get_deleted_biblio_for_export( + { + biblionumber => $_, + resultset => $resultset, + } + ); + $record ? $record : (); + } @{$deleted_record_ids}; } - } elsif ( $format eq 'xml' ) { - my $marcflavour = C4::Context->preference("marcflavour"); - MARC::File::XML->default_record_format( - ( $marcflavour eq 'UNIMARC' && $record_type eq 'auths' ) ? 'UNIMARCAUTH' : $marcflavour ); - - print MARC::File::XML::header(); - print "\n"; - for my $record_id (@$record_ids) { - my $record = _get_record_for_export( { %$params, record_id => $record_id } ); - next unless $record; - print MARC::File::XML::record($record); + if ( $format eq 'iso2709' ) { + my $encoding_validator = sub { + my ($record_type) = @_; + return sub { + my ($record) = @_; + my $errorcount_on_decode = + eval { scalar( MARC::File::USMARC->decode( $record->as_usmarc )->warnings() ) }; + if ( $errorcount_on_decode || $@ ) { + my ( $id_tag, $id_subfield ) = GetMarcFromKohaField( 'biblio.biblionumber', '' ); + my $record_id = $record->subfield( $id_tag, $id_subfield ); + my $msg = "$record_type $record_id could not be USMARC decoded/encoded. " . ( $@ // '' ); + chomp $msg; + Koha::Logger->get->warn($msg); + return 0; + } + return 1; + } + }; + my $validator = $encoding_validator->('Record'); + for my $record ( grep { $validator->($_) } @records ) { + print $record->as_usmarc(); + } + if (@deleted_records) { + $validator = $encoding_validator->('Deleted record'); + for my $deleted_record ( grep { $validator->($_) } @deleted_records ) { + print $deleted_record->as_usmarc(); + } + } + } elsif ( $format eq 'xml' ) { + my $marcflavour = C4::Context->preference("marcflavour"); + MARC::File::XML->default_record_format( + ( $marcflavour eq 'UNIMARC' && $record_type eq 'auths' ) ? 'UNIMARCAUTH' : $marcflavour ); + print MARC::File::XML::header(); + print "\n"; + for my $record ( @records, @deleted_records ) { + print MARC::File::XML::record($record); + print "\n"; + } + print MARC::File::XML::footer(); print "\n"; } - print MARC::File::XML::footer(); - print "\n"; } elsif ( $format eq 'csv' ) { die 'There is no valid csv profile defined for this export' unless Koha::CsvProfiles->find($csv_profile_id); print marc2csv( $record_ids, $csv_profile_id, $itemnumbers ); } - close $fh if $output_filepath; } diff --git a/misc/export_records.pl b/misc/export_records.pl index 683cb1971b..eaf6dc3964 100755 --- a/misc/export_records.pl +++ b/misc/export_records.pl @@ -38,6 +38,8 @@ use Koha::Reports; my ( $output_format, $timestamp, + $include_deleted, + $deleted_only, $dont_export_items, $csv_profile_id, $deleted_barcodes, @@ -68,6 +70,8 @@ my ( GetOptions( 'format=s' => \$output_format, 'date=s' => \$timestamp, + 'include_deleted' => \$include_deleted, + 'deleted_only' => \$deleted_only, 'dont_export_items' => \$dont_export_items, 'csv_profile_id=s' => \$csv_profile_id, 'deleted_barcodes' => \$deleted_barcodes, @@ -103,6 +107,18 @@ $record_type ||= 'bibs'; # Retrocompatibility for the format parameter $output_format = 'iso2709' if $output_format eq 'marc'; +if ( $include_deleted || $deleted_only ) { + if ( $record_type ne 'bibs' ) { + pod2usage(q|Option "--include_deleted" or "--deleted_only" can only be used with "--record-type=bibs"|); + } + if ( !$timestamp ) { + pod2usage(q|Option "--include_deleted" or "--deleted_only" requires that "--date" is also set|); + } + if ( $output_format eq 'csv' ) { + pod2usage(q|Option "--include_deleted" or "--deleted_only" cannot be used with "--format=csv"|); + } +} + if ( $output_format eq 'csv' and $record_type eq 'auths' ) { pod2usage(q|CSV output is only available for biblio records|); } @@ -164,6 +180,7 @@ my $dbh = C4::Context->dbh; open STDOUT, '>', $filename if $filename; my @record_ids; +my @deleted_record_ids; $timestamp = ($timestamp) ? output_pref( { dt => dt_from_string($timestamp), dateformat => 'iso', dateonly => 0, } ) : ''; @@ -187,32 +204,45 @@ if ( $record_type eq 'bibs' ) { } } } elsif ($timestamp) { - if ( !$dont_export_items ) { - push @record_ids, $_->{biblionumber} for @{ - $dbh->selectall_arrayref( - q| ( - SELECT biblio_metadata.biblionumber - FROM biblio_metadata - LEFT JOIN items USING(biblionumber) - WHERE biblio_metadata.timestamp >= ? - OR items.timestamp >= ? - ) UNION ( - SELECT biblio_metadata.biblionumber - FROM biblio_metadata - LEFT JOIN deleteditems USING(biblionumber) - WHERE biblio_metadata.timestamp >= ? - OR deleteditems.timestamp >= ? - ) |, { Slice => {} }, ($timestamp) x 4 - ); - }; - } else { - push @record_ids, $_->{biblionumber} for @{ + unless ($deleted_only) { + if ( !$dont_export_items ) { + push @record_ids, $_->{biblionumber} for @{ + $dbh->selectall_arrayref( + q| ( + SELECT biblio_metadata.biblionumber + FROM biblio_metadata + LEFT JOIN items USING(biblionumber) + WHERE biblio_metadata.timestamp >= ? + OR items.timestamp >= ? + ) UNION ( + SELECT biblio_metadata.biblionumber + FROM biblio_metadata + LEFT JOIN deleteditems USING(biblionumber) + WHERE biblio_metadata.timestamp >= ? + OR deleteditems.timestamp >= ? + ) |, { Slice => {} }, ($timestamp) x 4 + ); + }; + } else { + push @record_ids, $_->{biblionumber} for @{ + $dbh->selectall_arrayref( + q| ( + SELECT biblio_metadata.biblionumber + FROM biblio_metadata + WHERE biblio_metadata.timestamp >= ? + ) |, { Slice => {} }, $timestamp + ); + }; + } + } + if ( $include_deleted || $deleted_only ) { + push @deleted_record_ids, $_->{biblionumber} for @{ $dbh->selectall_arrayref( - q| ( - SELECT biblio_metadata.biblionumber - FROM biblio_metadata - WHERE biblio_metadata.timestamp >= ? - ) |, { Slice => {} }, $timestamp + q| + SELECT `biblionumber` + FROM `deletedbiblio` + WHERE `timestamp` >= ? + |, { Slice => {} }, $timestamp ); }; } @@ -332,6 +362,7 @@ if ($deleted_barcodes) { record_type => $record_type, record_ids => \@record_ids, record_conditions => @marc_conditions ? \@marc_conditions : undef, + deleted_record_ids => \@deleted_record_ids, format => $output_format, csv_profile_id => $csv_profile_id, export_items => ( not $dont_export_items ), @@ -348,7 +379,7 @@ export records - This script exports record (biblios or authorities) =head1 SYNOPSIS -export_records.pl [-h|--help] [--format=format] [--date=datetime] [--record-type=TYPE] [--dont_export_items] [--deleted_barcodes] [--clean] [--id_list_file=PATH] --filename=outputfile +export_records.pl [-h|--help] [--format=format] [--date=datetime] [--include_deleted] [--deleted_only] [--record-type=TYPE] [--dont_export_items] [--deleted_barcodes] [--clean] [--id_list_file=PATH] --filename=outputfile =head1 OPTIONS @@ -369,6 +400,11 @@ Print a brief help message. mm/dd/yyyy[ hh:mm:ss] for us) records exported are the ones that have been modified since DATETIME. +=item B<--include_deleted> + + --include_deleted If enabled, when using --date option, deleted records will be included in export as marc records + with leader record status set to "d" (deleted). + =item B<--record-type> --record-type=TYPE TYPE is 'bibs' or 'auths'. diff --git a/t/db_dependent/Exporter/Record.t b/t/db_dependent/Exporter/Record.t index 0dbfb84016..bcab88e605 100755 --- a/t/db_dependent/Exporter/Record.t +++ b/t/db_dependent/Exporter/Record.t @@ -77,6 +77,15 @@ if ( $marcflavour eq 'UNIMARC' ) { $homebranch_subfield_code = 'a'; } +my $deleted_biblio = MARC::Record->new(); +$deleted_biblio->leader('00136nam a22000617a 4500'); +$deleted_biblio->append_fields( + MARC::Field->new( '100', ' ', ' ', a => 'Chopra, Deepak' ), + MARC::Field->new( '245', ' ', ' ', a => 'The seven spiritual laws of success' ), +); +my ($deleted_biblionumber) = AddBiblio( $deleted_biblio, '' ); +DelBiblio($deleted_biblionumber); + my $bad_biblio = Koha::Biblio->new()->store(); Koha::Biblio::Metadata->new( { biblionumber => $bad_biblio->id, format => 'marcxml', metadata => 'something wrong', schema => $marcflavour } ) @@ -161,15 +170,16 @@ EOF }; subtest 'export xml' => sub { - plan tests => 3; + plan tests => 4; my $generated_xml_file = '/tmp/test_export.xml'; warning_like { Koha::Exporter::Record::export( { - record_type => 'bibs', - record_ids => [ $biblionumber_1, $bad_biblionumber, $biblionumber_2 ], - format => 'xml', - output_filepath => $generated_xml_file, + record_type => 'bibs', + record_ids => [ $biblionumber_1, $bad_biblionumber, $biblionumber_2 ], + deleted_record_ids => [$deleted_biblionumber], + format => 'xml', + output_filepath => $generated_xml_file, } ); } @@ -187,25 +197,31 @@ subtest 'export xml' => sub { while ( my $record = $records->next ) { push @records, $record; } - is( scalar(@records), 2, 'Export XML: 2 records should have been exported' ); + is( scalar(@records), 3, 'Export XML: 3 records should have been exported' ); my $second_record = $records[1]; my $title = $second_record->subfield( $title_field_tag, 'a' ); $title = Encode::encode( 'UTF-8', $title ); is( $title, $biblio_2_title, 'Export XML: The title is correctly encoded' ); + + my $deleted_record = $records[2]; + + # Leader has the expected value (and record status "d") + is( $deleted_record->leader, '00136dam a22000617a 4500', 'Deleted record has the correct leader value' ); }; subtest 'export iso2709' => sub { - plan tests => 3; + plan tests => 4; my $generated_mrc_file = '/tmp/test_export.mrc'; # Get all item infos warning_like { Koha::Exporter::Record::export( { - record_type => 'bibs', - record_ids => [ $biblionumber_1, $bad_biblionumber, $biblionumber_2 ], - format => 'iso2709', - output_filepath => $generated_mrc_file, + record_type => 'bibs', + record_ids => [ $biblionumber_1, $bad_biblionumber, $biblionumber_2 ], + deleted_record_ids => [$deleted_biblionumber], + format => 'iso2709', + output_filepath => $generated_mrc_file, } ); } @@ -216,11 +232,16 @@ subtest 'export iso2709' => sub { while ( my $record = $records->next ) { push @records, $record; } - is( scalar(@records), 2, 'Export ISO2709: 2 records should have been exported' ); + is( scalar(@records), 3, 'Export ISO2709: 3 records should have been exported' ); my $second_record = $records[1]; my $title = $second_record->subfield( $title_field_tag, 'a' ); $title = Encode::encode( 'UTF-8', $title ); is( $title, $biblio_2_title, 'Export ISO2709: The title is correctly encoded' ); + + my $deleted_record = $records[2]; + + # Leader has the expected value (and record status "d") + is( $deleted_record->leader, '00136dam a22000617a 4500', 'Deleted record has the correct leader value' ); }; subtest 'export without record_type' => sub { -- 2.39.5