From 69c7b1308083954d1f4e0102d2de410c7163c395 Mon Sep 17 00:00:00 2001 From: Matt Blenkinsop Date: Wed, 8 May 2024 14:29:36 +0000 Subject: [PATCH] Bug 34788: (QA follow-up) This patch fixes a few things: 1) The blue dialog box now clears when navigating away from the page 2) The background job now uses skip_record_index to avoid queuing indexing jobs for every new biblio and instead queues one job at the end 3) Large files that get chunked now successfully create linked biblios if requested 4) Title matching rules have been expanded to check the package ID so that we can have duplicate titles in different packages 5) A link to the package is now included on the job report page Signed-off-by: Nick Clemens --- Koha/BackgroundJob/ImportKBARTFile.pm | 30 ++++++++--- Koha/ERM/EHoldings/Title.pm | 4 +- Koha/REST/V1/ERM/EHoldings/Titles/Local.pm | 14 ++---- .../import_from_kbart_file.inc | 1 + .../ERM/EHoldingsLocalTitlesKBARTImport.vue | 13 +++-- .../Koha/BackgroundJob/ImportKBARTFile.t | 50 +++++++++++-------- 6 files changed, 68 insertions(+), 44 deletions(-) diff --git a/Koha/BackgroundJob/ImportKBARTFile.pm b/Koha/BackgroundJob/ImportKBARTFile.pm index 398476e3f54..de9d9606bae 100644 --- a/Koha/BackgroundJob/ImportKBARTFile.pm +++ b/Koha/BackgroundJob/ImportKBARTFile.pm @@ -24,6 +24,7 @@ use POSIX qw( floor ); use C4::Context; use Koha::ERM::EHoldings::Titles; +use Koha::SearchEngine::Indexer; use base 'Koha::BackgroundJob'; @@ -94,10 +95,12 @@ sub process { $self->size( scalar( @{$rows} ) )->store; $total_rows = scalar( @{$rows} ); + my @biblio_ids; + foreach my $row ( @{$rows} ) { next if !$row; my $new_title = create_title_hash_from_line_data( $row, $column_headers, $invalid_columns ); - my $title_match = check_for_matching_title($new_title); + my $title_match = check_for_matching_title( $new_title, $package_id ); if ($title_match) { $duplicate_titles++; @@ -122,6 +125,7 @@ sub process { } else { my $imported_title = Koha::ERM::EHoldings::Title->new($formatted_title) ->store( { create_linked_biblio => $create_linked_biblio } ); + push( @biblio_ids, $imported_title->biblio_id ) if $create_linked_biblio; create_linked_resource( { title => $imported_title, @@ -146,10 +150,16 @@ sub process { $self->step; } + if ( scalar(@biblio_ids) > 0 ) { + my $indexer = Koha::SearchEngine::Indexer->new( { index => $Koha::SearchEngine::BIBLIOS_INDEX } ); + $indexer->index_records( \@biblio_ids, "specialUpdate", "biblioserver" ); + } + $report->{duplicates_found} = $duplicate_titles; $report->{titles_imported} = $titles_imported; $report->{total_rows} = $total_rows; $report->{failed_imports} = $failed_imports; + $report->{package_id} = $package_id; my $data = $self->decoded_data; $data->{messages} = \@messages; @@ -266,7 +276,7 @@ sub create_title_hash_from_line_data { } # Remove any additional columns - foreach my $invalid_column ( @$invalid_columns ) { + foreach my $invalid_column (@$invalid_columns) { delete $new_title{$invalid_column}; } @@ -280,7 +290,7 @@ Checks whether this title already exists to avoid duplicates =cut sub check_for_matching_title { - my ($title) = @_; + my ( $title, $package_id ) = @_; my $match_parameters = {}; $match_parameters->{print_identifier} = $title->{print_identifier} if $title->{print_identifier}; @@ -290,14 +300,20 @@ sub check_for_matching_title { $match_parameters->{external_id} = $title->{title_id} if $title->{title_id}; # We should also check the date_first_issue_online for serial publications - $match_parameters->{date_first_issue_online} = $title->{date_first_issue_online} if $title->{date_first_issue_online}; + $match_parameters->{date_first_issue_online} = $title->{date_first_issue_online} + if $title->{date_first_issue_online}; # If no match parameters are provided in the file we should add the new title return 0 if !%$match_parameters; - my $title_match = Koha::ERM::EHoldings::Titles->search($match_parameters)->count; + my $matching_title_found; + my @title_matches = Koha::ERM::EHoldings::Titles->search($match_parameters)->as_list; + foreach my $title_match (@title_matches) { + my $resource = Koha::ERM::EHoldings::Resources->find( { title_id => $title_match->title_id } ); + $matching_title_found = 1 if $resource->package_id == $package_id; + } - return $title_match; + return $matching_title_found; } =head3 create_linked_resource @@ -437,7 +453,7 @@ sub is_file_too_large { =head3 rescue_EBSCO_files -EBSCO have an incorrect spelling for "preceding_publication_title_id" in all of their KBART files ("preceeding" instead of "preceding"). +EBSCO have an incorrect spelling for "preceding_publication_title_id" in all of their KBART files (preceding is spelled with a double 'e'). This means all of their KBART files fail to import using the current methodology. There is no simple way of finding out who the vendor is before importing so all KBART files from any vendor are going to have to be checked for this spelling and corrected. diff --git a/Koha/ERM/EHoldings/Title.pm b/Koha/ERM/EHoldings/Title.pm index e11c06abf5f..ea62ecb3602 100644 --- a/Koha/ERM/EHoldings/Title.pm +++ b/Koha/ERM/EHoldings/Title.pm @@ -55,7 +55,7 @@ sub store { my $record = $biblio->metadata->record(); my $title_field = $record->field($title_tag); $title_field->update( $title_subfield => $self->publication_title ); - C4::Biblio::ModBiblio( $record, $self->biblio_id, '' ); + C4::Biblio::ModBiblio( $record, $self->biblio_id, '', { skip_record_index => 1 } ); } else { # If it's not linked, we create a simple biblio and save the biblio id to the 'title' @@ -64,7 +64,7 @@ sub store { 'biblio.title' => $self->publication_title, } ); - my ($biblio_id) = C4::Biblio::AddBiblio( $marc_record, '' ); + my ($biblio_id) = C4::Biblio::AddBiblio( $marc_record, '', { skip_record_index => 1 } ); $self->biblio_id($biblio_id); } } diff --git a/Koha/REST/V1/ERM/EHoldings/Titles/Local.pm b/Koha/REST/V1/ERM/EHoldings/Titles/Local.pm index 5e363bc05bf..f2cf4c33700 100644 --- a/Koha/REST/V1/ERM/EHoldings/Titles/Local.pm +++ b/Koha/REST/V1/ERM/EHoldings/Titles/Local.pm @@ -24,9 +24,9 @@ use Koha::BackgroundJob::CreateEHoldingsFromBiblios; use Koha::BackgroundJob::ImportKBARTFile; use Scalar::Util qw( blessed ); -use Try::Tiny qw( catch try ); +use Try::Tiny qw( catch try ); use MIME::Base64 qw( decode_base64 encode_base64 ); -use POSIX qw( floor ); +use POSIX qw( floor ); use Text::CSV_XS; =head1 API @@ -294,7 +294,7 @@ sub import_from_kbart_file { # Check that the column headers in the file match the standardised KBART phase II columns # If not, return a warning - my $warnings = {}; + my $warnings = {}; my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers(); foreach my $header (@$column_headers) { if ( !grep { $_ eq $header } @valid_headers ) { @@ -324,13 +324,7 @@ sub import_from_kbart_file { my @chunked_files; push @chunked_files, [ splice @$rows, 0, $max_number_of_rows ] while @$rows; foreach my $chunk (@chunked_files) { - my $params = { - column_headers => $column_headers, - rows => $chunk, - package_id => $package_id, - file_name => $file->{filename} - }; - + $params->{rows} = $chunk; my $chunked_job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); push @job_ids, $chunked_job_id; } diff --git a/koha-tmpl/intranet-tmpl/prog/en/includes/background_jobs/import_from_kbart_file.inc b/koha-tmpl/intranet-tmpl/prog/en/includes/background_jobs/import_from_kbart_file.inc index 28d0dfb5f32..04032e0f432 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/includes/background_jobs/import_from_kbart_file.inc +++ b/koha-tmpl/intranet-tmpl/prog/en/includes/background_jobs/import_from_kbart_file.inc @@ -28,6 +28,7 @@ [% report.failed_imports | html %] + Click here to see the package [% ELSIF job.status == 'started' %]

[% END %] diff --git a/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesKBARTImport.vue b/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesKBARTImport.vue index 6a75b2118bf..7cdfc0aef93 100644 --- a/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesKBARTImport.vue +++ b/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesKBARTImport.vue @@ -80,10 +80,15 @@