From bd975df547020eb78d7ceaa0479cba6bb1fbaa9a Mon Sep 17 00:00:00 2001 From: Ere Maijala Date: Thu, 1 Feb 2018 13:26:23 +0200 Subject: [PATCH] Bug 19365 - link_bibs_to_authorities.pl doesn't work with Elasticsearch Fix several issues with the Elasticsearch code and optimize it to be usable. --- C4/Biblio.pm | 13 +++- C4/Heading.pm | 43 +++++++++++-- C4/Heading/MARC21.pm | 16 +++++ C4/Items.pm | 6 +- C4/Matcher.pm | 12 +++- Koha/SearchEngine/Elasticsearch/Indexer.pm | 30 ++++++--- Koha/SearchEngine/Elasticsearch/QueryBuilder.pm | 67 +++++++++++--------- Koha/SearchEngine/Elasticsearch/Search.pm | 83 ++++++++++++++----------- Koha/SearchEngine/Zebra/Search.pm | 4 +- misc/link_bibs_to_authorities.pl | 15 +++-- 10 files changed, 194 insertions(+), 95 deletions(-) diff --git a/C4/Biblio.pm b/C4/Biblio.pm index b9c4655..4085465 100644 --- a/C4/Biblio.pm +++ b/C4/Biblio.pm @@ -557,7 +557,10 @@ sub LinkBibHeadingsToAuthorities { '', '', "a" => "" . $field->subfield('a') ); map { $authfield->add_subfields( $_->[0] => $_->[1] ) - if ( $_->[0] =~ /[A-z]/ && $_->[0] ne "a" ) + if ( $_->[0] =~ /[A-z]/ && $_->[0] ne "a" + && C4::Heading::valid_bib_heading_subfield( + $authority_type->auth_tag_to_report, $_->[0], $frameworkcode ) + ); } $field->subfields(); $marcrecordauth->insert_fields_ordered($authfield); @@ -2752,7 +2755,10 @@ sub ModZebra { # TODO abstract to a standard API that'll work for whatever require Koha::SearchEngine::Elasticsearch::Indexer; - my $indexer = Koha::SearchEngine::Elasticsearch::Indexer->new( + # Use state to speed up repeated calls in batch processes and reuse the indexer. + # This also avoids creating a massive amount of ES connectors that would + # eventually run out of file descriptors. + state $indexer = Koha::SearchEngine::Elasticsearch::Indexer->new( { index => $server eq 'biblioserver' ? $Koha::SearchEngine::BIBLIOS_INDEX @@ -2774,6 +2780,7 @@ sub ModZebra { else { croak "ModZebra called with unknown operation: $op"; } + return; } my $dbh = C4::Context->dbh; @@ -2999,7 +3006,7 @@ sub _koha_modify_biblio { $sth->execute( $frameworkcode, $biblio->{'author'}, $biblio->{'title'}, $biblio->{'unititle'}, $biblio->{'notes'}, - $biblio->{'serial'}, $biblio->{'seriestitle'}, $biblio->{'copyrightdate'}, $biblio->{'abstract'}, $biblio->{'biblionumber'} + $biblio->{'serial'}, $biblio->{'seriestitle'}, int($biblio->{'copyrightdate'}), $biblio->{'abstract'}, $biblio->{'biblionumber'} ) if $biblio->{'biblionumber'}; if ( $dbh->errstr || !$biblio->{'biblionumber'} ) { diff --git a/C4/Heading.pm b/C4/Heading.pm index e323b5b..0e130a0 100644 --- a/C4/Heading.pm +++ b/C4/Heading.pm @@ -17,6 +17,8 @@ package C4::Heading; # You should have received a copy of the GNU General Public License # along with Koha; if not, see . +use Modern::Perl; + use strict; use warnings; use MARC::Record; @@ -50,7 +52,7 @@ headings found in bibliographic and authority records. my $heading = C4::Heading->new_from_bib_field($field, $frameworkcode, [, $marc_flavour]); -Given a C object containing a heading from a +Given a C object containing a heading from a bib record, create a C object. The optional second parameter is the MARC flavour (i.e., MARC21 @@ -141,7 +143,7 @@ sub search_form { my $authorities = $heading->authorities([$skipmetadata]); -Return a list of authority records for this +Return a list of authority records for this heading. If passed a true value for $skipmetadata, SearchAuthorities will return only authids. @@ -170,6 +172,23 @@ sub preferred_authorities { return $results; } +=head2 valid_bib_heading_subfield + + if (C4::Heading::valid_bib_heading_subfield('100', 'e', '')) ... + +=cut + +sub valid_bib_heading_subfield { + my $tag = shift; + my $subfield = shift; + my $frameworkcode = shift; + my $marcflavour = @_ ? shift : C4::Context->preference('marcflavour'); + + my $marc_handler = _marc_format_handler($marcflavour); + + return $marc_handler->valid_bib_heading_subfield( $tag, $subfield, $frameworkcode ); +} + =head1 INTERNAL METHODS =head2 _search @@ -200,12 +219,24 @@ sub _search { # push @operator, 'is'; # push @value, $self->{'thesaurus'}; # } - require C4::AuthoritiesMarc; - return C4::AuthoritiesMarc::SearchAuthorities( + + require Koha::SearchEngine::QueryBuilder; + require Koha::SearchEngine::Search; + + # Use state variables to avoid recreating the objects every time. + # With Elasticsearch this also avoids creating a massive amount of + # ES connectors that would eventually run out of file descriptors. + state $builder = Koha::SearchEngine::QueryBuilder->new( + { index => $Koha::SearchEngine::AUTHORITIES_INDEX } ); + state $searcher = Koha::SearchEngine::Search->new( + {index => $Koha::SearchEngine::AUTHORITIES_INDEX} ); + + my $search_query = $builder->build_authorities_query_compat( \@marclist, \@and_or, \@excluding, \@operator, - \@value, 0, 20, $self->{'auth_type'}, - 'AuthidAsc', $skipmetadata + \@value, $self->{'auth_type'}, + 'AuthidAsc' ); + return $searcher->search_auth_compat( $search_query, 0, 20, $skipmetadata ); } =head1 INTERNAL FUNCTIONS diff --git a/C4/Heading/MARC21.pm b/C4/Heading/MARC21.pm index 9a50cf6..15a1c90 100644 --- a/C4/Heading/MARC21.pm +++ b/C4/Heading/MARC21.pm @@ -161,6 +161,22 @@ sub valid_bib_heading_tag { } +=head2 valid_bib_heading_subfield + +=cut + +sub valid_bib_heading_subfield { + my $self = shift; + my $tag = shift; + my $subfield = shift; + my $frameworkcode = shift; + + if ( exists $bib_heading_fields->{$tag} ) { + return 1 if ($bib_heading_fields->{$tag}->{subfields} =~ /$subfield/); + } + return 0; +} + =head2 parse_heading =cut diff --git a/C4/Items.pm b/C4/Items.pm index a7154be..73c2a6c 100644 --- a/C4/Items.pm +++ b/C4/Items.pm @@ -18,6 +18,7 @@ package C4::Items; # You should have received a copy of the GNU General Public License # along with Koha; if not, see . +use Modern::Perl; use strict; #use warnings; FIXME - Bug 2505 @@ -2261,7 +2262,10 @@ sub GetAnalyticsCount { ### ZOOM search here my $query; $query= "hi=".$itemnumber; - my $searcher = Koha::SearchEngine::Search->new({index => $Koha::SearchEngine::BIBLIOS_INDEX}); + # Use state variables to avoid recreating the objects every time. + # With Elasticsearch this also avoids creating a massive amount of + # ES connectors that would eventually run out of file descriptors. + state $searcher = Koha::SearchEngine::Search->new({index => $Koha::SearchEngine::BIBLIOS_INDEX}); my ($err,$res,$result) = $searcher->simple_search_compat($query,0,10); return ($result); } diff --git a/C4/Matcher.pm b/C4/Matcher.pm index 7209511..dd38f89 100644 --- a/C4/Matcher.pm +++ b/C4/Matcher.pm @@ -664,7 +664,10 @@ sub get_matches { #NOTE: double-quote the values so you don't get a "Embedded truncation not supported" error when a term has a ? in it. } - my $searcher = Koha::SearchEngine::Search->new({index => $Koha::SearchEngine::BIBLIOS_INDEX}); + # Use state variables to avoid recreating the objects every time. + # With Elasticsearch this also avoids creating a massive amount of + # ES connectors that would eventually run out of file descriptors. + state $searcher = Koha::SearchEngine::Search->new({index => $Koha::SearchEngine::BIBLIOS_INDEX}); ( $error, $searchresults, $total_hits ) = $searcher->simple_search_compat( $query, 0, $max_matches, undef, skip_normalize => 1 ); @@ -703,8 +706,11 @@ sub get_matches { push @operator, 'exact'; push @value, $key; } - my $builder = Koha::SearchEngine::QueryBuilder->new({index => $Koha::SearchEngine::AUTHORITIES_INDEX}); - my $searcher = Koha::SearchEngine::Search->new({index => $Koha::SearchEngine::AUTHORITIES_INDEX}); + # Use state variables to avoid recreating the objects every time. + # With Elasticsearch this also avoids creating a massive amount of + # ES connectors that would eventually run out of file descriptors. + state $builder = Koha::SearchEngine::QueryBuilder->new({index => $Koha::SearchEngine::AUTHORITIES_INDEX}); + state $searcher = Koha::SearchEngine::Search->new({index => $Koha::SearchEngine::AUTHORITIES_INDEX}); my $search_query = $builder->build_authorities_query_compat( \@marclist, \@and_or, \@excluding, \@operator, \@value, undef, 'AuthidAsc' diff --git a/Koha/SearchEngine/Elasticsearch/Indexer.pm b/Koha/SearchEngine/Elasticsearch/Indexer.pm index e5babee..8f4d4a4 100644 --- a/Koha/SearchEngine/Elasticsearch/Indexer.pm +++ b/Koha/SearchEngine/Elasticsearch/Indexer.pm @@ -58,7 +58,7 @@ If that's a problem, clone them first. =cut sub update_index { - my ($self, $biblionums, $records) = @_; + my ($self, $biblionums, $records, $commit) = @_; # TODO should have a separate path for dealing with a large number # of records at once where we use the bulk update functions in ES. @@ -80,7 +80,12 @@ sub update_index { #print Data::Dumper::Dumper( $from->to_array ); $self->store->bag->add_many($from); - $self->store->bag->commit; + if ( !defined $commit || $commit ) { + $self->store->bag->commit; + } else { + # TODO: nicer way to do this + $self->store->bag->_bulk->flush; + } return 1; } @@ -98,8 +103,8 @@ it to be updated by a regular index cron job in the future. =cut sub update_index_background { - my $self = shift; - $self->update_index(@_); + my ($self, $biblionums, $records) = @_; + $self->update_index($biblionums, $records, 0); } =head2 $indexer->delete_index($biblionums) @@ -181,7 +186,8 @@ sub _sanitise_records { # tears in rain... if ( $rec ) { $rec->delete_fields($rec->field('999')); - $rec->append_fields(MARC::Field->new('999','','','c' => $bibnum, 'd' => $bibnum)); + # Make sure biblionumber is a string. Elasticsearch would consider int and string different IDs. + $rec->append_fields(MARC::Field->new('999','','','c' => "" . $bibnum, 'd' => "" . $bibnum)); } } } @@ -189,11 +195,17 @@ sub _sanitise_records { sub _convert_marc_to_json { my $self = shift; my $records = shift; + + # Use state to speed up repeated calls in batch processes + state %fixers; + if ( !defined $fixers{$self->index} ) { + $fixers{$self->index} = Catmandu::Fix->new( fixes => $self->get_fixer_rules() ); + } + my $importer = - Catmandu::Importer::MARC->new( records => $records, id => '999c' ); - my $fixer = Catmandu::Fix->new( fixes => $self->get_fixer_rules() ); - $importer = $fixer->fix($importer); - return $importer; + Catmandu::Importer::MARC->new( records => $records, id => '999c' ); + + return $fixers{$self->index}->fix($importer); } 1; diff --git a/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm b/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm index 95cc86a..8d458e4 100644 --- a/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm +++ b/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm @@ -102,10 +102,8 @@ sub build_query { if $d && ( $d ne 'asc' && $d ne 'desc' ); $d = 'asc' unless $d; - # TODO account for fields that don't have a 'phrase' type - $f = $self->_sort_field($f); - push @{ $res->{sort} }, { "$f.phrase" => { order => $d } }; + push @{ $res->{sort} }, { $f => { order => $d } }; } } @@ -171,7 +169,7 @@ sub build_browse_query { } } }, - sort => [ { "$sort.phrase" => { order => "asc" } } ], + sort => [ { $sort => { order => "asc" } } ], }; } @@ -294,7 +292,7 @@ sub build_authorities_query { foreach my $s ( @{ $search->{searches} } ) { my ( $wh, $op, $val ) = @{$s}{qw(where operator value)}; $wh = '_all' if $wh eq ''; - if ( $op eq 'is' || $op eq '=' ) { + if ( $op eq 'is' || $op eq '=' || $op eq 'exact' ) { # look for something that matches completely # note, '=' is about numerical vals. May need special handling. @@ -302,12 +300,7 @@ sub build_authorities_query { # matches. Also, we lowercase our search because the ES # index lowercases its values, and term searches don't get the # search analyzer applied to them. - push @filter_parts, { term => { "$wh.phrase" => lc $val } }; - } - elsif ( $op eq 'exact' ) { - - # left and right truncation, otherwise an exact phrase - push @query_parts, { match_phrase => { $wh => $val } }; + push @query_parts, { match_phrase => { "$wh.phrase" => lc $val } }; } elsif ( $op eq 'start' ) { @@ -316,22 +309,25 @@ sub build_authorities_query { } else { # regular wordlist stuff - push @query_parts, { match => { $wh => $val } }; + push @query_parts, { match => { $wh => { query => $val, operator => 'AND' } } }; } } + # Add authtype if specified + if (defined $search->{authtypecode} && $search->{authtypecode}) { + push @query_parts, { match => { authtype => $search->{authtypecode} } }; + } + # Merge the query and filter parts appropriately # 'should' behaves like 'or', if we want 'and', use 'must' - my $query_part = { bool => { should => \@query_parts } }; - my $filter_part = { bool => { should => \@filter_parts } }; + my $query_part = { bool => { must => \@query_parts } }; + my $filter_part = { bool => { must => \@filter_parts } }; - # We need to add '.phrase' to all the sort headings otherwise it'll sort - # based on the tokenised form. my %s; if ( exists $search->{sort} ) { foreach my $k ( keys %{ $search->{sort} } ) { my $f = $self->_sort_field($k); - $s{"$f.phrase"} = $search->{sort}{$k}; + $s{$f} = $search->{sort}{$k}; } $search->{sort} = \%s; } @@ -383,9 +379,9 @@ Also ignored. =item operator -What form of search to do. Options are: is (phrase, no trunction, whole field -must match), = (number exact match), exact (phrase, but with left and right -truncation). If left blank, then word list, right truncted, anywhere is used. +What form of search to do. Options are: is (phrase, no truncation, whole field +must match), = (number exact match), exact (phrase, no truncation, whole field +must match). If left blank, then word list, right truncated, anywhere is used. =item value @@ -417,7 +413,8 @@ our $koha_to_index_name = { 'match-heading' => 'Match-heading', 'see-from' => 'Match-heading-see-from', thesaurus => 'Subject-heading-thesaurus', - all => '' + any => '', + all => '' }; sub build_authorities_query_compat { @@ -435,18 +432,22 @@ sub build_authorities_query_compat { unless exists $koha_to_index_name->{$m}; } for ( my $i = 0 ; $i < @$value ; $i++ ) { - push @searches, - { - where => $koha_to_index_name->{$marclist->[$i]}, - operator => $operator->[$i], - value => $value->[$i], - }; + if (defined $value->[$i]) { + my $escaped_value = $value->[$i]; + $escaped_value =~ s/(["()\\])/\\$1/g; + push @searches, + { + where => $koha_to_index_name->{$marclist->[$i]}, + operator => $operator->[$i], + value => $escaped_value, + }; + } } my %sort; my $sort_field = ( $orderby =~ /^Heading/ ) ? 'Heading' - : ( $orderby =~ /^Auth/ ) ? 'Local-Number' + : ( $orderby =~ /^Auth/ ) ? 'Local-number' : undef; if ($sort_field) { my $sort_order = ( $orderby =~ /Asc$/ ) ? 'asc' : 'desc'; @@ -778,8 +779,18 @@ the end. Maybe it'll be something else in the future, who knows? sub _sort_field { my ($self, $f) = @_; + + my $mappings = $self->get_elasticsearch_mappings(); + my $textField = defined $mappings->{data}{properties}{$f}{type} && $mappings->{data}{properties}{$f}{type} eq 'text'; if ($self->sort_fields()->{$f}) { $f .= '__sort'; + # We need to add '.phrase' to text fields, otherwise it'll sort + # based on the tokenised form. + $f .= '.phrase' if $textField; + } else { + # We need to add '.raw' to text fields without a sort field, + # otherwise it'll sort based on the tokenised form. + $f .= '.raw' if $textField; } return $f; } diff --git a/Koha/SearchEngine/Elasticsearch/Search.pm b/Koha/SearchEngine/Elasticsearch/Search.pm index 4d584f7..ff88185 100644 --- a/Koha/SearchEngine/Elasticsearch/Search.pm +++ b/Koha/SearchEngine/Elasticsearch/Search.pm @@ -176,7 +176,7 @@ sub search_compat { =head2 search_auth_compat my ( $results, $total ) = - $searcher->search_auth_compat( $query, $page, $count, %options ); + $searcher->search_auth_compat( $query, $offset, $count, $skipmetadata, %options ); This has a similar calling convention to L, however it returns its results in a form the same as L. @@ -184,19 +184,24 @@ results in a form the same as L. =cut sub search_auth_compat { - my $self = shift; + my ($self, $query, $offset, $count, $skipmetadata, %options) = @_; + + if ( !defined $offset or $offset <= 0 ) { + $offset = 1; + } + # Uh, authority search uses 1-based offset.. + $options{offset} = $offset - 1; - # TODO handle paging my $database = Koha::Database->new(); my $schema = $database->schema(); - my $res = $self->search(@_); + my $res = $self->search($query, undef, $count, %options); + my $bib_searcher = Koha::SearchEngine::Elasticsearch::Search->new({index => 'biblios'}); my @records; $res->each( sub { my %result; - my $record = $_[0]; - my $marc_json = $record->{record}; + my $record = $_[0]; # I wonder if these should be real values defined in the mapping # rather than hard-coded conversions. @@ -205,40 +210,42 @@ sub search_auth_compat { my $authid = $record->{ 'Local-number' }[0][0]; $result{authid} = $authid; - # TODO put all this info into the record at index time so we - # don't have to go and sort it all out now. - my $authtypecode = $record->{authtype}; - my $rs = $schema->resultset('AuthType') - ->search( { authtypecode => $authtypecode } ); - - # FIXME there's an assumption here that we will get a result. - # the original code also makes an assumption that some provided - # authtypecode may sometimes be used instead of the one stored - # with the record. It's not documented why this is the case, so - # it's not reproduced here yet. - my $authtype = $rs->single; - my $auth_tag_to_report = $authtype->auth_tag_to_report; - my $marc = $self->json2marc($marc_json); - my $mainentry = $marc->field($auth_tag_to_report); - my $reported_tag; - if ($mainentry) { - foreach ( $mainentry->subfields() ) { - $reported_tag .= '$' . $_->[0] . $_->[1]; + if (!defined $skipmetadata || !$skipmetadata) { + # TODO put all this info into the record at index time so we + # don't have to go and sort it all out now. + my $authtypecode = $record->{authtype}; + my $rs = $schema->resultset('AuthType') + ->search( { authtypecode => $authtypecode } ); + + # FIXME there's an assumption here that we will get a result. + # the original code also makes an assumption that some provided + # authtypecode may sometimes be used instead of the one stored + # with the record. It's not documented why this is the case, so + # it's not reproduced here yet. + my $authtype = $rs->single; + my $auth_tag_to_report = $authtype->auth_tag_to_report; + my $marc = $self->json2marc($record->{record}); + my $mainentry = $marc->field($auth_tag_to_report); + my $reported_tag; + if ($mainentry) { + foreach ( $mainentry->subfields() ) { + $reported_tag .= '$' . $_->[0] . $_->[1]; + } } + # Turn the resultset into a hash + my %authtype_cols; + foreach my $col ($authtype->result_source->columns) { + $authtype_cols{$col} = $authtype->get_column($col); + } + $result{authtype} = $authtype->authtypetext; + $result{reported_tag} = $reported_tag; + + # Reimplementing BuildSummary is out of scope because it'll be hard + $result{summary} = + C4::AuthoritiesMarc::BuildSummary( $marc, $result{authid}, + $authtypecode ); + $result{used} = $self->count_auth_use($bib_searcher, $authid); } - # Turn the resultset into a hash - my %authtype_cols; - foreach my $col ($authtype->result_source->columns) { - $authtype_cols{$col} = $authtype->get_column($col); - } - $result{authtype} = $authtype->authtypetext; - $result{reported_tag} = $reported_tag; - - # Reimplementing BuildSummary is out of scope because it'll be hard - $result{summary} = - C4::AuthoritiesMarc::BuildSummary( $marc, $result{authid}, - $authtypecode ); - $result{used} = $self->count_auth_use($bib_searcher, $authid); push @records, \%result; } ); diff --git a/Koha/SearchEngine/Zebra/Search.pm b/Koha/SearchEngine/Zebra/Search.pm index 2fb45da..9dfafaa 100644 --- a/Koha/SearchEngine/Zebra/Search.pm +++ b/Koha/SearchEngine/Zebra/Search.pm @@ -100,12 +100,12 @@ This passes the search query on to C4::AuthoritiesMarc::SearchAuthorities =cut sub search_auth_compat { - my ( $self, $q, $startfrom, $resperpage ) = @_; + my ( $self, $q, $startfrom, $resperpage, $skipmetadata ) = @_; my @params = ( @{$q}{ 'marclist', 'and_or', 'excluding', 'operator', 'value' }, $startfrom - 1, - $resperpage, @{$q}{ 'authtypecode', 'orderby' } + $resperpage, @{$q}{ 'authtypecode', 'orderby' }, $skipmetadata ); C4::AuthoritiesMarc::SearchAuthorities(@params); } diff --git a/misc/link_bibs_to_authorities.pl b/misc/link_bibs_to_authorities.pl index b139a76..2711d16 100755 --- a/misc/link_bibs_to_authorities.pl +++ b/misc/link_bibs_to_authorities.pl @@ -195,9 +195,10 @@ sub process_bib { return; } + my $frameworkcode = GetFrameworkCode($biblionumber); + my ( $headings_changed, $results ) = - LinkBibHeadingsToAuthorities( $linker, $bib, - GetFrameworkCode($biblionumber) ); + LinkBibHeadingsToAuthorities( $linker, $bib, $frameworkcode ); foreach my $key ( keys %{ $results->{'unlinked'} } ) { $unlinked_headings{$key} += $results->{'unlinked'}->{$key}; } @@ -211,11 +212,15 @@ sub process_bib { if ($headings_changed) { if ($verbose) { my $title = substr( $bib->title, 0, 20 ); - print -"Bib $biblionumber ($title): $headings_changed headings changed\n"; + printf( + "Bib %12d (%-20s): %3d headings changed\n", + $biblionumber, + $title, + $headings_changed + ); } if ( not $test_only ) { - ModBiblio( $bib, $biblionumber, GetFrameworkCode($biblionumber) ); + ModBiblio( $bib, $biblionumber, $frameworkcode ); $num_bibs_modified++; } } -- 2.7.4