Bugzilla – Attachment 75109 Details for
Bug 19893
Alternative optimized indexing for Elasticsearch
Home
|
New
|
Browse
|
Search
|
[?]
|
Reports
|
Help
|
New Account
|
Log In
[x]
|
Forgot Password
Login:
[x]
[patch]
Bug 19893 - Alternative optimized indexing for Elasticsearch
Bug-19893---Alternative-optimized-indexing-for-Ela.patch (text/plain), 20.92 KB, created by
David Gustafsson
on 2018-05-07 12:00:10 UTC
(
hide
)
Description:
Bug 19893 - Alternative optimized indexing for Elasticsearch
Filename:
MIME Type:
Creator:
David Gustafsson
Created:
2018-05-07 12:00:10 UTC
Size:
20.92 KB
patch
obsolete
>From 76db2766386d32379cb68ce43411f5a67b4a88f5 Mon Sep 17 00:00:00 2001 >From: David Gustafsson <david.gustafsson@ub.gu.se> >Date: Tue, 12 Dec 2017 18:26:13 +0100 >Subject: [PATCH] Bug 19893 - Alternative optimized indexing for Elasticsearch > >Add alternative optimized indexing for Elasticsearch > >How to test: >1) Time a full elasticsearch re-index by running the rebuild_elastic_search.pl > with the -d flag: `koha-shell <instance_name> -c "time rebuild_elastic_search.pl -d"`. >2) Enable ExperimentalElasticSearchIndexing system preference > (found under Global System preferences -> Administration -> Search Engine). >3) Time a full re-index again, it should be about twice at fast (for a > couple of thousand biblios, with fewer biblios results may be more > unpredictable). > >Sponsored-by: Gothenburg University Library >--- > Koha/SearchEngine/Elasticsearch.pm | 192 +++++++++++++++++++++ > Koha/SearchEngine/Elasticsearch/Indexer.pm | 133 +++++++++++--- > Koha/SearchEngine/Elasticsearch/Search.pm | 46 +++-- > etc/searchengine/elasticsearch/field_config.yaml | 5 + > ...experimental_indexing_elasticsearch_syspref.sql | 1 + > .../prog/en/modules/admin/preferences/admin.pref | 6 + > misc/search_tools/rebuild_elastic_search.pl | 6 +- > 7 files changed, 346 insertions(+), 43 deletions(-) > create mode 100644 installer/data/mysql/atomicupdate/bug_19893_experimental_indexing_elasticsearch_syspref.sql > >diff --git a/Koha/SearchEngine/Elasticsearch.pm b/Koha/SearchEngine/Elasticsearch.pm >index 0b33c06b3b..a545f5e3c6 100644 >--- a/Koha/SearchEngine/Elasticsearch.pm >+++ b/Koha/SearchEngine/Elasticsearch.pm >@@ -34,6 +34,10 @@ use Search::Elasticsearch; > use Try::Tiny; > use YAML::Syck; > >+use List::Util qw( sum0 ); >+use Search::Elasticsearch; >+use MARC::File::XML; >+ > __PACKAGE__->mk_ro_accessors(qw( index )); > __PACKAGE__->mk_accessors(qw( sort_fields )); > >@@ -67,6 +71,19 @@ sub new { > return $self; > } > >+sub get_elasticsearch { >+ my $self = shift @_; >+ unless (defined $self->{elasticsearch}) { >+ my $conf = $self->get_elasticsearch_params(); >+ $self->{elasticsearch} = Search::Elasticsearch->new( >+ client => "5_0::Direct", >+ nodes => $conf->{nodes}, >+ cxn_pool => 'Sniff' >+ ); >+ } >+ return $self->{elasticsearch}; >+} >+ > =head2 get_elasticsearch_params > > my $params = $self->get_elasticsearch_params(); >@@ -281,6 +298,181 @@ sub sort_fields { > return $self->_sort_fields_accessor(); > } > >+sub marc_records_to_documents { >+ my ($self, $records) = @_; >+ my $rules = $self->get_marc_mapping_rules(); >+ my $control_fields_rules = $rules->{control_fields}; >+ my $data_fields_rules = $rules->{data_fields}; >+ my $marcflavour = lc C4::Context->preference('marcflavour'); >+ >+ my @record_documents; >+ >+ sub _process_mappings { >+ my ($mappings, $data, $record_document) = @_; >+ foreach my $mapping (@{$mappings}) { >+ my ($target, $options) = @{$mapping}; >+ my $_data = $data; >+ $record_document->{$target} //= []; >+ if ($options->{substr}) { >+ my ($offset, $length) = @{$options->{substr}}; >+ $_data = substr $data, $offset, $length; >+ } >+ if ($options->{property}) { >+ $_data = { >+ $options->{property} => $_data >+ } >+ } >+ push @{$record_document->{$target}}, $_data; >+ } >+ } >+ foreach my $record (@{$records}) { >+ my $record_document = {}; >+ my $mappings = $rules->{leader}; >+ if ($mappings) { >+ _process_mappings($mappings, $record->leader(), $record_document); >+ } >+ foreach my $field ($record->fields()) { >+ if($field->is_control_field()) { >+ my $mappings = $control_fields_rules->{$field->tag()}; >+ if ($mappings) { >+ _process_mappings($mappings, $field->data(), $record_document); >+ } >+ } >+ else { >+ my $subfields_mappings = $data_fields_rules->{$field->tag()}; >+ if ($subfields_mappings) { >+ my $wildcard_mappings = $subfields_mappings->{'*'}; >+ foreach my $subfield ($field->subfields()) { >+ my ($code, $data) = @{$subfield}; >+ my $mappings = $subfields_mappings->{$code} // []; >+ if ($wildcard_mappings) { >+ $mappings = [@{$mappings}, @{$wildcard_mappings}]; >+ } >+ if (@{$mappings}) { >+ _process_mappings($mappings, $data, $record_document); >+ } >+ } >+ } >+ } >+ } >+ foreach my $field (keys %{$rules->{defaults}}) { >+ unless (defined $record_document->{$field}) { >+ $record_document->{$field} = $rules->{defaults}->{$field}; >+ } >+ } >+ foreach my $field (@{$rules->{sum}}) { >+ if (defined $record_document->{$field}) { >+ # TODO: validate numeric? filter? >+ # TODO: Or should only accept fields without nested values? >+ # TODO: Quick and dirty, improve if needed >+ $record_document->{$field} = sum0(grep { ref($_) eq 'SCALAR' && m/\d+([.,]\d+)?/} @{$record_document->{$field}}); >+ } >+ } >+ # TODO: Perhaps should check if $records_document non empty, but really should never be the case >+ $record->encoding('UTF-8'); >+ $record_document->{'marc_xml'} = $record->as_xml_record($marcflavour); >+ my $id = $record->subfield('999', 'c'); >+ push @record_documents, [$id, $record_document]; >+ } >+ return \@record_documents; >+} >+ >+# Provides the rules for marc to Elasticsearch JSON document conversion. >+sub get_marc_mapping_rules { >+ my ($self) = @_; >+ >+ my $marcflavour = lc C4::Context->preference('marcflavour'); >+ my @rules; >+ >+ sub _field_mappings { >+ my ($facet, $suggestible, $sort, $target_name, $target_type, $range) = @_; >+ my %mapping_defaults = (); >+ my @mappings; >+ >+ my $substr_args = undef; >+ if ($range) { >+ my ($offset, $end) = map(int, split /-/, $range, 2); >+ $substr_args = [$offset]; >+ push @{$substr_args}, (defined $end ? $end - $offset : 1); >+ } >+ my $default_options = {}; >+ if ($substr_args) { >+ $default_options->{substr} = $substr_args; >+ } >+ >+ my $mapping = [$target_name, $default_options]; >+ push @mappings, $mapping; >+ >+ my @suffixes = (); >+ push @suffixes, 'facet' if $facet; >+ push @suffixes, 'suggestion' if $suggestible; # Check condition, also if undef? >+ push @suffixes, 'sort' if $sort; >+ foreach my $suffix (@suffixes) { >+ my $mapping = ["${target_name}__$suffix"]; >+ # Hack, fix later in less hideous manner >+ if ($suffix eq 'suggestion') { >+ push @{$mapping}, {%{$default_options}, property => 'input'}; >+ } >+ else { >+ push @{$mapping}, $default_options; >+ } >+ push @mappings, $mapping; >+ } >+ return @mappings; >+ }; >+ my $field_spec_regexp = qr/^([0-9]{3})([0-9a-z]+)?(?:_\/(\d+(?:-\d+)?))?$/; >+ my $leader_regexp = qr/^leader(?:_\/(\d+(?:-\d+)?))?$/; >+ my $rules = { >+ 'leader' => [], >+ 'control_fields' => {}, >+ 'data_fields' => {}, >+ 'sum' => [], >+ 'defaults' => {} >+ }; >+ >+ $self->_foreach_mapping(sub { >+ my ( $name, $type, $facet, $suggestible, $sort, $marc_type, $marc_field ) = @_; >+ return if $marc_type ne $marcflavour; >+ >+ if ($type eq 'sum') { >+ push @{$rules->{sum}}, $name; >+ } >+ elsif($type eq 'boolean') { >+ # boolean gets special handling, if value doesn't exist for a field, >+ # it is set to false >+ $rules->{defaults}->{$name} = 0; >+ } >+ >+ if ($marc_field =~ $field_spec_regexp) { >+ my $field_tag = $1; >+ my $subfields = defined $2 ? $2 : '*'; >+ my $range = defined $3 ? $3 : undef; >+ if ($field_tag < 10) { >+ $rules->{control_fields}->{$field_tag} //= []; >+ my @mappings = _field_mappings($facet, $suggestible, $sort, $name, $type, $range); >+ push @{$rules->{control_fields}->{$field_tag}}, @mappings; >+ } >+ else { >+ $rules->{data_fields}->{$field_tag} //= {}; >+ foreach my $subfield (split //, $subfields) { >+ $rules->{data_fields}->{$field_tag}->{$subfield} //= []; >+ my @mappings = _field_mappings($facet, $suggestible, $sort, $name, $type, $range); >+ push @{$rules->{data_fields}->{$field_tag}->{$subfield}}, @mappings; >+ } >+ } >+ } >+ elsif ($marc_field =~ $leader_regexp) { >+ my $range = defined $1 ? $1 : undef; >+ my @mappings = _field_mappings($facet, $suggestible, $sort, $name, $type, $range); >+ push @{$rules->{leader}}, @mappings; >+ } >+ else { >+ die("Invalid marc field: $marc_field"); >+ } >+ }); >+ return $rules; >+} >+ > # Provides the rules for data conversion. > sub get_fixer_rules { > my ($self) = @_; >diff --git a/Koha/SearchEngine/Elasticsearch/Indexer.pm b/Koha/SearchEngine/Elasticsearch/Indexer.pm >index e5babee535..d7d740f033 100644 >--- a/Koha/SearchEngine/Elasticsearch/Indexer.pm >+++ b/Koha/SearchEngine/Elasticsearch/Indexer.pm >@@ -66,24 +66,82 @@ sub update_index { > $self->_sanitise_records($biblionums, $records); > } > >- my $from = $self->_convert_marc_to_json($records); >- if ( !$self->store ) { >- my $params = $self->get_elasticsearch_params(); >- $self->store( >- Catmandu::Store::ElasticSearch->new( >- %$params, >- index_settings => $self->get_elasticsearch_settings(), >- index_mappings => $self->get_elasticsearch_mappings(), >- ) >- ); >+ if (C4::Context->preference('ExperimentalElasticsearchIndexing')) { >+ $self->ensure_mappings_updated(); >+ $self->bulk_index($records); >+ return 1; >+ } >+ else { >+ my $from = $self->_convert_marc_to_json($records); >+ if ( !$self->store ) { >+ my $params = $self->get_elasticsearch_params(); >+ $self->store( >+ Catmandu::Store::ElasticSearch->new( >+ %$params, >+ index_settings => $self->get_elasticsearch_settings(), >+ index_mappings => $self->get_elasticsearch_mappings(), >+ ) >+ ); >+ } >+ >+ #print Data::Dumper::Dumper( $from->to_array ); >+ $self->store->bag->add_many($from); >+ $self->store->bag->commit; >+ return 1; > } >+} > >- #print Data::Dumper::Dumper( $from->to_array ); >- $self->store->bag->add_many($from); >- $self->store->bag->commit; >+sub bulk_index { >+ my ($self, $records) = @_; >+ my $conf = $self->get_elasticsearch_params(); >+ my $elasticsearch = $self->get_elasticsearch(); >+ my $documents = $self->marc_records_to_documents($records); >+ my @body; >+ >+ foreach my $document_info (@{$documents}) { >+ my ($id, $document) = @{$document_info}; >+ push @body, { >+ index => { >+ _id => $id >+ } >+ }; >+ push @body, $document; >+ } >+ my $response = $elasticsearch->bulk( >+ index => $conf->{index_name}, >+ type => 'data', # is just hard coded in Indexer.pm? >+ body => \@body >+ ); >+ # TODO: handle response > return 1; > } > >+sub ensure_mappings_updated { >+ my ($self) = @_; >+ unless ($self->{_mappings_updated}) { >+ $self->update_mappings(); >+ } >+} >+ >+sub update_mappings { >+ my ($self) = @_; >+ my $conf = $self->get_elasticsearch_params(); >+ my $elasticsearch = $self->get_elasticsearch(); >+ my $mappings = $self->get_elasticsearch_mappings(); >+ >+ foreach my $type (keys %{$mappings}) { >+ my $response = $elasticsearch->indices->put_mapping( >+ index => $conf->{index_name}, >+ type => $type, >+ body => { >+ $type => $mappings->{$type} >+ } >+ ); >+ # TODO: process response, produce errors etc >+ } >+ $self->{_mappings_updated} = 1; >+} >+ > =head2 $indexer->update_index_background($biblionums, $records) > > This has exactly the same API as C<update_index_background> however it'll >@@ -148,21 +206,42 @@ after this will recreate it again. > > sub drop_index { > my ($self) = @_; >- >- if (!$self->store) { >- # If this index doesn't exist, this will create it. Then it'll be >- # deleted. That's not the end of the world however. >- my $params = $self->get_elasticsearch_params(); >- $self->store( >- Catmandu::Store::ElasticSearch->new( >- %$params, >- index_settings => $self->get_elasticsearch_settings(), >- index_mappings => $self->get_elasticsearch_mappings(), >- ) >- ); >+ if (C4::Context->preference('ExperimentalElasticsearchIndexing')) { >+ my $conf = $self->get_elasticsearch_params(); >+ my $elasticsearch = $self->get_elasticsearch(); >+ my $response = $elasticsearch->indices->delete(index => $conf->{index_name}); >+ # TODO: Handle response? Convert errors to exceptions/die >+ } >+ else { >+ if (!$self->store) { >+ # If this index doesn't exist, this will create it. Then it'll be >+ # deleted. That's not the end of the world however. >+ my $params = $self->get_elasticsearch_params(); >+ $self->store( >+ Catmandu::Store::ElasticSearch->new( >+ %$params, >+ index_settings => $self->get_elasticsearch_settings(), >+ index_mappings => $self->get_elasticsearch_mappings(), >+ ) >+ ); >+ } >+ $self->store->drop(); >+ $self->store(undef); > } >- $self->store->drop(); >- $self->store(undef); >+} >+ >+sub create_index { >+ my ($self) = @_; >+ my $conf = $self->get_elasticsearch_params(); >+ my $settings = $self->get_elasticsearch_settings(); >+ my $elasticsearch = $self->get_elasticsearch(); >+ my $response = $elasticsearch->indices->create( >+ index => $conf->{index_name}, >+ body => { >+ settings => $settings >+ } >+ ); >+ # TODO: Handle response? Convert errors to exceptions/die > } > > sub _sanitise_records { >diff --git a/Koha/SearchEngine/Elasticsearch/Search.pm b/Koha/SearchEngine/Elasticsearch/Search.pm >index 063dca306b..cf400b72e1 100644 >--- a/Koha/SearchEngine/Elasticsearch/Search.pm >+++ b/Koha/SearchEngine/Elasticsearch/Search.pm >@@ -48,7 +48,7 @@ use Koha::SearchEngine::QueryBuilder; > use Koha::SearchEngine::Search; > use MARC::Record; > use Catmandu::Store::ElasticSearch; >- >+use MARC::File::XML; > use Data::Dumper; #TODO remove > use Carp qw(cluck); > >@@ -156,15 +156,13 @@ sub search_compat { > my $results = $self->search($query, undef, $results_per_page, %options); > > # Convert each result into a MARC::Record >- my (@records, $index); >- $index = $offset; # opac-search expects results to be put in the >- # right place in the array, according to $offset >+ my @records; >+ # opac-search expects results to be put in the >+ # right place in the array, according to $offset >+ my $index = $offset; > $results->each(sub { >- # The results come in an array for some reason >- my $marc_json = $_[0]->{record}; >- my $marc = $self->json2marc($marc_json); >- $records[$index++] = $marc; >- }); >+ $records[$index++] = $self->decode_record_from_result(@_); >+ }); > # consumers of this expect a name-spaced result, we provide the default > # configuration. > my %result; >@@ -195,14 +193,14 @@ sub search_auth_compat { > $res->each( > sub { > my %result; >- my $record = $_[0]; >- my $marc_json = $record->{record}; > > # I wonder if these should be real values defined in the mapping > # rather than hard-coded conversions. >+ my $record = $_[0]; > # Handle legacy nested arrays indexed with splitting enabled. > my $authid = $record->{ 'Local-number' }[0]; > $authid = @$authid[0] if (ref $authid eq 'ARRAY'); >+ > $result{authid} = $authid; > > # TODO put all this info into the record at index time so we >@@ -218,7 +216,7 @@ sub search_auth_compat { > # it's not reproduced here yet. > my $authtype = $rs->single; > my $auth_tag_to_report = $authtype ? $authtype->auth_tag_to_report : ""; >- my $marc = $self->json2marc($marc_json); >+ my $marc = $self->decode_record_from_result(@_); > my $mainentry = $marc->field($auth_tag_to_report); > my $reported_tag; > if ($mainentry) { >@@ -337,9 +335,7 @@ sub simple_search_compat { > my $results = $self->search($query, undef, $max_results, %options); > my @records; > $results->each(sub { >- # The results come in an array for some reason >- my $marc_json = $_[0]->{record}; >- my $marc = $self->json2marc($marc_json); >+ my $marc = $self->decode_record_from_result(@_); > push @records, $marc; > }); > return (undef, \@records, $results->total); >@@ -360,6 +356,26 @@ sub extract_biblionumber { > return Koha::SearchEngine::Search::extract_biblionumber( $searchresultrecord ); > } > >+=head2 decode_record_from_result >+ my $marc_record = $self->decode_record_from_result(@result); >+ >+Extracts marc data from Elasticsearch result and decodes to MARC::Record object >+ >+=cut >+ >+sub decode_record_from_result { >+ # Result is passed in as array, will get flattened >+ # and first element will be $result >+ my ( $self, $result ) = @_; >+ if (C4::Context->preference('ExperimentalElasticsearchIndexing')) { >+ return MARC::Record->new_from_xml($result->{marc_xml}, 'UTF-8', uc C4::Context->preference('marcflavour')); >+ } >+ else { >+ return $self->json2marc($result->{record}); >+ } >+} >+ >+ > =head2 json2marc > > my $marc = $self->json2marc($marc_json); >diff --git a/etc/searchengine/elasticsearch/field_config.yaml b/etc/searchengine/elasticsearch/field_config.yaml >index 535a43c00b..437d6eb394 100644 >--- a/etc/searchengine/elasticsearch/field_config.yaml >+++ b/etc/searchengine/elasticsearch/field_config.yaml >@@ -8,6 +8,11 @@ general: > record: > store: true > type: text >+ marc_xml: >+ store: true >+ type: text >+ analyzer: keyword >+ index: false > # Search fields > search: > boolean: >diff --git a/installer/data/mysql/atomicupdate/bug_19893_experimental_indexing_elasticsearch_syspref.sql b/installer/data/mysql/atomicupdate/bug_19893_experimental_indexing_elasticsearch_syspref.sql >new file mode 100644 >index 0000000000..cff7f2b0fe >--- /dev/null >+++ b/installer/data/mysql/atomicupdate/bug_19893_experimental_indexing_elasticsearch_syspref.sql >@@ -0,0 +1 @@ >+INSERT IGNORE INTO systempreferences (variable, value, explanation, options, type) VALUES ('ExperimentalElasticsearchIndexing', '0', 'Enable optimized experimental Elasticsearch indexing', NULL, 'YesNo'); >diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/preferences/admin.pref b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/preferences/admin.pref >index 2e8dce78da..09f67b0e41 100644 >--- a/koha-tmpl/intranet-tmpl/prog/en/modules/admin/preferences/admin.pref >+++ b/koha-tmpl/intranet-tmpl/prog/en/modules/admin/preferences/admin.pref >@@ -425,3 +425,9 @@ Administration: > choices: > Zebra: Zebra > Elasticsearch: Elasticsearch >+ - >+ - pref: ExperimentalElasticsearchIndexing >+ choices: >+ yes: Enable >+ no: "Don't enable" >+ - "experimental faster indexing, only relevant if using Elasticsearch." >diff --git a/misc/search_tools/rebuild_elastic_search.pl b/misc/search_tools/rebuild_elastic_search.pl >index 6faa32c63e..a62c432a64 100755 >--- a/misc/search_tools/rebuild_elastic_search.pl >+++ b/misc/search_tools/rebuild_elastic_search.pl >@@ -161,10 +161,14 @@ sub do_reindex { > > my $indexer = Koha::SearchEngine::Elasticsearch::Indexer->new( { index => $index_name } ); > if ($delete) { >- > # We know it's safe to not recreate the indexer because update_index > # hasn't been called yet. > $indexer->drop_index(); >+ if (C4::Context->preference('ExperimentalElasticsearchIndexing')) { >+ # Catmandu will create index for us in update_index, so without it we >+ # to create it ourselves >+ $indexer->create_index(); >+ } > } > > my $count = 0; >-- >2.11.0
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Diff
|
Splinter Review
Attachments on
bug 19893
:
70199
|
70200
|
70201
|
70202
|
70230
|
70325
|
73153
|
75109
|
75271
|
75272
|
75353
|
75428
|
75455
|
75588
|
75658
|
75660
|
76612
|
76613
|
76614
|
76629
|
78091
|
78092
|
78093
|
78094
|
78524
|
78561
|
78587
|
78588
|
78589
|
78590
|
78591
|
78592
|
78593
|
78594
|
78690
|
78691
|
78692
|
78693
|
78694
|
78695
|
78696
|
80969
|
80970
|
80971
|
80983
|
81014
|
81015
|
81284
|
81822
|
81901
|
81903
|
81955
|
81957
|
81958
|
81959
|
81960
|
81961
|
81962
|
81963
|
81964
|
81965
|
81966
|
82143
|
82144
|
82145
|
82146
|
82147
|
82148
|
82149
|
82150
|
82151
|
82152
|
82256
|
82257
|
82258
|
82259
|
82260
|
82261
|
82262
|
82263
|
82264
|
82265
|
82266
|
82267