From 283e003a8ab5f5368dd592a3789f8a51a7a4a287 Mon Sep 17 00:00:00 2001 From: Tomas Cohen Arazi Date: Fri, 4 Oct 2024 17:41:29 -0300 Subject: [PATCH] Bug 38101: Make ES indexer split big fields into chunks This patch makes the `_process_mappings()` method split the index values in the event of them being bigger than the allowed 32766 bytes size. To test: 1. Have KTD running with ES: $ ktd --proxy --es7 up -d 2. Perform a search 3. Pick the first result for edition 4. Find a cool Wiki page with lots of paragraphs 5. Copy all of the paragraphs and put them on a 500$a field for the record. 6. Repeat 2 => FAIL: The record is not found 7. Reindex manually: $ ktd --shell k$ perl misc/search_tools/rebuild_elasticsearch.pl --biblios --where "biblionumber=3" -v -v => FAIL: You get something like: ``` [22229] Committing final records... One or more ElasticSearch errors occurred when indexing documents at /kohadevbox/koha/Koha/SearchEngine/Elasticsearch/Indexer.pm line 148. [22229] There were errors during indexing Record #3 Document contains at least one immense term in field="note.raw" (whose UTF8 encoding is longer than the max length 32766), all of which were skipped. Please correct the analyzer to not produce such terms. The prefix of the first immense term is: '[10, 109, 117, 115, 116, 97, 102, 97, 32, 102, 117, 101, 32, 101, 108, 32, 115, 101, 103, 117, 110, 100, 111, 32, 104, 105, 106, 111, 32, 100]...', original message: bytes can be at most 32766 in length; got 32771 (illegal_argument_exception) : max_bytes_length_exceeded_exception (bytes can be at most 32766 in length; got 32771) [22229] Total 1 records indexed ``` 8. Apply this patch 9. Repeat 7 => SUCCESS: No error! 10. Repeat 2 => SUCCESS: The record is indexed and can be found! 11. Sign off :-D Signed-off-by: Nick Clemens --- Koha/SearchEngine/Elasticsearch.pm | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/Koha/SearchEngine/Elasticsearch.pm b/Koha/SearchEngine/Elasticsearch.pm index dba0fbd934b..5faa3d13cc3 100644 --- a/Koha/SearchEngine/Elasticsearch.pm +++ b/Koha/SearchEngine/Elasticsearch.pm @@ -534,8 +534,32 @@ sub _process_mappings { $values = [ grep(!/^$/, @{$values}) ]; + # 4 bytes is the max size of a UTF-8 char. + # 32766 bytes is the max size of the data ES can add to an index + # 32766 / 4 =~ 8191 + my $MAX_SIZE = 8191; + + my @chunks; + + foreach my $value ( @{$values} ) { + while ( length($value) > $MAX_SIZE ) { + $value =~ s/^\s*//; + # Match up to MAX_SIZE characters, stopping at the last full word before MAX_SIZE + if ( $value =~ /\G(.{1,$MAX_SIZE})(?:\s|$)/g ) { + push @chunks, $1; + $value = substr( $value, length($1) ); + } else { + + # Catch-all for very long words + push @chunks, substr( $value, 0, $MAX_SIZE ); + $value = substr( $value, $MAX_SIZE ); + } + } + push @chunks, $value if length($value); + } + $record_document->{$target} //= []; - push @{$record_document->{$target}}, @{$values}; + push @{ $record_document->{$target} }, @chunks; } } -- 2.47.1