From 971f05e4bd60c8de9145819158875f17a662da3f Mon Sep 17 00:00:00 2001 From: David Gustafsson Date: Thu, 2 Nov 2017 15:37:12 +0100 Subject: [PATCH] Bug 19565: Fix regular expression in _truncate_terms so field names are not split up The regular expression used for tokenizing search string does not allow for field names containing "." or "-". Fix regular expression to allow for field names containing "." or "-". Test plan: 1. Make sure Elasticsearch is used as default search engine. 2. Select "Search the catalog" in the staff client front page. 3. Search for Control-number.raw="" where is an existing control number. (The quotes around are value are needed to prevent Koha from appending "*" to control number). 4. The search should not yield any results. 5. Apply patch. 6. Search again using the same condition. 7. The biblio with the control number used in the search should appear as a match. 8. Search for Control-number="". 9. The biblio with the control number used in the search should appear as a match. --- Koha/SearchEngine/Elasticsearch/QueryBuilder.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm b/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm index cdd0fc6bd1..7f9d764283 100644 --- a/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm +++ b/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm @@ -798,7 +798,7 @@ sub _truncate_terms { # '"donald duck" title:"the mouse" and peter" get split into # ['', '"donald duck"', '', ' ', '', 'title:"the mouse"', '', ' ', 'and', ' ', 'pete'] - my @tokens = split /((?:\w+:)?"[^"]+"|\s+)/, $query; + my @tokens = split /((?:[\w\-.]+:)?"[^"]+"|\s+)/, $query; # Filter out empty tokens my @words = grep { $_ !~ /^\s*$/ } @tokens; -- 2.11.0