Bugzilla – Attachment 34773 Details for
Bug 13264
Full stack encoding tests (UTF-8)
Home
|
New
|
Browse
|
Search
|
[?]
|
Reports
|
Help
|
New Account
|
Log In
[x]
|
Forgot Password
Login:
[x]
[patch]
Refactor search utf8 tests and add some more
0008-Bug-13264-Refactor-search-utf8-tests-and-add-some-mo.patch (text/plain), 29.86 KB, created by
Zeno Tajoli
on 2014-12-28 22:29:42 UTC
(
hide
)
Description:
Refactor search utf8 tests and add some more
Filename:
MIME Type:
Creator:
Zeno Tajoli
Created:
2014-12-28 22:29:42 UTC
Size:
29.86 KB
patch
obsolete
>From f5353b1e88b9c802df6b06d01bf3d0408bcde8ad Mon Sep 17 00:00:00 2001 >From: Jonathan Druart <jonathan.druart@biblibre.com> >Date: Thu, 4 Dec 2014 13:12:00 +0100 >Subject: [PATCH 08/11] Bug 13264: Refactor search utf8 tests and add some > more > >t/db_dependent/www/search_utf8.t and >t/db_dependent/www/intranet_search_utf8.t were quite similar, I merged >them into a single file (t/db_dependent/www/search_utf8.t). >On the way, I added some tests for them. > >Note that you will need the last patch on branch bug_11944 to see the >tests pass. > >Signed-off-by: Jonathan Druart <jonathan.druart@biblibre.com> >--- > t/db_dependent/www/auth_values_input_www.t | 26 ++- > t/db_dependent/www/intranet_search_utf8.t | 272 ------------------------- > t/db_dependent/www/opac_utf8.t | 277 ------------------------- > t/db_dependent/www/search_utf8.t | 302 ++++++++++++++++++++++++++++ > 4 files changed, 321 insertions(+), 556 deletions(-) > delete mode 100644 t/db_dependent/www/intranet_search_utf8.t > delete mode 100644 t/db_dependent/www/opac_utf8.t > create mode 100644 t/db_dependent/www/search_utf8.t > >diff --git a/t/db_dependent/www/auth_values_input_www.t b/t/db_dependent/www/auth_values_input_www.t >index 4f72cc1..b9aeea7 100644 >--- a/t/db_dependent/www/auth_values_input_www.t >+++ b/t/db_dependent/www/auth_values_input_www.t >@@ -18,14 +18,14 @@ > use Modern::Perl; > > use utf8; >-use Test::More; >+use Test::More tests => 15; > use Test::WWW::Mechanize; >-use Data::Dumper; > use XML::Simple; > use JSON; > use File::Basename; > use File::Spec; > use POSIX; >+use URI::Escape; > use Encode; > > my $testdir = File::Spec->rel2abs( dirname(__FILE__) ); >@@ -40,7 +40,6 @@ if ($@) { > plan skip_all => "Tests skip. You must have a working Context\n"; > } > >- > my $user = $ENV{KOHA_USER} || $xml->{config}->{user}; > my $password = $ENV{KOHA_PASS} || $xml->{config}->{pass}; > my $intranet = $ENV{KOHA_INTRANET_URL}; >@@ -57,6 +56,7 @@ my $jsonresponse; > > # -------------------------------------------------- LOAD RECORD > >+my $category = 'å¦åä¼Î¼Î¼'; > $agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'connect to intranet' ); > $agent->form_name('loginform'); > $agent->field( 'password', $password ); >@@ -71,15 +71,29 @@ $agent->form_name('Aform'); > $agent->field('authorised_value', 'εÏιμεq'); > $agent->field('lib_opac', 'autdesc2'); > $agent->field('lib', 'desc1'); >-$agent->field('category', 'å¦åä¼Î¼Î¼'); >+$agent->field('category', $category); > $agent->field('branches', ''); > $agent->click_ok( '', "Create new auth category and value" ); > >+my $expected_base = q|authorised_values.pl\?searchfield=| . uri_escape_utf8( $category ); >+$agent->base_like(qr|$expected_base|, "check base"); >+my $add_form_link_exists = 0; >+my $delete_form_link_exists = 0; >+for my $link ( $agent->links() ) { >+ if ( $link->url =~ m|authorised_values.pl\?op=add_form&category=$category| ) { >+ $add_form_link_exists = 1; >+ }elsif( $link->url =~ m|authorised_values.pl\?op=delete_confirm&searchfield=$category| ) { >+ $delete_form_link_exists = 1; >+ } >+} >+is( $add_form_link_exists, 1, ); >+is( $delete_form_link_exists, 1, ); >+ > $agent->get_ok( "$intranet/cgi-bin/koha/admin/authorised_values.pl", 'Return to Authorized values page' ); > $agent->get_ok( "$intranet/cgi-bin/koha/admin/authorised_values.pl?searchfield=å¦åä¼Î¼Î¼&offset=0", 'Search the values inserted' ); > my $text = $agent->text() ; > #Tests on UTF-8 >-ok ( ( length(Encode::encode_utf8($text)) != length($text) ) , 'UTF-8 are multi-byte. Good') ; >+ok ( ( length(Encode::encode('UTF-8', $text)) != length($text) ) , 'UTF-8 are multi-byte. Good') ; > ok ($text =~ m/å¦åä¼Î¼Î¼/, 'UTF-8 (Asia) chars are correctly present. Good'); > ok ($text =~ m/εÏιμεq/, 'UTF-8 (Greek) chars are correctly present. Good'); > my @links = $agent->links; >@@ -98,6 +112,4 @@ if ($id_to_del) { > ok($id_to_del ne undef, "error, link to delete nor working"); > } > >-done_testing(); >- > 1; >diff --git a/t/db_dependent/www/intranet_search_utf8.t b/t/db_dependent/www/intranet_search_utf8.t >deleted file mode 100644 >index 1fdfa66..0000000 >--- a/t/db_dependent/www/intranet_search_utf8.t >+++ /dev/null >@@ -1,272 +0,0 @@ >-#!/usr/bin/perl >- >-# This file is part of Koha. >-# >-# Koha is free software; you can redistribute it and/or modify it >-# under the terms of the GNU General Public License as published by >-# the Free Software Foundation; either version 3 of the License, or >-# (at your option) any later version. >-# >-# Koha is distributed in the hope that it will be useful, but >-# WITHOUT ANY WARRANTY; without even the implied warranty of >-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the >-# GNU General Public License for more details. >-# >-# You should have received a copy of the GNU General Public License >-# along with Koha; if not, see <http://www.gnu.org/licenses>. >- >-use Modern::Perl; >- >-use utf8; >-use Test::More; >-use Test::WWW::Mechanize; >-use Data::Dumper; >-use XML::Simple; >-use JSON; >-use File::Basename; >-use File::Path; >-use File::Spec; >-use File::Temp qw/ tempdir /; >-use POSIX; >-use Encode; >- >-use C4::Context; >- >-my $testdir = File::Spec->rel2abs( dirname(__FILE__) ); >-# global variables that will be used when forking >-our $zebra_pid; >-our $indexer_pid; >-our $datadir = tempdir();; >- >-my $koha_conf = $ENV{KOHA_CONF}; >-my $xml = XMLin($koha_conf); >- >-my $marcflavour = C4::Context->preference('marcflavour') || 'MARC21'; >- >-# For the purpose of this test, we can reasonably take MARC21 and NORMARC to be the same >-my $file = >- $marcflavour eq 'UNIMARC' >- ? "$testdir/data/unimarcutf8record.mrc" >- : "$testdir/data/marc21utf8record.mrc"; >- >-my $user = $ENV{KOHA_USER} || $xml->{config}->{user}; >-my $password = $ENV{KOHA_PASS} || $xml->{config}->{pass}; >-my $intranet = $ENV{KOHA_INTRANET_URL}; >-my $opac = $ENV{KOHA_OPAC_URL}; >- >-# launch the zebra process >-launch_zebra( $datadir, $koha_conf ); >-if ( not defined $zebra_pid ) { >- plan skip_all => "Tests skip. Error starting Zebra Server to do those tests\n"; >-} >-# launch the zebra process >-launch_indexer( ); >-if ( not defined $indexer_pid ) { >- plan skip_all => "Tests skip. Error starting the indexer daemon to do those tests\n"; >-} >-# test KOHA_INTRANET_URL is set >-if ( not defined $intranet ) { >- plan skip_all => "Tests skip. You must set env. variable KOHA_INTRANET_URL to do tests\n"; >-} >-# test KOHA_OPAC_URL is set >-if ( not defined $opac ) { >- plan skip_all => "Tests skip. You must set env. variable KOHA_OPAC_URL to do tests\n"; >-} >- >-$intranet =~ s#/$##; >-$opac =~ s#/$##; >- >-my $agent = Test::WWW::Mechanize->new( autocheck => 1 ); >-my $jsonresponse; >- >-# -------------------------------------------------- LOAD RECORD >- >-$agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'connect to intranet' ); >-$agent->form_name('loginform'); >-$agent->field( 'password', $password ); >-$agent->field( 'userid', $user ); >-$agent->field( 'branch', '' ); >-$agent->click_ok( '', 'login to staff client' ); >- >-$agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'load main page' ); >- >-$agent->follow_link_ok( { url_regex => qr/tools-home/i }, 'open tools module' ); >-$agent->follow_link_ok( { text => 'Stage MARC records for import' }, >- 'go to stage MARC' ); >- >-$agent->post( >- "$intranet/cgi-bin/koha/tools/upload-file.pl", >- [ 'fileToUpload' => [$file], ], >- 'Content_Type' => 'form-data', >-); >-ok( $agent->success, 'uploaded file' ); >- >-$jsonresponse = decode_json $agent->content(); >-is( $jsonresponse->{'status'}, 'done', 'upload succeeded' ); >-my $fileid = $jsonresponse->{'fileid'}; >- >-$agent->get_ok( "$intranet/cgi-bin/koha/tools/stage-marc-import.pl", >- 'reopen stage MARC page' ); >-$agent->submit_form_ok( >- { >- form_number => 5, >- fields => { >- 'uploadedfileid' => $fileid, >- 'nomatch_action' => 'create_new', >- 'overlay_action' => 'replace', >- 'item_action' => 'always_add', >- 'matcher' => '', >- 'comments' => '', >- 'encoding' => 'utf8', >- 'parse_items' => '1', >- 'runinbackground' => '1', >- } >- }, >- 'stage MARC' >-); >- >-$jsonresponse = decode_json $agent->content(); >-my $jobID = $jsonresponse->{'jobID'}; >-ok( $jobID, 'have job ID' ); >- >-my $completed = 0; >- >-# if we haven't completed the batch in two minutes, it's not happening >-for my $counter ( 1 .. 24 ) { >- $agent->get( >- "$intranet/cgi-bin/koha/tools/background-job-progress.pl?jobID=$jobID", >- "get job progress" >- ); >- $jsonresponse = decode_json $agent->content(); >- if ( $jsonresponse->{'job_status'} eq 'completed' ) { >- $completed = 1; >- last; >- } >- warn( >- ( >- $jsonresponse->{'job_size'} >- ? floor( >- 100 * $jsonresponse->{'progress'} / $jsonresponse->{'job_size'} >- ) >- : '100' >- ) >- . "% completed" >- ); >- sleep 5; >-} >-is( $jsonresponse->{'job_status'}, 'completed', 'job was completed' ); >- >-$agent->get_ok( >- "$intranet/cgi-bin/koha/tools/stage-marc-import.pl", >- 'reopen stage MARC page at end of upload' >-); >-$agent->submit_form_ok( >- { >- form_number => 5, >- fields => { >- 'uploadedfileid' => $fileid, >- 'nomatch_action' => 'create_new', >- 'overlay_action' => 'replace', >- 'item_action' => 'always_add', >- 'matcher' => '1', >- 'comments' => '', >- 'encoding' => 'utf8', >- 'parse_items' => '1', >- 'runinbackground' => '1', >- 'completedJobID' => $jobID, >- } >- }, >- 'stage MARC' >-); >- >-$agent->follow_link_ok( { text => 'Manage staged records' }, 'view batch' ); >- >- >-$agent->form_number(5); >-$agent->field( 'framework', '' ); >-$agent->click_ok( 'mainformsubmit', "imported records into catalog" ); >-my $webpage = $agent->{content}; >- >-$webpage =~ /(.*<title>.*?)(\d{1,})(.*<\/title>)/sx; >-my $id_batch = $2; >-my $id_bib_number = GetBiblionumberFromImport($id_batch); >- >-# wait enough time for the indexer >-sleep 10; >- >-# --------------------------------- TEST INTRANET SEARCH >- >-$agent->get_ok( "$intranet/cgi-bin/koha/catalogue/search.pl" , "got search on intranet"); >-$agent->form_number(1); >-$agent->field('idx', 'kw'); >-$agent->field('q', 'deuteros'); >-$agent->click(); >- >-my $text = $agent->text() ; >- >-#Tests on UTF-8 >- >-ok ( ( length(Encode::encode_utf8($text)) != length($text) ) , 'UTF-8 are multi-byte. Goog') ; >-ok ($text =~ m/å¦åä¼. μμ/, 'UTF-8 chars are correctly present. Good'); >- >- >- >-#-------------------------------------------------- REVERT >- >-$agent->get_ok( "$intranet/cgi-bin/koha/tools/manage-marc-import.pl", 'view and clean batch' ); >-$agent->form_name('clean_batch_'.$id_batch); >-$agent->click(); >-$agent->get_ok( "$intranet/cgi-bin/koha/catalogue/detail.pl?biblionumber=$id_bib_number", 'biblio on intranet' ); >-$agent->get_ok( "$intranet/cgi-bin/koha/cataloguing/addbiblio.pl?op=delete&biblionumber=$id_bib_number", 'biblio deleted' ); >- >-# clean >-cleanup(); >- >-done_testing(); >- >-# function that launches the zebra daemon >-sub launch_zebra { >- >- my ( $datadir, $koha_conf ) = @_; >- >- $zebra_pid = fork(); >- if ( $zebra_pid == 0 ) { >- exec("zebrasrv -f $koha_conf -v none,request -l $datadir/zebra.log"); >- exit; >- } >- sleep( 1 ); >-} >- >-sub launch_indexer { >- >- my $rootdir = dirname(__FILE__) . '/../../../'; >- my $rebuild_zebra = "$rootdir/misc/migration_tools/rebuild_zebra.pl"; >- >- $indexer_pid = fork(); >- >- if ( $indexer_pid == 0 ) { >- exec("$rebuild_zebra -daemon -sleep 5"); >- exit; >- } >- sleep( 1 ); >-} >- >-sub cleanup { >- >- kill 9, $zebra_pid if defined $zebra_pid; >- kill 9, $indexer_pid if defined $indexer_pid; >- # Clean up the Zebra files since the child process was just shot >- rmtree $datadir; >- >-} >- >-sub GetBiblionumberFromImport{ >- my ( $batch_id) = @_; >- use C4::ImportBatch; >- my $data = C4::ImportBatch::GetImportRecordsRange($batch_id, '', '', undef, >- { order_by => 'import_record_id', order_by_direction => 'DESC' }); >- my $biblionumber = $data->[0]->{'matched_biblionumber'}; >- >- return $biblionumber; >-} >-1; >diff --git a/t/db_dependent/www/opac_utf8.t b/t/db_dependent/www/opac_utf8.t >deleted file mode 100644 >index 143a168..0000000 >--- a/t/db_dependent/www/opac_utf8.t >+++ /dev/null >@@ -1,277 +0,0 @@ >-#!/usr/bin/perl >- >-# This file is part of Koha. >-# >-# Koha is free software; you can redistribute it and/or modify it >-# under the terms of the GNU General Public License as published by >-# the Free Software Foundation; either version 3 of the License, or >-# (at your option) any later version. >-# >-# Koha is distributed in the hope that it will be useful, but >-# WITHOUT ANY WARRANTY; without even the implied warranty of >-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the >-# GNU General Public License for more details. >-# >-# You should have received a copy of the GNU General Public License >-# along with Koha; if not, see <http://www.gnu.org/licenses>. >- >-use Modern::Perl; >- >-use utf8; >-use Test::More; >-use Test::WWW::Mechanize; >-use Data::Dumper; >-use XML::Simple; >-use JSON; >-use File::Basename; >-use File::Path; >-use File::Spec; >-use File::Temp qw/ tempdir /; >-use POSIX; >-use Encode; >- >-use C4::Context; >- >-my $testdir = File::Spec->rel2abs( dirname(__FILE__) ); >-# global variables that will be used when forking >-our $zebra_pid; >-our $indexer_pid; >-our $datadir = tempdir();; >- >-my $koha_conf = $ENV{KOHA_CONF}; >-my $xml = XMLin($koha_conf); >- >-my $marcflavour = C4::Context->preference('marcflavour') || 'MARC21'; >- >-# For the purpose of this test, we can reasonably take MARC21 and NORMARC to be the same >-my $file = >- $marcflavour eq 'UNIMARC' >- ? "$testdir/data/unimarcutf8record.mrc" >- : "$testdir/data/marc21utf8record.mrc"; >- >-my $user = $ENV{KOHA_USER} || $xml->{config}->{user}; >-my $password = $ENV{KOHA_PASS} || $xml->{config}->{pass}; >-my $intranet = $ENV{KOHA_INTRANET_URL}; >-my $opac = $ENV{KOHA_OPAC_URL}; >- >-# launch the zebra process >-launch_zebra( $datadir, $koha_conf ); >-if ( not defined $zebra_pid ) { >- plan skip_all => "Tests skip. Error starting Zebra Server to do those tests\n"; >-} >-# launch the zebra process >-launch_indexer( ); >-if ( not defined $indexer_pid ) { >- plan skip_all => "Tests skip. Error starting the indexer daemon to do those tests\n"; >-} >-# test KOHA_INTRANET_URL is set >-if ( not defined $intranet ) { >- plan skip_all => "Tests skip. You must set env. variable KOHA_INTRANET_URL to do tests\n"; >-} >-# test KOHA_OPAC_URL is set >-if ( not defined $opac ) { >- plan skip_all => "Tests skip. You must set env. variable KOHA_OPAC_URL to do tests\n"; >-} >- >-$intranet =~ s#/$##; >-$opac =~ s#/$##; >- >-my $agent = Test::WWW::Mechanize->new( autocheck => 1 ); >-my $jsonresponse; >- >-# -------------------------------------------------- LOAD RECORD >- >-$agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'connect to intranet' ); >-$agent->form_name('loginform'); >-$agent->field( 'password', $password ); >-$agent->field( 'userid', $user ); >-$agent->field( 'branch', '' ); >-$agent->click_ok( '', 'login to staff client' ); >- >-$agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'load main page' ); >- >-$agent->follow_link_ok( { url_regex => qr/tools-home/i }, 'open tools module' ); >-$agent->follow_link_ok( { text => 'Stage MARC records for import' }, >- 'go to stage MARC' ); >- >-$agent->post( >- "$intranet/cgi-bin/koha/tools/upload-file.pl", >- [ 'fileToUpload' => [$file], ], >- 'Content_Type' => 'form-data', >-); >-ok( $agent->success, 'uploaded file' ); >- >-$jsonresponse = decode_json $agent->content(); >-is( $jsonresponse->{'status'}, 'done', 'upload succeeded' ); >-my $fileid = $jsonresponse->{'fileid'}; >- >-$agent->get_ok( "$intranet/cgi-bin/koha/tools/stage-marc-import.pl", >- 'reopen stage MARC page' ); >-$agent->submit_form_ok( >- { >- form_number => 5, >- fields => { >- 'uploadedfileid' => $fileid, >- 'nomatch_action' => 'create_new', >- 'overlay_action' => 'replace', >- 'item_action' => 'always_add', >- 'matcher' => '', >- 'comments' => '', >- 'encoding' => 'utf8', >- 'parse_items' => '1', >- 'runinbackground' => '1', >- } >- }, >- 'stage MARC' >-); >- >-$jsonresponse = decode_json $agent->content(); >-my $jobID = $jsonresponse->{'jobID'}; >-ok( $jobID, 'have job ID' ); >- >-my $completed = 0; >- >-# if we haven't completed the batch in two minutes, it's not happening >-for my $counter ( 1 .. 24 ) { >- $agent->get( >- "$intranet/cgi-bin/koha/tools/background-job-progress.pl?jobID=$jobID", >- "get job progress" >- ); >- $jsonresponse = decode_json $agent->content(); >- if ( $jsonresponse->{'job_status'} eq 'completed' ) { >- $completed = 1; >- last; >- } >- warn( >- ( >- $jsonresponse->{'job_size'} >- ? floor( >- 100 * $jsonresponse->{'progress'} / $jsonresponse->{'job_size'} >- ) >- : '100' >- ) >- . "% completed" >- ); >- sleep 5; >-} >-is( $jsonresponse->{'job_status'}, 'completed', 'job was completed' ); >- >-$agent->get_ok( >- "$intranet/cgi-bin/koha/tools/stage-marc-import.pl", >- 'reopen stage MARC page at end of upload' >-); >-$agent->submit_form_ok( >- { >- form_number => 5, >- fields => { >- 'uploadedfileid' => $fileid, >- 'nomatch_action' => 'create_new', >- 'overlay_action' => 'replace', >- 'item_action' => 'always_add', >- 'matcher' => '1', >- 'comments' => '', >- 'encoding' => 'utf8', >- 'parse_items' => '1', >- 'runinbackground' => '1', >- 'completedJobID' => $jobID, >- } >- }, >- 'stage MARC' >-); >- >-$agent->follow_link_ok( { text => 'Manage staged records' }, 'view batch' ); >- >- >-$agent->form_number(5); >-$agent->field( 'framework', '' ); >-$agent->click_ok( 'mainformsubmit', "imported records into catalog" ); >-my $webpage = $agent->{content}; >- >-$webpage =~ /(.*<title>.*?)(\d{1,})(.*<\/title>)/sx; >-my $id_batch = $2; >-my $id_bib_number = GetBiblionumberFromImport($id_batch); >- >-# wait enough time for the indexer >-sleep 10; >- >- >- >-# -------------------------------------------------- TEST ON OPAC >- >- >-$agent->get_ok( "$opac" , "got opac"); >-$agent->form_name('searchform'); >-$agent->field( 'q', 'deuteros' ); >-$agent->field( 'idx', '' ); >-$agent->click( ); >- >-# ok( $agent->success, 'uploaded file' ); >- >-my $text = $agent->text() ; >- >-#Tests on UTF-8 >- >-ok ( ( length(Encode::encode_utf8($text)) != length($text) ) , 'UTF-8 are multi-byte. Goog') ; >-ok ($text =~ m/å¦åä¼. μμ/, 'UTF-8 chars are correctly present. Good'); >- >- >- >-#-------------------------------------------------- REVERT >- >-$agent->get_ok( "$intranet/cgi-bin/koha/tools/manage-marc-import.pl", 'view and clean batch' ); >-$agent->form_name('clean_batch_'.$id_batch); >-$agent->click(); >-$agent->get_ok( "$intranet/cgi-bin/koha/catalogue/detail.pl?biblionumber=$id_bib_number", 'biblio on intranet' ); >-$agent->get_ok( "$intranet/cgi-bin/koha/cataloguing/addbiblio.pl?op=delete&biblionumber=$id_bib_number", 'biblio deleted' ); >- >-# clean >-cleanup(); >- >-done_testing(); >- >-# function that launches the zebra daemon >-sub launch_zebra { >- >- my ( $datadir, $koha_conf ) = @_; >- >- $zebra_pid = fork(); >- if ( $zebra_pid == 0 ) { >- exec("zebrasrv -f $koha_conf -v none,request -l $datadir/zebra.log"); >- exit; >- } >- sleep( 1 ); >-} >- >-sub launch_indexer { >- >- my $rootdir = dirname(__FILE__) . '/../../../'; >- my $rebuild_zebra = "$rootdir/misc/migration_tools/rebuild_zebra.pl"; >- >- $indexer_pid = fork(); >- >- if ( $indexer_pid == 0 ) { >- exec("$rebuild_zebra -daemon -sleep 5"); >- exit; >- } >- sleep( 1 ); >-} >- >-sub cleanup { >- >- kill 9, $zebra_pid if defined $zebra_pid; >- kill 9, $indexer_pid if defined $indexer_pid; >- # Clean up the Zebra files since the child process was just shot >- rmtree $datadir; >- >-} >- >-sub GetBiblionumberFromImport{ >- my ( $batch_id) = @_; >- use C4::ImportBatch; >- my $data = C4::ImportBatch::GetImportRecordsRange($batch_id, '', '', undef, >- { order_by => 'import_record_id', order_by_direction => 'DESC' }); >- my $biblionumber = $data->[0]->{'matched_biblionumber'}; >- >- return $biblionumber; >-} >-1; >diff --git a/t/db_dependent/www/search_utf8.t b/t/db_dependent/www/search_utf8.t >new file mode 100644 >index 0000000..01f5f2a >--- /dev/null >+++ b/t/db_dependent/www/search_utf8.t >@@ -0,0 +1,302 @@ >+#!/usr/bin/perl >+ >+# This file is part of Koha. >+# >+# Koha is free software; you can redistribute it and/or modify it >+# under the terms of the GNU General Public License as published by >+# the Free Software Foundation; either version 3 of the License, or >+# (at your option) any later version. >+# >+# Koha is distributed in the hope that it will be useful, but >+# WITHOUT ANY WARRANTY; without even the implied warranty of >+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the >+# GNU General Public License for more details. >+# >+# You should have received a copy of the GNU General Public License >+# along with Koha; if not, see <http://www.gnu.org/licenses>. >+ >+use Modern::Perl; >+ >+use utf8; >+use Test::More tests => 32; >+use Test::WWW::Mechanize; >+use Data::Dumper; >+use XML::Simple; >+use JSON; >+use File::Basename; >+use File::Path; >+use File::Spec; >+use File::Temp qw/ tempdir /; >+use POSIX; >+use Encode; >+use URI::Escape; >+ >+use C4::Context; >+ >+my $testdir = File::Spec->rel2abs( dirname(__FILE__) ); >+# global variables that will be used when forking >+our $zebra_pid; >+our $indexer_pid; >+our $datadir = tempdir();; >+ >+my $koha_conf = $ENV{KOHA_CONF}; >+my $xml = XMLin($koha_conf); >+ >+my $marcflavour = C4::Context->preference('marcflavour') || 'MARC21'; >+ >+# For the purpose of this test, we can reasonably take MARC21 and NORMARC to be the same >+my $file = >+ $marcflavour eq 'UNIMARC' >+ ? "$testdir/data/unimarcutf8record.mrc" >+ : "$testdir/data/marc21utf8record.mrc"; >+ >+my $user = $ENV{KOHA_USER} || $xml->{config}->{user}; >+my $password = $ENV{KOHA_PASS} || $xml->{config}->{pass}; >+my $intranet = $ENV{KOHA_INTRANET_URL}; >+my $opac = $ENV{KOHA_OPAC_URL}; >+ >+# launch the zebra process >+launch_zebra( $datadir, $koha_conf ); >+if ( not defined $zebra_pid ) { >+ plan skip_all => "Tests skip. Error starting Zebra Server to do those tests\n"; >+} >+# launch the zebra process >+launch_indexer( ); >+if ( not defined $indexer_pid ) { >+ plan skip_all => "Tests skip. Error starting the indexer daemon to do those tests\n"; >+} >+# test KOHA_INTRANET_URL is set >+if ( not defined $intranet ) { >+ plan skip_all => "Tests skip. You must set env. variable KOHA_INTRANET_URL to do tests\n"; >+} >+# test KOHA_OPAC_URL is set >+if ( not defined $opac ) { >+ plan skip_all => "Tests skip. You must set env. variable KOHA_OPAC_URL to do tests\n"; >+} >+ >+$intranet =~ s#/$##; >+$opac =~ s#/$##; >+ >+my $agent = Test::WWW::Mechanize->new( autocheck => 1 ); >+my $jsonresponse; >+ >+# -------------------------------------------------- LOAD RECORD >+ >+$agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'connect to intranet' ); >+$agent->form_name('loginform'); >+$agent->field( 'password', $password ); >+$agent->field( 'userid', $user ); >+$agent->field( 'branch', '' ); >+$agent->click_ok( '', 'login to staff client' ); >+ >+$agent->get_ok( "$intranet/cgi-bin/koha/mainpage.pl", 'load main page' ); >+ >+$agent->follow_link_ok( { url_regex => qr/tools-home/i }, 'open tools module' ); >+$agent->follow_link_ok( { text => 'Stage MARC records for import' }, >+ 'go to stage MARC' ); >+ >+$agent->post( >+ "$intranet/cgi-bin/koha/tools/upload-file.pl", >+ [ 'fileToUpload' => [$file], ], >+ 'Content_Type' => 'form-data', >+); >+ok( $agent->success, 'uploaded file' ); >+ >+$jsonresponse = decode_json $agent->content(); >+is( $jsonresponse->{'status'}, 'done', 'upload succeeded' ); >+my $fileid = $jsonresponse->{'fileid'}; >+ >+$agent->get_ok( "$intranet/cgi-bin/koha/tools/stage-marc-import.pl", >+ 'reopen stage MARC page' ); >+$agent->submit_form_ok( >+ { >+ form_number => 5, >+ fields => { >+ 'uploadedfileid' => $fileid, >+ 'nomatch_action' => 'create_new', >+ 'overlay_action' => 'replace', >+ 'item_action' => 'always_add', >+ 'matcher' => '', >+ 'comments' => '', >+ 'encoding' => 'utf8', >+ 'parse_items' => '1', >+ 'runinbackground' => '1', >+ } >+ }, >+ 'stage MARC' >+); >+ >+$jsonresponse = decode_json $agent->content(); >+my $jobID = $jsonresponse->{'jobID'}; >+ok( $jobID, 'have job ID' ); >+ >+my $completed = 0; >+ >+# if we haven't completed the batch in two minutes, it's not happening >+for my $counter ( 1 .. 24 ) { >+ $agent->get( >+ "$intranet/cgi-bin/koha/tools/background-job-progress.pl?jobID=$jobID", >+ "get job progress" >+ ); >+ $jsonresponse = decode_json $agent->content(); >+ if ( $jsonresponse->{'job_status'} eq 'completed' ) { >+ $completed = 1; >+ last; >+ } >+ warn( >+ ( >+ $jsonresponse->{'job_size'} >+ ? floor( >+ 100 * $jsonresponse->{'progress'} / $jsonresponse->{'job_size'} >+ ) >+ : '100' >+ ) >+ . "% completed" >+ ); >+ sleep 5; >+} >+is( $jsonresponse->{'job_status'}, 'completed', 'job was completed' ); >+ >+$agent->get_ok( >+ "$intranet/cgi-bin/koha/tools/stage-marc-import.pl", >+ 'reopen stage MARC page at end of upload' >+); >+$agent->submit_form_ok( >+ { >+ form_number => 5, >+ fields => { >+ 'uploadedfileid' => $fileid, >+ 'nomatch_action' => 'create_new', >+ 'overlay_action' => 'replace', >+ 'item_action' => 'always_add', >+ 'matcher' => '1', >+ 'comments' => '', >+ 'encoding' => 'utf8', >+ 'parse_items' => '1', >+ 'runinbackground' => '1', >+ 'completedJobID' => $jobID, >+ } >+ }, >+ 'stage MARC' >+); >+ >+$agent->follow_link_ok( { text => 'Manage staged records' }, 'view batch' ); >+ >+ >+$agent->form_number(5); >+$agent->field( 'framework', '' ); >+$agent->click_ok( 'mainformsubmit', "imported records into catalog" ); >+my $webpage = $agent->{content}; >+ >+$webpage =~ /(.*<title>.*?)(\d{1,})(.*<\/title>)/sx; >+my $id_batch = $2; >+my $id_bib_number = GetBiblionumberFromImport($id_batch); >+ >+# wait enough time for the indexer >+sleep 10; >+ >+# --------------------------------- TEST INTRANET SEARCH >+ >+my $publisher = 'Îθήνα'; >+$agent->get_ok( "$intranet/cgi-bin/koha/catalogue/search.pl" , "got search on intranet"); >+$agent->form_number(1); >+$agent->field('idx', 'kw'); >+$agent->field('q', 'deuteros'); >+$agent->click(); >+my $intra_text = $agent->text() ; >+like( $intra_text, qr|Publisher: $publisher|, ); >+ >+$agent->get_ok( "$intranet/cgi-bin/koha/catalogue/search.pl" , "got search on intranet"); >+$agent->form_number(1); >+$agent->field('idx', 'kw'); >+$agent->field('q', Encode::encode('UTF-8', $publisher)); >+$agent->click(); >+$intra_text = $agent->text(); >+ >+like( $intra_text, qr|Publisher: $publisher|, ); >+my $expected_base = q|search.pl\?idx=kw&q=| . uri_escape_utf8( Encode::encode('UTF-8', $publisher ) ); >+$agent->base_like(qr|$expected_base|, ); >+ >+ok ( ( length(Encode::encode('UTF-8', $intra_text)) != length($intra_text) ) , 'UTF-8 are multi-byte. Goog') ; >+ok ($intra_text =~ m/å¦åä¼. μμ/, 'UTF-8 chars are correctly present. Good'); >+# -------------------------------------------------- TEST ON OPAC >+ >+$agent->get_ok( "$opac" , "got opac"); >+$agent->form_name('searchform'); >+$agent->field( 'q', 'deuteros' ); >+$agent->field( 'idx', '' ); >+$agent->click( ); >+my $opac_text = $agent->text() ; >+like( $opac_text, qr|Publisher: $publisher|, ); >+ >+$agent->get_ok( "$opac" , "got opac"); >+$agent->form_name('searchform'); >+$agent->field('q', $publisher); >+$agent->field( 'idx', '' ); >+$agent->click(); >+$opac_text = $agent->text(); >+ >+like( $opac_text, qr|Publisher: $publisher|, ); >+$expected_base = q|opac-search.pl\?q=| . uri_escape_utf8( $publisher ); >+$agent->base_like(qr|$expected_base|, ); >+ >+ok ( ( length(Encode::encode('UTF-8', $opac_text)) != length($opac_text) ) , 'UTF-8 are multi-byte. Goog') ; >+ok ($opac_text =~ m/å¦åä¼. μμ/, 'UTF-8 chars are correctly present. Good'); >+ >+#-------------------------------------------------- REVERT >+ >+$agent->get_ok( "$intranet/cgi-bin/koha/tools/manage-marc-import.pl", 'view and clean batch' ); >+$agent->form_name('clean_batch_'.$id_batch); >+$agent->click(); >+$agent->get_ok( "$intranet/cgi-bin/koha/catalogue/detail.pl?biblionumber=$id_bib_number", 'biblio on intranet' ); >+$agent->get_ok( "$intranet/cgi-bin/koha/cataloguing/addbiblio.pl?op=delete&biblionumber=$id_bib_number", 'biblio deleted' ); >+ >+# clean >+cleanup(); >+ >+# function that launches the zebra daemon >+sub launch_zebra { >+ >+ my ( $datadir, $koha_conf ) = @_; >+ >+ $zebra_pid = fork(); >+ if ( $zebra_pid == 0 ) { >+ exec("zebrasrv -f $koha_conf -v none,request -l $datadir/zebra.log"); >+ exit; >+ } >+ sleep( 1 ); >+} >+ >+sub launch_indexer { >+ >+ my $rootdir = dirname(__FILE__) . '/../../../'; >+ my $rebuild_zebra = "$rootdir/misc/migration_tools/rebuild_zebra.pl"; >+ >+ $indexer_pid = fork(); >+ >+ if ( $indexer_pid == 0 ) { >+ exec("$rebuild_zebra -daemon -sleep 5"); >+ exit; >+ } >+ sleep( 1 ); >+} >+ >+sub cleanup { >+ >+ kill 9, $zebra_pid if defined $zebra_pid; >+ kill 9, $indexer_pid if defined $indexer_pid; >+ # Clean up the Zebra files since the child process was just shot >+ rmtree $datadir; >+ >+} >+ >+sub GetBiblionumberFromImport{ >+ my ( $batch_id) = @_; >+ use C4::ImportBatch; >+ my $data = C4::ImportBatch::GetImportRecordsRange($batch_id, '', '', undef, >+ { order_by => 'import_record_id', order_by_direction => 'DESC' }); >+ my $biblionumber = $data->[0]->{'matched_biblionumber'}; >+ >+ return $biblionumber; >+} >+1; >-- >1.7.10.4 >
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Diff
|
Splinter Review
Attachments on
bug 13264
:
33614
|
33703
|
33704
|
33705
|
33743
|
33744
|
33756
|
33757
|
33758
|
33759
|
33760
|
33777
|
33778
|
33779
|
33780
|
33781
|
33782
|
33783
|
33784
|
33785
|
33786
|
33811
|
33812
|
33813
|
33814
|
33815
|
33816
|
33817
|
34110
|
34111
|
34112
|
34113
|
34114
|
34115
|
34116
|
34117
|
34694
|
34766
|
34767
|
34768
|
34769
|
34770
|
34771
|
34772
|
34773
|
34774
|
34775
|
34776
|
34798
|
34799
|
34800
|
34801
|
34802
|
34803
|
34804
|
34805
|
34806
|
34807
|
34808
|
34809
|
34810
|
34811
|
34814
|
35068
|
35069
|
35070
|
35071
|
35072
|
35073
|
35074
|
35075
|
35076
|
35077
|
35078
|
35079
|
35080
|
35081
|
35082
|
35110
|
35111
|
35112
|
35113
|
35114
|
35115
|
35116
|
35117
|
35118
|
35119
|
35120
|
35121
|
35122
|
35123
|
35124