Lines 33-47
and process them in batches every second.
Link Here
|
33 |
|
33 |
|
34 |
use Modern::Perl; |
34 |
use Modern::Perl; |
35 |
use JSON qw( decode_json ); |
35 |
use JSON qw( decode_json ); |
36 |
use Try::Tiny qw( catch try ); |
36 |
use Try::Tiny; |
37 |
use Pod::Usage; |
37 |
use Pod::Usage; |
38 |
use Getopt::Long; |
38 |
use Getopt::Long; |
39 |
|
39 |
|
40 |
use C4::Context; |
40 |
use C4::Context; |
|
|
41 |
use Koha::Logger; |
41 |
use Koha::BackgroundJobs; |
42 |
use Koha::BackgroundJobs; |
42 |
use Koha::SearchEngine; |
43 |
use Koha::SearchEngine; |
43 |
use Koha::SearchEngine::Indexer; |
44 |
use Koha::SearchEngine::Indexer; |
44 |
|
45 |
|
|
|
46 |
use constant BATCH_SIZE => 10; # FIXME Must be in the koha config and/or options |
47 |
|
45 |
my ( $help ); |
48 |
my ( $help ); |
46 |
GetOptions( |
49 |
GetOptions( |
47 |
'h|help' => \$help, |
50 |
'h|help' => \$help, |
Lines 61-136
try {
Link Here
|
61 |
if ( $conn ) { |
64 |
if ( $conn ) { |
62 |
# FIXME cf note in Koha::BackgroundJob about $namespace |
65 |
# FIXME cf note in Koha::BackgroundJob about $namespace |
63 |
my $namespace = C4::Context->config('memcached_namespace'); |
66 |
my $namespace = C4::Context->config('memcached_namespace'); |
64 |
$conn->subscribe({ destination => sprintf("/queue/%s-%s", $namespace, 'elastic_index'), ack => 'client' }); |
67 |
$conn->subscribe( |
|
|
68 |
{ |
69 |
destination => sprintf( "/queue/%s-%s", $namespace, 'elastic_index' ), |
70 |
ack => 'client', |
71 |
'prefetch-count' => 1, |
72 |
} |
73 |
); |
65 |
} |
74 |
} |
66 |
my $indexer = Koha::SearchEngine::Indexer->new({ index => $Koha::SearchEngine::BIBLIOS_INDEX }); |
75 |
my $indexer = Koha::SearchEngine::Indexer->new({ index => $Koha::SearchEngine::BIBLIOS_INDEX }); |
67 |
my @jobs = (); |
76 |
my @jobs = (); |
68 |
my @records = (); |
|
|
69 |
my $frame; |
70 |
|
77 |
|
71 |
while (1) { |
78 |
while (1) { |
72 |
local $SIG{ALRM} = sub { commit_records() if scalar @jobs }; # NB: \n required |
79 |
|
73 |
alarm 1; |
|
|
74 |
if ( $conn ) { |
80 |
if ( $conn ) { |
75 |
$frame = $conn->receive_frame; |
81 |
my $frame = $conn->receive_frame; |
76 |
if ( !defined $frame ) { |
82 |
if ( !defined $frame ) { |
77 |
# maybe log connection problems |
83 |
# maybe log connection problems |
78 |
next; # will reconnect automatically |
84 |
next; # will reconnect automatically |
79 |
} |
85 |
} |
80 |
|
86 |
|
81 |
my $body = $frame->body; |
87 |
my $job; |
82 |
my $args = decode_json($body); # TODO Should this be from_json? Check utf8 flag.<F9> |
88 |
try { |
|
|
89 |
my $body = $frame->body; |
90 |
my $args = decode_json($body); # TODO Should this be from_json? Check utf8 flag. |
83 |
|
91 |
|
84 |
# FIXME This means we need to have create the DB entry before |
92 |
# FIXME ack early if it's what we decide on 32573 |
85 |
# It could work in a first step, but then we will want to handle job that will be created from the message received |
93 |
$conn->ack( { frame => $frame } ); |
86 |
my $job = Koha::BackgroundJobs->find($args->{job_id}); |
|
|
87 |
next unless defined $job; |
88 |
|
94 |
|
89 |
push @records, @{ $args->{record_ids} }; |
95 |
# FIXME This means we need to have create the DB entry before |
90 |
push @jobs, $job; |
96 |
# It could work in a first step, but then we will want to handle job that will be created from the message received |
|
|
97 |
my $job = Koha::BackgroundJobs->find($args->{job_id}); |
98 |
next unless defined $job; |
91 |
|
99 |
|
|
|
100 |
push @jobs, $job; |
101 |
if ( @jobs >= BATCH_SIZE || !$conn->can_read ) { |
102 |
commit(@jobs); |
103 |
@jobs = (); |
104 |
} |
105 |
} catch { |
106 |
Koha::Logger->get->warn(sprintf "Job and/or frame not processed - %s", $_); |
107 |
} finally { |
108 |
$job->status('failed')->store if $job && @_; |
109 |
}; |
92 |
|
110 |
|
93 |
} else { |
111 |
} else { |
94 |
my $jobs = Koha::BackgroundJobs->search({ status => 'new', queue => 'elastic_index' }); |
112 |
@jobs = Koha::BackgroundJobs->search( |
95 |
while ( my $job = $jobs->next ) { |
113 |
{ status => 'new', queue => 'elastic_index' } )->as_list; |
96 |
my $args = $job->json->decode($job->data); |
114 |
commit(@jobs); |
97 |
push @records, @{ $args->{record_ids} }; |
115 |
@jobs = (); |
98 |
push @jobs, $job; |
|
|
99 |
} |
100 |
sleep 10; |
116 |
sleep 10; |
101 |
} |
117 |
} |
102 |
} |
|
|
103 |
$conn->disconnect; |
104 |
|
118 |
|
105 |
sub commit_records { |
|
|
106 |
eval { |
107 |
$indexer->update_index(\@records); |
108 |
}; |
109 |
if ( $@ ){ |
110 |
warn $@; |
111 |
} |
112 |
foreach my $job (@jobs){ |
113 |
$job->status('finished')->store; |
114 |
} |
115 |
@jobs = (); |
116 |
@records = (); |
117 |
$conn->ack( { frame => $frame } ) if $frame; # FIXME depending on success? |
118 |
# Acknowledging the current frame also acknowledges all previously batched frames |
119 |
} |
119 |
} |
|
|
120 |
$conn->disconnect; |
120 |
|
121 |
|
|
|
122 |
sub commit { |
123 |
my ( @jobs ) = @_; |
121 |
|
124 |
|
122 |
sub process_job { |
125 |
my @records; |
123 |
my ( $job, $args ) = @_; |
126 |
for my $job ( @jobs ) { |
124 |
|
127 |
my $args = $job->json->decode($job->data); |
125 |
my $pid; |
128 |
push @records, @{ $args->{record_ids} }; |
126 |
if ( $pid = fork ) { |
|
|
127 |
wait; |
128 |
return; |
129 |
} |
129 |
} |
130 |
|
130 |
|
131 |
die "fork failed!" unless defined $pid; |
131 |
try { |
132 |
|
132 |
$indexer->update_index(\@records); |
133 |
$job->process( $args ); |
133 |
} catch { |
|
|
134 |
Koha::Logger->get->warn(sprintf "Update of elastic index failed with: %s", $_); |
135 |
} |
134 |
|
136 |
|
135 |
exit; |
137 |
Koha::BackgroundJobs->search( { id => [ map { $_->id } @jobs ] } ) |
|
|
138 |
->update( { status => 'finished' }, { no_triggers => 1 } ); |
136 |
} |
139 |
} |
137 |
- |
|
|