Lines 21-29
use Mojo::Base 'Mojolicious::Controller';
Link Here
|
21 |
|
21 |
|
22 |
use Koha::ERM::EHoldings::Titles; |
22 |
use Koha::ERM::EHoldings::Titles; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
|
|
24 |
use Koha::BackgroundJob::ImportKBARTFile; |
24 |
|
25 |
|
25 |
use Scalar::Util qw( blessed ); |
26 |
use Scalar::Util qw( blessed ); |
26 |
use Try::Tiny qw( catch try ); |
27 |
use Try::Tiny qw( catch try ); |
|
|
28 |
use MIME::Base64 qw( decode_base64 encode_base64 ); |
29 |
use POSIX qw( floor ); |
27 |
|
30 |
|
28 |
=head1 API |
31 |
=head1 API |
29 |
|
32 |
|
Lines 277-280
sub import_from_list {
Link Here
|
277 |
}; |
280 |
}; |
278 |
} |
281 |
} |
279 |
|
282 |
|
|
|
283 |
|
284 |
=head3 import_from_kbart_file |
285 |
|
286 |
=cut |
287 |
|
288 |
sub import_from_kbart_file { |
289 |
my $c = shift or return; |
290 |
|
291 |
my $file = $c->req->json; |
292 |
|
293 |
return try { |
294 |
my @job_ids; |
295 |
my @invalid_columns; |
296 |
my $max_allowed_packet = C4::Context->dbh->selectrow_array(q{SELECT @@max_allowed_packet}); |
297 |
my $file_content = defined( $file->{file_content} ) ? decode_base64( $file->{file_content} ) : ""; |
298 |
$file_content =~ s/\n/\r/g; |
299 |
my @lines = split /\r/, $file_content; |
300 |
my @column_headers = split /\t/, $lines[0]; |
301 |
shift @lines; # Remove headers row |
302 |
my @remove_null_lines = grep $_ ne '', @lines; |
303 |
|
304 |
# Check that the column headers in the file match the standardised KBART phase II columns |
305 |
# If not, return a warning before the job is queued |
306 |
my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers(); |
307 |
foreach my $header (@column_headers) { |
308 |
if ( !grep { $_ eq $header } @valid_headers ) { |
309 |
push @invalid_columns, $header; |
310 |
} |
311 |
} |
312 |
return $c->render( |
313 |
status => 201, |
314 |
openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers } |
315 |
) if scalar(@invalid_columns) > 0; |
316 |
|
317 |
my $file_size = length($file_content); |
318 |
|
319 |
# If the file is too large, we can break the file into smaller chunks and enqueue one job per chunk |
320 |
if ( $file_size > $max_allowed_packet ) { |
321 |
|
322 |
my $max_number_of_lines = Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size( |
323 |
$file_size, $max_allowed_packet, |
324 |
scalar(@remove_null_lines) |
325 |
); |
326 |
my @chunked_files; |
327 |
push @chunked_files, [ splice @remove_null_lines, 0, $max_number_of_lines ] while @remove_null_lines; |
328 |
|
329 |
foreach my $chunk (@chunked_files) { |
330 |
unshift( @{$chunk}, join( "\t", @column_headers ) ); |
331 |
my $chunked_file = { |
332 |
filename => $file->{filename}, |
333 |
file_content => encode_base64( join( "\r", @{$chunk} ) ) |
334 |
}; |
335 |
my $params = { file => $chunked_file }; |
336 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
337 |
push @job_ids, $job_id; |
338 |
} |
339 |
} else { |
340 |
my $params = { file => $file }; |
341 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
342 |
push @job_ids, $job_id; |
343 |
} |
344 |
|
345 |
return $c->render( |
346 |
status => 201, |
347 |
openapi => { job_ids => \@job_ids } |
348 |
); |
349 |
} catch { |
350 |
$c->unhandled_exception($_); |
351 |
}; |
352 |
} |
353 |
|
280 |
1; |
354 |
1; |