Lines 21-29
use Mojo::Base 'Mojolicious::Controller';
Link Here
|
21 |
|
21 |
|
22 |
use Koha::ERM::EHoldings::Titles; |
22 |
use Koha::ERM::EHoldings::Titles; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
|
|
24 |
use Koha::BackgroundJob::ImportKBARTFile; |
24 |
|
25 |
|
25 |
use Scalar::Util qw( blessed ); |
26 |
use Scalar::Util qw( blessed ); |
26 |
use Try::Tiny qw( catch try ); |
27 |
use Try::Tiny qw( catch try ); |
|
|
28 |
use MIME::Base64 qw( decode_base64 encode_base64 ); |
29 |
use POSIX qw( floor ); |
27 |
|
30 |
|
28 |
=head1 API |
31 |
=head1 API |
29 |
|
32 |
|
Lines 275-278
sub import_from_list {
Link Here
|
275 |
}; |
278 |
}; |
276 |
} |
279 |
} |
277 |
|
280 |
|
|
|
281 |
|
282 |
=head3 import_from_kbart_file |
283 |
|
284 |
=cut |
285 |
|
286 |
sub import_from_kbart_file { |
287 |
my $c = shift or return; |
288 |
|
289 |
my $file = $c->req->json; |
290 |
|
291 |
return try { |
292 |
my @job_ids; |
293 |
my @invalid_columns; |
294 |
my $max_allowed_packet = C4::Context->dbh->selectrow_array(q{SELECT @@max_allowed_packet}); |
295 |
my $file_content = defined( $file->{file_content} ) ? decode_base64( $file->{file_content} ) : ""; |
296 |
$file_content =~ s/\n/\r/g; |
297 |
my @lines = split /\r/, $file_content; |
298 |
my @column_headers = split /\t/, $lines[0]; |
299 |
shift @lines; # Remove headers row |
300 |
my @remove_null_lines = grep $_ ne '', @lines; |
301 |
|
302 |
# Check that the column headers in the file match the standardised KBART phase II columns |
303 |
# If not, return a warning before the job is queued |
304 |
my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers(); |
305 |
foreach my $header (@column_headers) { |
306 |
if ( !grep { $_ eq $header } @valid_headers ) { |
307 |
push @invalid_columns, $header; |
308 |
} |
309 |
} |
310 |
return $c->render( |
311 |
status => 201, |
312 |
openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers } |
313 |
) if scalar(@invalid_columns) > 0; |
314 |
|
315 |
my $file_size = length($file_content); |
316 |
|
317 |
# If the file is too large, we can break the file into smaller chunks and enqueue one job per chunk |
318 |
if ( $file_size > $max_allowed_packet ) { |
319 |
|
320 |
my $max_number_of_lines = Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size( |
321 |
$file_size, $max_allowed_packet, |
322 |
scalar(@remove_null_lines) |
323 |
); |
324 |
my @chunked_files; |
325 |
push @chunked_files, [ splice @remove_null_lines, 0, $max_number_of_lines ] while @remove_null_lines; |
326 |
|
327 |
foreach my $chunk (@chunked_files) { |
328 |
unshift( @{$chunk}, join( "\t", @column_headers ) ); |
329 |
my $chunked_file = { |
330 |
filename => $file->{filename}, |
331 |
file_content => encode_base64( join( "\r", @{$chunk} ) ) |
332 |
}; |
333 |
my $params = { file => $chunked_file }; |
334 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
335 |
push @job_ids, $job_id; |
336 |
} |
337 |
} else { |
338 |
my $params = { file => $file }; |
339 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
340 |
push @job_ids, $job_id; |
341 |
} |
342 |
|
343 |
return $c->render( |
344 |
status => 201, |
345 |
openapi => { job_ids => \@job_ids } |
346 |
); |
347 |
} catch { |
348 |
$c->unhandled_exception($_); |
349 |
}; |
350 |
} |
351 |
|
278 |
1; |
352 |
1; |