Lines 21-29
use Mojo::Base 'Mojolicious::Controller';
Link Here
|
21 |
|
21 |
|
22 |
use Koha::ERM::EHoldings::Titles; |
22 |
use Koha::ERM::EHoldings::Titles; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
|
|
24 |
use Koha::BackgroundJob::ImportKBARTFile; |
24 |
|
25 |
|
25 |
use Scalar::Util qw( blessed ); |
26 |
use Scalar::Util qw( blessed ); |
26 |
use Try::Tiny qw( catch try ); |
27 |
use Try::Tiny qw( catch try ); |
|
|
28 |
use MIME::Base64 qw( decode_base64 encode_base64 ); |
29 |
use POSIX qw( floor ); |
27 |
|
30 |
|
28 |
=head1 API |
31 |
=head1 API |
29 |
|
32 |
|
Lines 260-263
sub import_from_list {
Link Here
|
260 |
}; |
263 |
}; |
261 |
} |
264 |
} |
262 |
|
265 |
|
|
|
266 |
|
267 |
=head3 import_from_kbart_file |
268 |
|
269 |
=cut |
270 |
|
271 |
sub import_from_kbart_file { |
272 |
my $c = shift or return; |
273 |
|
274 |
my $file = $c->req->json; |
275 |
|
276 |
return try { |
277 |
my @job_ids; |
278 |
my @invalid_columns; |
279 |
my $max_allowed_packet = C4::Context->dbh->selectrow_array(q{SELECT @@max_allowed_packet}); |
280 |
my $file_content = defined( $file->{file_content} ) ? decode_base64( $file->{file_content} ) : ""; |
281 |
$file_content =~ s/\n/\r/g; |
282 |
my @lines = split /\r/, $file_content; |
283 |
my @column_headers = split /\t/, $lines[0]; |
284 |
shift @lines; # Remove headers row |
285 |
my @remove_null_lines = grep $_ ne '', @lines; |
286 |
|
287 |
# Check that the column headers in the file match the standardised KBART phase II columns |
288 |
# If not, return a warning before the job is queued |
289 |
my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers(); |
290 |
foreach my $header (@column_headers) { |
291 |
if ( !grep { $_ eq $header } @valid_headers ) { |
292 |
push @invalid_columns, $header; |
293 |
} |
294 |
} |
295 |
return $c->render( |
296 |
status => 201, |
297 |
openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers } |
298 |
) if scalar(@invalid_columns) > 0; |
299 |
|
300 |
my $file_size = length($file_content); |
301 |
|
302 |
# If the file is too large, we can break the file into smaller chunks and enqueue one job per chunk |
303 |
if ( $file_size > $max_allowed_packet ) { |
304 |
|
305 |
my $max_number_of_lines = Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size( |
306 |
$file_size, $max_allowed_packet, |
307 |
scalar(@remove_null_lines) |
308 |
); |
309 |
my @chunked_files; |
310 |
push @chunked_files, [ splice @remove_null_lines, 0, $max_number_of_lines ] while @remove_null_lines; |
311 |
|
312 |
foreach my $chunk (@chunked_files) { |
313 |
unshift( @{$chunk}, join( "\t", @column_headers ) ); |
314 |
my $chunked_file = { |
315 |
filename => $file->{filename}, |
316 |
file_content => encode_base64( join( "\r", @{$chunk} ) ) |
317 |
}; |
318 |
my $params = { file => $chunked_file }; |
319 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
320 |
push @job_ids, $job_id; |
321 |
} |
322 |
} else { |
323 |
my $params = { file => $file }; |
324 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
325 |
push @job_ids, $job_id; |
326 |
} |
327 |
|
328 |
return $c->render( |
329 |
status => 201, |
330 |
openapi => { job_ids => \@job_ids } |
331 |
); |
332 |
} catch { |
333 |
$c->unhandled_exception($_); |
334 |
}; |
335 |
} |
336 |
|
263 |
1; |
337 |
1; |