Lines 21-29
use Mojo::Base 'Mojolicious::Controller';
Link Here
|
21 |
|
21 |
|
22 |
use Koha::ERM::EHoldings::Titles; |
22 |
use Koha::ERM::EHoldings::Titles; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
23 |
use Koha::BackgroundJob::CreateEHoldingsFromBiblios; |
|
|
24 |
use Koha::BackgroundJob::ImportKBARTFile; |
24 |
|
25 |
|
25 |
use Scalar::Util qw( blessed ); |
26 |
use Scalar::Util qw( blessed ); |
26 |
use Try::Tiny qw( catch try ); |
27 |
use Try::Tiny qw( catch try ); |
|
|
28 |
use MIME::Base64 qw( decode_base64 encode_base64 ); |
29 |
use POSIX qw( floor ); |
27 |
|
30 |
|
28 |
=head1 API |
31 |
=head1 API |
29 |
|
32 |
|
Lines 259-262
sub import_from_list {
Link Here
|
259 |
}; |
262 |
}; |
260 |
} |
263 |
} |
261 |
|
264 |
|
|
|
265 |
|
266 |
=head3 import_from_kbart_file |
267 |
|
268 |
=cut |
269 |
|
270 |
sub import_from_kbart_file { |
271 |
my $c = shift or return; |
272 |
|
273 |
my $file = $c->req->json; |
274 |
|
275 |
return try { |
276 |
my @job_ids; |
277 |
my @invalid_columns; |
278 |
my $max_allowed_packet = C4::Context->dbh->selectrow_array(q{SELECT @@max_allowed_packet}); |
279 |
my $file_content = defined( $file->{file_content} ) ? decode_base64( $file->{file_content} ) : ""; |
280 |
$file_content =~ s/\n/\r/g; |
281 |
my @lines = split /\r/, $file_content; |
282 |
my @column_headers = split /\t/, $lines[0]; |
283 |
shift @lines; # Remove headers row |
284 |
my @remove_null_lines = grep $_ ne '', @lines; |
285 |
|
286 |
# Check that the column headers in the file match the standardised KBART phase II columns |
287 |
# If not, return a warning before the job is queued |
288 |
my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers(); |
289 |
foreach my $header (@column_headers) { |
290 |
if ( !grep { $_ eq $header } @valid_headers ) { |
291 |
push @invalid_columns, $header; |
292 |
} |
293 |
} |
294 |
return $c->render( |
295 |
status => 201, |
296 |
openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers } |
297 |
) if scalar(@invalid_columns) > 0; |
298 |
|
299 |
my $file_size = length($file_content); |
300 |
|
301 |
# If the file is too large, we can break the file into smaller chunks and enqueue one job per chunk |
302 |
if ( $file_size > $max_allowed_packet ) { |
303 |
|
304 |
my $max_number_of_lines = Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size( |
305 |
$file_size, $max_allowed_packet, |
306 |
scalar(@remove_null_lines) |
307 |
); |
308 |
my @chunked_files; |
309 |
push @chunked_files, [ splice @remove_null_lines, 0, $max_number_of_lines ] while @remove_null_lines; |
310 |
|
311 |
foreach my $chunk (@chunked_files) { |
312 |
unshift( @{$chunk}, join( "\t", @column_headers ) ); |
313 |
my $chunked_file = { |
314 |
filename => $file->{filename}, |
315 |
file_content => encode_base64( join( "\r", @{$chunk} ) ) |
316 |
}; |
317 |
my $params = { file => $chunked_file }; |
318 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
319 |
push @job_ids, $job_id; |
320 |
} |
321 |
} else { |
322 |
my $params = { file => $file }; |
323 |
my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params); |
324 |
push @job_ids, $job_id; |
325 |
} |
326 |
|
327 |
return $c->render( |
328 |
status => 201, |
329 |
openapi => { job_ids => \@job_ids } |
330 |
); |
331 |
} catch { |
332 |
$c->unhandled_exception($_); |
333 |
}; |
334 |
} |
335 |
|
262 |
1; |
336 |
1; |