View | Details | Raw Unified | Return to bug 34788
Collapse All | Expand All

(-)a/Koha/BackgroundJob/ImportKBARTFile.pm (-45 / +193 lines)
Lines 16-25 package Koha::BackgroundJob::ImportKBARTFile; Link Here
16
# along with Koha; if not, see <http://www.gnu.org/licenses>.
16
# along with Koha; if not, see <http://www.gnu.org/licenses>.
17
17
18
use Modern::Perl;
18
use Modern::Perl;
19
use JSON qw( decode_json encode_json );
19
use JSON         qw( decode_json encode_json );
20
use Try::Tiny qw( catch try );
20
use Try::Tiny    qw( catch try );
21
use MIME::Base64 qw( decode_base64 );
21
use MIME::Base64 qw( decode_base64 );
22
use POSIX qw( floor );
22
use POSIX        qw( floor );
23
23
24
use C4::Context;
24
use C4::Context;
25
25
Lines 65-101 sub process { Link Here
65
    my $titles_imported  = 0;
65
    my $titles_imported  = 0;
66
    my $duplicate_titles = 0;
66
    my $duplicate_titles = 0;
67
    my $failed_imports   = 0;
67
    my $failed_imports   = 0;
68
    my $total_lines;
68
    my $total_rows;
69
    my $file_name = $args->{file}->{filename};
69
    my $file_name = $args->{filename};
70
    my $report    = {
70
    my $report    = {
71
        duplicates_found => undef,
71
        duplicates_found => undef,
72
        titles_imported  => undef,
72
        titles_imported  => undef,
73
        file_name        => $file_name,
73
        file_name        => $file_name,
74
        total_lines      => undef,
74
        total_rows       => undef,
75
        failed_imports   => undef
75
        failed_imports   => undef
76
    };
76
    };
77
77
78
    try {
78
    try {
79
        my $file = $args->{file};
79
        my $column_headers = $args->{column_headers};
80
        my $package_id = $args->{package_id};
80
        my $rows           = $args->{rows};
81
        my ( $column_headers, $lines ) = format_file($file);
81
        my $package_id     = $args->{package_id};
82
82
83
        if ( scalar( @{$lines} ) == 0 ) {
83
        if ( scalar( @{$rows} ) == 0 ) {
84
            push @messages, {
84
            push @messages, {
85
                code          => 'job_failed',
85
                code          => 'job_failed',
86
                type          => 'error',
86
                type          => 'error',
87
                error_message => 'No valid lines were found in this file. Please check the file formatting.',
87
                error_message => 'No valid rows were found in this file. Please check the file formatting.',
88
            };
88
            };
89
            $self->status('failed')->store;
89
            $self->status('failed')->store;
90
        }
90
        }
91
91
92
        $self->size( scalar( @{$lines} ) )->store;
92
        $self->size( scalar( @{$rows} ) )->store;
93
        $total_lines = scalar( @{$lines} );
93
        $total_rows = scalar( @{$rows} );
94
94
95
        foreach my $line ( @{$lines} ) {
95
        foreach my $row ( @{$rows} ) {
96
            next if !$line;
96
            next if !$row;
97
            my $new_title   = create_title_hash_from_line_data( $line, $column_headers );
97
            my $new_title   = create_title_hash_from_line_data( $row, $column_headers );
98
            my $title_match = Koha::ERM::EHoldings::Titles->search( { external_id => $new_title->{title_id} } )->count;
98
            my $title_match = check_for_matching_title($new_title);
99
99
100
            if ($title_match) {
100
            if ($title_match) {
101
                $duplicate_titles++;
101
                $duplicate_titles++;
Lines 119-127 sub process { Link Here
119
                        $failed_imports++;
119
                        $failed_imports++;
120
                    } else {
120
                    } else {
121
                        my $imported_title = Koha::ERM::EHoldings::Title->new($formatted_title)->store;
121
                        my $imported_title = Koha::ERM::EHoldings::Title->new($formatted_title)->store;
122
                        my $title_id = $imported_title->title_id;
122
                        create_linked_resource(
123
                        Koha::ERM::EHoldings::Resource->new( { title_id => $title_id, package_id => $package_id } )
123
                            {
124
                            ->store;
124
                                title      => $imported_title,
125
                                package_id => $package_id
126
                            }
127
                        );
125
128
126
                        # No need to add a message for a successful import,
129
                        # No need to add a message for a successful import,
127
                        # files could have 1000s of titles which will lead to lots of messages in background_job->data
130
                        # files could have 1000s of titles which will lead to lots of messages in background_job->data
Lines 132-138 sub process { Link Here
132
                    push @messages, {
135
                    push @messages, {
133
                        code          => 'title_failed',
136
                        code          => 'title_failed',
134
                        type          => 'error',
137
                        type          => 'error',
135
                        error_message => $_->{msg},
138
                        error_message => $_->{msg} || "Please check your file",
136
                        title         => $new_title->{publication_title}
139
                        title         => $new_title->{publication_title}
137
                    }
140
                    }
138
                };
141
                };
Lines 142-148 sub process { Link Here
142
145
143
        $report->{duplicates_found} = $duplicate_titles;
146
        $report->{duplicates_found} = $duplicate_titles;
144
        $report->{titles_imported}  = $titles_imported;
147
        $report->{titles_imported}  = $titles_imported;
145
        $report->{total_lines}      = $total_lines;
148
        $report->{total_rows}       = $total_rows;
146
        $report->{failed_imports}   = $failed_imports;
149
        $report->{failed_imports}   = $failed_imports;
147
150
148
        my $data = $self->decoded_data;
151
        my $data = $self->decoded_data;
Lines 167-173 Enqueue the new job Link Here
167
sub enqueue {
170
sub enqueue {
168
    my ( $self, $args ) = @_;
171
    my ( $self, $args ) = @_;
169
172
170
    return unless exists $args->{file};
173
    return unless exists $args->{column_headers};
171
174
172
    $self->SUPER::enqueue(
175
    $self->SUPER::enqueue(
173
        {
176
        {
Lines 194-200 sub format_title { Link Here
194
    delete $title->{title_id};
197
    delete $title->{title_id};
195
198
196
    # Some files appear to use coverage_notes instead of "notes" as in the KBART standard
199
    # Some files appear to use coverage_notes instead of "notes" as in the KBART standard
197
    if ( $title->{coverage_notes} ) {
200
    if ( exists $title->{coverage_notes} ) {
198
        $title->{notes} = $title->{coverage_notes};
201
        $title->{notes} = $title->{coverage_notes};
199
        delete $title->{coverage_notes};
202
        delete $title->{coverage_notes};
200
    }
203
    }
Lines 202-224 sub format_title { Link Here
202
    return $title;
205
    return $title;
203
}
206
}
204
207
205
=head3 format_file
208
=head3 read_file
206
209
207
Formats a file to provide report headers and lines to be processed
210
Reads a file to provide report headers and lines to be processed
208
211
209
=cut
212
=cut
210
213
211
sub format_file {
214
sub read_file {
212
    my ($file) = @_;
215
    my ($file) = @_;
213
216
214
    my $file_content = decode_base64( $file->{file_content} );
217
    my $file_content = defined( $file->{file_content} ) ? decode_base64( $file->{file_content} ) : "";
215
    $file_content =~ s/\n/\r/g;
218
    my $delimiter    = $file->{filename} =~ /\.tsv$/    ? "\t"                                   : ",";
216
    my @lines          = split /\r/, $file_content;
219
    my $quote_char   = $file->{filename} =~ /\.tsv$/    ? ""                                     : '"';
217
    my @column_headers = split /\t/, $lines[0];
218
    shift @lines;    # Remove headers row
219
    my @remove_null_lines = grep $_ ne '', @lines;
220
220
221
    return ( \@column_headers, \@remove_null_lines );
221
    open my $fh, "<", \$file_content or die;
222
    my $csv = Text::CSV_XS->new(
223
        {
224
            sep_char           => $delimiter,
225
            quote_char         => $quote_char,
226
            binary             => 1,
227
            allow_loose_quotes => 1
228
        }
229
    );
230
    my $headers_to_check = $csv->getline($fh);
231
    my $column_headers   = rescue_EBSCO_files($headers_to_check);
232
    my $lines            = $csv->getline_all( $fh, 0 );
233
234
    my ( $cde, $str, $pos ) = $csv->error_diag();
235
    my $error = $cde ? "$cde, $str, $pos" : "";
236
    warn $error if $error;
237
238
    close($fh);
239
240
    return ( $column_headers, $lines, $error );
222
}
241
}
223
242
224
=head3 create_title_hash_from_line_data
243
=head3 create_title_hash_from_line_data
Lines 228-243 Takes a line and creates a hash of the values mapped to the column headings Link Here
228
=cut
247
=cut
229
248
230
sub create_title_hash_from_line_data {
249
sub create_title_hash_from_line_data {
231
    my ( $line, $column_headers ) = @_;
250
    my ( $row, $column_headers ) = @_;
232
251
233
    my %new_title;
252
    my %new_title;
234
    my @values = split /\t/, $line;
235
253
236
    @new_title{ @{$column_headers} } = @values;
254
    @new_title{ @{$column_headers} } = @$row;
255
256
    # If the file has been converted from CSV to TSV for import, then some titles containing commas will be enclosed in ""
257
    my $first_char = substr( $new_title{publication_title}, 0, 1 );
258
    my $last_char  = substr( $new_title{publication_title}, -1 );
259
    if ( $first_char eq '"' && $last_char eq '"' ) {
260
        $new_title{publication_title} =~ s/^"|"$//g;
261
    }
237
262
238
    return \%new_title;
263
    return \%new_title;
239
}
264
}
240
265
266
=head3 check_for_matching_title
267
268
Checks whether this title already exists to avoid duplicates
269
270
=cut
271
272
sub check_for_matching_title {
273
    my ($title) = @_;
274
275
    my $match_parameters = {};
276
    $match_parameters->{print_identifier}  = $title->{print_identifier}  if $title->{print_identifier};
277
    $match_parameters->{online_identifier} = $title->{online_identifier} if $title->{online_identifier};
278
279
    # Use external_id in case title exists for a different provider, we want to add it for the new provider
280
    $match_parameters->{external_id} = $title->{title_id} if $title->{title_id};
281
282
    # If no match parameters are provided in the file we should add the new title
283
    return 0 if !%$match_parameters;
284
285
    my $title_match = Koha::ERM::EHoldings::Titles->search($match_parameters)->count;
286
287
    return $title_match;
288
}
289
290
=head3 create_linked_resource
291
292
Creates a resource for a newly stored title.
293
294
=cut
295
296
sub create_linked_resource {
297
    my ($args) = @_;
298
299
    my $title      = $args->{title};
300
    my $package_id = $args->{package_id};
301
302
    my $title_id = $title->title_id;
303
    my ( $date_first_issue_online, $date_last_issue_online ) = get_first_and_last_issue_dates($title);
304
    my $resource = Koha::ERM::EHoldings::Resource->new(
305
        {
306
            title_id   => $title_id,
307
            package_id => $package_id,
308
            started_on => $date_first_issue_online,
309
            ended_on   => $date_last_issue_online,
310
        }
311
    )->store;
312
313
    return;
314
}
315
316
=head3 get_first_and_last_issue_dates
317
318
Gets and formats a date for storing on the resource. Dates can come from files in YYYY, YYYY-MM or YYYY-MM-DD format
319
320
=cut
321
322
sub get_first_and_last_issue_dates {
323
    my ($title) = @_;
324
325
    return ( undef, undef ) if ( !$title->date_first_issue_online && !$title->date_last_issue_online );
326
327
    my $date_first_issue_online =
328
          $title->date_first_issue_online =~ /^\d{4}((-\d{2}-\d{2}$|-\d{2}$)|$)$/
329
        ? $title->date_first_issue_online
330
        : undef;
331
    my $date_last_issue_online =
332
        $title->date_last_issue_online =~ /^\d{4}((-\d{2}-\d{2}$|-\d{2}$)|$)$/ ? $title->date_last_issue_online : undef;
333
334
    $date_first_issue_online = $date_first_issue_online . '-01-01'
335
        if $date_first_issue_online && $date_first_issue_online =~ /^\d{4}$/;
336
    $date_last_issue_online = $date_last_issue_online . '-01-01'
337
        if $date_last_issue_online && $date_last_issue_online =~ /^\d{4}$/;
338
    $date_first_issue_online = $date_first_issue_online . '-01'
339
        if $date_first_issue_online && $date_first_issue_online =~ /^\d{4}-\d{2}$/;
340
    $date_last_issue_online = $date_last_issue_online . '-01'
341
        if $date_last_issue_online && $date_last_issue_online =~ /^\d{4}-\d{2}$/;
342
343
    return ( $date_first_issue_online, $date_last_issue_online );
344
}
345
241
=head3 get_valid_headers
346
=head3 get_valid_headers
242
347
243
Returns a list of permitted headers in a KBART phase II file
348
Returns a list of permitted headers in a KBART phase II file
Lines 275-294 sub get_valid_headers { Link Here
275
    );
380
    );
276
}
381
}
277
382
278
=head3 calculate_chunked_file_size
383
=head3 calculate_chunked_params_size
279
384
280
Calculates average line size to work out how many lines to chunk a large file into
385
Calculates average line size to work out how many lines to chunk a large file into
281
Knocks 10% off the final result to give some margin for error
386
Uses only 75% of the max_allowed_packet as an upper limit
387
388
=cut
389
390
sub calculate_chunked_params_size {
391
    my ( $params_size, $max_allowed_packet, $number_of_rows ) = @_;
392
393
    my $average_line_size = $params_size / $number_of_rows;
394
    my $lines_possible    = ( $max_allowed_packet * 0.75 ) / $average_line_size;
395
    my $rounded_value     = floor($lines_possible);
396
    return $rounded_value;
397
}
398
399
=head3 is_file_too_large
400
401
Calculates the final size of the background job object that will need storing to check if we exceed the max_allowed_packet
402
403
=cut
404
405
sub is_file_too_large {
406
    my ( $params_to_store, $max_allowed_packet ) = @_;
407
408
    my $json           = JSON->new->utf8(0);
409
    my $encoded_params = $json->encode($params_to_store);
410
    my $params_size    = length $encoded_params;
411
412
    # A lot more than just the params are stored in the background job table and this is difficult to calculate
413
    # We should allow for no more than 75% of the max_allowed_packet to be made up of the job params to avoid db conflicts
414
    return {
415
        file_too_large => 1,
416
        params_size    => $params_size
417
    } if $params_size > ( $max_allowed_packet * 0.75 );
418
419
    return {
420
        file_too_large => 0,
421
        params_size    => $params_size
422
    };
423
}
424
425
=head3
426
427
EBSCO have an incorrect spelling of "preceding_publication_title_id" in all of their KBART files ("preceeding" instead of "preceding").
428
This is very annoying because it means all of their KBART files fail to import using the current methodology.
429
There is no simple way of finding out who the vendor is before importing so all KBART files from any vendor are going to have to be checked for this spelling and corrected.
282
430
283
=cut
431
=cut
284
432
285
sub calculate_chunked_file_size {
433
sub rescue_EBSCO_files {
286
    my ( $file_size, $max_allowed_packet, $number_of_lines ) = @_;
434
    my ($column_headers) = @_;
435
436
    my ($index) = grep { @$column_headers[$_] eq 'preceeding_publication_title_id' } ( 0 .. @$column_headers - 1 );
437
    @$column_headers[$index] = 'preceding_publication_title_id' if $index;
287
438
288
    my $average_line_size = $file_size / $number_of_lines;
439
    return $column_headers;
289
    my $lines_possible    = $max_allowed_packet / $average_line_size;
290
    my $moderated_value   = floor( $lines_possible * 0.9 );
291
    return $moderated_value;
292
}
440
}
293
441
294
1;
442
1;
(-)a/Koha/REST/V1/ERM/EHoldings/Titles/Local.pm (-27 / +39 lines)
Lines 27-32 use Scalar::Util qw( blessed ); Link Here
27
use Try::Tiny qw( catch try );
27
use Try::Tiny qw( catch try );
28
use MIME::Base64 qw( decode_base64 encode_base64 );
28
use MIME::Base64 qw( decode_base64 encode_base64 );
29
use POSIX qw( floor );
29
use POSIX qw( floor );
30
use Text::CSV_XS;
30
31
31
=head1 API
32
=head1 API
32
33
Lines 287-343 sub import_from_kbart_file { Link Here
287
    my $c = shift or return;
288
    my $c = shift or return;
288
289
289
    my $import_data = $c->req->json;
290
    my $import_data = $c->req->json;
290
    my $file = $import_data->{file};
291
    my $file        = $import_data->{file};
291
    my $package_id = $import_data->{package_id};
292
    my $package_id  = $import_data->{package_id};
292
293
293
    return try {
294
    return try {
294
        my @job_ids;
295
        my @job_ids;
295
        my @invalid_columns;
296
        my @invalid_columns;
296
        my $max_allowed_packet = C4::Context->dbh->selectrow_array(q{SELECT @@max_allowed_packet});
297
        my $max_allowed_packet = C4::Context->dbh->selectrow_array(q{SELECT @@max_allowed_packet});
297
        my $file_content       = defined( $file->{file_content} ) ? decode_base64( $file->{file_content} ) : "";
298
298
        $file_content =~ s/\n/\r/g;
299
        # Check if file is in TSV or CSV format and send an error back if not
299
        my @lines          = split /\r/, $file_content;
300
        if ( $file->{filename} !~ /\.csv$/ && $file->{filename} !~ /\.tsv$/ ) {
300
        my @column_headers = split /\t/, $lines[0];
301
            return $c->render(
301
        shift @lines;    # Remove headers row
302
                status  => 201,
302
        my @remove_null_lines = grep $_ ne '', @lines;
303
                openapi => { invalid_filetype => 1 }
304
            );
305
        }
306
307
        my ( $column_headers, $rows ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
303
308
304
        # Check that the column headers in the file match the standardised KBART phase II columns
309
        # Check that the column headers in the file match the standardised KBART phase II columns
305
        # If not, return a warning before the job is queued
310
        # If not, return a warning
306
        my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers();
311
        my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers();
307
        foreach my $header (@column_headers) {
312
        foreach my $header (@$column_headers) {
308
            if ( !grep { $_ eq $header } @valid_headers ) {
313
            if ( !grep { $_ eq $header } @valid_headers ) {
314
                $header = 'Empty column - please remove' if $header eq '';
309
                push @invalid_columns, $header;
315
                push @invalid_columns, $header;
310
            }
316
            }
311
        }
317
        }
312
        return $c->render(
318
        return $c->render(
313
            status  => 201,
319
            status  => 201,
314
            openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers }
320
            openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers, invalid_filetype => 0 }
315
        ) if scalar(@invalid_columns) > 0;
321
        ) if scalar(@invalid_columns) > 0;
316
322
317
        my $file_size = length($file_content);
323
        my $params = {
324
            column_headers => $column_headers,
325
            rows           => $rows,
326
            package_id     => $package_id,
327
            file_name      => $file->{filename}
328
        };
329
        my $outcome = Koha::BackgroundJob::ImportKBARTFile::is_file_too_large( $params, $max_allowed_packet );
318
330
319
        # If the file is too large, we can break the file into smaller chunks and enqueue one job per chunk
331
        # If the file is too large, we can break the file into smaller chunks and enqueue one job per chunk
320
        if ( $file_size > $max_allowed_packet ) {
332
        if ( $outcome->{file_too_large} ) {
321
333
            my $max_number_of_rows = Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_params_size(
322
            my $max_number_of_lines = Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size(
334
                $outcome->{params_size}, $max_allowed_packet,
323
                $file_size, $max_allowed_packet,
335
                scalar(@$rows)
324
                scalar(@remove_null_lines)
325
            );
336
            );
326
            my @chunked_files;
327
            push @chunked_files, [ splice @remove_null_lines, 0, $max_number_of_lines ] while @remove_null_lines;
328
337
338
            my @chunked_files;
339
            push @chunked_files, [ splice @$rows, 0, $max_number_of_rows ] while @$rows;
329
            foreach my $chunk (@chunked_files) {
340
            foreach my $chunk (@chunked_files) {
330
                unshift( @{$chunk}, join( "\t", @column_headers ) );
341
                my $params = {
331
                my $chunked_file = {
342
                    column_headers => $column_headers,
332
                    filename     => $file->{filename},
343
                    rows           => $chunk,
333
                    file_content => encode_base64( join( "\r", @{$chunk} ) )
344
                    package_id     => $package_id,
345
                    file_name      => $file->{filename}
334
                };
346
                };
335
                my $params = { file => $chunked_file, package_id => $package_id };
347
336
                my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params);
348
                my $chunked_job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params);
337
                push @job_ids, $job_id;
349
                push @job_ids, $chunked_job_id;
338
            }
350
            }
339
        } else {
351
        } else {
340
            my $params = { file => $file, package_id => $package_id };
341
            my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params);
352
            my $job_id = Koha::BackgroundJob::ImportKBARTFile->new->enqueue($params);
342
            push @job_ids, $job_id;
353
            push @job_ids, $job_id;
343
        }
354
        }
Lines 351-354 sub import_from_kbart_file { Link Here
351
    };
362
    };
352
}
363
}
353
364
365
354
1;
366
1;
(-)a/api/v1/swagger/paths/erm_eholdings_titles.yaml (+2 lines)
Lines 529-534 Link Here
529
              type: array
529
              type: array
530
            valid_columns:
530
            valid_columns:
531
              type: array
531
              type: array
532
            invalid_filetype:
533
              type: integer
532
          additionalProperties: false
534
          additionalProperties: false
533
      400:
535
      400:
534
        description: Bad parameter
536
        description: Bad parameter
(-)a/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesKBARTImport.vue (-13 / +31 lines)
Lines 2-8 Link Here
2
    <h2>{{ $__("Import from a KBART file") }}</h2>
2
    <h2>{{ $__("Import from a KBART file") }}</h2>
3
    <div class="page-section" id="files">
3
    <div class="page-section" id="files">
4
        <form @submit="addDocument($event)" class="file_upload">
4
        <form @submit="addDocument($event)" class="file_upload">
5
            <label>{{ $__("File") }}:</label>
5
            <h3>{{ $__("Requirements:") }}</h3>
6
            <ul style="margin-bottom: 1.5em">
7
                <li>{{ $__("The file must be in TSV or CSV format") }}</li>
8
                <li>
9
                    {{
10
                        $__(
11
                            "The file should not contain any additional information / header rows, e.g. a file with a single title would be structured as follows:"
12
                        )
13
                    }}
14
                    <ol>
15
                        <li>Column headings row</li>
16
                        <li>Title data row</li>
17
                    </ol>
18
                </li>
19
            </ul>
20
            <h3>{{ $__("File") }}:</h3>
6
            <div class="file_information">
21
            <div class="file_information">
7
                <span v-if="!file.filename">
22
                <span v-if="!file.filename">
8
                    {{ $__("Select a file") }}
23
                    {{ $__("Select a file") }}
Lines 44-50 Link Here
44
            </div>
59
            </div>
45
            <fieldset class="action">
60
            <fieldset class="action">
46
                <ButtonSubmit />
61
                <ButtonSubmit />
47
                <a @click="clearForm($event)" role="button" class="cancel">{{
62
                <a @click="clearForm()" role="button" class="cancel">{{
48
                    $__("Clear form")
63
                    $__("Clear form")
49
                }}</a>
64
                }}</a>
50
            </fieldset>
65
            </fieldset>
Lines 104-110 export default { Link Here
104
                    let message = ""
119
                    let message = ""
105
                    if (success.job_ids) {
120
                    if (success.job_ids) {
106
                        if (success.job_ids.length > 1) {
121
                        if (success.job_ids.length > 1) {
107
                            message += this.__(
122
                            message += this.$__(
108
                                "<li>Your file was too large to process in one job, the file has been split into %s jobs to meet the maximum size limits.</li>"
123
                                "<li>Your file was too large to process in one job, the file has been split into %s jobs to meet the maximum size limits.</li>"
109
                            ).format(success.job_ids.length)
124
                            ).format(success.job_ids.length)
110
                        }
125
                        }
Lines 116-143 export default { Link Here
116
                        setMessage(message, true)
131
                        setMessage(message, true)
117
                    }
132
                    }
118
                    if (success.invalid_columns) {
133
                    if (success.invalid_columns) {
119
                        message +=
134
                        message += this.$__(
120
                            "<p>Invalid columns were detected in your report, please check the list below:</p>"
135
                            "<p>Invalid columns were detected in your report, please check the list below:</p>"
136
                        )
121
                        success.invalid_columns.forEach(column => {
137
                        success.invalid_columns.forEach(column => {
122
                            message += this.$__(
138
                            message += this.$__(
123
                                `<li style="font-weight: normal; font-size: medium;">%s</li>`
139
                                `<li style="font-weight: normal; font-size: medium;">%s</li>`
124
                            ).format(column)
140
                            ).format(column)
125
                        })
141
                        })
126
                        message +=
142
                        message += this.$__(
127
                            '<p style="margin-top: 1em;">Below is a list of columns allowed in a KBART phase II report:</p>'
143
                            '<p style="margin-top: 1em;">For a list of compliant column headers, please click <a target="_blank" href="https://groups.niso.org/higherlogic/ws/public/download/16900/RP-9-2014_KBART.pdf" />here</p>'
128
                        success.valid_columns.forEach(column => {
144
                        )
129
                            message += this.$__(
145
                        setWarning(message)
130
                                `<li style="font-weight: normal; font-size: medium;">%s</li>`
146
                    }
131
                            ).format(column)
147
                    if (success.invalid_filetype) {
132
                        })
148
                        message += this.$__(
149
                            "<p>The file must be in .tsv or .csv format, please convert your file and try again.</p>"
150
                        )
133
                        setWarning(message)
151
                        setWarning(message)
134
                    }
152
                    }
135
                },
153
                },
136
                error => {}
154
                error => {}
137
            )
155
            )
156
            this.clearForm()
138
        },
157
        },
139
        clearForm(e) {
158
        clearForm() {
140
            e.preventDefault()
141
            this.file = {
159
            this.file = {
142
                filename: null,
160
                filename: null,
143
                file_type: null,
161
                file_type: null,
(-)a/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesList.vue (-17 / +5 lines)
Lines 13-18 Link Here
13
                    icon="plus"
13
                    icon="plus"
14
                    :title="$__('Import from list')"
14
                    :title="$__('Import from list')"
15
                />
15
                />
16
                <ToolbarButton
17
                    :to="{ name: 'EHoldingsLocalTitlesKBARTImport' }"
18
                    icon="plus"
19
                    :title="$__('Import from KBART file')"
20
                />
16
            </Toolbar>
21
            </Toolbar>
17
            <div
22
            <div
18
                v-if="title_count > 0"
23
                v-if="title_count > 0"
Lines 102-124 export default { Link Here
102
                },
107
                },
103
            },
108
            },
104
            cannot_search: false,
109
            cannot_search: false,
105
            toolbar_options: [
106
                {
107
                    to: "EHoldingsLocalTitlesFormAdd",
108
                    icon: "plus",
109
                    button_title: this.$__("New title"),
110
                },
111
                {
112
                    to: "EHoldingsLocalTitlesFormImport",
113
                    icon: "plus",
114
                    button_title: this.$__("Import from list"),
115
                },
116
                {
117
                    to: "EHoldingsLocalTitlesKBARTImport",
118
                    icon: "plus",
119
                    button_title: this.$__("Import from KBART file"),
120
                },
121
            ],
122
        }
110
        }
123
    },
111
    },
124
    beforeRouteEnter(to, from, next) {
112
    beforeRouteEnter(to, from, next) {
(-)a/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesShow.vue (-1 / +3 lines)
Lines 119-125 Link Here
119
                    <li v-if="title.title_url">
119
                    <li v-if="title.title_url">
120
                        <label>{{ $__("Title-level URL") }}:</label>
120
                        <label>{{ $__("Title-level URL") }}:</label>
121
                        <span>
121
                        <span>
122
                            {{ title.title_url }}
122
                            <a :href="title.title_url" target="_blank">{{
123
                                title.title_url
124
                            }}</a>
123
                        </span>
125
                        </span>
124
                    </li>
126
                    </li>
125
                    <li v-if="title.first_author">
127
                    <li v-if="title.first_author">
(-)a/t/db_dependent/Koha/BackgroundJob/ImportKBARTFile.t (-27 / +60 lines)
Lines 45-60 subtest 'enqueue' => sub { Link Here
45
    $schema->storage->txn_rollback;
45
    $schema->storage->txn_rollback;
46
};
46
};
47
47
48
subtest 'calculate_chunked_file_size' => sub {
48
subtest 'calculate_chunked_params_size' => sub {
49
49
50
    plan tests => 2;
50
    plan tests => 2;
51
51
52
    my $max_number_of_lines =
52
    my $max_number_of_lines =
53
        Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size( 500000, 100000, 50000 );
53
        Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_params_size( 500000, 100000, 50000 );
54
    is( $max_number_of_lines, 9000, 'Number of lines calculated correctly' );
54
    is( $max_number_of_lines, 7500, 'Number of lines calculated correctly' );
55
    my $max_number_of_lines2 =
55
    my $max_number_of_lines2 =
56
        Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_file_size( 400000, 100000, 60000 );
56
        Koha::BackgroundJob::ImportKBARTFile::calculate_chunked_params_size( 400000, 100000, 60000 );
57
    is( $max_number_of_lines2, 13500, 'Number of lines calculated correctly' );
57
    is( $max_number_of_lines2, 11250, 'Number of lines calculated correctly' );
58
};
58
};
59
59
60
subtest 'format_title' => sub {
60
subtest 'format_title' => sub {
Lines 74-85 subtest 'format_title' => sub { Link Here
74
    is( $title->{coverage_notes},        undef,        'coverage_notes has been deleted' );
74
    is( $title->{coverage_notes},        undef,        'coverage_notes has been deleted' );
75
};
75
};
76
76
77
subtest 'format_file' => sub {
77
subtest 'read_file' => sub {
78
78
79
    plan tests => 6;
79
    plan tests => 6;
80
80
81
    my $file = {
81
    my $file = {
82
        filename     => 'Test_file.txt',
82
        filename     => 'Test_file.tsv',
83
        file_content => encode_base64(
83
        file_content => encode_base64(
84
            'publication_title	print_identifier	online_identifier	date_first_issue_online	num_first_vol_online	num_first_issue_online	date_last_issue_online	num_last_vol_online	num_last_issue_online	title_url	first_author	title_id	embargo_info	coverage_depth	coverage_notes	publisher_name	publication_type	date_monograph_published_print	date_monograph_published_online	monograph_volume	monograph_edition	first_editor	parent_publication_title_id	preceding_publication_title_id	access_type
84
            'publication_title	print_identifier	online_identifier	date_first_issue_online	num_first_vol_online	num_first_issue_online	date_last_issue_online	num_last_vol_online	num_last_issue_online	title_url	first_author	title_id	embargo_info	coverage_depth	coverage_notes	publisher_name	publication_type	date_monograph_published_print	date_monograph_published_online	monograph_volume	monograph_edition	first_editor	parent_publication_title_id	preceding_publication_title_id	access_type
85
Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P
85
Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P
Lines 87-107 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
87
        )
87
        )
88
    };
88
    };
89
89
90
    my ( $column_headers, $lines ) = Koha::BackgroundJob::ImportKBARTFile::format_file($file);
90
    my ( $column_headers, $lines, $error ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
91
91
92
    is( @{$column_headers},     25,                  '25 column headers found' );
92
    is( @{$column_headers},     25,                  '25 column headers found' );
93
    is( @{$column_headers}[0],  'publication_title', 'First header correctly extracted' );
93
    is( @{$column_headers}[0],  'publication_title', 'First header correctly extracted' );
94
    is( @{$column_headers}[10], 'first_author',      'Tenth header correctly extracted' );
94
    is( @{$column_headers}[10], 'first_author',      'Tenth header correctly extracted' );
95
95
    is( @{$lines},              2,                   'Two lines need processing' );
96
    is( @{$lines}, 2, 'Two lines need processing' );
96
    is_deeply(
97
    is(
98
        @{$lines}[0],
97
        @{$lines}[0],
99
        'Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P',
98
        [
99
            'Nature Plants', '', '2055-0278', '2015-01',     '1', '1', '', '', '', 'https://www.nature.com/nplants', '',
100
            '4aaa7', '', 'fulltext', 'Hybrid (Open Choice)', 'Nature Publishing Group UK', 'serial', '', '', '', '',
101
            '',      '', '',         'P'
102
        ],
100
        'Line correctly identified'
103
        'Line correctly identified'
101
    );
104
    );
102
    is(
105
    is_deeply(
103
        @{$lines}[1],
106
        @{$lines}[1],
104
        'Nature Astronomy		2397-3366	2017-01	1	1				https://www.nature.com/natastron		4bbb0		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P',
107
        [
108
            'Nature Astronomy', '', '2397-3366', '2017-01', '1', '1', '', '', '', 'https://www.nature.com/natastron',
109
            '', '4bbb0', '', 'fulltext', 'Hybrid (Open Choice)', 'Nature Publishing Group UK', 'serial', '', '', '',
110
            '', '',      '', '',         'P'
111
        ],
105
        'Line correctly identified'
112
        'Line correctly identified'
106
    );
113
    );
107
114
Lines 112-118 subtest 'create_title_hash_from_line_data' => sub { Link Here
112
    plan tests => 2;
119
    plan tests => 2;
113
120
114
    my $file = {
121
    my $file = {
115
        filename     => 'Test_file.txt',
122
        filename     => 'Test_file.tsv',
116
        file_content => encode_base64(
123
        file_content => encode_base64(
117
            'publication_title	print_identifier	online_identifier	date_first_issue_online	num_first_vol_online	num_first_issue_online	date_last_issue_online	num_last_vol_online	num_last_issue_online	title_url	first_author	title_id	embargo_info	coverage_depth	coverage_notes	publisher_name	publication_type	date_monograph_published_print	date_monograph_published_online	monograph_volume	monograph_edition	first_editor	parent_publication_title_id	preceding_publication_title_id	access_type
124
            'publication_title	print_identifier	online_identifier	date_first_issue_online	num_first_vol_online	num_first_issue_online	date_last_issue_online	num_last_vol_online	num_last_issue_online	title_url	first_author	title_id	embargo_info	coverage_depth	coverage_notes	publisher_name	publication_type	date_monograph_published_print	date_monograph_published_online	monograph_volume	monograph_edition	first_editor	parent_publication_title_id	preceding_publication_title_id	access_type
118
Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P
125
Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P
Lines 120-126 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
120
        )
127
        )
121
    };
128
    };
122
129
123
    my ( $column_headers, $lines ) = Koha::BackgroundJob::ImportKBARTFile::format_file($file);
130
    my ( $column_headers, $lines ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
124
131
125
    my $title_from_line1 =
132
    my $title_from_line1 =
126
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[0], $column_headers );
133
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[0], $column_headers );
Lines 191-198 subtest 'process' => sub { Link Here
191
198
192
    $schema->storage->txn_begin;
199
    $schema->storage->txn_begin;
193
200
201
    Koha::ERM::EHoldings::Packages->search->delete;
202
    my $ehpackage = $builder->build_object(
203
        {
204
            class => 'Koha::ERM::EHoldings::Packages',
205
            value => { external_id => undef }
206
        }
207
    );
208
194
    my $file = {
209
    my $file = {
195
        filename     => 'Test_file.txt',
210
        filename     => 'Test_file.tsv',
196
        file_content => encode_base64(
211
        file_content => encode_base64(
197
            'publication_title	print_identifier	online_identifier	date_first_issue_online	num_first_vol_online	num_first_issue_online	date_last_issue_online	num_last_vol_online	num_last_issue_online	title_url	first_author	title_id	embargo_info	coverage_depth	coverage_notes	publisher_name	publication_type	date_monograph_published_print	date_monograph_published_online	monograph_volume	monograph_edition	first_editor	parent_publication_title_id	preceding_publication_title_id	access_type
212
            'publication_title	print_identifier	online_identifier	date_first_issue_online	num_first_vol_online	num_first_issue_online	date_last_issue_online	num_last_vol_online	num_last_issue_online	title_url	first_author	title_id	embargo_info	coverage_depth	coverage_notes	publisher_name	publication_type	date_monograph_published_print	date_monograph_published_online	monograph_volume	monograph_edition	first_editor	parent_publication_title_id	preceding_publication_title_id	access_type
198
Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P
213
Nature Plants		2055-0278	2015-01	1	1				https://www.nature.com/nplants		4aaa7		fulltext	Hybrid (Open Choice)	Nature Publishing Group UK	serial								P
Lines 200-205 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
200
        )
215
        )
201
    };
216
    };
202
217
218
    my ( $column_headers, $rows, $error ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
219
    my $data = {
220
        column_headers => $column_headers,
221
        rows           => $rows,
222
        package_id     => $ehpackage->package_id,
223
        file_name      => $file->{filename}
224
    };
225
203
    my $job = Koha::BackgroundJob::ImportKBARTFile->new(
226
    my $job = Koha::BackgroundJob::ImportKBARTFile->new(
204
        {
227
        {
205
            status => 'new',
228
            status => 'new',
Lines 208-214 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
208
        }
231
        }
209
    )->store;
232
    )->store;
210
    $job = Koha::BackgroundJobs->find( $job->id );
233
    $job = Koha::BackgroundJobs->find( $job->id );
211
    my $data = { file => $file };
212
    my $json = $job->json->encode($data);
234
    my $json = $job->json->encode($data);
213
    $job->data($json)->store;
235
    $job->data($json)->store;
214
    $job->process($data);
236
    $job->process($data);
Lines 251-262 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
251
    $module->mock(
273
    $module->mock(
252
        'create_title_hash_from_line_data',
274
        'create_title_hash_from_line_data',
253
        sub {
275
        sub {
254
            my ( $line, $column_headers ) = @_;
276
            my ( $row, $column_headers ) = @_;
255
277
256
            my %new_title;
278
            my %new_title;
257
            my @values = split /\t/, $line;
258
279
259
            @new_title{ @{$column_headers} } = @values;
280
            @new_title{ @{$column_headers} } = @$row;
281
282
            # If the file has been converted from CSV to TSV for import, then some titles containing commas will be enclosed in ""
283
            my $first_char = substr( $new_title{publication_title}, 0, 1 );
284
            my $last_char  = substr( $new_title{publication_title}, -1 );
285
            if ( $first_char eq '"' && $last_char eq '"' ) {
286
                $new_title{publication_title} =~ s/^"|"$//g;
287
            }
260
288
261
            $new_title{title_id}          = '12345' if $new_title{publication_title} eq 'Nature Plants';
289
            $new_title{title_id}          = '12345' if $new_title{publication_title} eq 'Nature Plants';
262
            $new_title{publication_title} = ''      if $new_title{publication_title} eq 'Nature Plants';
290
            $new_title{publication_title} = ''      if $new_title{publication_title} eq 'Nature Plants';
Lines 302-313 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
302
    $module->mock(
330
    $module->mock(
303
        'create_title_hash_from_line_data',
331
        'create_title_hash_from_line_data',
304
        sub {
332
        sub {
305
            my ( $line, $column_headers ) = @_;
333
            my ( $row, $column_headers ) = @_;
306
334
307
            my %new_title;
335
            my %new_title;
308
            my @values = split /\t/, $line;
309
336
310
            @new_title{ @{$column_headers} } = @values;
337
            @new_title{ @{$column_headers} } = @$row;
338
339
            # If the file has been converted from CSV to TSV for import, then some titles containing commas will be enclosed in ""
340
            my $first_char = substr( $new_title{publication_title}, 0, 1 );
341
            my $last_char  = substr( $new_title{publication_title}, -1 );
342
            if ( $first_char eq '"' && $last_char eq '"' ) {
343
                $new_title{publication_title} =~ s/^"|"$//g;
344
            }
311
345
312
            $new_title{title_id}      = 'abcde' if $new_title{publication_title} eq 'Nature Plants';
346
            $new_title{title_id}      = 'abcde' if $new_title{publication_title} eq 'Nature Plants';
313
            $new_title{unknown_field} = ''      if $new_title{publication_title} eq 'Nature Plants';
347
            $new_title{unknown_field} = ''      if $new_title{publication_title} eq 'Nature Plants';
Lines 334-341 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
334
        $job4->messages,
368
        $job4->messages,
335
        [
369
        [
336
            {
370
            {
337
                'type'  => 'error',
371
                'type'          => 'error',
338
                'title' => 'Nature Plants',
372
                'title'         => 'Nature Plants',
339
                'error_message' =>
373
                'error_message' =>
340
                    'DBIx::Class::Row::store_column(): No such column \'unknown_field\' on Koha::Schema::Result::ErmEholdingsTitle at /kohadevbox/koha/Koha/Object.pm line 79
374
                    'DBIx::Class::Row::store_column(): No such column \'unknown_field\' on Koha::Schema::Result::ErmEholdingsTitle at /kohadevbox/koha/Koha/Object.pm line 79
341
',
375
',
342
- 

Return to bug 34788