View | Details | Raw Unified | Return to bug 34788
Collapse All | Expand All

(-)a/Koha/BackgroundJob/ImportKBARTFile.pm (-3 / +14 lines)
Lines 77-82 sub process { Link Here
77
77
78
    try {
78
    try {
79
        my $column_headers       = $args->{column_headers};
79
        my $column_headers       = $args->{column_headers};
80
        my $invalid_columns      = $args->{invalid_columns};
80
        my $rows                 = $args->{rows};
81
        my $rows                 = $args->{rows};
81
        my $package_id           = $args->{package_id};
82
        my $package_id           = $args->{package_id};
82
        my $create_linked_biblio = $args->{create_linked_biblio};
83
        my $create_linked_biblio = $args->{create_linked_biblio};
Lines 95-101 sub process { Link Here
95
96
96
        foreach my $row ( @{$rows} ) {
97
        foreach my $row ( @{$rows} ) {
97
            next if !$row;
98
            next if !$row;
98
            my $new_title   = create_title_hash_from_line_data( $row, $column_headers );
99
            my $new_title   = create_title_hash_from_line_data( $row, $column_headers, $invalid_columns );
99
            my $title_match = check_for_matching_title($new_title);
100
            my $title_match = check_for_matching_title($new_title);
100
101
101
            if ($title_match) {
102
            if ($title_match) {
Lines 245-255 sub read_file { Link Here
245
=head3 create_title_hash_from_line_data
246
=head3 create_title_hash_from_line_data
246
247
247
Takes a line and creates a hash of the values mapped to the column headings
248
Takes a line and creates a hash of the values mapped to the column headings
249
Only accepts fields that are in the list of permitted KBART fields, other fields are ignored
250
(This is identified to the user on the background job status page)
248
251
249
=cut
252
=cut
250
253
251
sub create_title_hash_from_line_data {
254
sub create_title_hash_from_line_data {
252
    my ( $row, $column_headers ) = @_;
255
    my ( $row, $column_headers, $invalid_columns ) = @_;
253
256
254
    my %new_title;
257
    my %new_title;
255
258
Lines 262-267 sub create_title_hash_from_line_data { Link Here
262
        $new_title{publication_title} =~ s/^"|"$//g;
265
        $new_title{publication_title} =~ s/^"|"$//g;
263
    }
266
    }
264
267
268
    # Remove any additional columns
269
    foreach my $invalid_column ( @$invalid_columns ) {
270
        delete $new_title{$invalid_column};
271
    }
272
265
    return \%new_title;
273
    return \%new_title;
266
}
274
}
267
275
Lines 281-286 sub check_for_matching_title { Link Here
281
    # Use external_id in case title exists for a different provider, we want to add it for the new provider
289
    # Use external_id in case title exists for a different provider, we want to add it for the new provider
282
    $match_parameters->{external_id} = $title->{title_id} if $title->{title_id};
290
    $match_parameters->{external_id} = $title->{title_id} if $title->{title_id};
283
291
292
    # We should also check the date_first_issue_online for serial publications
293
    $match_parameters->{date_first_issue_online} = $title->{date_first_issue_online} if $title->{date_first_issue_online};
294
284
    # If no match parameters are provided in the file we should add the new title
295
    # If no match parameters are provided in the file we should add the new title
285
    return 0 if !%$match_parameters;
296
    return 0 if !%$match_parameters;
286
297
Lines 426-432 sub is_file_too_large { Link Here
426
437
427
=head3 rescue_EBSCO_files
438
=head3 rescue_EBSCO_files
428
439
429
EBSCO have an incorrect spelling of "preceding_publication_title_id" in all of their KBART files ("preceeding" instead of "preceding").
440
EBSCO have an incorrect spelling for "preceding_publication_title_id" in all of their KBART files ("preceeding" instead of "preceding").
430
This means all of their KBART files fail to import using the current methodology.
441
This means all of their KBART files fail to import using the current methodology.
431
There is no simple way of finding out who the vendor is before importing so all KBART files from any vendor are going to have to be checked for this spelling and corrected.
442
There is no simple way of finding out who the vendor is before importing so all KBART files from any vendor are going to have to be checked for this spelling and corrected.
432
443
(-)a/Koha/REST/V1/ERM/EHoldings/Titles/Local.pm (-6 / +5 lines)
Lines 294-313 sub import_from_kbart_file { Link Here
294
294
295
        # Check that the column headers in the file match the standardised KBART phase II columns
295
        # Check that the column headers in the file match the standardised KBART phase II columns
296
        # If not, return a warning
296
        # If not, return a warning
297
        my $warnings = {};
297
        my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers();
298
        my @valid_headers = Koha::BackgroundJob::ImportKBARTFile::get_valid_headers();
298
        foreach my $header (@$column_headers) {
299
        foreach my $header (@$column_headers) {
299
            if ( !grep { $_ eq $header } @valid_headers ) {
300
            if ( !grep { $_ eq $header } @valid_headers ) {
300
                $header = 'Empty column - please remove' if $header eq '';
301
                $header = 'Empty column' if $header eq '';
301
                push @invalid_columns, $header;
302
                push @invalid_columns, $header;
302
            }
303
            }
303
        }
304
        }
304
        return $c->render(
305
        $warnings->{invalid_columns} = \@invalid_columns if scalar(@invalid_columns) > 0;
305
            status  => 201,
306
            openapi => { invalid_columns => \@invalid_columns, valid_columns => \@valid_headers, invalid_filetype => 0 }
307
        ) if scalar(@invalid_columns) > 0;
308
306
309
        my $params = {
307
        my $params = {
310
            column_headers       => $column_headers,
308
            column_headers       => $column_headers,
309
            invalid_columns      => \@invalid_columns,
311
            rows                 => $rows,
310
            rows                 => $rows,
312
            package_id           => $package_id,
311
            package_id           => $package_id,
313
            file_name            => $file->{filename},
312
            file_name            => $file->{filename},
Lines 342-348 sub import_from_kbart_file { Link Here
342
341
343
        return $c->render(
342
        return $c->render(
344
            status  => 201,
343
            status  => 201,
345
            openapi => { job_ids => \@job_ids }
344
            openapi => { job_ids => \@job_ids, warnings => $warnings }
346
        );
345
        );
347
    } catch {
346
    } catch {
348
        $c->unhandled_exception($_);
347
        $c->unhandled_exception($_);
(-)a/api/v1/swagger/paths/erm_eholdings_titles.yaml (+2 lines)
Lines 533-538 Link Here
533
              type: array
533
              type: array
534
            invalid_filetype:
534
            invalid_filetype:
535
              type: integer
535
              type: integer
536
            warnings:
537
              type: object
536
          additionalProperties: false
538
          additionalProperties: false
537
      400:
539
      400:
538
        description: Bad parameter
540
        description: Bad parameter
(-)a/koha-tmpl/intranet-tmpl/prog/js/vue/components/ERM/EHoldingsLocalTitlesKBARTImport.vue (-10 / +17 lines)
Lines 28-33 Link Here
28
                            :id="`import_file`"
28
                            :id="`import_file`"
29
                            :name="`import_file`"
29
                            :name="`import_file`"
30
                            required
30
                            required
31
                            ref="fileLoader"
31
                        />
32
                        />
32
                    </li>
33
                    </li>
33
                    <li>
34
                    <li>
Lines 80-88 Link Here
80
import ButtonSubmit from "../ButtonSubmit.vue"
81
import ButtonSubmit from "../ButtonSubmit.vue"
81
import { APIClient } from "../../fetch/api-client.js"
82
import { APIClient } from "../../fetch/api-client.js"
82
import { setMessage, setWarning } from "../../messages"
83
import { setMessage, setWarning } from "../../messages"
84
import { ref } from "vue"
83
85
84
export default {
86
export default {
85
    data() {
87
    data() {
88
        const fileLoader = ref()
86
        return {
89
        return {
87
            file: {
90
            file: {
88
                filename: null,
91
                filename: null,
Lines 91-96 export default { Link Here
91
            packages: [],
94
            packages: [],
92
            package_id: null,
95
            package_id: null,
93
            create_linked_biblio: false,
96
            create_linked_biblio: false,
97
            fileLoader,
94
        }
98
        }
95
    },
99
    },
96
    beforeCreate() {
100
    beforeCreate() {
Lines 131-159 export default { Link Here
131
                    if (success.job_ids) {
135
                    if (success.job_ids) {
132
                        if (success.job_ids.length > 1) {
136
                        if (success.job_ids.length > 1) {
133
                            message += this.$__(
137
                            message += this.$__(
134
                                "<li>Your file was too large to process in one job, the file has been split into %s jobs to meet the maximum size limits.</li>"
138
                                "<p style='font-weight: normal; font-size: medium; margin-top: 1em;'>Your file was too large to process in one job, the file has been split into %s jobs to meet the maximum size limits.</p>"
135
                            ).format(success.job_ids.length)
139
                            ).format(success.job_ids.length)
136
                        }
140
                        }
137
                        success.job_ids.forEach((job, i) => {
141
                        success.job_ids.forEach((job, i) => {
138
                            message += this.$__(
142
                            message += this.$__(
139
                                '<li>Job for uploaded file %s has been queued, <a href="/cgi-bin/koha/admin/background_jobs.pl?op=view&id=%s" target="_blank">click here</a> to check its progress.</li>'
143
                                '<li>Job %s for uploaded file has been queued, <a href="/cgi-bin/koha/admin/background_jobs.pl?op=view&id=%s" target="_blank">click here</a> to check its progress.</li>'
140
                            ).format(i + 1, job)
144
                            ).format(i + 1, job)
141
                        })
145
                        })
142
                        setMessage(message, true)
146
                        setMessage(message, true)
143
                    }
147
                    }
144
                    if (success.invalid_columns) {
148
                    if (success.warnings.invalid_columns) {
145
                        message += this.$__(
149
                        message += this.$__(
146
                            "<p>Invalid columns were detected in your report, please check the list below:</p>"
150
                            "<p style='font-weight: normal; font-size: medium; margin-top: 1em;'>Information:</p>"
147
                        )
151
                        )
148
                        success.invalid_columns.forEach(column => {
152
                        message += this.$__(
149
                            message += this.$__(
153
                            "<p>Additional columns were detected in your report, please see the list below:</p>"
150
                                `<li style="font-weight: normal; font-size: medium;">%s</li>`
154
                        )
151
                            ).format(column)
155
                        success.warnings.invalid_columns.forEach(column => {
156
                            message += this.$__(`<li>%s</li>`).format(column)
152
                        })
157
                        })
153
                        message += this.$__(
158
                        message += this.$__(
154
                            '<p style="margin-top: 1em;">For a list of compliant column headers, please click <a target="_blank" href="https://groups.niso.org/higherlogic/ws/public/download/16900/RP-9-2014_KBART.pdf" />here</p>'
159
                            "<p style='margin-top: 0.1em;'>The data in these columns will not be imported.</p>"
155
                        )
160
                        )
156
                        setWarning(message)
161
                        setMessage(message)
157
                    }
162
                    }
158
                    if (success.invalid_filetype) {
163
                    if (success.invalid_filetype) {
159
                        message += this.$__(
164
                        message += this.$__(
Lines 174-179 export default { Link Here
174
            }
179
            }
175
            this.package_id = null
180
            this.package_id = null
176
            this.create_linked_biblio = false
181
            this.create_linked_biblio = false
182
            this.$refs.fileLoader.files = null
183
            this.$refs.fileLoader.value = null
177
        },
184
        },
178
    },
185
    },
179
    components: {
186
    components: {
(-)a/t/db_dependent/Koha/BackgroundJob/ImportKBARTFile.t (-4 / +84 lines)
Lines 17-23 Link Here
17
17
18
use Modern::Perl;
18
use Modern::Perl;
19
19
20
use Test::More tests => 6;
20
use Test::More tests => 7;
21
use Test::MockModule;
21
use Test::MockModule;
22
22
23
use Koha::Database;
23
use Koha::Database;
Lines 128-138 Nature Astronomy 2397-3366 2017-01 1 1 https://www.nature.com/natastron 4bb Link Here
128
    };
128
    };
129
129
130
    my ( $column_headers, $lines ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
130
    my ( $column_headers, $lines ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
131
    my @invalid_columns;
131
132
132
    my $title_from_line1 =
133
    my $title_from_line1 =
133
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[0], $column_headers );
134
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[0], $column_headers, \@invalid_columns );
134
    my $title_from_line2 =
135
    my $title_from_line2 =
135
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[1], $column_headers );
136
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[1], $column_headers, \@invalid_columns );
137
138
    my $line1_match = {
139
        'coverage_depth'                  => 'fulltext',
140
        'date_monograph_published_print'  => '',
141
        'date_first_issue_online'         => '2015-01',
142
        'date_last_issue_online'          => '',
143
        'coverage_notes'                  => 'Hybrid (Open Choice)',
144
        'first_editor'                    => '',
145
        'date_monograph_published_online' => '',
146
        'preceding_publication_title_id'  => '',
147
        'num_last_issue_online'           => '',
148
        'embargo_info'                    => '',
149
        'access_type'                     => 'P',
150
        'num_first_issue_online'          => '1',
151
        'online_identifier'               => '2055-0278',
152
        'title_url'                       => 'https://www.nature.com/nplants',
153
        'monograph_volume'                => '',
154
        'first_author'                    => '',
155
        'parent_publication_title_id'     => '',
156
        'num_last_vol_online'             => '',
157
        'publication_title'               => 'Nature Plants',
158
        'num_first_vol_online'            => '1',
159
        'print_identifier'                => '',
160
        'publisher_name'                  => 'Nature Publishing Group UK',
161
        'title_id'                        => '4aaa7',
162
        'publication_type'                => 'serial',
163
        'monograph_edition'               => ''
164
    };
165
    my $line2_match = {
166
        'date_monograph_published_online' => '',
167
        'num_first_vol_online'            => '1',
168
        'num_last_issue_online'           => '',
169
        'preceding_publication_title_id'  => '',
170
        'title_url'                       => 'https://www.nature.com/natastron',
171
        'online_identifier'               => '2397-3366',
172
        'print_identifier'                => '',
173
        'num_last_vol_online'             => '',
174
        'embargo_info'                    => '',
175
        'parent_publication_title_id'     => '',
176
        'publisher_name'                  => 'Nature Publishing Group UK',
177
        'date_first_issue_online'         => '2017-01',
178
        'monograph_volume'                => '',
179
        'monograph_edition'               => '',
180
        'access_type'                     => 'P',
181
        'first_author'                    => '',
182
        'num_first_issue_online'          => '1',
183
        'first_editor'                    => '',
184
        'publication_title'               => 'Nature Astronomy',
185
        'date_monograph_published_print'  => '',
186
        'publication_type'                => 'serial',
187
        'title_id'                        => '4bbb0',
188
        'coverage_depth'                  => 'fulltext',
189
        'coverage_notes'                  => 'Hybrid (Open Choice)',
190
        'date_last_issue_online'          => ''
191
    };
192
193
    is_deeply( $title_from_line1, $line1_match, 'Title hash created correctly' );
194
    is_deeply( $title_from_line2, $line2_match, 'Title hash created correctly' );
195
};
196
197
subtest 'create_title_hash_from_line_data with invalid columns using csv' => sub {
198
199
    plan tests => 2;
200
201
    my $file = {
202
        filename     => 'Test_file.csv',
203
        file_content => encode_base64(
204
            'publication_title,print_identifier,online_identifier,date_first_issue_online,num_first_vol_online,num_first_issue_online,date_last_issue_online,num_last_vol_online,num_last_issue_online,title_url,first_author,title_id,embargo_info,coverage_depth,coverage_notes,publisher_name,publication_type,date_monograph_published_print,date_monograph_published_online,monograph_volume,monograph_edition,first_editor,parent_publication_title_id,preceding_publication_title_id,access_type,invalid_column
205
Nature Plants,,2055-0278,2015-01,1,1,,,,https://www.nature.com/nplants,,4aaa7,,fulltext,Hybrid (Open Choice),Nature Publishing Group UK,serial,,,,,,,,P,invalid_column_data
206
Nature Astronomy,,2397-3366,2017-01,1,1,,,,https://www.nature.com/natastron,,4bbb0,,fulltext,Hybrid (Open Choice),Nature Publishing Group UK,serial,,,,,,,,P,invalid_column_data'
207
        )
208
    };
209
210
    my ( $column_headers, $lines ) = Koha::BackgroundJob::ImportKBARTFile::read_file($file);
211
    my @invalid_columns = ('invalid_column');
212
213
    my $title_from_line1 =
214
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[0], $column_headers, \@invalid_columns );
215
    my $title_from_line2 =
216
        Koha::BackgroundJob::ImportKBARTFile::create_title_hash_from_line_data( @{$lines}[1], $column_headers, \@invalid_columns );
136
217
137
    my $line1_match = {
218
    my $line1_match = {
138
        'coverage_depth'                  => 'fulltext',
219
        'coverage_depth'                  => 'fulltext',
139
- 

Return to bug 34788