From d4ee17a0281160aeb726e20b1b683f4c1b856913 Mon Sep 17 00:00:00 2001 From: Peter Weber Date: Mon, 3 Jun 2024 16:04:34 +0200 Subject: [PATCH] tests: use black * Uses `black` during tests * Sourcery refactoring. Co-Authored-by: Peter Weber --- docker-services.yml | 2 +- poetry.lock | 1996 +++---- pyproject.toml | 14 +- pytest.ini | 29 - rero_ils/__init__.py | 2 +- rero_ils/accounts_views.py | 60 +- ...53585f_correct_subjects_bf_Organization.py | 94 +- ...555c03fe49_correct_holdings_items_count.py | 24 +- ...4dc2beb_add_library_pid_to_ill_request_.py | 51 +- ...b0af71048a7_add_request_expiration_date.py | 34 +- ...54134957af7d_loan_checkout_location_pid.py | 44 +- .../5f0b086e4b82_patron_role_migration.py | 88 +- ...4a5cc96f96e_update_claimed_issue_status.py | 28 +- ...78_update_patron_communication_channels.py | 37 +- ...a7cdef99_reindex_items_with_invalid_ean.py | 34 +- .../8d97be2c8ad6_items_remove_claims_count.py | 24 +- .../90d857fb5c23_unpaid_subscription_limit.py | 47 +- ...d88e64_move_collection_to_his_own_table.py | 59 +- ...021979fe_migrate_contribution_to_entity.py | 12 +- .../a941628259e1_add_keys_into_loan_index.py | 22 +- ...5cbcad66_migrate_obsolete_country_codes.py | 55 +- ...48948_add_column_to_selfcheck_terminals.py | 17 +- rero_ils/alembic/bd78d77eb7e3_isfiction.py | 69 +- ...78_cipo_request_duration_field_creation.py | 33 +- ...elete_expired_items_temporary_locations.py | 17 +- .../e3eb396b39bb_migration_ill_pickup.py | 18 +- .../e63e5dfa2416_new_vendor_serial_contact.py | 44 +- .../eec683a446e5_merging_rero_ils_branches.py | 6 +- rero_ils/alembic/f0e7f3b80a21_initial.py | 4 +- ...b998b8_holdings_affiliated_to_libraries.py | 33 +- rero_ils/celery.py | 12 +- rero_ils/config.py | 5002 +++++++++-------- rero_ils/dojson/cli.py | 15 +- rero_ils/dojson/utils.py | 1694 +++--- rero_ils/es_templates/__init__.py | 4 +- rero_ils/facets.py | 68 +- rero_ils/filter.py | 67 +- rero_ils/jsonschemas/utils.py | 11 +- rero_ils/modules/__init__.py | 2 +- .../modules/acquisition/acq_accounts/api.py | 220 +- .../acquisition/acq_accounts/dumpers.py | 2 +- .../acquisition/acq_accounts/extensions.py | 26 +- .../acquisition/acq_accounts/jsonresolver.py | 8 +- .../acquisition/acq_accounts/listener.py | 98 +- .../acquisition/acq_accounts/models.py | 15 +- .../acquisition/acq_accounts/permissions.py | 29 +- .../acq_accounts/serializers/__init__.py | 34 +- .../acq_accounts/serializers/csv.py | 38 +- .../acq_accounts/serializers/json.py | 24 +- .../modules/acquisition/acq_accounts/utils.py | 3 +- .../modules/acquisition/acq_accounts/views.py | 27 +- .../modules/acquisition/acq_invoices/api.py | 74 +- .../acquisition/acq_invoices/jsonresolver.py | 10 +- .../acquisition/acq_invoices/models.py | 11 +- .../acquisition/acq_invoices/permissions.py | 27 +- .../acquisition/acq_invoices/serializers.py | 15 +- .../acquisition/acq_order_lines/api.py | 125 +- .../acquisition/acq_order_lines/dumpers.py | 48 +- .../acquisition/acq_order_lines/extensions.py | 21 +- .../acq_order_lines/jsonresolver.py | 8 +- .../acquisition/acq_order_lines/listener.py | 28 +- .../acquisition/acq_order_lines/models.py | 25 +- .../acq_order_lines/permissions.py | 27 +- .../acq_order_lines/serializers.py | 2 +- .../modules/acquisition/acq_orders/api.py | 258 +- .../modules/acquisition/acq_orders/dumpers.py | 51 +- .../acquisition/acq_orders/extensions.py | 24 +- .../acquisition/acq_orders/jsonresolver.py | 4 +- .../acquisition/acq_orders/listener.py | 116 +- .../modules/acquisition/acq_orders/models.py | 37 +- .../acquisition/acq_orders/permissions.py | 27 +- .../acq_orders/serializers/__init__.py | 60 +- .../acquisition/acq_orders/serializers/csv.py | 250 +- .../acq_orders/serializers/json.py | 16 +- .../modules/acquisition/acq_orders/utils.py | 15 +- .../modules/acquisition/acq_orders/views.py | 40 +- .../acquisition/acq_receipt_lines/api.py | 73 +- .../acquisition/acq_receipt_lines/dumpers.py | 21 +- .../acq_receipt_lines/extensions.py | 16 +- .../acq_receipt_lines/jsonresolver.py | 8 +- .../acquisition/acq_receipt_lines/listener.py | 40 +- .../acquisition/acq_receipt_lines/models.py | 15 +- .../acq_receipt_lines/permissions.py | 27 +- .../acq_receipt_lines/serializers.py | 11 +- .../modules/acquisition/acq_receipts/api.py | 140 +- .../acquisition/acq_receipts/dumpers.py | 10 +- .../acquisition/acq_receipts/extensions.py | 4 +- .../acquisition/acq_receipts/jsonresolver.py | 8 +- .../acquisition/acq_receipts/listener.py | 24 +- .../acquisition/acq_receipts/models.py | 17 +- .../acquisition/acq_receipts/permissions.py | 27 +- .../acquisition/acq_receipts/serializers.py | 18 +- .../modules/acquisition/acq_receipts/views.py | 13 +- rero_ils/modules/acquisition/api.py | 4 +- rero_ils/modules/acquisition/budgets/api.py | 49 +- .../acquisition/budgets/jsonresolver.py | 10 +- .../modules/acquisition/budgets/listener.py | 23 +- .../modules/acquisition/budgets/models.py | 11 +- .../acquisition/budgets/permissions.py | 19 +- .../acquisition/budgets/serializers.py | 2 +- rero_ils/modules/acquisition/cli.py | 78 +- rero_ils/modules/acquisition/dumpers.py | 67 +- rero_ils/modules/acquisition/exceptions.py | 6 +- rero_ils/modules/acquisition/rollover.py | 337 +- rero_ils/modules/api.py | 205 +- rero_ils/modules/apiharvester/cli.py | 143 +- rero_ils/modules/apiharvester/models.py | 13 +- rero_ils/modules/apiharvester/signals.py | 2 +- rero_ils/modules/apiharvester/tasks.py | 20 +- rero_ils/modules/apiharvester/utils.py | 72 +- rero_ils/modules/babel_extractors.py | 10 +- rero_ils/modules/circ_policies/api.py | 308 +- rero_ils/modules/circ_policies/extensions.py | 5 +- rero_ils/modules/circ_policies/models.py | 11 +- rero_ils/modules/circ_policies/permissions.py | 19 +- rero_ils/modules/circ_policies/views.py | 24 +- rero_ils/modules/cli/fixtures.py | 509 +- rero_ils/modules/cli/index.py | 327 +- rero_ils/modules/cli/utils.py | 1288 +++-- rero_ils/modules/collections/api.py | 35 +- rero_ils/modules/collections/cli.py | 37 +- rero_ils/modules/collections/jsonresolver.py | 8 +- rero_ils/modules/collections/listener.py | 17 +- rero_ils/modules/collections/models.py | 11 +- rero_ils/modules/collections/permissions.py | 19 +- rero_ils/modules/collections/serializers.py | 12 +- rero_ils/modules/collections/views.py | 62 +- rero_ils/modules/commons/dumpers.py | 1 + rero_ils/modules/commons/exceptions.py | 2 +- rero_ils/modules/commons/identifiers.py | 102 +- rero_ils/modules/commons/models.py | 4 +- rero_ils/modules/commons/schemas.py | 5 +- rero_ils/modules/decorators.py | 40 +- rero_ils/modules/documents/api.py | 321 +- rero_ils/modules/documents/api_views.py | 89 +- .../dojson/contrib/jsontodc/__init__.py | 2 +- .../dojson/contrib/jsontodc/model.py | 199 +- .../dojson/contrib/jsontomarc21/__init__.py | 2 +- .../dojson/contrib/jsontomarc21/model.py | 770 +-- .../dojson/contrib/marc21tojson/__init__.py | 10 +- .../contrib/marc21tojson/dnb/__init__.py | 2 +- .../dojson/contrib/marc21tojson/dnb/model.py | 136 +- .../contrib/marc21tojson/kul/__init__.py | 2 +- .../dojson/contrib/marc21tojson/kul/model.py | 140 +- .../contrib/marc21tojson/loc/__init__.py | 2 +- .../dojson/contrib/marc21tojson/loc/model.py | 454 +- .../contrib/marc21tojson/rero/__init__.py | 2 +- .../dojson/contrib/marc21tojson/rero/model.py | 839 +-- .../contrib/marc21tojson/slsp/__init__.py | 2 +- .../dojson/contrib/marc21tojson/slsp/model.py | 298 +- .../contrib/marc21tojson/ugent/__init__.py | 2 +- .../contrib/marc21tojson/ugent/model.py | 214 +- .../dojson/contrib/marc21tojson/utils.py | 2126 +++---- .../dojson/contrib/unimarctojson/__init__.py | 2 +- .../dojson/contrib/unimarctojson/model.py | 1262 ++--- .../modules/documents/dumpers/__init__.py | 55 +- rero_ils/modules/documents/dumpers/indexer.py | 230 +- .../modules/documents/dumpers/replace_refs.py | 38 +- rero_ils/modules/documents/dumpers/title.py | 9 +- .../modules/documents/extensions/__init__.py | 10 +- .../documents/extensions/add_mef_pid.py | 11 +- .../documents/extensions/edition_statement.py | 26 +- .../extensions/provision_activities.py | 43 +- .../documents/extensions/series_statement.py | 57 +- .../modules/documents/extensions/title.py | 60 +- rero_ils/modules/documents/jsonresolver.py | 4 +- rero_ils/modules/documents/loaders/marcxml.py | 6 +- rero_ils/modules/documents/models.py | 17 +- rero_ils/modules/documents/permissions.py | 15 +- rero_ils/modules/documents/query.py | 62 +- .../modules/documents/serializers/__init__.py | 56 +- .../modules/documents/serializers/base.py | 304 +- rero_ils/modules/documents/serializers/dc.py | 79 +- .../modules/documents/serializers/json.py | 195 +- .../modules/documents/serializers/marc.py | 190 +- rero_ils/modules/documents/serializers/ris.py | 122 +- rero_ils/modules/documents/utils.py | 194 +- rero_ils/modules/documents/views.py | 467 +- rero_ils/modules/ebooks/cli.py | 58 +- .../ebooks/dojson/contrib/marc21/__init__.py | 2 +- .../ebooks/dojson/contrib/marc21/model.py | 444 +- rero_ils/modules/ebooks/receivers.py | 33 +- rero_ils/modules/ebooks/tasks.py | 60 +- rero_ils/modules/ebooks/utils.py | 95 +- rero_ils/modules/entities/api.py | 64 +- rero_ils/modules/entities/dumpers/__init__.py | 40 +- .../dumpers/authorized_acces_point.py | 3 +- rero_ils/modules/entities/dumpers/document.py | 26 +- rero_ils/modules/entities/dumpers/indexer.py | 4 +- rero_ils/modules/entities/fetchers.py | 11 +- rero_ils/modules/entities/helpers.py | 14 +- .../modules/entities/local_entities/api.py | 22 +- .../local_entities/extensions/__init__.py | 5 +- .../extensions/authorized_access_point.py | 6 +- .../extensions/local_entity_factory.py | 11 +- .../entities/local_entities/indexer.py | 20 +- .../entities/local_entities/jsonresolver.py | 4 +- .../modules/entities/local_entities/models.py | 8 +- .../entities/local_entities/permissions.py | 15 +- .../modules/entities/local_entities/proxy.py | 20 +- .../local_entities/subclasses/__init__.py | 12 +- .../local_entities/subclasses/organisation.py | 17 +- .../local_entities/subclasses/person.py | 16 +- .../local_entities/subclasses/place.py | 2 +- .../local_entities/subclasses/temporal.py | 2 +- .../local_entities/subclasses/topic.py | 2 +- .../local_entities/subclasses/work.py | 8 +- .../modules/entities/local_entities/views.py | 27 +- rero_ils/modules/entities/logger.py | 51 +- rero_ils/modules/entities/minters.py | 10 +- rero_ils/modules/entities/models.py | 24 +- .../modules/entities/remote_entities/api.py | 95 +- .../modules/entities/remote_entities/cli.py | 86 +- .../entities/remote_entities/models.py | 14 +- .../modules/entities/remote_entities/proxy.py | 143 +- .../entities/remote_entities/replace.py | 168 +- .../modules/entities/remote_entities/sync.py | 223 +- .../modules/entities/remote_entities/tasks.py | 50 +- .../modules/entities/remote_entities/utils.py | 62 +- .../modules/entities/remote_entities/views.py | 14 +- .../modules/entities/serializers/__init__.py | 2 +- rero_ils/modules/entities/views.py | 82 +- rero_ils/modules/ext.py | 204 +- rero_ils/modules/extensions.py | 21 +- rero_ils/modules/fetchers.py | 8 +- rero_ils/modules/files/cli.py | 71 +- rero_ils/modules/files/components.py | 88 +- rero_ils/modules/files/dumpers.py | 1 + rero_ils/modules/files/permissions.py | 27 +- rero_ils/modules/files/results.py | 5 +- rero_ils/modules/files/schemas.py | 4 +- rero_ils/modules/files/services.py | 17 +- rero_ils/modules/holdings/api.py | 538 +- rero_ils/modules/holdings/api_views.py | 164 +- rero_ils/modules/holdings/cli.py | 100 +- rero_ils/modules/holdings/dumpers.py | 6 +- rero_ils/modules/holdings/jsonresolver.py | 4 +- rero_ils/modules/holdings/listener.py | 204 +- rero_ils/modules/holdings/models.py | 38 +- rero_ils/modules/holdings/permissions.py | 25 +- rero_ils/modules/holdings/serializers.py | 30 +- rero_ils/modules/holdings/tasks.py | 23 +- rero_ils/modules/holdings/utils.py | 22 +- rero_ils/modules/holdings/views.py | 37 +- rero_ils/modules/ill_requests/api.py | 59 +- rero_ils/modules/ill_requests/cli.py | 34 +- rero_ils/modules/ill_requests/extensions.py | 13 +- rero_ils/modules/ill_requests/forms.py | 180 +- rero_ils/modules/ill_requests/jsonresolver.py | 4 +- rero_ils/modules/ill_requests/listener.py | 28 +- rero_ils/modules/ill_requests/models.py | 23 +- rero_ils/modules/ill_requests/permissions.py | 21 +- rero_ils/modules/ill_requests/serializers.py | 23 +- rero_ils/modules/ill_requests/utils.py | 18 +- rero_ils/modules/ill_requests/views.py | 142 +- rero_ils/modules/imports/api.py | 660 ++- rero_ils/modules/imports/exceptions.py | 2 +- .../modules/imports/serializers/__init__.py | 37 +- .../modules/imports/serializers/response.py | 8 +- .../imports/serializers/serializers.py | 72 +- rero_ils/modules/imports/views.py | 141 +- rero_ils/modules/indexer_utils.py | 12 +- rero_ils/modules/item_types/api.py | 100 +- rero_ils/modules/item_types/jsonresolver.py | 4 +- rero_ils/modules/item_types/listener.py | 39 +- rero_ils/modules/item_types/models.py | 11 +- rero_ils/modules/item_types/permissions.py | 19 +- rero_ils/modules/item_types/views.py | 24 +- rero_ils/modules/items/api/__init__.py | 13 +- rero_ils/modules/items/api/api.py | 151 +- rero_ils/modules/items/api/circulation.py | 1120 ++-- rero_ils/modules/items/api/issue.py | 75 +- rero_ils/modules/items/api/record.py | 293 +- rero_ils/modules/items/cli.py | 233 +- rero_ils/modules/items/decorators.py | 31 +- rero_ils/modules/items/dumpers.py | 111 +- rero_ils/modules/items/extensions.py | 7 +- rero_ils/modules/items/jsonresolver.py | 4 +- rero_ils/modules/items/listener.py | 51 +- rero_ils/modules/items/models.py | 72 +- rero_ils/modules/items/permissions.py | 21 +- .../modules/items/serializers/__init__.py | 158 +- .../modules/items/serializers/collector.py | 350 +- rero_ils/modules/items/serializers/csv.py | 42 +- rero_ils/modules/items/serializers/json.py | 100 +- rero_ils/modules/items/tasks.py | 59 +- rero_ils/modules/items/utils.py | 69 +- rero_ils/modules/items/views/__init__.py | 11 +- rero_ils/modules/items/views/api_views.py | 295 +- rero_ils/modules/items/views/filters.py | 14 +- rero_ils/modules/items/views/rest.py | 17 +- rero_ils/modules/jsonresolver.py | 23 +- rero_ils/modules/libraries/api.py | 206 +- rero_ils/modules/libraries/api_views.py | 30 +- rero_ils/modules/libraries/dumpers.py | 64 +- rero_ils/modules/libraries/extensions.py | 4 +- rero_ils/modules/libraries/jsonresolver.py | 4 +- rero_ils/modules/libraries/models.py | 21 +- rero_ils/modules/libraries/permissions.py | 42 +- rero_ils/modules/libraries/tasks.py | 18 +- rero_ils/modules/loans/api.py | 696 +-- rero_ils/modules/loans/api_views.py | 44 +- rero_ils/modules/loans/cli.py | 500 +- rero_ils/modules/loans/dumpers.py | 52 +- rero_ils/modules/loans/extensions.py | 54 +- rero_ils/modules/loans/jsonresolver.py | 4 +- rero_ils/modules/loans/listener.py | 35 +- rero_ils/modules/loans/logs/api.py | 98 +- rero_ils/modules/loans/models.py | 36 +- rero_ils/modules/loans/permissions.py | 22 +- rero_ils/modules/loans/query.py | 12 +- .../modules/loans/serializers/__init__.py | 49 +- rero_ils/modules/loans/serializers/csv.py | 74 +- rero_ils/modules/loans/serializers/json.py | 105 +- rero_ils/modules/loans/tasks.py | 38 +- rero_ils/modules/loans/utils.py | 106 +- rero_ils/modules/local_fields/api.py | 47 +- rero_ils/modules/local_fields/dumpers.py | 4 +- rero_ils/modules/local_fields/extensions.py | 1 + rero_ils/modules/local_fields/jsonresolver.py | 8 +- rero_ils/modules/local_fields/models.py | 11 +- rero_ils/modules/local_fields/permissions.py | 19 +- rero_ils/modules/locations/api.py | 131 +- rero_ils/modules/locations/extensions.py | 4 +- rero_ils/modules/locations/indexer.py | 18 +- rero_ils/modules/locations/jsonresolver.py | 4 +- rero_ils/modules/locations/models.py | 11 +- rero_ils/modules/locations/permissions.py | 21 +- .../modules/locations/serializers/__init__.py | 4 +- .../modules/locations/serializers/json.py | 11 +- rero_ils/modules/locations/tasks.py | 16 +- rero_ils/modules/message.py | 12 +- rero_ils/modules/minters.py | 6 +- rero_ils/modules/monitoring/api.py | 131 +- rero_ils/modules/monitoring/cli.py | 115 +- rero_ils/modules/monitoring/views.py | 264 +- rero_ils/modules/normalizer_stop_words.py | 29 +- rero_ils/modules/notifications/api.py | 91 +- rero_ils/modules/notifications/cli.py | 41 +- rero_ils/modules/notifications/dispatcher.py | 103 +- rero_ils/modules/notifications/extensions.py | 4 +- .../modules/notifications/jsonresolver.py | 4 +- rero_ils/modules/notifications/listener.py | 20 +- rero_ils/modules/notifications/logs/api.py | 58 +- rero_ils/modules/notifications/models.py | 69 +- rero_ils/modules/notifications/permissions.py | 51 +- .../notifications/subclasses/acq_order.py | 51 +- .../notifications/subclasses/at_desk.py | 53 +- .../notifications/subclasses/availability.py | 47 +- .../notifications/subclasses/booking.py | 39 +- .../notifications/subclasses/circulation.py | 51 +- .../notifications/subclasses/claim_issue.py | 47 +- .../notifications/subclasses/internal.py | 7 +- .../notifications/subclasses/recall.py | 34 +- .../notifications/subclasses/reminder.py | 81 +- .../notifications/subclasses/request.py | 32 +- .../notifications/subclasses/transit.py | 23 +- rero_ils/modules/notifications/tasks.py | 50 +- rero_ils/modules/notifications/utils.py | 75 +- rero_ils/modules/notifications/views.py | 15 +- rero_ils/modules/operation_logs/api.py | 100 +- rero_ils/modules/operation_logs/cli.py | 45 +- .../operation_logs/es_templates/__init__.py | 4 +- rero_ils/modules/operation_logs/extensions.py | 118 +- rero_ils/modules/operation_logs/logs/api.py | 73 +- rero_ils/modules/operation_logs/models.py | 6 +- .../modules/operation_logs/permissions.py | 9 +- .../modules/operation_logs/serializers.py | 32 +- rero_ils/modules/organisations/api.py | 87 +- rero_ils/modules/organisations/dumpers.py | 16 +- .../modules/organisations/jsonresolver.py | 4 +- rero_ils/modules/organisations/models.py | 8 +- rero_ils/modules/organisations/permissions.py | 19 +- .../modules/patron_transaction_events/api.py | 99 +- .../patron_transaction_events/jsonresolver.py | 4 +- .../patron_transaction_events/listener.py | 40 +- .../patron_transaction_events/models.py | 19 +- .../patron_transaction_events/permissions.py | 21 +- .../serializers/__init__.py | 49 +- .../serializers/csv.py | 78 +- .../serializers/json.py | 47 +- .../patron_transaction_events/utils.py | 11 +- rero_ils/modules/patron_transactions/api.py | 94 +- .../modules/patron_transactions/extensions.py | 59 +- .../patron_transactions/jsonresolver.py | 4 +- .../modules/patron_transactions/listener.py | 26 +- .../modules/patron_transactions/models.py | 29 +- .../patron_transactions/permissions.py | 21 +- .../patron_transactions/serializers.py | 32 +- rero_ils/modules/patron_transactions/utils.py | 143 +- rero_ils/modules/patron_types/api.py | 206 +- rero_ils/modules/patron_types/jsonresolver.py | 4 +- rero_ils/modules/patron_types/models.py | 8 +- rero_ils/modules/patron_types/permissions.py | 19 +- rero_ils/modules/patron_types/views.py | 24 +- rero_ils/modules/patrons/api.py | 471 +- rero_ils/modules/patrons/cli.py | 156 +- rero_ils/modules/patrons/dumpers.py | 29 +- rero_ils/modules/patrons/extensions.py | 2 +- rero_ils/modules/patrons/jsonresolver.py | 4 +- rero_ils/modules/patrons/listener.py | 24 +- rero_ils/modules/patrons/loaders/__init__.py | 4 +- rero_ils/modules/patrons/models.py | 12 +- rero_ils/modules/patrons/permissions.py | 47 +- rero_ils/modules/patrons/query.py | 9 +- rero_ils/modules/patrons/schemas/json.py | 37 +- rero_ils/modules/patrons/serializers.py | 14 +- rero_ils/modules/patrons/tasks.py | 29 +- rero_ils/modules/patrons/utils.py | 28 +- rero_ils/modules/patrons/views.py | 347 +- rero_ils/modules/permissions.py | 208 +- rero_ils/modules/providers.py | 14 +- rero_ils/modules/receivers.py | 19 +- rero_ils/modules/record_extensions.py | 16 +- rero_ils/modules/selfcheck/admin.py | 94 +- rero_ils/modules/selfcheck/api.py | 450 +- rero_ils/modules/selfcheck/cli.py | 108 +- rero_ils/modules/selfcheck/models.py | 9 +- rero_ils/modules/selfcheck/permissions.py | 7 +- rero_ils/modules/selfcheck/utils.py | 147 +- rero_ils/modules/serializers/__init__.py | 34 +- rero_ils/modules/serializers/base.py | 90 +- rero_ils/modules/serializers/mixins.py | 22 +- rero_ils/modules/serializers/response.py | 73 +- rero_ils/modules/serializers/schema.py | 3 +- rero_ils/modules/sru/cql_parser.py | 496 +- rero_ils/modules/sru/explaine.py | 118 +- rero_ils/modules/sru/views.py | 100 +- rero_ils/modules/stats/api/api.py | 16 +- rero_ils/modules/stats/api/indicators/base.py | 2 +- .../stats/api/indicators/circulation.py | 123 +- .../modules/stats/api/indicators/others.py | 271 +- .../modules/stats/api/indicators/patron.py | 114 +- .../modules/stats/api/indicators/requests.py | 11 +- rero_ils/modules/stats/api/librarian.py | 236 +- rero_ils/modules/stats/api/pricing.py | 231 +- rero_ils/modules/stats/api/report.py | 128 +- rero_ils/modules/stats/cli.py | 145 +- rero_ils/modules/stats/exceptions.py | 3 +- rero_ils/modules/stats/extensions.py | 21 +- rero_ils/modules/stats/models.py | 17 +- rero_ils/modules/stats/permissions.py | 40 +- rero_ils/modules/stats/serializers.py | 95 +- rero_ils/modules/stats/tasks.py | 35 +- rero_ils/modules/stats/views.py | 123 +- rero_ils/modules/stats_cfg/api.py | 32 +- rero_ils/modules/stats_cfg/dumpers.py | 16 +- rero_ils/modules/stats_cfg/jsonresolver.py | 4 +- rero_ils/modules/stats_cfg/models.py | 11 +- rero_ils/modules/stats_cfg/permissions.py | 21 +- .../modules/stats_cfg/serializers/__init__.py | 6 +- .../modules/stats_cfg/serializers/json.py | 3 +- rero_ils/modules/stats_cfg/views.py | 12 +- rero_ils/modules/tasks.py | 20 +- rero_ils/modules/templates/api.py | 37 +- rero_ils/modules/templates/extensions.py | 44 +- rero_ils/modules/templates/jsonresolver.py | 4 +- rero_ils/modules/templates/listener.py | 15 +- .../modules/templates/loaders/__init__.py | 4 +- rero_ils/modules/templates/models.py | 15 +- rero_ils/modules/templates/permissions.py | 31 +- rero_ils/modules/templates/schemas/json.py | 25 +- rero_ils/modules/templates/views.py | 8 +- rero_ils/modules/users/api.py | 136 +- rero_ils/modules/users/api_views.py | 30 +- rero_ils/modules/users/forms.py | 12 +- rero_ils/modules/users/listener.py | 4 +- rero_ils/modules/users/models.py | 32 +- rero_ils/modules/users/validators.py | 8 +- rero_ils/modules/users/views.py | 118 +- rero_ils/modules/utils.py | 640 +-- rero_ils/modules/vendors/api.py | 77 +- rero_ils/modules/vendors/dumpers.py | 24 +- rero_ils/modules/vendors/jsonresolver.py | 10 +- rero_ils/modules/vendors/models.py | 35 +- rero_ils/modules/vendors/permissions.py | 19 +- rero_ils/modules/views.py | 73 +- rero_ils/oauth/scopes.py | 14 +- rero_ils/permissions.py | 34 +- rero_ils/query.py | 263 +- rero_ils/schedulers.py | 102 +- rero_ils/theme/menus.py | 248 +- rero_ils/theme/views.py | 186 +- rero_ils/theme/webpack.py | 18 +- rero_ils/utils.py | 7 +- scripts/test | 48 +- .../test_acq_accounts_permissions.py | 159 +- .../acq_accounts/test_acq_accounts_rest.py | 237 +- .../test_acq_accounts_serializers.py | 37 +- .../test_acq_invoices_permissions.py | 173 +- .../acq_invoices/test_acq_invoices_rest.py | 116 +- .../test_acq_order_lines_permissions.py | 181 +- .../test_acq_order_lines_rest.py | 133 +- .../acq_orders/test_acq_orders_permissions.py | 169 +- tests/api/acq_orders/test_acq_orders_rest.py | 218 +- .../acq_orders/test_acq_orders_serializers.py | 48 +- tests/api/acq_orders/test_acq_orders_views.py | 104 +- .../test_acq_receipt_lines_permissions.py | 172 +- .../test_acq_receipt_lines_rest.py | 121 +- .../test_acq_receipts_permissions.py | 171 +- .../acq_receipts/test_acq_receipts_rest.py | 101 +- .../acq_receipts/test_acq_receipts_views.py | 64 +- tests/api/acquisition/acq_utils.py | 20 +- .../acquisition/test_acquisition_dumpers.py | 13 +- .../test_acquisition_reception_workflow.py | 559 +- .../acquisition/test_acquisition_rollover.py | 149 +- .../acquisition/test_acquisition_scenarios.py | 484 +- .../test_acquisition_serializers.py | 75 +- tests/api/budgets/test_budgets_permissions.py | 78 +- tests/api/budgets/test_budgets_rest.py | 221 +- .../test_circ_policies_permissions.py | 143 +- .../circ_policies/test_circ_policies_rest.py | 109 +- .../circulation/scenarios/test_scenario_a.py | 45 +- .../circulation/scenarios/test_scenario_b.py | 66 +- .../circulation/scenarios/test_scenario_c.py | 120 +- .../circulation/scenarios/test_scenario_d.py | 154 +- .../test_actions_views_add_request.py | 62 +- .../test_actions_views_cancel_request.py | 45 +- .../test_actions_views_change_pickup.py | 77 +- .../circulation/test_actions_views_checkin.py | 119 +- .../test_actions_views_checkout.py | 87 +- .../test_actions_views_extend_loan_request.py | 74 +- .../test_actions_views_validate_request.py | 72 +- tests/api/circulation/test_borrow_limits.py | 472 +- tests/api/circulation/test_inhouse_cipo.py | 47 +- .../test_library_calendar_changes.py | 48 +- .../test_library_with_no_circulation.py | 171 +- .../test_locations_restrictions.py | 61 +- .../circulation/test_temporary_item_type.py | 53 +- tests/api/collections/test_collections_api.py | 7 +- .../test_collections_permissions.py | 204 +- .../api/collections/test_collections_rest.py | 54 +- tests/api/conftest.py | 182 +- tests/api/documents/test_documents_dumpers.py | 30 +- .../documents/test_documents_files_rest.py | 30 +- .../documents/test_documents_permissions.py | 124 +- tests/api/documents/test_documents_rest.py | 85 +- .../api/documents/test_export_serializers.py | 56 +- tests/api/documents/test_marcxml_rest_api.py | 36 +- .../test_local_entities_extensions.py | 29 +- .../test_local_entities_permissions.py | 95 +- .../test_local_entities_rest.py | 186 +- .../test_remote_entities_permissions.py | 90 +- .../test_remote_entities_rest.py | 152 +- tests/api/entities/test_entities_rest.py | 37 +- tests/api/entities/test_entities_search.py | 37 +- tests/api/files/test_files_permissions.py | 206 +- .../api/holdings/test_holdings_permissions.py | 216 +- tests/api/holdings/test_holdings_rest.py | 174 +- tests/api/holdings/test_patterns.py | 348 +- tests/api/holdings/test_provisional_items.py | 181 +- .../test_ill_requests_permissions.py | 95 +- .../ill_requests/test_ill_requests_rest.py | 245 +- .../item_types/test_item_types_permissions.py | 143 +- tests/api/item_types/test_item_types_rest.py | 235 +- tests/api/items/test_items_in_transit.py | 45 +- tests/api/items/test_items_issue.py | 152 +- tests/api/items/test_items_permissions.py | 174 +- tests/api/items/test_items_rest.py | 967 ++-- tests/api/items/test_items_rest_views.py | 211 +- tests/api/items/test_items_serializer.py | 150 +- .../libraries/test_libraries_permissions.py | 153 +- tests/api/libraries/test_libraries_rest.py | 281 +- .../libraries/test_libraries_rest_views.py | 42 +- .../api/loans/test_loans_delete_item_rest.py | 41 +- tests/api/loans/test_loans_permissions.py | 113 +- tests/api/loans/test_loans_rest.py | 468 +- tests/api/loans/test_loans_rest_views.py | 48 +- tests/api/loans/test_loans_utils.py | 35 +- .../test_local_fields_permissions.py | 101 +- .../local_fields/test_local_fields_rest.py | 79 +- .../locations/test_locations_permissions.py | 144 +- tests/api/locations/test_locations_rest.py | 210 +- .../test_notifications_permissions.py | 212 +- .../notifications/test_notifications_rest.py | 921 +-- .../test_operation_logs_permissions.py | 113 +- .../test_operation_logs_rest.py | 226 +- .../test_organisations_permissions.py | 135 +- .../test_organisations_rest_api.py | 121 +- .../test_patron_payments_rest.py | 155 +- ...t_patron_transaction_events_permissions.py | 171 +- .../test_patron_transaction_events_rest.py | 152 +- .../test_patron_transactions_permissions.py | 172 +- .../test_patron_transactions_rest.py | 245 +- .../test_patron_types_permissions.py | 141 +- .../patron_types/test_patron_types_rest.py | 249 +- tests/api/patrons/test_patrons_blocked.py | 77 +- tests/api/patrons/test_patrons_marshmallow.py | 44 +- tests/api/patrons/test_patrons_permissions.py | 205 +- tests/api/patrons/test_patrons_rest.py | 435 +- tests/api/patrons/test_patrons_views.py | 109 +- tests/api/selfcheck/test_admin.py | 64 +- tests/api/selfcheck/test_models.py | 36 +- tests/api/selfcheck/test_selfcheck.py | 367 +- tests/api/selfcheck/test_selfcheck_utils.py | 34 +- tests/api/sru/test_sru_rest.py | 104 +- tests/api/stats/conftest.py | 28 +- tests/api/stats/test_stats_permissions.py | 95 +- tests/api/stats/test_stats_rest.py | 90 +- .../stats_cfg/test_stats_cfg_permissions.py | 155 +- tests/api/stats_cfg/test_stats_cfg_rest.py | 19 +- tests/api/stats_cfg/test_stats_cfg_views.py | 11 +- .../templates/test_templates_marshmallow.py | 29 +- .../templates/test_templates_permissions.py | 474 +- tests/api/templates/test_templates_rest.py | 316 +- tests/api/test_accounts_rest_auth.py | 29 +- tests/api/test_availability.py | 364 +- tests/api/test_circ_bug.py | 112 +- tests/api/test_commons_api.py | 188 +- tests/api/test_exports.py | 58 +- tests/api/test_external_services.py | 987 ++-- tests/api/test_monitoring_rest.py | 184 +- tests/api/test_pid_rest.py | 14 +- tests/api/test_record_permissions.py | 119 +- tests/api/test_search.py | 310 +- tests/api/test_serializers.py | 317 +- tests/api/test_tasks.py | 261 +- tests/api/test_translations.py | 23 +- tests/api/test_user_authentication.py | 174 +- tests/api/users/test_user_api.py | 90 +- tests/api/users/test_users_profile_updates.py | 82 +- tests/api/users/test_users_rest.py | 236 +- tests/api/vendors/test_vendors.py | 74 +- tests/api/vendors/test_vendors_permissions.py | 149 +- tests/conftest.py | 341 +- tests/e2e/test_front_page.py | 8 +- tests/fixtures/acquisition.py | 773 +-- tests/fixtures/circulation.py | 1169 ++-- tests/fixtures/mef.py | 31 +- tests/fixtures/metadata.py | 968 ++-- tests/fixtures/organisations.py | 637 +-- tests/fixtures/sip2.py | 42 +- tests/scheduler/conftest.py | 40 +- tests/scheduler/test_scheduler.py | 105 +- .../test_acq_accounts_jsonresolver.py | 14 +- .../acq_accounts/test_acq_accounts_mapping.py | 13 +- .../test_acq_invoices_jsonresolver.py | 14 +- .../acq_invoices/test_acq_invoices_mapping.py | 25 +- .../test_acq_order_lines_api.py | 16 +- .../test_acq_order_lines_jsonresolver.py | 21 +- .../test_acq_order_lines_mapping.py | 17 +- tests/ui/acq_orders/test_acq_orders_api.py | 68 +- .../test_acq_orders_jsonresolver.py | 14 +- .../ui/acq_orders/test_acq_orders_mapping.py | 13 +- .../test_acq_receipt_lines_api.py | 20 +- .../test_acq_receipt_lines_jsonresolver.py | 10 +- .../test_acq_receipt_lines_mapping.py | 15 +- .../ui/acq_receipts/test_acq_receipts_api.py | 35 +- .../test_acq_receipts_jsonresolver.py | 10 +- .../acq_receipts/test_acq_receipts_mapping.py | 21 +- .../apiharvester/test_apiharvester_utils.py | 122 +- tests/ui/budgets/test_budgets_api.py | 22 +- tests/ui/budgets/test_budgets_jsonresolver.py | 10 +- tests/ui/budgets/test_budgets_mapping.py | 9 +- .../circ_policies/test_circ_policies_api.py | 117 +- .../test_circ_policies_mapping.py | 18 +- .../circ_policies/test_circ_policies_utils.py | 76 +- .../circulation/test_actions_add_request.py | 186 +- .../circulation/test_actions_auto_extend.py | 83 +- .../test_actions_cancel_request.py | 401 +- tests/ui/circulation/test_actions_checkin.py | 412 +- tests/ui/circulation/test_actions_checkout.py | 396 +- .../test_actions_expired_request.py | 36 +- tests/ui/circulation/test_actions_extend.py | 204 +- .../test_actions_validate_request.py | 186 +- tests/ui/circulation/test_extend_external.py | 104 +- .../ui/circulation/test_in_transit_actions.py | 113 +- tests/ui/circulation/test_loan_utils.py | 91 +- .../collections/test_collections_mapping.py | 15 +- tests/ui/collections/test_collections_ui.py | 14 +- .../ui/collections/test_collections_utils.py | 9 +- tests/ui/conftest.py | 65 +- tests/ui/documents/conftest.py | 9 +- tests/ui/documents/test_documents_api.py | 298 +- tests/ui/documents/test_documents_filter.py | 713 +-- .../documents/test_documents_jsonresolver.py | 12 +- tests/ui/documents/test_documents_mapping.py | 34 +- tests/ui/documents/test_documents_ui.py | 35 +- tests/ui/ebooks/test_ebooks_receivers.py | 97 +- tests/ui/ebooks/test_ebooks_utils.py | 19 +- .../local_entities/test_local_entities_api.py | 32 +- .../test_local_entities_dumpers.py | 10 +- .../test_local_entities_jsonresolver.py | 22 +- .../test_local_entities_mapping.py | 11 +- .../test_remote_entities_api.py | 406 +- .../test_remote_entities_mapping.py | 45 +- .../test_remote_entities_ui.py | 16 +- .../test_remote_entities_utils.py | 17 +- tests/ui/entities/test_entities_api.py | 4 +- tests/ui/entities/test_entities_ui.py | 122 +- tests/ui/files/test_files_preview.py | 37 +- tests/ui/holdings/test_holdings_api.py | 95 +- tests/ui/holdings/test_holdings_item.py | 73 +- .../ui/holdings/test_holdings_jsonresolver.py | 14 +- tests/ui/holdings/test_holdings_mapping.py | 16 +- tests/ui/holdings/test_holdings_patterns.py | 415 +- tests/ui/holdings/test_issues_reindex.py | 131 +- tests/ui/holdings/test_serial_claims.py | 27 +- .../ui/ill_requests/test_ill_requests_api.py | 67 +- .../test_ill_requests_jsonresolver.py | 16 +- .../ill_requests/test_ill_requests_mapping.py | 10 +- tests/ui/ill_requests/test_ill_requests_ui.py | 60 +- .../ill_requests/test_ill_requests_utils.py | 15 +- tests/ui/item_types/conftest.py | 4 +- tests/ui/item_types/test_item_types_api.py | 73 +- .../test_item_types_jsonresolver.py | 16 +- .../ui/item_types/test_item_types_mapping.py | 13 +- tests/ui/items/test_items_api.py | 240 +- tests/ui/items/test_items_dumpers.py | 95 +- tests/ui/items/test_items_jsonresolver.py | 10 +- tests/ui/items/test_items_mapping.py | 14 +- tests/ui/libraries/conftest.py | 10 +- tests/ui/libraries/test_libraries_api.py | 87 +- tests/ui/libraries/test_libraries_dumpers.py | 23 +- .../libraries/test_libraries_jsonresolver.py | 10 +- tests/ui/libraries/test_libraries_mapping.py | 21 +- tests/ui/loans/test_loans_api.py | 303 +- tests/ui/loans/test_loans_dumpers.py | 12 +- tests/ui/loans/test_loans_jsonresolver.py | 10 +- tests/ui/loans/test_loans_operation_logs.py | 122 +- tests/ui/local_fields/conftest.py | 2 +- .../ui/local_fields/test_local_fields_api.py | 73 +- .../test_local_fields_jsonresolver.py | 14 +- .../local_fields/test_local_fields_mapping.py | 26 +- tests/ui/locations/conftest.py | 4 +- tests/ui/locations/test_locations_api.py | 62 +- .../locations/test_locations_jsonresolver.py | 12 +- tests/ui/locations/test_locations_mapping.py | 12 +- tests/ui/locations/test_locations_other.py | 28 +- .../notifications/test_notifications_api.py | 122 +- .../test_notifications_mapping.py | 12 +- .../notifications/test_notifications_utils.py | 14 +- .../operation_logs/test_operation_logs_api.py | 162 +- .../organisations/test_organisations_api.py | 24 +- .../test_organisations_jsonresolver.py | 16 +- .../test_patron_transaction_events_api.py | 39 +- ..._patron_transaction_events_jsonresolver.py | 21 +- .../test_patron_transaction_events_mapping.py | 11 +- .../test_patron_transactions_api.py | 26 +- .../test_patron_transactions_jsonresolver.py | 17 +- .../test_patron_transactions_mapping.py | 11 +- tests/ui/patron_types/conftest.py | 6 +- .../ui/patron_types/test_patron_types_api.py | 30 +- .../test_patron_types_jsonresolver.py | 16 +- .../patron_types/test_patron_types_mapping.py | 18 +- tests/ui/patrons/conftest.py | 9 +- tests/ui/patrons/test_patrons_api.py | 274 +- tests/ui/patrons/test_patrons_dumpers.py | 6 +- tests/ui/patrons/test_patrons_jsonresolver.py | 12 +- tests/ui/patrons/test_patrons_mapping.py | 13 +- tests/ui/patrons/test_patrons_ui.py | 46 +- tests/ui/stats/conftest.py | 4 +- tests/ui/stats/test_stats_librarian.py | 174 +- tests/ui/stats/test_stats_pricing.py | 192 +- tests/ui/stats/test_stats_report.py | 50 +- .../test_stats_report_n_deleted_items.py | 78 +- tests/ui/stats/test_stats_report_n_docs.py | 202 +- .../stats/test_stats_report_n_ill_requests.py | 211 +- tests/ui/stats/test_stats_report_n_items.py | 315 +- tests/ui/stats/test_stats_report_n_patrons.py | 271 +- .../test_stats_report_n_serial_holdings.py | 160 +- .../test_stats_report_number_of_ciculation.py | 677 ++- tests/ui/stats/test_stats_tasks.py | 4 +- tests/ui/stats/test_stats_views.py | 30 +- tests/ui/stats_cfg/test_stats_cfg_api.py | 24 +- .../stats_cfg/test_stats_cfg_jsonresolver.py | 17 +- tests/ui/stats_cfg/test_stats_cfg_mapping.py | 15 +- tests/ui/templates/test_templates_api.py | 27 +- tests/ui/templates/test_templates_mapping.py | 21 +- tests/ui/test_api.py | 136 +- tests/ui/test_api_schema.py | 39 +- tests/ui/test_filters.py | 103 +- tests/ui/test_indexer_utils.py | 71 +- .../ui/test_invenio_celery_tasks_endpoints.py | 11 +- tests/ui/test_message.py | 16 +- tests/ui/test_patron_message.py | 67 +- tests/ui/test_permissions.py | 8 +- tests/ui/test_ui_schema.py | 18 +- tests/ui/test_utils_app.py | 233 +- tests/ui/test_views.py | 117 +- tests/ui/users/test_forms.py | 156 +- tests/ui/users/test_users_ui.py | 14 +- tests/ui/vendors/test_vendors_api.py | 19 +- tests/ui/vendors/test_vendors_jsonresolver.py | 12 +- tests/ui/vendors/test_vendors_mapping.py | 5 +- tests/unit/conftest.py | 484 +- tests/unit/documents/test_documents_dojson.py | 3562 +++++------- .../documents/test_documents_dojson_dc.py | 331 +- .../documents/test_documents_dojson_ebooks.py | 304 +- .../documents/test_documents_dojson_marc21.py | 1218 ++-- .../documents/test_documents_dojson_slsp.py | 120 +- .../test_documents_dojson_unimarc.py | 994 ++-- .../documents/test_documents_jsonschema.py | 269 +- tests/unit/documents/test_documents_utils.py | 163 +- tests/unit/documents/test_identifiers.py | 47 +- tests/unit/test_acq_accounts_jsonschema.py | 24 +- tests/unit/test_acq_order_lines_jsonschema.py | 18 +- tests/unit/test_acq_orders_jsonschema.py | 12 +- .../unit/test_acq_receipt_lines_jsonschema.py | 8 +- tests/unit/test_babel_extractors.py | 10 +- tests/unit/test_budgets_jsonschema.py | 29 +- tests/unit/test_circ_policies_jsonschema.py | 100 +- tests/unit/test_cli_fixtures.py | 75 +- tests/unit/test_cli_index.py | 132 +- tests/unit/test_cli_utils.py | 47 +- tests/unit/test_contributions_jsonschema.py | 47 +- tests/unit/test_cql_parser.py | 236 +- tests/unit/test_csv.py | 81 +- tests/unit/test_holdings_jsonschema.py | 70 +- tests/unit/test_ill_requests_jsonschema.py | 28 +- tests/unit/test_item_types_jsonschema.py | 8 +- tests/unit/test_items_jsonschema.py | 81 +- tests/unit/test_libraries_jsonschema.py | 30 +- tests/unit/test_local_fields_jsonschema.py | 19 +- tests/unit/test_locations_jsonschema.py | 9 +- tests/unit/test_normalizer_stop_words.py | 45 +- tests/unit/test_notifications_jsonschema.py | 11 +- tests/unit/test_operation_logs_jsonschema.py | 19 +- tests/unit/test_organisations_jsonschema.py | 12 +- ...st_patron_transaction_events_jsonschema.py | 113 +- .../test_patron_transactions_jsonschema.py | 48 +- tests/unit/test_patron_types_jsonschema.py | 43 +- tests/unit/test_patrons_jsonschema.py | 50 +- tests/unit/test_sru_explain.py | 25 +- tests/unit/test_stats_cfg_jsonschema.py | 362 +- tests/unit/test_templates_jsonschema.py | 21 +- tests/unit/test_users_jsonschema.py | 33 +- tests/unit/test_utils.py | 112 +- tests/unit/test_vendors_jsonschema.py | 14 +- tests/utils.py | 196 +- 830 files changed, 47307 insertions(+), 47052 deletions(-) delete mode 100644 pytest.ini diff --git a/docker-services.yml b/docker-services.yml index 143d1df238..887ab25ad8 100644 --- a/docker-services.yml +++ b/docker-services.yml @@ -33,7 +33,7 @@ services: - "INVENIO_SECRET_KEY=CHANGE_ME" - "INVENIO_SQLALCHEMY_DATABASE_URI=postgresql+psycopg2://rero-ils:rero-ils@db/rero-ils" - "INVENIO_WSGI_PROXIES=2" - - "INVENIO_RATELIMIT_STORAGE_URL=redis://cache:6379/3" + - "INVENIO_RATELIMIT_STORAGE_URI=redis://cache:6379/3" - "COLLECT_STORAGE='flask_collect.storage.file'" lb: build: ./docker/haproxy/ diff --git a/poetry.lock b/poetry.lock index 1ae9520d19..fd0c81a34d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -116,22 +116,22 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autoflake" @@ -150,13 +150,13 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -235,33 +235,33 @@ files = [ [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -426,63 +426,78 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -792,63 +807,83 @@ tests = ["check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.3.3)", "pydo [[package]] name = "coverage" -version = "7.6.0" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, - {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, - {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, - {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, - {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, - {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, - {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, - {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, - {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, - {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, - {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, - {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, - {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, - {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -859,43 +894,38 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, + {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, + {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, + {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, + {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, + {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, + {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, ] [package.dependencies] @@ -908,7 +938,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1795,13 +1825,13 @@ scandir = ["scandir (>=1.5,<2.0)"] [[package]] name = "ftfy" -version = "6.2.0" +version = "6.2.3" description = "Fixes mojibake and other problems with Unicode, after the fact" optional = false -python-versions = ">=3.8,<4" +python-versions = "<4,>=3.8.1" files = [ - {file = "ftfy-6.2.0-py3-none-any.whl", hash = "sha256:f94a2c34b76e07475720e3096f5ca80911d152406fbde66fdb45c4d0c9150026"}, - {file = "ftfy-6.2.0.tar.gz", hash = "sha256:5e42143c7025ef97944ca2619d6b61b0619fc6654f98771d39e862c1424c75c0"}, + {file = "ftfy-6.2.3-py3-none-any.whl", hash = "sha256:f15761b023f3061a66207d33f0c0149ad40a8319fd16da91796363e2c049fdf8"}, + {file = "ftfy-6.2.3.tar.gz", hash = "sha256:79b505988f29d577a58a9069afe75553a02a46e42de6091c0660cdc67812badc"}, ] [package.dependencies] @@ -1963,13 +1993,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -1982,21 +2012,21 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.3" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.3-py3-none-any.whl", hash = "sha256:2d6dfe3b9e055f72495c2085890837fc8c758984e209115c8792bddcb762cd93"}, + {file = "importlib_resources-6.4.3.tar.gz", hash = "sha256:4a202b9b9d38563b46da59221d77bb73862ab5d79d461307bcb826d725448b98"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "infinity" @@ -2339,13 +2369,13 @@ tests = ["CairoSVG (>=1.0.20)", "Pillow (>=10.0.0)", "Sphinx (>=1.8.0)", "mock ( [[package]] name = "invenio-i18n" -version = "2.1.1" +version = "2.1.2" description = "Invenio internationalization (I18N) module." optional = false python-versions = ">=3.7" files = [ - {file = "invenio-i18n-2.1.1.tar.gz", hash = "sha256:915e011a22acfe71e83a4c1bf8ab2a7a064d977b28ba631ede0e0ab2aa0cb592"}, - {file = "invenio_i18n-2.1.1-py2.py3-none-any.whl", hash = "sha256:057a2d67b3453a7dd2e7d70881074ed5c4dc983615472467cb6b5057366246fa"}, + {file = "invenio-i18n-2.1.2.tar.gz", hash = "sha256:6f39224abb3e26c32d670ba1828f4d147acc0e8cca0671ce66862a5264ce75ef"}, + {file = "invenio_i18n-2.1.2-py2.py3-none-any.whl", hash = "sha256:ca66b3d226f963d765c71c7fcbbf7ff2f5977db3cd0b84893073bda4655f99a9"}, ] [package.dependencies] @@ -2649,13 +2679,13 @@ tests = ["invenio-admin (>=1.4.0,<2.0.0)", "pytest-black (>=0.3.0)", "pytest-inv [[package]] name = "invenio-records-permissions" -version = "0.19.2" +version = "0.20.0" description = "Permission policies for Invenio records." optional = false python-versions = ">=3.7" files = [ - {file = "invenio-records-permissions-0.19.2.tar.gz", hash = "sha256:3c3aa3b7924162955609432b8a97dd3119e91eba50f22e071b9e932000505ec1"}, - {file = "invenio_records_permissions-0.19.2-py2.py3-none-any.whl", hash = "sha256:2eebb9334bc1c1d9a6bae2241e86c727a1343370fce27fea93bac42fe91cfa23"}, + {file = "invenio-records-permissions-0.20.0.tar.gz", hash = "sha256:ee807199a17d217335014b7defd0c7c6d86397aaa945e31437e1e5e9eb59e16f"}, + {file = "invenio_records_permissions-0.20.0-py2.py3-none-any.whl", hash = "sha256:3d99afbcc2d1b34a5db959fce8212e446f5bb4bf19e1c899d2f81bf3ed0ad769"}, ] [package.dependencies] @@ -2739,13 +2769,13 @@ tests = ["Flask-Login (>=0.3.2)", "citeproc-py (>=0.6.0)", "citeproc-py-styles ( [[package]] name = "invenio-records-ui" -version = "1.2.0" -description = "User interface for Invenio-Records." +version = "1.2.1" +description = "\"User interface for Invenio-Records.\"" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "invenio-records-ui-1.2.0.tar.gz", hash = "sha256:d465ed33645712f4c6144836ffca80f3773e7aec3ef596a9f95bafc14535335b"}, - {file = "invenio_records_ui-1.2.0-py2.py3-none-any.whl", hash = "sha256:2e4adc70fc2f257828c6ea99199bac55d013c7922a5e2cb3d652441304e82fd3"}, + {file = "invenio-records-ui-1.2.1.tar.gz", hash = "sha256:bf3ce5498e7300b8577a20fff502ddb22287f2648ccd9b267f12db0d417fc966"}, + {file = "invenio_records_ui-1.2.1-py2.py3-none-any.whl", hash = "sha256:eae21ceb4d95460699ea586c0bb0a48b64bd4a91f3ab2c8bf8effd41e3aa5b0c"}, ] [package.dependencies] @@ -2755,9 +2785,7 @@ invenio-pidstore = ">=1.2.0" invenio-records = ">=1.0.0" [package.extras] -all = ["Sphinx (>=1.5.1)", "invenio-access (>=1.0.0)", "invenio-accounts (>=1.3.0)", "invenio-db[mysql,postgresql,versioning] (>=1.0.0)", "pytest-invenio (>=1.4.0)"] -docs = ["Sphinx (>=1.5.1)"] -tests = ["invenio-access (>=1.0.0)", "invenio-accounts (>=1.3.0)", "invenio-db[mysql,postgresql,versioning] (>=1.0.0)", "pytest-invenio (>=1.4.0)"] +tests = ["invenio-access (>=1.0.0)", "invenio-accounts (>=1.3.0)", "invenio-db[mysql,postgresql,versioning] (>=1.0.0)", "pytest-black (>=0.3.0,<0.3.10)", "pytest-invenio (>=1.4.0)", "sphinx (>=4.5)"] [[package]] name = "invenio-rest" @@ -2783,13 +2811,13 @@ tests = ["Sphinx (>=4.5.0)", "pytest-black (>=0.3.0,<0.3.10)", "pytest-invenio ( [[package]] name = "invenio-search" -version = "2.3.1" +version = "2.4.1" description = "\"Invenio module for information retrieval.\"" optional = false python-versions = ">=3.7" files = [ - {file = "invenio-search-2.3.1.tar.gz", hash = "sha256:ecbb420d4804b3a4fca8bcc9f8b5941067b06204b5a3c62ec997270f3c0e6ba6"}, - {file = "invenio_search-2.3.1-py2.py3-none-any.whl", hash = "sha256:1f9789993bcb5339688e88ec60993802e33c6e94954cb0702d0a43c7c6f94e6b"}, + {file = "invenio-search-2.4.1.tar.gz", hash = "sha256:569df7a0db6b84951acecbd60b5871c9f06aaf61421bf493f9ce646f685eb0a6"}, + {file = "invenio_search-2.4.1-py2.py3-none-any.whl", hash = "sha256:ced2c342a69b8a27262728b00c51d014eaf32a1330499decac192fa810141d21"}, ] [package.dependencies] @@ -2847,13 +2875,13 @@ python-dateutil = ">=2.8.2" [[package]] name = "invenio-stats" -version = "4.0.2" -description = "\"Invenio module for collecting statistics.\"" +version = "4.1.0" +description = "Invenio module for collecting statistics." optional = false python-versions = ">=3.7" files = [ - {file = "invenio-stats-4.0.2.tar.gz", hash = "sha256:83db4d152a81e44f5813a64884ca7d7b58dccfc33aeea2abd36ad9b2a3b8fb23"}, - {file = "invenio_stats-4.0.2-py2.py3-none-any.whl", hash = "sha256:08beb0d6e6f960620472673ed1497187c4291cb7b95779f5cac5db3cf7bc5b23"}, + {file = "invenio-stats-4.1.0.tar.gz", hash = "sha256:a77512a4b25f7393ce8671ccc4f7286e1319f53baad0b4f605a383e9461ef7dd"}, + {file = "invenio_stats-4.1.0-py2.py3-none-any.whl", hash = "sha256:93c9276846b38dc96c745d5bd93af4d86510c3a1dc5e2173bb409c224fee2f52"}, ] [package.dependencies] @@ -2874,13 +2902,13 @@ tests = ["Sphinx (>=5,<6)", "invenio-accounts (>=2.1.0)", "invenio-app (>=1.0.0) [[package]] name = "invenio-theme" -version = "3.2.0" +version = "3.3.0" description = "\"Invenio standard theme.\"" optional = false python-versions = ">=3.7" files = [ - {file = "invenio-theme-3.2.0.tar.gz", hash = "sha256:111af436bbe0715a7dc3c9cb36b5dad6ac345edcc1bc65dd19b7679b189c998b"}, - {file = "invenio_theme-3.2.0-py2.py3-none-any.whl", hash = "sha256:1b7c6de8822a53418a700f82a9ea6f271b503c05b2484be74353caa3e1916249"}, + {file = "invenio-theme-3.3.0.tar.gz", hash = "sha256:1df9a12741cff13b0651cc862b7f6cf6673e5127ccca71c0e06e61ba516551c5"}, + {file = "invenio_theme-3.3.0-py2.py3-none-any.whl", hash = "sha256:d93f14a047eb3e0c6faa0bf2c16cb84304f9d03b1100d8d789af1a1de5834586"}, ] [package.dependencies] @@ -3199,29 +3227,29 @@ files = [ [[package]] name = "kombu" -version = "5.3.7" +version = "5.4.0" description = "Messaging library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.3.7-py3-none-any.whl", hash = "sha256:5634c511926309c7f9789f1433e9ed402616b56836ef9878f01bd59267b4c7a9"}, - {file = "kombu-5.3.7.tar.gz", hash = "sha256:011c4cd9a355c14a1de8d35d257314a1d2456d52b7140388561acac3cf1a97bf"}, + {file = "kombu-5.4.0-py3-none-any.whl", hash = "sha256:c8dd99820467610b4febbc7a9e8a0d3d7da2d35116b67184418b51cc520ea6b6"}, + {file = "kombu-5.4.0.tar.gz", hash = "sha256:ad200a8dbdaaa2bbc5f26d2ee7d707d9a1fded353a0f4bd751ce8c7d9f449c60"}, ] [package.dependencies] amqp = ">=5.1.1,<6.0.0" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} -vine = "*" +typing-extensions = {version = "4.12.2", markers = "python_version < \"3.10\""} +vine = "5.1.0" [package.extras] azureservicebus = ["azure-servicebus (>=7.10.0)"] azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] confluentkafka = ["confluent-kafka (>=2.2.0)"] -consul = ["python-consul2"] +consul = ["python-consul2 (==0.1.5)"] librabbitmq = ["librabbitmq (>=2.0.0)"] mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] +msgpack = ["msgpack (==1.0.8)"] +pyro = ["pyro4 (==4.82)"] qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] slmq = ["softlayer-messaging (>=1.0.3)"] @@ -3286,153 +3314,149 @@ ply = ">=3.11" [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -3440,17 +3464,17 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "lxml-html-clean" -version = "0.1.1" +version = "0.2.0" description = "HTML cleaner from lxml project" optional = false python-versions = "*" files = [ - {file = "lxml_html_clean-0.1.1-py3-none-any.whl", hash = "sha256:58c04176593c9caf72ec92e033d2f38859e918b3eff0cc0f8051ad27dc2ab8ef"}, - {file = "lxml_html_clean-0.1.1.tar.gz", hash = "sha256:8a644ed01dbbe132fabddb9467f077f6dad12a1d4f3a6a553e280f3815fa46df"}, + {file = "lxml_html_clean-0.2.0-py3-none-any.whl", hash = "sha256:80bdc730b288b8e68f0bf86b99f4bbef129c5ec59b694c6681422be4c1eeb3c5"}, + {file = "lxml_html_clean-0.2.0.tar.gz", hash = "sha256:47c323f39d95d4cbf4956da62929c89a79313074467efaa4821013c97bf95628"}, ] [package.dependencies] @@ -3576,13 +3600,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -3590,7 +3614,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -4307,7 +4331,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -4316,8 +4339,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -4368,13 +4389,13 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [package.extras] @@ -4393,13 +4414,13 @@ files = [ [[package]] name = "pycodestyle" -version = "2.12.0" +version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, - {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -4468,13 +4489,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.dependencies] @@ -4482,73 +4503,73 @@ cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryp [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymupdf" -version = "1.24.7" +version = "1.24.9" description = "A high performance Python library for data extraction, analysis, conversion & manipulation of PDF (and other) documents." optional = false python-versions = ">=3.8" files = [ - {file = "PyMuPDF-1.24.7-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:9b6984d57e127e016231b2e89b247f2b0fef07af84d4cc91e4893c6d3fde52c7"}, - {file = "PyMuPDF-1.24.7-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7b8405aac8011b445ceb1bc2d4f4c44b2c969024b301f1178399e0084541d747"}, - {file = "PyMuPDF-1.24.7-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:ada392c739f43608611d430305c92d1b8922f66a07bc36a4ea0c7f3f9c1b5dd2"}, - {file = "PyMuPDF-1.24.7-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:7a59e24873e6d135f9c07be9f47d98502171210957819c8ffa9a7cca1cac1fb8"}, - {file = "PyMuPDF-1.24.7-cp310-none-musllinux_1_2_x86_64.whl", hash = "sha256:bdd9a1b703e3fedd9836d54a13b89ddf772a6eaea3b1e34dcd682b8f0b5b7123"}, - {file = "PyMuPDF-1.24.7-cp310-none-win32.whl", hash = "sha256:3a2e8af6e2ef437c4e599c7e520299ea10ef02680be0a8533f78d6ee489c5a60"}, - {file = "PyMuPDF-1.24.7-cp310-none-win_amd64.whl", hash = "sha256:8293773e973cf07f5d6398699d5d98151a025c9db04a333fd55a87c8f8c3c74b"}, - {file = "PyMuPDF-1.24.7-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:11770619a1d5b90f5f81cc22c11967b2f473310fc9a8f2aa96c1aabff5971c63"}, - {file = "PyMuPDF-1.24.7-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:5e9b6018b3af45c6ca04adb0441369eefc0d66e5023f8d54befde9bb4f078c5a"}, - {file = "PyMuPDF-1.24.7-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:40743cbfa40a35e873d4b8642709f45849b3315292957d836be647cdd74d1107"}, - {file = "PyMuPDF-1.24.7-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:44f4c58040c496fcfc0719df17dab60348c7988d874c5e17233fa664dcf0a984"}, - {file = "PyMuPDF-1.24.7-cp311-none-musllinux_1_2_x86_64.whl", hash = "sha256:0d7e8d2155bdc6e61242bdbf5dff2f6d0118f1a3f9e5d83db334a5cf71c90c5f"}, - {file = "PyMuPDF-1.24.7-cp311-none-win32.whl", hash = "sha256:3ea0bb27512e72225822d98b6747b51b02de84cc1e17a5579d90ad957760de03"}, - {file = "PyMuPDF-1.24.7-cp311-none-win_amd64.whl", hash = "sha256:785c8a40fd5d76b47759f4ade6db0b78a799bb1fb2dd61bbe59784538299ee21"}, - {file = "PyMuPDF-1.24.7-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:999f40a6ead5aaad0c9e23b3c2ed009080bf51be9485ae122ff2cedd6487874b"}, - {file = "PyMuPDF-1.24.7-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:03facd26907dcbba63429e8b56fdbabd717f22fb6d24b38bdefc591b309ce437"}, - {file = "PyMuPDF-1.24.7-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:7ec6da974092de0f842f9ef0e355c58a5529c2d5ec792709d2c6fec5ffcd80b1"}, - {file = "PyMuPDF-1.24.7-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:89ccd758b5b7d1e05d615e85fb5d358ff1994ff70aaa5df4f0724bd414847949"}, - {file = "PyMuPDF-1.24.7-cp312-none-musllinux_1_2_x86_64.whl", hash = "sha256:8873253ffedb7bcc8dcafa9bfe8f1efa39ac91732ccc579e1c1746eeff4ed83a"}, - {file = "PyMuPDF-1.24.7-cp312-none-win32.whl", hash = "sha256:5341440bfc6c89f025990d181896b74097f6b145f5e563e3d5e5fa944c79aae6"}, - {file = "PyMuPDF-1.24.7-cp312-none-win_amd64.whl", hash = "sha256:a906b57dae8f3870663f3d3a1d4a66b67cc8f5cf644bcbdd175a391f5f74f2ef"}, - {file = "PyMuPDF-1.24.7-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:b18630fead76175059b973230cc175c4ed30e9f4ea03d25963d2c74440bdb78b"}, - {file = "PyMuPDF-1.24.7-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:a94d556b7ea25e6195c46dc2c4114d30e8e51233a49f0ed589f8e0190d2cc3b5"}, - {file = "PyMuPDF-1.24.7-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:da82dc784e52b3635110995a75189cdef71b02d9a3a2635cd1d216133d547519"}, - {file = "PyMuPDF-1.24.7-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:d95c0cc4768ce09d06c31280cda6fc7619026e615d33beee2dbd1c74ea3a1e08"}, - {file = "PyMuPDF-1.24.7-cp38-none-musllinux_1_2_x86_64.whl", hash = "sha256:5c7c964e4b3dfbcd7a0ec11f44090002967c9652cf1dbaf5e09de333c479eae3"}, - {file = "PyMuPDF-1.24.7-cp38-none-win32.whl", hash = "sha256:5812645087eaf537efb168df1a78b40b76d6b01c694b890d31e362fd007926fe"}, - {file = "PyMuPDF-1.24.7-cp38-none-win_amd64.whl", hash = "sha256:b11065d5192bdc6e80046505ed68447276d3698ec1d0a3eea8dc2533f972f8fb"}, - {file = "PyMuPDF-1.24.7-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:102d3427fcc3a47084eb2faa670de8a58e5d2061b4e61365b323d12a7bac0afd"}, - {file = "PyMuPDF-1.24.7-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:5580c975a166a3fb1908fb055c0427bcd23abe7a7d3113777d15c0e5f8a2133b"}, - {file = "PyMuPDF-1.24.7-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:1553df16e6e141314fa33534800aee2bc4eab1daa8057bf791584b318aa114f6"}, - {file = "PyMuPDF-1.24.7-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:53cedffedc7ec5c019f8b06d0e18b1f410f4af15e0c122330b79d5e617739042"}, - {file = "PyMuPDF-1.24.7-cp39-none-musllinux_1_2_x86_64.whl", hash = "sha256:371b983ba2b396a636518e2cfaa76ea8cccd38c9bdbc942256df678e421f7063"}, - {file = "PyMuPDF-1.24.7-cp39-none-win32.whl", hash = "sha256:0a93d0f5c5b5cdc2f1718dbdf4b1c34aafc68ee4e44688b58c0a2894277ca941"}, - {file = "PyMuPDF-1.24.7-cp39-none-win_amd64.whl", hash = "sha256:f82db6fd757fb3698075e2e68e4f6df7ae89b82a96ae102e178e5b01a250359b"}, - {file = "PyMuPDF-1.24.7.tar.gz", hash = "sha256:a34ceae204f215bad51f49dd43987116c6a6269fc03d8770224f7067013b59b8"}, -] - -[package.dependencies] -PyMuPDFb = "1.24.6" + {file = "PyMuPDF-1.24.9-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:da5d9699472bfd1de52975de3eb7efaf5190ac5801b9fc6bcccde603afbe6937"}, + {file = "PyMuPDF-1.24.9-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:3d1133983c7ac388a35bbab8dfc4c26a874c05edc47d2038961add2efa4639a8"}, + {file = "PyMuPDF-1.24.9-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:94f2796a3dd1f0735d0717eb020d7c3c7313eaae8c9c1040022408c880931616"}, + {file = "PyMuPDF-1.24.9-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:5199567353d1543e6c21c626148f8ac9ebb14ce553f2c434fcb9b00e195e1e52"}, + {file = "PyMuPDF-1.24.9-cp310-none-musllinux_1_2_x86_64.whl", hash = "sha256:c97f0b2fb201c9d9bc0f15a901641174e8896a9ae9fbe0d5bb1a6f2315cc3ced"}, + {file = "PyMuPDF-1.24.9-cp310-none-win32.whl", hash = "sha256:00499b864a56a2168254dce3d0f12048b96e9b3bdd43fecace18a1572342c8d4"}, + {file = "PyMuPDF-1.24.9-cp310-none-win_amd64.whl", hash = "sha256:f074e501e883428e7d5480f732ea6a6bd17146f10ebefb9b84957fd32b79f0d4"}, + {file = "PyMuPDF-1.24.9-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:caf43ce86790f95049a5849f2802b5c412b865cd368ece89a39a54fc84aa45cd"}, + {file = "PyMuPDF-1.24.9-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:13d06161176e1d4e337f5b5e053b628e4531bab5effb269a83dc38d4deb8e659"}, + {file = "PyMuPDF-1.24.9-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:7ab228dfb80002eb8612ffe71b50052d8b20d9364a3535e2fe43a0901ce41d40"}, + {file = "PyMuPDF-1.24.9-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:042ad205c7ef615d9fbab7078f6fa8d14f020ed2dfe3a79d803b6171318565b5"}, + {file = "PyMuPDF-1.24.9-cp311-none-musllinux_1_2_x86_64.whl", hash = "sha256:b4495833bb0300fc885491928f2cbdf96afb569205dcc256bb4c43e3d1fde7cb"}, + {file = "PyMuPDF-1.24.9-cp311-none-win32.whl", hash = "sha256:e53370f3679a7b013c2abb801bb566882dab1fb59646d4b0a717ee0d350c5ab1"}, + {file = "PyMuPDF-1.24.9-cp311-none-win_amd64.whl", hash = "sha256:454932e9c7b9cd3057ee83dfe805f551a1382b9e216e87a32eb44c6d6843f966"}, + {file = "PyMuPDF-1.24.9-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:93cc4908259f133c9dc88f5e77329c4b2dbc03fca83126b1efffedb67ade0fb9"}, + {file = "PyMuPDF-1.24.9-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:84e1516d4b3e40711b9a6dbaedd30e0a89d6a054ca408a56114ceb5a1461f0d1"}, + {file = "PyMuPDF-1.24.9-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:d7cdddce8d214e65ed483a8a403da49984815e543c3ce4b539306570c4cfc453"}, + {file = "PyMuPDF-1.24.9-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:de8b330900c194efeedeb97adab25520479d101fc9aed50d7323dde08698ae24"}, + {file = "PyMuPDF-1.24.9-cp312-none-musllinux_1_2_x86_64.whl", hash = "sha256:41c92d69993e7614730205b75d7999b21ca0f929d31b2bb86a4b58d3b1b0451a"}, + {file = "PyMuPDF-1.24.9-cp312-none-win32.whl", hash = "sha256:a04af6f3f5f35cb62bc7b3c2e9cfff510aa56c39c53355ecfff40b7cb9773fef"}, + {file = "PyMuPDF-1.24.9-cp312-none-win_amd64.whl", hash = "sha256:e2828a79415ae3dd90c629697ace51db7f1e81f426fc2fc034c2151dbe58be6e"}, + {file = "PyMuPDF-1.24.9-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:241913d0c76aacb05acdd8a0e82b1105883ffe6ef3bb4d9742b41d3c5e84d2db"}, + {file = "PyMuPDF-1.24.9-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:ff70e26625b6cdd036e2c63b5d6c1897949c0e8b205cd756276f27baadaad340"}, + {file = "PyMuPDF-1.24.9-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:8e29bc817afad511072371f24624c7c3b7485a9e656b6a65dc58fecdf5043b08"}, + {file = "PyMuPDF-1.24.9-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:d17ec6920f91c43b6e777a017f3aaf44b205a3216771db9e8aa46e78a703f8f6"}, + {file = "PyMuPDF-1.24.9-cp38-none-musllinux_1_2_x86_64.whl", hash = "sha256:5cec9d17fdcbd83fa2c90190c22f652a0a51275cf75a29068eea025fff076829"}, + {file = "PyMuPDF-1.24.9-cp38-none-win32.whl", hash = "sha256:4f7b19f5c0026db49b7be17901728ed15761c5aa2031f04b01f9eb2e54f1b50e"}, + {file = "PyMuPDF-1.24.9-cp38-none-win_amd64.whl", hash = "sha256:e4c867f1cde68ff0e9c7889ea27c4c2c67df80e776f82619888bb69d1e1b27cf"}, + {file = "PyMuPDF-1.24.9-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:b4f85c24050e3778be6c7c1f4d4965fd4385281264798df7b4301b78895053fd"}, + {file = "PyMuPDF-1.24.9-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:4e807010ef4e63cfb70dd88fe1fcd1d7e2b4e62ffa2b1dc53b35bc18bf939d8e"}, + {file = "PyMuPDF-1.24.9-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:5dac888cc16981e385c886c26de6aabf914059215e028d14cd67767ff0c1288c"}, + {file = "PyMuPDF-1.24.9-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:de55817c02e06ff75233ce2487cc5ebcbf585acd694bb69500825ee37789ac79"}, + {file = "PyMuPDF-1.24.9-cp39-none-musllinux_1_2_x86_64.whl", hash = "sha256:49cb22196f11c2327f6345554db48cfb2e31ed4f073ca6a872f21ddc4b0619c1"}, + {file = "PyMuPDF-1.24.9-cp39-none-win32.whl", hash = "sha256:46b1f84816c666e1c82f4249c1e815e92c462633255d72da20751eaad125d0f0"}, + {file = "PyMuPDF-1.24.9-cp39-none-win_amd64.whl", hash = "sha256:4fa45474d63715c707e3c3a6ebeeee75fd7aaa180512b75863e437f6876dfa86"}, + {file = "PyMuPDF-1.24.9.tar.gz", hash = "sha256:3692a5e824f10dc09bbddabab207f7cd5979831e48dd2f4de1be21e441767473"}, +] + +[package.dependencies] +PyMuPDFb = "1.24.9" [[package]] name = "pymupdfb" -version = "1.24.6" +version = "1.24.9" description = "MuPDF shared libraries for PyMuPDF." optional = false python-versions = ">=3.8" files = [ - {file = "PyMuPDFb-1.24.6-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:21e3ed890f736def68b9a031122ae1fb854d5cb9a53aa144b6e2ca3092416a6b"}, - {file = "PyMuPDFb-1.24.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8704d2dfadc9448ce184597d8b0f9c30143e379ac948a517f9c4db7c0c71ed51"}, - {file = "PyMuPDFb-1.24.6-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01662584d5cfa7a91f77585f13fc23a12291cfd76a57e0a28dd5a56bf521cb2c"}, - {file = "PyMuPDFb-1.24.6-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1f7657353529ae3f88575c83ee49eac9adea311a034b9c97248a65cee7df0e5"}, - {file = "PyMuPDFb-1.24.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cebc2cedb870d1e1168e2f502eb06f05938f6df69103b0853a2b329611ec19a7"}, - {file = "PyMuPDFb-1.24.6-py3-none-win32.whl", hash = "sha256:ac4b865cd1e239db04674f85e02844a0e405f8255ee7a74dfee0d86aad0d3576"}, - {file = "PyMuPDFb-1.24.6-py3-none-win_amd64.whl", hash = "sha256:9224e088a0d3c188dea03831807789e245b812fbd071c8d498da8f7cc33142b2"}, - {file = "PyMuPDFb-1.24.6.tar.gz", hash = "sha256:f5a40b1732d65a1e519916d698858b9ce7473e23edf9001ddd085c5293d59d30"}, + {file = "PyMuPDFb-1.24.9-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:3c9e694b1fb1bde37a8d3c953fbd0916e7dee8a4650142547d4f832105b17689"}, + {file = "PyMuPDFb-1.24.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3fd74ee7969712ab457495465da0a61aab44d8cf9b71b9ef51910a8c6a90ad57"}, + {file = "PyMuPDFb-1.24.9-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb5b38f588963a239a8c0bca99d3d912f0c04674e3c6e7199e44cebd22840061"}, + {file = "PyMuPDFb-1.24.9-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:198f6b3713b6f980fa96c1099be0d5459c7d43c593299948f0ba528577e6bf46"}, + {file = "PyMuPDFb-1.24.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae044ebc8299f5a3ba822a6dfe97285dffd6c66cba194bc39180aa189a2755c9"}, + {file = "PyMuPDFb-1.24.9-py3-none-win32.whl", hash = "sha256:20ea17fd5799dcf7813ec099c0ce303f763e6e4ba8d0f54d5f84e4df90c3a340"}, + {file = "PyMuPDFb-1.24.9-py3-none-win_amd64.whl", hash = "sha256:c6b8adc0b9c91ff0f657440a816ad2130429a808cd53ff273f3e72532e526bdc"}, + {file = "PyMuPDFb-1.24.9.tar.gz", hash = "sha256:5505f07b3dded6e791ab7d10d01f0687e913fc75edd23fdf2825a582b6651558"}, ] [[package]] @@ -4613,6 +4634,21 @@ tomli = ">=1.0.0" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-black" +version = "0.3.12" +description = "A pytest plugin to enable format checking with black" +optional = false +python-versions = ">=2.7" +files = [ + {file = "pytest-black-0.3.12.tar.gz", hash = "sha256:1d339b004f764d6cd0f06e690f6dd748df3d62e6fe1a692d6a5500ac2c5b75a5"}, +] + +[package.dependencies] +black = {version = "*", markers = "python_version >= \"3.6\""} +pytest = ">=3.5.0" +toml = "*" + [[package]] name = "pytest-black-ng" version = "0.4.1" @@ -4855,158 +4891,182 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.1.1" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:b1bb952d1e407463c9333ea7e0c0600001e54e08ce836d4f0aff1fb3f902cf63"}, + {file = "pyzmq-26.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:65e2a18e845c6ea7ab849c70db932eaeadee5edede9e379eb21c0a44cf523b2e"}, + {file = "pyzmq-26.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:def7ae3006924b8a0c146a89ab4008310913fa903beedb95e25dea749642528e"}, + {file = "pyzmq-26.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8234571df7816f99dde89c3403cb396d70c6554120b795853a8ea56fcc26cd3"}, + {file = "pyzmq-26.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18da8e84dbc30688fd2baefd41df7190607511f916be34f9a24b0e007551822e"}, + {file = "pyzmq-26.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c70dab93d98b2bf3f0ac1265edbf6e7f83acbf71dabcc4611889bb0dea45bed7"}, + {file = "pyzmq-26.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fcb90592c5d5c562e1b1a1ceccf6f00036d73c51db0271bf4d352b8d6b31d468"}, + {file = "pyzmq-26.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cf4be7460a0c1bc71e9b0e64ecdd75a86386ca6afaa36641686f5542d0314e9d"}, + {file = "pyzmq-26.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4cbecda4ddbfc1e309c3be04d333f9be3fc6178b8b6592b309676f929767a15"}, + {file = "pyzmq-26.1.1-cp310-cp310-win32.whl", hash = "sha256:583f73b113b8165713b6ce028d221402b1b69483055b5aa3f991937e34dd1ead"}, + {file = "pyzmq-26.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5e6f39ecb8eb7bfcb976c49262e8cf83ff76e082b77ca23ba90c9b6691a345be"}, + {file = "pyzmq-26.1.1-cp310-cp310-win_arm64.whl", hash = "sha256:8d042d6446cab3a1388b38596f5acabb9926b0b95c3894c519356b577a549458"}, + {file = "pyzmq-26.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:362cac2423e36966d336d79d3ec3eafeabc153ee3e7a5cf580d7e74a34b3d912"}, + {file = "pyzmq-26.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0841633446cb1539a832a19bb24c03a20c00887d0cedd1d891b495b07e5c5cb5"}, + {file = "pyzmq-26.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e1fcdc333afbf9918d0a614a6e10858aede7da49a60f6705a77e343fe86a317"}, + {file = "pyzmq-26.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc8d655627d775475eafdcf0e49e74bcc1e5e90afd9ab813b4da98f092ed7b93"}, + {file = "pyzmq-26.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32de51744820857a6f7c3077e620ab3f607d0e4388dfead885d5124ab9bcdc5e"}, + {file = "pyzmq-26.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a880240597010914ffb1d6edd04d3deb7ce6a2abf79a0012751438d13630a671"}, + {file = "pyzmq-26.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:26131b1cec02f941ed2d2b4b8cc051662b1c248b044eff5069df1f500bbced56"}, + {file = "pyzmq-26.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ce05841322b58510607f9508a573138d995a46c7928887bc433de9cb760fd2ad"}, + {file = "pyzmq-26.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32123ff0a6db521aadf2b95201e967a4e0d11fb89f73663a99d2f54881c07214"}, + {file = "pyzmq-26.1.1-cp311-cp311-win32.whl", hash = "sha256:e790602d7ea1d6c7d8713d571226d67de7ffe47b1e22ae2c043ebd537de1bccb"}, + {file = "pyzmq-26.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:717960855f2d6fdc2dba9df49dff31c414187bb11c76af36343a57d1f7083d9a"}, + {file = "pyzmq-26.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:08956c26dbcd4fd8835cb777a16e21958ed2412317630e19f0018d49dbeeb470"}, + {file = "pyzmq-26.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e80345900ae241c2c51bead7c9fa247bba6d4b2a83423e9791bae8b0a7f12c52"}, + {file = "pyzmq-26.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ec8fe214fcc45dfb0c32e4a7ad1db20244ba2d2fecbf0cbf9d5242d81ca0a375"}, + {file = "pyzmq-26.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4e283f97688d993cb7a8acbc22889effbbb7cbaa19ee9709751f44be928f5d"}, + {file = "pyzmq-26.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2508bdc8ab246e5ed7c92023d4352aaad63020ca3b098a4e3f1822db202f703d"}, + {file = "pyzmq-26.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:741bdb4d96efe8192616abdc3671931d51a8bcd38c71da2d53fb3127149265d1"}, + {file = "pyzmq-26.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:76154943e4c4054b2591792eb3484ef1dd23d59805759f9cebd2f010aa30ee8c"}, + {file = "pyzmq-26.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9498ac427d20d0e0ef0e4bbd6200841e91640dfdf619f544ceec7f464cfb6070"}, + {file = "pyzmq-26.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f34453ef3496ca3462f30435bf85f535f9550392987341f9ccc92c102825a79"}, + {file = "pyzmq-26.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:50f0669324e27cc2091ef6ab76ca7112f364b6249691790b4cffce31e73fda28"}, + {file = "pyzmq-26.1.1-cp312-cp312-win32.whl", hash = "sha256:3ee5cbf2625b94de21c68d0cefd35327c8dfdbd6a98fcc41682b4e8bb00d841f"}, + {file = "pyzmq-26.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:75bd448a28b1001b6928679015bc95dd5f172703ed30135bb9e34fc9cda0a3e7"}, + {file = "pyzmq-26.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:4350233569b4bbef88595c5e77ee38995a6f1f1790fae148b578941bfffd1c24"}, + {file = "pyzmq-26.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8087a3281c20b1d11042d372ed5a47734af05975d78e4d1d6e7bd1018535f3"}, + {file = "pyzmq-26.1.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ebef7d3fe11fe4c688f08bc0211a976c3318c097057f258428200737b9fff4da"}, + {file = "pyzmq-26.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a5342110510045a47de1e87f5f1dcc1d9d90109522316dc9830cfc6157c800f"}, + {file = "pyzmq-26.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af690ea4be6ca92a67c2b44a779a023bf0838e92d48497a2268175dc4a505691"}, + {file = "pyzmq-26.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc994e220c1403ae087d7f0fa45129d583e46668a019e389060da811a5a9320e"}, + {file = "pyzmq-26.1.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:b8e153f5dffb0310af71fc6fc9cd8174f4c8ea312c415adcb815d786fee78179"}, + {file = "pyzmq-26.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0065026e624052a51033857e5cd45a94b52946b44533f965f0bdf182460e965d"}, + {file = "pyzmq-26.1.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:63351392f948b5d50b9f55161994bc4feedbfb3f3cfe393d2f503dea2c3ec445"}, + {file = "pyzmq-26.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ffecc43b3c18e36b62fcec995761829b6ac325d8dd74a4f2c5c1653afbb4495a"}, + {file = "pyzmq-26.1.1-cp313-cp313-win32.whl", hash = "sha256:6ff14c2fae6c0c2c1c02590c5c5d75aa1db35b859971b3ca2fcd28f983d9f2b6"}, + {file = "pyzmq-26.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:85f2d2ee5ea9a8f1de86a300e1062fbab044f45b5ce34d20580c0198a8196db0"}, + {file = "pyzmq-26.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:cc09b1de8b985ca5a0ca343dd7fb007267c6b329347a74e200f4654268084239"}, + {file = "pyzmq-26.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:bc904e86de98f8fc5bd41597da5d61232d2d6d60c4397f26efffabb961b2b245"}, + {file = "pyzmq-26.1.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:00f39c367bbd6aa8e4bc36af6510561944c619b58eb36199fa334b594a18f615"}, + {file = "pyzmq-26.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de6f384864a959866b782e6a3896538d1424d183f2d3c7ef079f71dcecde7284"}, + {file = "pyzmq-26.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3abb15df0c763339edb27a644c19381b2425ddd1aea3dbd77c1601a3b31867b8"}, + {file = "pyzmq-26.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40908ec2dd3b29bbadc0916a0d3c87f8dbeebbd8fead8e618539f09e0506dec4"}, + {file = "pyzmq-26.1.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c11a95d3f6fc7e714ccd1066f68f9c1abd764a8b3596158be92f46dd49f41e03"}, + {file = "pyzmq-26.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:4437af9fee7a58302dbd511cc49f0cc2b35c112a33a1111fb123cf0be45205ca"}, + {file = "pyzmq-26.1.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:76390d3d66406cb01b9681c382874400e9dfd77f30ecdea4bd1bf5226dd4aff0"}, + {file = "pyzmq-26.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:4d4c7fe5e50e269f9c63a260638488fec194a73993008618a59b54c47ef6ae72"}, + {file = "pyzmq-26.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:25d128524207f53f7aae7c5abdc2b63f8957a060b00521af5ffcd20986b5d8f4"}, + {file = "pyzmq-26.1.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d74b925d997e4f92b042bdd7085cd0a309ee0fd7cb4dc376059bbff6b32ff34f"}, + {file = "pyzmq-26.1.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:732f957441e5b1c65a7509395e6b6cafee9e12df9aa5f4bf92ed266fe0ba70ee"}, + {file = "pyzmq-26.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0a45102ad7ed9f9ddf2bd699cc5df37742cf7301111cba06001b927efecb120"}, + {file = "pyzmq-26.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9f380d5333fc7cd17423f486125dcc073918676e33db70a6a8172b19fc78d23d"}, + {file = "pyzmq-26.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8eaffcd6bf6a9d00b66a2052a33fa7e6a6575427e9644395f13c3d070f2918dc"}, + {file = "pyzmq-26.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f1483d4975ae1b387b39bb8e23d1ff32fe5621aa9e4ed3055d05e9c5613fea53"}, + {file = "pyzmq-26.1.1-cp37-cp37m-win32.whl", hash = "sha256:a83653c6bbe5887caea55e49fbd2909c14b73acf43bcc051eb60b2d514bbd46e"}, + {file = "pyzmq-26.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9763a8d3f5f74ef679989b373c37cc22e8d07e56d26439205cb83edb7722357f"}, + {file = "pyzmq-26.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2b045647caf620ce0ed6c8fd9fb6a73116f99aceed966b152a5ba1b416d25311"}, + {file = "pyzmq-26.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f66dcb6625c002f209cdc12cae1a1fec926493cd2262efe37dc6b25a30cea863"}, + {file = "pyzmq-26.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0cf1d980c969fb9e538f52abd2227f09e015096bc5c3ef7aa26e0d64051c1db8"}, + {file = "pyzmq-26.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:443ebf5e261a95ee9725693f2a5a71401f89b89df0e0ea58844b074067aac2f1"}, + {file = "pyzmq-26.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29de77ba1b1877fe7defc1b9140e65cbd35f72a63bc501e56c2eae55bde5fff4"}, + {file = "pyzmq-26.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f6071ec95af145d7b659dae6786871cd85f0acc599286b6f8ba0c74592d83dd"}, + {file = "pyzmq-26.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f0512fc87629ad968889176bf2165d721cd817401a281504329e2a2ed0ca6a3"}, + {file = "pyzmq-26.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5ccfcf13e80719f6a2d9c0a021d9e47d4550907a29253554be2c09582f6d7963"}, + {file = "pyzmq-26.1.1-cp38-cp38-win32.whl", hash = "sha256:809673947e95752e407aaaaf03f205ee86ebfff9ca51db6d4003dfd87b8428d1"}, + {file = "pyzmq-26.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:62b5180e23e6f581600459cd983473cd723fdc64350f606d21407c99832aaf5f"}, + {file = "pyzmq-26.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:fe73d7c89d6f803bed122135ff5783364e8cdb479cf6fe2d764a44b6349e7e0f"}, + {file = "pyzmq-26.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db1b7e2b50ef21f398036786da4c153db63203a402396d9f21e08ea61f3f8dba"}, + {file = "pyzmq-26.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c506a51cb01bb997a3f6440db0d121e5e7a32396e9948b1fdb6a7bfa67243f4"}, + {file = "pyzmq-26.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:92eca4f80e8a748d880e55d3cf57ef487692e439f12d5c5a2e1cce84aaa7f6cb"}, + {file = "pyzmq-26.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14bdbae02f72f4716b0ffe7500e9da303d719ddde1f3dcfb4c4f6cc1cf73bb02"}, + {file = "pyzmq-26.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e03be7ed17836c9434cce0668ac1e2cc9143d7169f90f46a0167f6155e176e32"}, + {file = "pyzmq-26.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc5df31e36e4fddd4c8b5c42daee8d54d7b529e898ac984be97bf5517de166a7"}, + {file = "pyzmq-26.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f218179c90a12d660906e04b25a340dd63e9743000ba16232ddaf46888f269da"}, + {file = "pyzmq-26.1.1-cp39-cp39-win32.whl", hash = "sha256:7dfabc180a4da422a4b349c63077347392463a75fa07aa3be96712ed6d42c547"}, + {file = "pyzmq-26.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c5248e6e0fcbbbc912982e99cdd51c342601f495b0fa5bd667f3bdbdbf3e170f"}, + {file = "pyzmq-26.1.1-cp39-cp39-win_arm64.whl", hash = "sha256:2ae7aa1408778dc74582a1226052b930f9083b54b64d7e6ef6ec0466cfdcdec2"}, + {file = "pyzmq-26.1.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:be3fc2b11c0c384949cf1f01f9a48555039408b0f3e877863b1754225635953e"}, + {file = "pyzmq-26.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48dee75c2a9fa4f4a583d4028d564a0453447ee1277a29b07acc3743c092e259"}, + {file = "pyzmq-26.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23f2fe4fb567e8098ebaa7204819658195b10ddd86958a97a6058eed2901eed3"}, + {file = "pyzmq-26.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:472cacd16f627c06d3c8b2d374345ab74446bae913584a6245e2aa935336d929"}, + {file = "pyzmq-26.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8285b25aa20fcc46f1ca4afbc39fd3d5f2fe4c4bbf7f2c7f907a214e87a70024"}, + {file = "pyzmq-26.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2067e63fd9d5c13cfe12624dab0366053e523b37a7a01678ce4321f839398939"}, + {file = "pyzmq-26.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cc109be2ee3638035d276e18eaf66a1e1f44201c0c4bea4ee0c692766bbd3570"}, + {file = "pyzmq-26.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0da97e65ee73261dba70469cc8f63d8da3a8a825337a2e3d246b9e95141cdd0"}, + {file = "pyzmq-26.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa79c528706561306938b275f89bb2c6985ce08469c27e5de05bc680df5e826f"}, + {file = "pyzmq-26.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3ddbd851a3a2651fdc5065a2804d50cf2f4b13b1bcd66de8e9e855d0217d4fcd"}, + {file = "pyzmq-26.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3df226ab7464684ae6706e20a5cbab717c3735a7e409b3fa598b754d49f1946"}, + {file = "pyzmq-26.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abad7b897e960d577eb4a0f3f789c1780bc3ffe2e7c27cf317e7c90ad26acf12"}, + {file = "pyzmq-26.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c513d829a548c2d5c88983167be2b3aa537f6d1191edcdc6fcd8999e18bdd994"}, + {file = "pyzmq-26.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70af4c9c991714ef1c65957605a8de42ef0d0620dd5f125953c8e682281bdb80"}, + {file = "pyzmq-26.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8d4234f335b0d0842f7d661d8cd50cbad0729be58f1c4deb85cd96b38fe95025"}, + {file = "pyzmq-26.1.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2c0fdb7b758e0e1605157e480b00b3a599073068a37091a1c75ec65bf7498645"}, + {file = "pyzmq-26.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc657577f057d60dd3642c9f95f28b432889b73143140061f7c1331d02f03df6"}, + {file = "pyzmq-26.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e3b66fe6131b4f33d239f7d4c3bfb2f8532d8644bae3b3da4f3987073edac55"}, + {file = "pyzmq-26.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59b57e912feef6951aec8bb03fe0faa5ad5f36962883c72a30a9c965e6d988fd"}, + {file = "pyzmq-26.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:146956aec7d947c5afc5e7da0841423d7a53f84fd160fff25e682361dcfb32cb"}, + {file = "pyzmq-26.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9521b874fd489495865172f344e46e0159095d1f161858e3fc6e28e43ca15160"}, + {file = "pyzmq-26.1.1.tar.gz", hash = "sha256:a7db05d8b7cd1a8c6610e9e9aa55d525baae7a44a43e18bc3260eb3f92de96c6"}, ] [package.dependencies] @@ -5014,20 +5074,20 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "redis" -version = "5.0.7" +version = "5.0.8" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" files = [ - {file = "redis-5.0.7-py3-none-any.whl", hash = "sha256:0e479e24da960c690be5d9b96d21f7b918a98c0cf49af3b6fafaa0753f93a0db"}, - {file = "redis-5.0.7.tar.gz", hash = "sha256:8f611490b93c8109b50adc317b31bfd84fff31def3475b92e7e80bf39f48175b"}, + {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, + {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, ] [package.dependencies] async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} [package.extras] -hiredis = ["hiredis (>=1.0.0)"] +hiredis = ["hiredis (>1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] [[package]] @@ -5060,90 +5120,90 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2024.5.15" +version = "2024.7.24" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, + {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, + {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, + {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, + {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, + {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, + {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, + {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, + {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, + {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, + {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, + {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, ] [[package]] @@ -5249,110 +5309,114 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "rpds-py" -version = "0.19.0" +version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"}, - {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"}, - {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"}, - {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"}, - {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"}, - {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"}, - {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"}, - {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, - {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, - {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, - {file = "rpds_py-0.19.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:52e466bea6f8f3a44b1234570244b1cff45150f59a4acae3fcc5fd700c2993ca"}, - {file = "rpds_py-0.19.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e21cc693045fda7f745c790cb687958161ce172ffe3c5719ca1764e752237d16"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b31f059878eb1f5da8b2fd82480cc18bed8dcd7fb8fe68370e2e6285fa86da6"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dd46f309e953927dd018567d6a9e2fb84783963650171f6c5fe7e5c41fd5666"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a01a4490e170376cd79258b7f755fa13b1a6c3667e872c8e35051ae857a92b"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcf426a8c38eb57f7bf28932e68425ba86def6e756a5b8cb4731d8e62e4e0223"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68eea5df6347d3f1378ce992d86b2af16ad7ff4dcb4a19ccdc23dea901b87fb"}, - {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dab8d921b55a28287733263c0e4c7db11b3ee22aee158a4de09f13c93283c62d"}, - {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6fe87efd7f47266dfc42fe76dae89060038f1d9cb911f89ae7e5084148d1cc08"}, - {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:535d4b52524a961d220875688159277f0e9eeeda0ac45e766092bfb54437543f"}, - {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8b1a94b8afc154fbe36978a511a1f155f9bd97664e4f1f7a374d72e180ceb0ae"}, - {file = "rpds_py-0.19.0-cp38-none-win32.whl", hash = "sha256:7c98298a15d6b90c8f6e3caa6457f4f022423caa5fa1a1ca7a5e9e512bdb77a4"}, - {file = "rpds_py-0.19.0-cp38-none-win_amd64.whl", hash = "sha256:b0da31853ab6e58a11db3205729133ce0df26e6804e93079dee095be3d681dc1"}, - {file = "rpds_py-0.19.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c"}, - {file = "rpds_py-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05"}, - {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a"}, - {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b"}, - {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b"}, - {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be"}, - {file = "rpds_py-0.19.0-cp39-none-win32.whl", hash = "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb"}, - {file = "rpds_py-0.19.0-cp39-none-win_amd64.whl", hash = "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"}, - {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -5382,51 +5446,37 @@ python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, @@ -5471,13 +5521,13 @@ urllib3 = "*" [[package]] name = "sentry-sdk" -version = "1.45.0" +version = "1.45.1" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.45.0.tar.gz", hash = "sha256:509aa9678c0512344ca886281766c2e538682f8acfa50fd8d405f8c417ad0625"}, - {file = "sentry_sdk-1.45.0-py2.py3-none-any.whl", hash = "sha256:1ce29e30240cc289a027011103a8c83885b15ef2f316a60bcc7c5300afa144f1"}, + {file = "sentry_sdk-1.45.1-py2.py3-none-any.whl", hash = "sha256:608887855ccfe39032bfd03936e3a1c4f4fc99b3a4ac49ced54a4220de61c9c1"}, + {file = "sentry_sdk-1.45.1.tar.gz", hash = "sha256:a16c997c0f4e3df63c0fc5e4207ccb1ab37900433e0f72fef88315d317829a26"}, ] [package.dependencies] @@ -5551,109 +5601,121 @@ requests = ">=1.1.0" [[package]] name = "simplejson" -version = "3.19.2" +version = "3.19.3" description = "Simple, fast, extensible JSON encoder/decoder for Python" optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"}, - {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"}, - {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"}, - {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"}, - {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"}, - {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"}, - {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"}, - {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"}, - {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"}, - {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"}, - {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"}, - {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"}, - {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"}, - {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"}, - {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"}, - {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"}, - {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"}, - {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"}, - {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.5" +files = [ + {file = "simplejson-3.19.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f39caec26007a2d0efab6b8b1d74873ede9351962707afab622cc2285dd26ed0"}, + {file = "simplejson-3.19.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:83c87706265ae3028e8460d08b05f30254c569772e859e5ba61fe8af2c883468"}, + {file = "simplejson-3.19.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0b5ddd2c7d1d3f4d23224bc8a04bbf1430ae9a8149c05b90f8fc610f7f857a23"}, + {file = "simplejson-3.19.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:ad0e0b1ce9bd3edb5cf64b5b5b76eacbfdac8c5367153aeeec8a8b1407f68342"}, + {file = "simplejson-3.19.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:93be280fc69a952c76e261036312c20b910e7fa9e234f1d89bdfe3fa34f8a023"}, + {file = "simplejson-3.19.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:6d43e24b88c80f997081503f693be832fc90854f278df277dd54f8a4c847ab61"}, + {file = "simplejson-3.19.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2876027ebdd599d730d36464debe84619b0368e9a642ca6e7c601be55aed439e"}, + {file = "simplejson-3.19.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:0766ca6222b410e08e0053a0dda3606cafb3973d5d00538307f631bb59743396"}, + {file = "simplejson-3.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:50d8b742d74c449c4dcac570d08ce0f21f6a149d2d9cf7652dbf2ba9a1bc729a"}, + {file = "simplejson-3.19.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd011fc3c1d88b779645495fdb8189fb318a26981eebcce14109460e062f209b"}, + {file = "simplejson-3.19.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:637c4d4b81825c1f4d651e56210bd35b5604034b192b02d2d8f17f7ce8c18f42"}, + {file = "simplejson-3.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f56eb03bc9e432bb81adc8ecff2486d39feb371abb442964ffb44f6db23b332"}, + {file = "simplejson-3.19.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef59a53be400c1fad2c914b8d74c9d42384fed5174f9321dd021b7017fd40270"}, + {file = "simplejson-3.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72e8abbc86fcac83629a030888b45fed3a404d54161118be52cb491cd6975d3e"}, + {file = "simplejson-3.19.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8efb03ca77bd7725dfacc9254df00d73e6f43013cf39bd37ef1a8ed0ebb5165"}, + {file = "simplejson-3.19.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:add8850db04b98507a8b62d248a326ecc8561e6d24336d1ca5c605bbfaab4cad"}, + {file = "simplejson-3.19.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fc3dc9fb413fc34c396f52f4c87de18d0bd5023804afa8ab5cc224deeb6a9900"}, + {file = "simplejson-3.19.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dfa420bb9225dd33b6efdabde7c6a671b51150b9b1d9c4e5cd74d3b420b3fe1"}, + {file = "simplejson-3.19.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7b5c472099b39b274dcde27f1113db8d818c9aa3ba8f78cbb8ad04a4c1ac2118"}, + {file = "simplejson-3.19.3-cp310-cp310-win32.whl", hash = "sha256:817abad79241ed4a507b3caf4d3f2be5079f39d35d4c550a061988986bffd2ec"}, + {file = "simplejson-3.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:dd5b9b1783e14803e362a558680d88939e830db2466f3fa22df5c9319f8eea94"}, + {file = "simplejson-3.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e88abff510dcff903a18d11c2a75f9964e768d99c8d147839913886144b2065e"}, + {file = "simplejson-3.19.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:934a50a614fb831614db5dbfba35127ee277624dda4d15895c957d2f5d48610c"}, + {file = "simplejson-3.19.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:212fce86a22188b0c7f53533b0f693ea9605c1a0f02c84c475a30616f55a744d"}, + {file = "simplejson-3.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d9e8f836688a8fabe6a6b41b334aa550a6823f7b4ac3d3712fc0ad8655be9a8"}, + {file = "simplejson-3.19.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23228037dc5d41c36666384062904d74409a62f52283d9858fa12f4c22cffad1"}, + {file = "simplejson-3.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0791f64fed7d4abad639491f8a6b1ba56d3c604eb94b50f8697359b92d983f36"}, + {file = "simplejson-3.19.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f614581b61a26fbbba232a1391f6cee82bc26f2abbb6a0b44a9bba25c56a1c"}, + {file = "simplejson-3.19.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1df0aaf1cb787fdf34484ed4a1f0c545efd8811f6028623290fef1a53694e597"}, + {file = "simplejson-3.19.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:951095be8d4451a7182403354c22ec2de3e513e0cc40408b689af08d02611588"}, + {file = "simplejson-3.19.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a954b30810988feeabde843e3263bf187697e0eb5037396276db3612434049b"}, + {file = "simplejson-3.19.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c40df31a75de98db2cdfead6074d4449cd009e79f54c1ebe5e5f1f153c68ad20"}, + {file = "simplejson-3.19.3-cp311-cp311-win32.whl", hash = "sha256:7e2a098c21ad8924076a12b6c178965d88a0ad75d1de67e1afa0a66878f277a5"}, + {file = "simplejson-3.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:c9bedebdc5fdad48af8783022bae307746d54006b783007d1d3c38e10872a2c6"}, + {file = "simplejson-3.19.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:66a0399e21c2112acacfebf3d832ebe2884f823b1c7e6d1363f2944f1db31a99"}, + {file = "simplejson-3.19.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6ef9383c5e05f445be60f1735c1816163c874c0b1ede8bb4390aff2ced34f333"}, + {file = "simplejson-3.19.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:42e5acf80d4d971238d4df97811286a044d720693092b20a56d5e56b7dcc5d09"}, + {file = "simplejson-3.19.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0b0efc7279d768db7c74d3d07f0b5c81280d16ae3fb14e9081dc903e8360771"}, + {file = "simplejson-3.19.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0552eb06e7234da892e1d02365cd2b7b2b1f8233aa5aabdb2981587b7cc92ea0"}, + {file = "simplejson-3.19.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf6a3b9a7d7191471b464fe38f684df10eb491ec9ea454003edb45a011ab187"}, + {file = "simplejson-3.19.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7017329ca8d4dca94ad5e59f496e5fc77630aecfc39df381ffc1d37fb6b25832"}, + {file = "simplejson-3.19.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67a20641afebf4cfbcff50061f07daad1eace6e7b31d7622b6fa2c40d43900ba"}, + {file = "simplejson-3.19.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:dd6a7dabcc4c32daf601bc45e01b79175dde4b52548becea4f9545b0a4428169"}, + {file = "simplejson-3.19.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:08f9b443a94e72dd02c87098c96886d35790e79e46b24e67accafbf13b73d43b"}, + {file = "simplejson-3.19.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa97278ae6614346b5ca41a45a911f37a3261b57dbe4a00602048652c862c28b"}, + {file = "simplejson-3.19.3-cp312-cp312-win32.whl", hash = "sha256:ef28c3b328d29b5e2756903aed888960bc5df39b4c2eab157ae212f70ed5bf74"}, + {file = "simplejson-3.19.3-cp312-cp312-win_amd64.whl", hash = "sha256:1e662336db50ad665777e6548b5076329a94a0c3d4a0472971c588b3ef27de3a"}, + {file = "simplejson-3.19.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0959e6cb62e3994b5a40e31047ff97ef5c4138875fae31659bead691bed55896"}, + {file = "simplejson-3.19.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7a7bfad839c624e139a4863007233a3f194e7c51551081f9789cba52e4da5167"}, + {file = "simplejson-3.19.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afab2f7f2486a866ff04d6d905e9386ca6a231379181a3838abce1f32fbdcc37"}, + {file = "simplejson-3.19.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00313681015ac498e1736b304446ee6d1c72c5b287cd196996dad84369998f7"}, + {file = "simplejson-3.19.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d936ae682d5b878af9d9eb4d8bb1fdd5e41275c8eb59ceddb0aeed857bb264a2"}, + {file = "simplejson-3.19.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c6657485393f2e9b8177c77a7634f13ebe70d5e6de150aae1677d91516ce6b"}, + {file = "simplejson-3.19.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a6a750d3c7461b1c47cfc6bba8d9e57a455e7c5f80057d2a82f738040dd1129"}, + {file = "simplejson-3.19.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ea7a4a998c87c5674a27089e022110a1a08a7753f21af3baf09efe9915c23c3c"}, + {file = "simplejson-3.19.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6300680d83a399be2b8f3b0ef7ef90b35d2a29fe6e9c21438097e0938bbc1564"}, + {file = "simplejson-3.19.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ab69f811a660c362651ae395eba8ce84f84c944cea0df5718ea0ba9d1e4e7252"}, + {file = "simplejson-3.19.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:256e09d0f94d9c3d177d9e95fd27a68c875a4baa2046633df387b86b652f5747"}, + {file = "simplejson-3.19.3-cp313-cp313-win32.whl", hash = "sha256:2c78293470313aefa9cfc5e3f75ca0635721fb016fb1121c1c5b0cb8cc74712a"}, + {file = "simplejson-3.19.3-cp313-cp313-win_amd64.whl", hash = "sha256:3bbcdc438dc1683b35f7a8dc100960c721f922f9ede8127f63bed7dfded4c64c"}, + {file = "simplejson-3.19.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:89b35433186e977fa86ff1fd179c1fadff39cfa3afa1648dab0b6ca53153acd9"}, + {file = "simplejson-3.19.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d43c2d7504eda566c50203cdc9dc043aff6f55f1b7dae0dcd79dfefef9159d1c"}, + {file = "simplejson-3.19.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6890ff9cf0bd2e1d487e2a8869ebd620a44684c0a9667fa5ee751d099d5d84c8"}, + {file = "simplejson-3.19.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1069143a8fb3905e1bc0696c62be7e3adf812e9f1976ac9ae15b05112ff57cc9"}, + {file = "simplejson-3.19.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb324bb903330cbb35d87cce367a12631cd5720afa06e5b9c906483970946da6"}, + {file = "simplejson-3.19.3-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:0a32859d45d7b85fb803bb68f6bee14526991a1190269116c33399fa0daf9bbf"}, + {file = "simplejson-3.19.3-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:23833ee7e791ec968b744dfee2a2d39df7152050051096caf4296506d75608d8"}, + {file = "simplejson-3.19.3-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:d73efb03c5b39249c82488a994f0998f9e4399e3d085209d2120503305ba77a8"}, + {file = "simplejson-3.19.3-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7923878b7a0142d39763ec2dbecff3053c1bedd3653585a8474666e420fe83f5"}, + {file = "simplejson-3.19.3-cp36-cp36m-win32.whl", hash = "sha256:7355c7203353c36d46c4e7b6055293b3d2be097bbc5e2874a2b8a7259f0325dd"}, + {file = "simplejson-3.19.3-cp36-cp36m-win_amd64.whl", hash = "sha256:d1b8b4d6379fe55f471914345fe6171d81a18649dacf3248abfc9c349b4442eb"}, + {file = "simplejson-3.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d36608557b4dcd7a62c29ad4cd7c5a1720bbf7dc942eff9dc42d2c542a5f042d"}, + {file = "simplejson-3.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7137e69c6781ecf23afab064be94a277236c9cba31aa48ff1a0ec3995c69171e"}, + {file = "simplejson-3.19.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76f8c28fe2d426182405b18ddf3001fce47835a557dc15c3d8bdea01c03361da"}, + {file = "simplejson-3.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff7bc1bbdaa3e487c9469128bf39408e91f5573901cb852e03af378d3582c52d"}, + {file = "simplejson-3.19.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0782cb9bf827f0c488b6aa0f2819f618308a3caf2973cfd792e45d631bec4db"}, + {file = "simplejson-3.19.3-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:6fea0716c593dabb4392c4996d4e902a83b2428e6da82938cf28a523a11eb277"}, + {file = "simplejson-3.19.3-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:8f41bb5370b34f63171e65fdb00e12be1d83675cecb23e627df26f4c88dfc021"}, + {file = "simplejson-3.19.3-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:37105d1d708365b91165e1a6e505bdecc88637091348cf4b6adcdcb4f5a5fb8b"}, + {file = "simplejson-3.19.3-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:b9198c1f1f8910a3b86b60f4fe2556d9d28d3fefe35bffe6be509a27402e694d"}, + {file = "simplejson-3.19.3-cp37-cp37m-win32.whl", hash = "sha256:bc164f32dd9691e7082ce5df24b4cf8c6c394bbf9bdeeb5d843127cd07ab8ad2"}, + {file = "simplejson-3.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:1bd41f2cb1a2c57656ceff67b12d005cb255c728265e222027ad73193a04005a"}, + {file = "simplejson-3.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0733ecd95ae03ae718ec74aad818f5af5f3155d596f7b242acbc1621e765e5fb"}, + {file = "simplejson-3.19.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a0710d1a5e41c4f829caa1572793dd3130c8d65c2b194c24ff29c4c305c26e0"}, + {file = "simplejson-3.19.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a53a07320c5ff574d8b1a89c937ce33608832f166f39dff0581ac43dc979abd"}, + {file = "simplejson-3.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1773cabfba66a6337b547e45dafbd471b09487370bcab75bd28f626520410d29"}, + {file = "simplejson-3.19.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c0104b4b7d2c75ccedbf1d9d5a3bd2daa75e51053935a44ba012e2fd4c43752"}, + {file = "simplejson-3.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c49eeb94b8f09dc8a5843c156a22b8bde6aa1ddc65ca8ddc62dddcc001e6a2d"}, + {file = "simplejson-3.19.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dc5c1a85ff388e98ea877042daec3d157b6db0d85bac6ba5498034689793e7e"}, + {file = "simplejson-3.19.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49549e3d81ab4a58424405aa545602674d8c35c20e986b42bb8668e782a94bac"}, + {file = "simplejson-3.19.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e1a1452ad5723ff129b081e3c8aa4ba56b8734fee4223355ed7b815a7ece69bc"}, + {file = "simplejson-3.19.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:d0d5a63f1768fed7e78cf55712dee81f5a345e34d34224f3507ebf71df2b754d"}, + {file = "simplejson-3.19.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7e062767ac165df9a46963f5735aa4eee0089ec1e48b3f2ec46182754b96f55e"}, + {file = "simplejson-3.19.3-cp38-cp38-win32.whl", hash = "sha256:56134bbafe458a7b21f6fddbf889d36bec6d903718f4430768e3af822f8e27c2"}, + {file = "simplejson-3.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:bcde83a553a96dc7533736c547bddaa35414a2566ab0ecf7d3964fc4bdb84c11"}, + {file = "simplejson-3.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b5587feda2b65a79da985ae6d116daf6428bf7489992badc29fc96d16cd27b05"}, + {file = "simplejson-3.19.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e0d2b00ecbcd1a3c5ea1abc8bb99a26508f758c1759fd01c3be482a3655a176f"}, + {file = "simplejson-3.19.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:32a3ada8f3ea41db35e6d37b86dade03760f804628ec22e4fe775b703d567426"}, + {file = "simplejson-3.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f455672f4738b0f47183c5896e3606cd65c9ddee3805a4d18e8c96aa3f47c84"}, + {file = "simplejson-3.19.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b737a5fefedb8333fa50b8db3dcc9b1d18fd6c598f89fa7debff8b46bf4e511"}, + {file = "simplejson-3.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb47ee773ce67476a960e2db4a0a906680c54f662521550828c0cc57d0099426"}, + {file = "simplejson-3.19.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eed8cd98a7b24861da9d3d937f5fbfb6657350c547528a117297fe49e3960667"}, + {file = "simplejson-3.19.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:619756f1dd634b5bdf57d9a3914300526c3b348188a765e45b8b08eabef0c94e"}, + {file = "simplejson-3.19.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dd7230d061e755d60a4d5445bae854afe33444cdb182f3815cff26ac9fb29a15"}, + {file = "simplejson-3.19.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:101a3c8392028cd704a93c7cba8926594e775ca3c91e0bee82144e34190903f1"}, + {file = "simplejson-3.19.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e557712fc79f251673aeb3fad3501d7d4da3a27eff0857af2e1d1afbbcf6685"}, + {file = "simplejson-3.19.3-cp39-cp39-win32.whl", hash = "sha256:0bc5544e3128891bf613b9f71813ee2ec9c11574806f74dd8bb84e5e95bf64a2"}, + {file = "simplejson-3.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:06662392e4913dc8846d6a71a6d5de86db5fba244831abe1dd741d62a4136764"}, + {file = "simplejson-3.19.3-py3-none-any.whl", hash = "sha256:49cc4c7b940d43bd12bf87ec63f28cbc4964fc4e12c031cc8cd01650f43eb94e"}, + {file = "simplejson-3.19.3.tar.gz", hash = "sha256:8e086896c36210ab6050f2f9f095a5f1e03c83fa0e7f296d6cba425411364680"}, ] [[package]] @@ -5692,13 +5754,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] @@ -5748,49 +5810,49 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] @@ -5810,89 +5872,87 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] -test = ["pytest"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "1.4.53" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, + {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, ] [package.dependencies] @@ -5903,17 +5963,17 @@ aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] @@ -6065,13 +6125,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.9.0.20240316" +version = "2.9.0.20240821" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, + {file = "types-python-dateutil-2.9.0.20240821.tar.gz", hash = "sha256:9649d1dcb6fef1046fb18bebe9ea2aa0028b160918518c34589a46045f6ebd98"}, + {file = "types_python_dateutil-2.9.0.20240821-py3-none-any.whl", hash = "sha256:f5889fcb4e63ed4aaa379b44f93c32593d50b9a94c9a60a0c854d8cc3511cd57"}, ] [[package]] @@ -6509,13 +6569,13 @@ files = [ [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] @@ -6542,4 +6602,4 @@ sip2 = ["invenio-sip2"] [metadata] lock-version = "2.0" python-versions = ">= 3.9, <3.10" -content-hash = "0308b222592f5942bbc6da3482b74e3e4c91ca980c60da9b9202afbe83ba9b36" +content-hash = "359d48f6dd4bb2be817b6286d8bb2e725e682467cf9067b194946e3824a17e55" diff --git a/pyproject.toml b/pyproject.toml index 9e8e40303b..1de88d2dac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,9 +122,10 @@ rero-invenio-files = ">=1.0.0,<2.0.0" ## Python packages development dependencies (order matters) #---------------------------------------------------------- ## Default from Invenio -pytest-black-ng = ">=0.4.0" pytest-invenio = ">=2.1.6,<3.0.0" pydocstyle = ">=6.1.1" +pytest-black = ">=0.3.2" +pytest-black-ng = ">=0.4.0" Sphinx = ">=4.5.0" Flask-Debugtoolbar = ">=0.10.1" ## RERO ILS specific python packages @@ -468,6 +469,17 @@ server = {cmd = "./scripts/server", help = "Starts the server "} setup = {cmd = "./scripts/setup", help = "Runs setup"} update = {cmd = "./scripts/update", help = "Runs update"} +[tool.isort] +profile = "black" + +[tool.pytest] +addopts = "--color=yes --black --isort --pydocstyle --doctest-glob=\"*.rst\" --doctest-modules --cov=rero_ils --cov-report=term-missing --ignore=setup.py --ignore=docs/conf.py --ignore=rero_ils/config.py -m \"not external\"" +testpaths = "docs tests rero_ils" +# custom markers +markers = "external: mark a test as dealing with external services." +# not displaying all the PendingDeprecationWarnings from invenio +filterwarnings = "ignore::PendingDeprecationWarning" + [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 4a986764db..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# RERO ILS -# Copyright (C) 2019 RERO -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, version 3 of the License. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -[pytest] -live_server_scope = module -addopts = --color=yes --pycodestyle --pydocstyle --doctest-glob="*.rst" --doctest-modules --cov=rero_ils --cov-report=term-missing --ignore=setup.py --ignore=docs/conf.py --ignore=rero_ils/config.py -m "not external" -testpaths = docs tests rero_ils - -# custom markers -markers = - external: mark a test as dealing with external services. - -# not displaying all the PendingDeprecationWarnings from invenio -filterwarnings = - ignore::PendingDeprecationWarning diff --git a/rero_ils/__init__.py b/rero_ils/__init__.py index a279f0df8d..b23a285a74 100644 --- a/rero_ils/__init__.py +++ b/rero_ils/__init__.py @@ -21,4 +21,4 @@ from .version import __version__ -__all__ = ('__version__', ) +__all__ = ("__version__",) diff --git a/rero_ils/accounts_views.py b/rero_ils/accounts_views.py index 54006e288b..27c6a4a0f6 100644 --- a/rero_ils/accounts_views.py +++ b/rero_ils/accounts_views.py @@ -24,8 +24,7 @@ from flask_security.confirmable import requires_confirmation from flask_security.utils import get_message, verify_and_update_password from invenio_accounts.utils import change_user_password -from invenio_accounts.views.rest import \ - ChangePasswordView as BaseChangePasswordView +from invenio_accounts.views.rest import ChangePasswordView as BaseChangePasswordView from invenio_accounts.views.rest import LoginView as CoreLoginView from invenio_accounts.views.rest import _abort, _commit, use_args, use_kwargs from marshmallow import Schema, fields, validates, validates_schema @@ -34,11 +33,9 @@ from rero_ils.modules.patrons.api import Patron, current_librarian from rero_ils.modules.users.api import User -from rero_ils.modules.utils import PasswordValidatorException, \ - password_validator +from rero_ils.modules.utils import PasswordValidatorException, password_validator -current_datastore = LocalProxy( - lambda: current_app.extensions['security'].datastore) +current_datastore = LocalProxy(lambda: current_app.extensions["security"].datastore) # @@ -46,8 +43,8 @@ # def validate_password(password): """Validate the password.""" - length = current_app.config.get('RERO_ILS_PASSWORD_MIN_LENGTH', 8) - special_char = current_app.config.get('RERO_ILS_PASSWORD_SPECIAL_CHAR') + length = current_app.config.get("RERO_ILS_PASSWORD_MIN_LENGTH", 8) + special_char = current_app.config.get("RERO_ILS_PASSWORD_SPECIAL_CHAR") try: password_validator(password, length=length, special_char=special_char) except PasswordValidatorException as pve: @@ -57,15 +54,15 @@ def validate_password(password): def validate_passwords(password, confirm_password): """Validate that the 2 passwords are identical.""" if password != confirm_password: - raise ValidationError(_('The 2 passwords are not identical.')) + raise ValidationError(_("The 2 passwords are not identical.")) class LoginView(CoreLoginView): """invenio-accounts Login REST View.""" post_args = { - 'email': fields.String(required=True), - 'password': fields.String(required=True) + "email": fields.String(required=True), + "password": fields.String(required=True), } @classmethod @@ -79,7 +76,7 @@ def post(self, **kwargs): """Verify and login a user.""" user = self.get_user(**kwargs) if not user: - _abort(_('INVALID_USER_OR_PASSWORD')) + _abort(_("INVALID_USER_OR_PASSWORD")) self.verify_login(user, **kwargs) self.login_user(user) return self.success_response(user) @@ -87,11 +84,11 @@ def post(self, **kwargs): def verify_login(self, user, password=None, **kwargs): """Verify the login via password.""" if not user.password or not verify_and_update_password(password, user): - _abort(_('INVALID_USER_OR_PASSWORD')) + _abort(_("INVALID_USER_OR_PASSWORD")) if requires_confirmation(user): - _abort(get_message('CONFIRMATION_REQUIRED')[0]) + _abort(get_message("CONFIRMATION_REQUIRED")[0]) if not user.is_active: - _abort(get_message('DISABLED_ACCOUNT')[0]) + _abort(get_message("DISABLED_ACCOUNT")[0]) class PasswordPassword(Schema): @@ -109,14 +106,13 @@ def validate_password(self, value): @validates_schema def validate_passwords(self, data, **kwargs): """Validate that the 2 passwords are identical.""" - validate_passwords(data['new_password'], data['new_password_confirm']) + validate_passwords(data["new_password"], data["new_password_confirm"]) class UsernamePassword(Schema): """Args validation when a professional change a password for a user.""" - username = fields.String(required=True, - validate=[validate.Length(min=1, max=128)]) + username = fields.String(required=True, validate=[validate.Length(min=1, max=128)]) new_password = fields.String(required=True) new_password_confirm = fields.String(required=True) @@ -128,22 +124,22 @@ def validate_password(self, value): @validates_schema def validate_passwords(self, data, **kwargs): """Validate that the 2 passwords are identical.""" - validate_passwords(data['new_password'], data['new_password_confirm']) + validate_passwords(data["new_password"], data["new_password_confirm"]) def make_password_schema(request): """Select the right args validation depending on the context.""" # Filter based on 'fields' query parameter - fields = request.args.get('fields', None) - only = fields.split(',') if fields else None + fields = request.args.get("fields", None) + only = fields.split(",") if fields else None # Respect partial updates for PATCH requests - partial = request.method == 'PATCH' - if request.json.get('username'): - return UsernamePassword(only=only, - partial=partial, context={"request": request}) + partial = request.method == "PATCH" + if request.json.get("username"): + return UsernamePassword( + only=only, partial=partial, context={"request": request} + ) # Add current request to the schema's context - return PasswordPassword(only=only, - partial=partial, context={"request": request}) + return PasswordPassword(only=only, partial=partial, context={"request": request}) class ChangePasswordView(BaseChangePasswordView): @@ -158,21 +154,21 @@ def verify_permission(self, username, **args): patrons = Patron.get_patrons_by_user(user.user) # logged user is not librarian or no patron account match the logged # user organisation - if not current_librarian or current_librarian.organisation_pid not in \ - [ptrn.organisation_pid for ptrn in patrons]: + if not current_librarian or current_librarian.organisation_pid not in [ + ptrn.organisation_pid for ptrn in patrons + ]: return current_app.login_manager.unauthorized() def change_password_for_user(self, username, new_password, **kwargs): """Perform change password for a specific user.""" after_this_request(_commit) user = User.get_by_username(username) - change_user_password(user=user.user, - password=new_password) + change_user_password(user=user.user, password=new_password) @use_args(make_password_schema) def post(self, args): """Change user password.""" - if flask_request.json.get('username'): + if flask_request.json.get("username"): self.verify_permission(**args) self.change_password_for_user(**args) else: diff --git a/rero_ils/alembic/0387b753585f_correct_subjects_bf_Organization.py b/rero_ils/alembic/0387b753585f_correct_subjects_bf_Organization.py index bb3e621e69..6dd086d354 100644 --- a/rero_ils/alembic/0387b753585f_correct_subjects_bf_Organization.py +++ b/rero_ils/alembic/0387b753585f_correct_subjects_bf_Organization.py @@ -25,73 +25,85 @@ from rero_ils.modules.documents.api import Document, DocumentsSearch -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") # revision identifiers, used by Alembic. -revision = '0387b753585f' -down_revision = 'ce4923ba5286' +revision = "0387b753585f" +down_revision = "ce4923ba5286" branch_labels = () depends_on = None def upgrade(): """Change subjects bf:Organization to bf:Organisation.""" - query = DocumentsSearch().filter('bool', should=[ - Q('term', subjects__type='bf:Organization'), - Q('term', subjects_imported__type='bf:Organization') - ]) + query = DocumentsSearch().filter( + "bool", + should=[ + Q("term", subjects__type="bf:Organization"), + Q("term", subjects_imported__type="bf:Organization"), + ], + ) - LOGGER.info(f'Upgrade to {revision}') - LOGGER.info(f'Documents to change: {query.count()}') - pids = [hit.pid for hit in query.source('pid').scan()] + LOGGER.info(f"Upgrade to {revision}") + LOGGER.info(f"Documents to change: {query.count()}") + pids = [hit.pid for hit in query.source("pid").scan()] errors = 0 idx = 0 for idx, pid in enumerate(pids, 1): - LOGGER.info(f'{idx} * Change document: {pid}') + LOGGER.info(f"{idx} * Change document: {pid}") doc = Document.get_record_by_pid(pid) - for subject in doc.get('subjects', []): - if subject['type'] == 'bf:Organization': - subject['type'] = 'bf:Organisation' - for subjects_imported in doc.get('subjects_imported', []): - if subjects_imported['type'] == 'bf:Organization': - subjects_imported['type'] = 'bf:Organisation' + for subject in doc.get("subjects", []): + if subject["type"] == "bf:Organization": + subject["type"] = "bf:Organisation" + for subjects_imported in doc.get("subjects_imported", []): + if subjects_imported["type"] == "bf:Organization": + subjects_imported["type"] = "bf:Organisation" try: doc.update(data=doc, dbcommit=True, reindex=True) except Exception as err: - LOGGER.error(f'\tError: {err}') + LOGGER.error(f"\tError: {err}") errors += 1 - LOGGER.info(f'Updated: {idx} Errors: {errors}') + LOGGER.info(f"Updated: {idx} Errors: {errors}") def downgrade(): """Change subjects bf:Organisation to bf:Organization.""" - query = DocumentsSearch().filter('bool', should=[ - Q('bool', must=[ - Q('term', subjects__type='bf:Organisation'), - Q('exists', field='subjects.preferred_name') - ]), - Q('bool', must=[ - Q('term', subjects_imported__type='bf:Organisation'), - Q('exists', field='subjects_imported.preferred_name') - ]) - ]) - LOGGER.info(f'Downgrade to {down_revision}') - LOGGER.info(f'Documents to change: {query.count()}') - pids = [hit.pid for hit in query.source('pid').scan()] + query = DocumentsSearch().filter( + "bool", + should=[ + Q( + "bool", + must=[ + Q("term", subjects__type="bf:Organisation"), + Q("exists", field="subjects.preferred_name"), + ], + ), + Q( + "bool", + must=[ + Q("term", subjects_imported__type="bf:Organisation"), + Q("exists", field="subjects_imported.preferred_name"), + ], + ), + ], + ) + LOGGER.info(f"Downgrade to {down_revision}") + LOGGER.info(f"Documents to change: {query.count()}") + pids = [hit.pid for hit in query.source("pid").scan()] errors = 0 idx = 0 for idx, pid in enumerate(pids, 1): - LOGGER.info(f'{idx} * Change document: {pid}') + LOGGER.info(f"{idx} * Change document: {pid}") doc = Document.get_record_by_pid(pid) - for subject in doc.get('subjects', []): - if subject['type'] == 'bf:Organisation': - subject['type'] = 'bf:Organization' - for subjects_imported in doc.get('subjects_imported', []): - if subjects_imported['type'] == 'bf:Organisation': - subjects_imported['type'] = 'bf:Organization' + for subject in doc.get("subjects", []): + if subject["type"] == "bf:Organisation": + subject["type"] = "bf:Organization" + for subjects_imported in doc.get("subjects_imported", []): + if subjects_imported["type"] == "bf:Organisation": + subjects_imported["type"] = "bf:Organization" try: doc.update(data=doc, dbcommit=True, reindex=True) except Exception as err: - LOGGER.error(f'\tError: {err}') + LOGGER.error(f"\tError: {err}") errors += 1 - LOGGER.info(f'Updated: {idx} Errors: {errors}') + LOGGER.info(f"Updated: {idx} Errors: {errors}") diff --git a/rero_ils/alembic/05555c03fe49_correct_holdings_items_count.py b/rero_ils/alembic/05555c03fe49_correct_holdings_items_count.py index be8339416e..6537dac435 100644 --- a/rero_ils/alembic/05555c03fe49_correct_holdings_items_count.py +++ b/rero_ils/alembic/05555c03fe49_correct_holdings_items_count.py @@ -22,24 +22,24 @@ from rero_ils.modules.holdings.api import Holding, HoldingsSearch # revision identifiers, used by Alembic. -revision = '05555c03fe49' -down_revision = 'b90f8b148948' +revision = "05555c03fe49" +down_revision = "b90f8b148948" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): """Upgrade index holdings.""" - errors = upgrade_downgrade('upgrade') - LOGGER.info(f'upgraded to version: {revision} errors: {errors}') + errors = upgrade_downgrade("upgrade") + LOGGER.info(f"upgraded to version: {revision} errors: {errors}") def downgrade(): """Downgrade index holdings.""" - errors = upgrade_downgrade('downgrade') - LOGGER.info(f'downgraded to version: {down_revision} errors: {errors}') + errors = upgrade_downgrade("downgrade") + LOGGER.info(f"downgraded to version: {down_revision} errors: {errors}") def upgrade_downgrade(action): @@ -48,20 +48,18 @@ def upgrade_downgrade(action): Correct items_count and public_items_count for holdings of type serial. :param str action: upgrade or downgrade. """ - query = HoldingsSearch()\ - .filter('term', holdings_type='serial') \ - .source(['pid']) + query = HoldingsSearch().filter("term", holdings_type="serial").source(["pid"]) ids = [(h.meta.id, h.pid) for h in query.scan()] - LOGGER.info(f'Indexing {len(ids)} records ....') + LOGGER.info(f"Indexing {len(ids)} records ....") errors = 0 for idx, (id, pid) in enumerate(ids): - LOGGER.info(f'{idx} * Reindex holding: {pid}.') + LOGGER.info(f"{idx} * Reindex holding: {pid}.") try: hold = Holding.get_record(id) hold.reindex() except Exception as err: - LOGGER.error(f'{idx} * Reindex holding: {pid} {err}') + LOGGER.error(f"{idx} * Reindex holding: {pid} {err}") errors += 1 return errors diff --git a/rero_ils/alembic/21a994dc2beb_add_library_pid_to_ill_request_.py b/rero_ils/alembic/21a994dc2beb_add_library_pid_to_ill_request_.py index 34ca8e1034..a430f30c99 100644 --- a/rero_ils/alembic/21a994dc2beb_add_library_pid_to_ill_request_.py +++ b/rero_ils/alembic/21a994dc2beb_add_library_pid_to_ill_request_.py @@ -26,12 +26,12 @@ from rero_ils.modules.operation_logs.api import OperationLogsSearch # revision identifiers, used by Alembic. -revision = '21a994dc2beb' -down_revision = '74ab9da9f078' +revision = "21a994dc2beb" +down_revision = "74ab9da9f078" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): @@ -39,47 +39,46 @@ def upgrade(): For all ill request operation logs, we will add a library pid. """ - query = RecordsSearch(index=LoanOperationLog.index_name) \ - .filter('term', record__type='illr') \ - .filter('bool', must_not=[ - Q('exists', field='ill_request.library_pid') - ]) - pids = [hit.pid for hit in query.source('pid').scan()] - LOGGER.info(f'Upgrade operation logs illr :: {len(pids)}') + query = ( + RecordsSearch(index=LoanOperationLog.index_name) + .filter("term", record__type="illr") + .filter("bool", must_not=[Q("exists", field="ill_request.library_pid")]) + ) + pids = [hit.pid for hit in query.source("pid").scan()] + LOGGER.info(f"Upgrade operation logs illr :: {len(pids)}") errors = 0 for idx, pid in enumerate(pids, 1): record = LoanOperationLog.get_record(pid) - ill_request_pid = record['record']['value'] + ill_request_pid = record["record"]["value"] if ill_request := ILLRequest.get_record_by_pid(ill_request_pid): try: - record['ill_request']['library_pid'] = \ - ill_request.get_library().pid - LoanOperationLog.update(pid, record['date'], record) + record["ill_request"]["library_pid"] = ill_request.get_library().pid + LoanOperationLog.update(pid, record["date"], record) except Exception as err: - LOGGER.error(f'{idx:<10} {pid} {err}') + LOGGER.error(f"{idx:<10} {pid} {err}") errors += 1 else: - LOGGER.error( - f'{idx:<10} {pid} ill request not found {ill_request_pid}') + LOGGER.error(f"{idx:<10} {pid} ill request not found {ill_request_pid}") errors += 1 OperationLogsSearch.flush_and_refresh() - LOGGER.info(f'Changed: {idx} Errors: {errors}') + LOGGER.info(f"Changed: {idx} Errors: {errors}") def downgrade(): """Downgrade ill request operation logs records.""" - query = RecordsSearch(index=LoanOperationLog.index_name)\ - .filter('term', record__type='illr') - pids = [hit.pid for hit in query.source('pid').scan()] - LOGGER.info(f'Downgrade operation logs illr :: {len(pids)}') + query = RecordsSearch(index=LoanOperationLog.index_name).filter( + "term", record__type="illr" + ) + pids = [hit.pid for hit in query.source("pid").scan()] + LOGGER.info(f"Downgrade operation logs illr :: {len(pids)}") errors = 0 for idx, pid in enumerate(pids, 1): record = LoanOperationLog.get_record(pid) - record['ill_request'].pop('library_pid', None) + record["ill_request"].pop("library_pid", None) try: - LoanOperationLog.update(pid, record['date'], record) + LoanOperationLog.update(pid, record["date"], record) except Exception as err: - LOGGER.error(f'{idx:<10} {pid} {err}') + LOGGER.error(f"{idx:<10} {pid} {err}") errors += 1 OperationLogsSearch.flush_and_refresh() - LOGGER.info(f'Changed: {idx} Errors: {errors}') + LOGGER.info(f"Changed: {idx} Errors: {errors}") diff --git a/rero_ils/alembic/2b0af71048a7_add_request_expiration_date.py b/rero_ils/alembic/2b0af71048a7_add_request_expiration_date.py index a3d42d355c..aa2c8731ed 100644 --- a/rero_ils/alembic/2b0af71048a7_add_request_expiration_date.py +++ b/rero_ils/alembic/2b0af71048a7_add_request_expiration_date.py @@ -30,20 +30,22 @@ from rero_ils.modules.loans.models import LoanState # revision identifiers, used by Alembic. -revision = '2b0af71048a7' -down_revision = 'cc7ffbe1e078' +revision = "2b0af71048a7" +down_revision = "cc7ffbe1e078" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): """Update loans records.""" - query = current_circulation.loan_search_cls() \ - .filter('term', state=LoanState.ITEM_AT_DESK) \ - .filter('bool', must_not=[Q('exists', field='request_expire_date')]) \ - .source('pid') + query = ( + current_circulation.loan_search_cls() + .filter("term", state=LoanState.ITEM_AT_DESK) + .filter("bool", must_not=[Q("exists", field="request_expire_date")]) + .source("pid") + ) loan_pids = [hit.pid for hit in query.scan()] ids = [] for pid in loan_pids: @@ -51,21 +53,21 @@ def upgrade(): trans_date = ciso8601.parse_datetime(loan.transaction_date) expire_date = trans_date + timedelta(days=10) expire_date = expire_date.replace( - hour=23, minute=59, second=00, microsecond=000, - tzinfo=None) - expire_date = pytz.timezone('Europe/Zurich').localize(expire_date) - loan['request_expire_date'] = expire_date.isoformat() - loan['request_start_date'] = datetime.now().isoformat() + hour=23, minute=59, second=00, microsecond=000, tzinfo=None + ) + expire_date = pytz.timezone("Europe/Zurich").localize(expire_date) + loan["request_expire_date"] = expire_date.isoformat() + loan["request_start_date"] = datetime.now().isoformat() loan.update(loan, dbcommit=True, reindex=False) - LOGGER.info(f' * Updated loan#{loan.pid}') + LOGGER.info(f" * Updated loan#{loan.pid}") ids.append(loan.id) if len(ids): - LOGGER.info(f'Indexing {len(ids)} records ....') + LOGGER.info(f"Indexing {len(ids)} records ....") indexer = LoansIndexer() indexer.bulk_index(ids) count = indexer.process_bulk_queue() - LOGGER.info(f'{count} records indexed.') - LOGGER.info(f'TOTAL :: {len(ids)}') + LOGGER.info(f"{count} records indexed.") + LOGGER.info(f"TOTAL :: {len(ids)}") def downgrade(): diff --git a/rero_ils/alembic/54134957af7d_loan_checkout_location_pid.py b/rero_ils/alembic/54134957af7d_loan_checkout_location_pid.py index dc7783f688..0fe6be0846 100644 --- a/rero_ils/alembic/54134957af7d_loan_checkout_location_pid.py +++ b/rero_ils/alembic/54134957af7d_loan_checkout_location_pid.py @@ -26,12 +26,12 @@ from rero_ils.modules.loans.models import LoanState # revision identifiers, used by Alembic. -revision = '54134957af7d' -down_revision = '90d857fb5c23' +revision = "54134957af7d" +down_revision = "90d857fb5c23" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") indexing_chunck_size = 1000 @@ -41,39 +41,41 @@ def upgrade(): For all ON_LOAN records, we will add a new `checkout_location_pid` field used to calculate fees based on checkout library. """ - query = current_circulation.loan_search_cls() \ - .filter('term', state=LoanState.ITEM_ON_LOAN) \ - .filter('bool', must_not=[ - Q('exists', field='checkout_location_pid') - ]) \ - .source(['pid', 'transaction_location_pid']) + query = ( + current_circulation.loan_search_cls() + .filter("term", state=LoanState.ITEM_ON_LOAN) + .filter("bool", must_not=[Q("exists", field="checkout_location_pid")]) + .source(["pid", "transaction_location_pid"]) + ) loans_hits = [hit for hit in query.scan()] ids = [] for hit in loans_hits: loan = Loan.get_record_by_pid(hit.pid) - loan['checkout_location_pid'] = hit.transaction_location_pid + loan["checkout_location_pid"] = hit.transaction_location_pid loan.update(loan, dbcommit=True, reindex=False) - LOGGER.info(f' * Upgrade loan#{loan.pid}') + LOGGER.info(f" * Upgrade loan#{loan.pid}") ids.append(loan.id) _indexing_records(ids) - LOGGER.info(f'TOTAL :: {len(ids)}') + LOGGER.info(f"TOTAL :: {len(ids)}") def downgrade(): """Downgrade Loan records removing `checkout_location_pid` field.""" - query = current_circulation.loan_search_cls() \ - .filter('exists', field='checkout_location_pid') \ - .source('pid') + query = ( + current_circulation.loan_search_cls() + .filter("exists", field="checkout_location_pid") + .source("pid") + ) loans_hits = [hit for hit in query.scan()] ids = [] for hit in loans_hits: loan = Loan.get_record_by_pid(hit.pid) - del loan['checkout_location_pid'] + del loan["checkout_location_pid"] loan.update(loan, dbcommit=True, reindex=False) - LOGGER.info(f' * Downgrade loan#{loan.pid}') + LOGGER.info(f" * Downgrade loan#{loan.pid}") ids.append(loan.id) _indexing_records(ids) - LOGGER.info(f'TOTAL :: {len(ids)}') + LOGGER.info(f"TOTAL :: {len(ids)}") def _indexing_records(record_ids): @@ -81,13 +83,13 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = LoansIndexer() chunks = [ - record_ids[x:x + indexing_chunck_size] + record_ids[x : x + indexing_chunck_size] for x in range(0, len(record_ids), indexing_chunck_size) ] for chuncked_ids in chunks: indexer.bulk_index(chuncked_ids) count = indexer.process_bulk_queue() - LOGGER.info(f'{count} records indexed.') + LOGGER.info(f"{count} records indexed.") diff --git a/rero_ils/alembic/5f0b086e4b82_patron_role_migration.py b/rero_ils/alembic/5f0b086e4b82_patron_role_migration.py index eb6f10d954..e4788457e7 100644 --- a/rero_ils/alembic/5f0b086e4b82_patron_role_migration.py +++ b/rero_ils/alembic/5f0b086e4b82_patron_role_migration.py @@ -26,12 +26,12 @@ from rero_ils.modules.users.models import UserRole # revision identifiers, used by Alembic. -revision = '5f0b086e4b82' -down_revision = 'eec683a446e5' +revision = "5f0b086e4b82" +down_revision = "eec683a446e5" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") indexing_chunck_size = 100 @@ -39,49 +39,53 @@ def upgrade(): """Upgrade database.""" def get_new_roles(roles): - if 'system_librarian' in roles: - roles.remove('system_librarian') + if "system_librarian" in roles: + roles.remove("system_librarian") roles.append(UserRole.FULL_PERMISSIONS) - if 'librarian' in roles: - roles.remove('librarian') - roles.extend([ - UserRole.PROFESSIONAL_READ_ONLY, - UserRole.ACQUISITION_MANAGER, - UserRole.CATALOG_MANAGER, - UserRole.CIRCULATION_MANAGER, - UserRole.USER_MANAGER - ]) + if "librarian" in roles: + roles.remove("librarian") + roles.extend( + [ + UserRole.PROFESSIONAL_READ_ONLY, + UserRole.ACQUISITION_MANAGER, + UserRole.CATALOG_MANAGER, + UserRole.CIRCULATION_MANAGER, + UserRole.USER_MANAGER, + ] + ) return roles # add new roles new_roles = { - UserRole.PROFESSIONAL_READ_ONLY: 'Professional: Read_only', - UserRole.ACQUISITION_MANAGER: 'Professional: Acquisition manager', - UserRole.FULL_PERMISSIONS: 'Professional: Full permissions', - UserRole.CATALOG_MANAGER: 'Professional: Catalog manager', - UserRole.CIRCULATION_MANAGER: 'Professional: Circulation manager', - UserRole.LIBRARY_ADMINISTRATOR: 'Professional: Library administrator', - UserRole.USER_MANAGER: 'Professional: User manager' + UserRole.PROFESSIONAL_READ_ONLY: "Professional: Read_only", + UserRole.ACQUISITION_MANAGER: "Professional: Acquisition manager", + UserRole.FULL_PERMISSIONS: "Professional: Full permissions", + UserRole.CATALOG_MANAGER: "Professional: Catalog manager", + UserRole.CIRCULATION_MANAGER: "Professional: Circulation manager", + UserRole.LIBRARY_ADMINISTRATOR: "Professional: Library administrator", + UserRole.USER_MANAGER: "Professional: User manager", } for name, description in new_roles.items(): _datastore.create_role(name=name, description=description) - LOGGER.info(f'--> [{name}] role created') + LOGGER.info(f"--> [{name}] role created") _datastore.commit() # assign new roles - query = PatronsSearch()\ - .filter('terms', roles=['librarian', 'system_librarian'])\ + query = ( + PatronsSearch() + .filter("terms", roles=["librarian", "system_librarian"]) .source(False) + ) patron_uuids = [] for hit in query.scan(): patron = Patron.get_record(hit.meta.id) - original_roles = patron.get('roles') + original_roles = patron.get("roles") migrated_roles = get_new_roles(original_roles) - LOGGER.info(f'* Updating ptrn#{patron.pid} [{patron.formatted_name}]') - LOGGER.info(f'\t - Original roles are : [{original_roles}]') - LOGGER.info(f'\t - New roles are : [{migrated_roles}]') - patron['roles'] = migrated_roles + LOGGER.info(f"* Updating ptrn#{patron.pid} [{patron.formatted_name}]") + LOGGER.info(f"\t - Original roles are : [{original_roles}]") + LOGGER.info(f"\t - New roles are : [{migrated_roles}]") + patron["roles"] = migrated_roles patron.update(patron, dbcommit=True, reindex=False) patron_uuids.append(hit.meta.id) @@ -93,23 +97,23 @@ def downgrade(): def get_original_roles(roles): if UserRole.FULL_PERMISSIONS in roles: - return ['system_librarian'] + return ["system_librarian"] elif any(role in UserRole.LIBRARIAN_ROLES for role in roles): - return ['librarian'] + return ["librarian"] - query = PatronsSearch()\ - .filter('terms', roles=UserRole.PROFESSIONAL_ROLES)\ - .source(False) + query = ( + PatronsSearch().filter("terms", roles=UserRole.PROFESSIONAL_ROLES).source(False) + ) patron_uuids = [] for hit in query.scan(): patron = Patron.get_record(hit.meta.id) - current_roles = patron.get('roles') + current_roles = patron.get("roles") original_roles = get_original_roles(current_roles) - LOGGER.info(f'* Updating ptrn#{patron.pid} [{patron.formatted_name}]') - LOGGER.info(f'\t - Current roles are : {current_roles}') - LOGGER.info(f'\t - Original roles are : {original_roles}') - patron['roles'] = original_roles + LOGGER.info(f"* Updating ptrn#{patron.pid} [{patron.formatted_name}]") + LOGGER.info(f"\t - Current roles are : {current_roles}") + LOGGER.info(f"\t - Original roles are : {original_roles}") + patron["roles"] = original_roles patron.update(patron, dbcommit=True, reindex=False) patron_uuids.append(hit.meta.id) @@ -121,13 +125,13 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = PatronsIndexer() chunks = [ - record_ids[x:x + indexing_chunck_size] + record_ids[x : x + indexing_chunck_size] for x in range(0, len(record_ids), indexing_chunck_size) ] for chuncked_ids in chunks: indexer.bulk_index(chuncked_ids) count = indexer.process_bulk_queue() - LOGGER.info(f'{count} records indexed.') + LOGGER.info(f"{count} records indexed.") diff --git a/rero_ils/alembic/64a5cc96f96e_update_claimed_issue_status.py b/rero_ils/alembic/64a5cc96f96e_update_claimed_issue_status.py index 193165476d..4f0bbf2f25 100644 --- a/rero_ils/alembic/64a5cc96f96e_update_claimed_issue_status.py +++ b/rero_ils/alembic/64a5cc96f96e_update_claimed_issue_status.py @@ -26,30 +26,29 @@ from rero_ils.modules.items.models import ItemIssueStatus # revision identifiers, used by Alembic. -revision = '64a5cc96f96e' -down_revision = 'e63e5dfa2416' +revision = "64a5cc96f96e" +down_revision = "e63e5dfa2416" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") chunck_size = 1000 def upgrade(): """Update item issue obsolete 'claimed' status.""" - query = ItemsSearch() \ - .filter('term', issue__status='claimed') \ - .source(False) + query = ItemsSearch().filter("term", issue__status="claimed").source(False) uuids = [hit.meta.id for hit in query.scan()] for idx, uuid in enumerate(uuids, 1): record = Item.get_record(uuid) - LOGGER.info(f'[{idx}/{len(uuids)}] Processing Item#{record.pid} ' - f'[{record.id}]...') + LOGGER.info( + f"[{idx}/{len(uuids)}] Processing Item#{record.pid} " f"[{record.id}]..." + ) status = ItemIssueStatus.LATE - if record['issue'].get('received_date'): + if record["issue"].get("received_date"): status = ItemIssueStatus.RECEIVED - record['issue']['status'] = status - record.get('issue', {}).pop('claims_count', None) + record["issue"]["status"] = status + record.get("issue", {}).pop("claims_count", None) record.update(record, commit=True) if idx % chunck_size == 0: # commit DB changes every 1000 changes. db.session.commit() @@ -69,15 +68,14 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = ItemsIndexer() chunks = [ - record_ids[x:x + chunck_size] - for x in range(0, len(record_ids), chunck_size) + record_ids[x : x + chunck_size] for x in range(0, len(record_ids), chunck_size) ] total_indexed = 0 for chuncked_ids in chunks: indexer.bulk_index(chuncked_ids) _, count = indexer.process_bulk_queue() total_indexed += count[0] - LOGGER.info(f'{total_indexed}/{len(record_ids)} records indexed.') + LOGGER.info(f"{total_indexed}/{len(record_ids)} records indexed.") diff --git a/rero_ils/alembic/74ab9da9f078_update_patron_communication_channels.py b/rero_ils/alembic/74ab9da9f078_update_patron_communication_channels.py index db080d2ed8..ef21ed137f 100644 --- a/rero_ils/alembic/74ab9da9f078_update_patron_communication_channels.py +++ b/rero_ils/alembic/74ab9da9f078_update_patron_communication_channels.py @@ -26,36 +26,39 @@ from rero_ils.modules.patrons.models import CommunicationChannel # revision identifiers, used by Alembic. -revision = '74ab9da9f078' -down_revision = '0387b753585f' +revision = "74ab9da9f078" +down_revision = "0387b753585f" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): """Fix incorrectly set patron communication channels.""" - query = PatronsSearch()\ - .filter('term', - patron__communication_channel=CommunicationChannel.EMAIL)\ - .filter('bool', must_not=[ - Q('exists', field='patron.additional_communication_email')])\ - .filter('bool', must_not=[Q('exists', field='email')])\ - .source(includes='pid') - pids = [(hit['pid'], hit.meta.id) for hit in query.scan()] + query = ( + PatronsSearch() + .filter("term", patron__communication_channel=CommunicationChannel.EMAIL) + .filter( + "bool", + must_not=[Q("exists", field="patron.additional_communication_email")], + ) + .filter("bool", must_not=[Q("exists", field="email")]) + .source(includes="pid") + ) + pids = [(hit["pid"], hit.meta.id) for hit in query.scan()] errors = 0 ids = [] for idx, (pid, id) in enumerate(pids, 1): if patron := Patron.get_record_by_pid(pid): ids.append(id) try: - patron['patron']['communication_channel'] = \ - CommunicationChannel.MAIL - db.session.query(patron.model_cls).filter_by( - id=patron.id).update({patron.model_cls.json: patron}) + patron["patron"]["communication_channel"] = CommunicationChannel.MAIL + db.session.query(patron.model_cls).filter_by(id=patron.id).update( + {patron.model_cls.json: patron} + ) except Exception as err: - LOGGER.error(f'{idx} * Update patron: {pid} {err}') + LOGGER.error(f"{idx} * Update patron: {pid} {err}") errors += 1 if ids: # commit session @@ -65,7 +68,7 @@ def upgrade(): indexer.bulk_index(ids) indexer.process_bulk_queue() - LOGGER.info(f'upgraded to version: {revision} errors: {errors}') + LOGGER.info(f"upgraded to version: {revision} errors: {errors}") def downgrade(): diff --git a/rero_ils/alembic/8145a7cdef99_reindex_items_with_invalid_ean.py b/rero_ils/alembic/8145a7cdef99_reindex_items_with_invalid_ean.py index f31ebdea53..5bb181aa01 100644 --- a/rero_ils/alembic/8145a7cdef99_reindex_items_with_invalid_ean.py +++ b/rero_ils/alembic/8145a7cdef99_reindex_items_with_invalid_ean.py @@ -25,24 +25,32 @@ from rero_ils.modules.documents.api import DocumentsIndexer, DocumentsSearch # revision identifiers, used by Alembic. -revision = '8145a7cdef99' -down_revision = '5f0b086e4b82' +revision = "8145a7cdef99" +down_revision = "5f0b086e4b82" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") indexing_chunck_size = 1000 def upgrade(): """Upgrade database.""" - query = DocumentsSearch().filter( - 'nested', - path='nested_identifiers', - query=Q('bool', must=[ - Q('term', nested_identifiers__type='bf:Ean'), - ~Q('exists', field='nested_identifiers.value') - ])).source(False) + query = ( + DocumentsSearch() + .filter( + "nested", + path="nested_identifiers", + query=Q( + "bool", + must=[ + Q("term", nested_identifiers__type="bf:Ean"), + ~Q("exists", field="nested_identifiers.value"), + ], + ), + ) + .source(False) + ) uuids = [hit.meta.id for hit in query.scan()] _indexing_records(uuids) @@ -56,13 +64,13 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = DocumentsIndexer() chunks = [ - record_ids[x:x + indexing_chunck_size] + record_ids[x : x + indexing_chunck_size] for x in range(0, len(record_ids), indexing_chunck_size) ] for chuncked_ids in chunks: indexer.bulk_index(chuncked_ids) _, (indexer_count, error_count) = indexer.process_bulk_queue() - LOGGER.info(f'{indexer_count} records indexed, {error_count} errors') + LOGGER.info(f"{indexer_count} records indexed, {error_count} errors") diff --git a/rero_ils/alembic/8d97be2c8ad6_items_remove_claims_count.py b/rero_ils/alembic/8d97be2c8ad6_items_remove_claims_count.py index b9f68b5ed2..ae1fdb596f 100644 --- a/rero_ils/alembic/8d97be2c8ad6_items_remove_claims_count.py +++ b/rero_ils/alembic/8d97be2c8ad6_items_remove_claims_count.py @@ -25,26 +25,25 @@ from rero_ils.modules.items.api import Item, ItemsIndexer, ItemsSearch # revision identifiers, used by Alembic. -revision = '8d97be2c8ad6' -down_revision = '64a5cc96f96e' +revision = "8d97be2c8ad6" +down_revision = "64a5cc96f96e" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") chunck_size = 1000 def upgrade(): """Remove unused 'claim_count' items fields.""" - query = ItemsSearch()\ - .filter('exists', field='issue.claims_count')\ - .source(False) + query = ItemsSearch().filter("exists", field="issue.claims_count").source(False) uuids = [hit.meta.id for hit in query.scan()] for idx, uuid in enumerate(uuids, 1): record = Item.get_record(uuid) - LOGGER.info(f'[{idx}/{len(uuids)}] Processing Item#{record.pid} ' - f'[{record.id}]...') - record.get('issue', {}).pop('claims_count', None) + LOGGER.info( + f"[{idx}/{len(uuids)}] Processing Item#{record.pid} " f"[{record.id}]..." + ) + record.get("issue", {}).pop("claims_count", None) record.update(record, commit=True) if idx % chunck_size == 0: db.session.commit() @@ -63,15 +62,14 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = ItemsIndexer() chunks = [ - record_ids[x:x + chunck_size] - for x in range(0, len(record_ids), chunck_size) + record_ids[x : x + chunck_size] for x in range(0, len(record_ids), chunck_size) ] total_indexed = 0 for chuncked_ids in chunks: indexer.bulk_index(chuncked_ids) _, count = indexer.process_bulk_queue() total_indexed += count[0] - LOGGER.info(f'{total_indexed}/{len(record_ids)} records indexed.') + LOGGER.info(f"{total_indexed}/{len(record_ids)} records indexed.") diff --git a/rero_ils/alembic/90d857fb5c23_unpaid_subscription_limit.py b/rero_ils/alembic/90d857fb5c23_unpaid_subscription_limit.py index c6682b007d..f3510af2ec 100644 --- a/rero_ils/alembic/90d857fb5c23_unpaid_subscription_limit.py +++ b/rero_ils/alembic/90d857fb5c23_unpaid_subscription_limit.py @@ -20,15 +20,18 @@ from logging import getLogger -from rero_ils.modules.patron_types.api import PatronType, PatronTypesIndexer, \ - PatronTypesSearch +from rero_ils.modules.patron_types.api import ( + PatronType, + PatronTypesIndexer, + PatronTypesSearch, +) -revision = '90d857fb5c23' -down_revision = '2b0af71048a7' +revision = "90d857fb5c23" +down_revision = "2b0af71048a7" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): @@ -38,45 +41,47 @@ def upgrade(): records that doesn't already define such a limit. The default value for existing records will be `False` to not generate unstable behavior. """ - query = PatronTypesSearch()\ - .exclude('exists', field='limits.unpaid_subscription')\ - .source('pid') + query = ( + PatronTypesSearch() + .exclude("exists", field="limits.unpaid_subscription") + .source("pid") + ) patron_type_pids = [hit.pid for hit in query.scan()] ids = [] for pid in patron_type_pids: record = PatronType.get_record_by_pid(pid) - record\ - .setdefault('limits', {})\ - .setdefault('unpaid_subscription', False) + record.setdefault("limits", {}).setdefault("unpaid_subscription", False) record.update(record, dbcommit=True, reindex=False) - LOGGER.info(f' * Updated PatronType#{record.pid}') + LOGGER.info(f" * Updated PatronType#{record.pid}") ids.append(record.id) _indexing_records(ids) - LOGGER.info(f'TOTAL :: {len(ids)}') + LOGGER.info(f"TOTAL :: {len(ids)}") def downgrade(): """Downgrade patron type records.""" - query = PatronTypesSearch()\ - .filter('exists', field='limits.unpaid_subscription')\ - .source('pid') + query = ( + PatronTypesSearch() + .filter("exists", field="limits.unpaid_subscription") + .source("pid") + ) patron_type_pids = [hit.pid for hit in query.scan()] ids = [] for pid in patron_type_pids: record = PatronType.get_record_by_pid(pid) - del record['limits']['unpaid_subscription'] + del record["limits"]["unpaid_subscription"] record.update(record, dbcommit=True, reindex=False) - LOGGER.info(f' * Updated PatronType#{record.pid}') + LOGGER.info(f" * Updated PatronType#{record.pid}") ids.append(record.id) _indexing_records(ids) - LOGGER.info(f'TOTAL :: {len(ids)}') + LOGGER.info(f"TOTAL :: {len(ids)}") def _indexing_records(record_ids): """Indexing some record based on record uuid.""" if len(record_ids): - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = PatronTypesIndexer() indexer.bulk_index(record_ids) count = indexer.process_bulk_queue() - LOGGER.info(f'{count} records indexed.') + LOGGER.info(f"{count} records indexed.") diff --git a/rero_ils/alembic/9e3145d88e64_move_collection_to_his_own_table.py b/rero_ils/alembic/9e3145d88e64_move_collection_to_his_own_table.py index 3e154d661d..3b482edd48 100644 --- a/rero_ils/alembic/9e3145d88e64_move_collection_to_his_own_table.py +++ b/rero_ils/alembic/9e3145d88e64_move_collection_to_his_own_table.py @@ -27,13 +27,13 @@ from rero_ils.modules.collections.models import CollectionMetadata # revision identifiers, used by Alembic. -revision = '9e3145d88e64' -down_revision = 'f0e7f3b80a21' +revision = "9e3145d88e64" +down_revision = "f0e7f3b80a21" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') -SCHEMA = 'https://bib.rero.ch/schemas/collections/collection-v0.0.1.json' +LOGGER = getLogger("alembic") +SCHEMA = "https://bib.rero.ch/schemas/collections/collection-v0.0.1.json" def upgrade(): @@ -41,35 +41,46 @@ def upgrade(): CollectionMetadata.metadata.create_all(bind=db.engine) assert CollectionMetadata.query.count() == 0 results = RecordMetadata.query.filter( - RecordMetadata.json['$schema'].as_string() == SCHEMA).all() - collections = [{ - 'id': col.id, - 'json': col.json, - 'created': col.created, - 'updated': col.updated, - 'version_id': col.version_id - } for col in results] + RecordMetadata.json["$schema"].as_string() == SCHEMA + ).all() + collections = [ + { + "id": col.id, + "json": col.json, + "created": col.created, + "updated": col.updated, + "version_id": col.version_id, + } + for col in results + ] op.bulk_insert(CollectionMetadata.__table__, collections) for col in results: db.session.delete(col) db.session.commit() - LOGGER.info('migrate %s' % len(collections)) + LOGGER.info("migrate %s" % len(collections)) def downgrade(): """Downgrade database.""" - assert RecordMetadata.query.filter( - RecordMetadata.json['$schema'].as_string() == SCHEMA).count() == 0 + assert ( + RecordMetadata.query.filter( + RecordMetadata.json["$schema"].as_string() == SCHEMA + ).count() + == 0 + ) results = CollectionMetadata.query.all() - collections = [{ - 'id': col.id, - 'json': col.json, - 'created': col.created, - 'updated': col.updated, - 'version_id': col.version_id - } for col in results] + collections = [ + { + "id": col.id, + "json": col.json, + "created": col.created, + "updated": col.updated, + "version_id": col.version_id, + } + for col in results + ] op.bulk_insert(RecordMetadata.__table__, collections) # need to close the session before removing the table db.session.close() - op.drop_table('collection_metadata') - LOGGER.info('migrate %s record collection' % len(collections)) + op.drop_table("collection_metadata") + LOGGER.info("migrate %s record collection" % len(collections)) diff --git a/rero_ils/alembic/a710021979fe_migrate_contribution_to_entity.py b/rero_ils/alembic/a710021979fe_migrate_contribution_to_entity.py index d8aeca7393..cc51b23d69 100644 --- a/rero_ils/alembic/a710021979fe_migrate_contribution_to_entity.py +++ b/rero_ils/alembic/a710021979fe_migrate_contribution_to_entity.py @@ -21,19 +21,19 @@ from alembic import op # revision identifiers, used by Alembic. -revision = 'a710021979fe' -down_revision = '8145a7cdef99' +revision = "a710021979fe" +down_revision = "8145a7cdef99" branch_labels = () depends_on = None def upgrade(): """Upgrade database.""" - op.rename_table('contribution_id', 'entity_id') - op.rename_table('contribution_metadata', 'entity_metadata') + op.rename_table("contribution_id", "entity_id") + op.rename_table("contribution_metadata", "entity_metadata") def downgrade(): """Downgrade database.""" - op.rename_table('entity_id', 'contribution_id') - op.rename_table('entity_metadata', 'contribution_metadata') + op.rename_table("entity_id", "contribution_id") + op.rename_table("entity_metadata", "contribution_metadata") diff --git a/rero_ils/alembic/a941628259e1_add_keys_into_loan_index.py b/rero_ils/alembic/a941628259e1_add_keys_into_loan_index.py index 3c405f0e9a..4d020fc6cf 100644 --- a/rero_ils/alembic/a941628259e1_add_keys_into_loan_index.py +++ b/rero_ils/alembic/a941628259e1_add_keys_into_loan_index.py @@ -24,12 +24,12 @@ from rero_ils.modules.loans.api import LoansIndexer from rero_ils.modules.loans.models import LoanState -revision = 'a941628259e1' -down_revision = '21a994dc2beb' +revision = "a941628259e1" +down_revision = "21a994dc2beb" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") indexing_chunck_size = 1000 @@ -37,10 +37,12 @@ def upgrade(): """Reindex 'opened' loans to add some keys into ES index.""" # Keep only loan without `location_pid` field (these loans are already # indexed with correct data) - query = current_circulation.loan_search_cls() \ - .exclude('terms', state=LoanState.CONCLUDED) \ - .exclude('exists', field='location_pid') \ - .source('pid') + query = ( + current_circulation.loan_search_cls() + .exclude("terms", state=LoanState.CONCLUDED) + .exclude("exists", field="location_pid") + .source("pid") + ) loan_uuids = [hit.meta.id for hit in query.scan()] _indexing_records(loan_uuids) @@ -54,13 +56,13 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = LoansIndexer() chunks = [ - record_ids[x:x + indexing_chunck_size] + record_ids[x : x + indexing_chunck_size] for x in range(0, len(record_ids), indexing_chunck_size) ] for chuncked_ids in chunks: indexer.bulk_index(chuncked_ids) count = indexer.process_bulk_queue() - LOGGER.info(f'{count} records indexed.') + LOGGER.info(f"{count} records indexed.") diff --git a/rero_ils/alembic/add75cbcad66_migrate_obsolete_country_codes.py b/rero_ils/alembic/add75cbcad66_migrate_obsolete_country_codes.py index 406bbf700e..ebb9381e64 100644 --- a/rero_ils/alembic/add75cbcad66_migrate_obsolete_country_codes.py +++ b/rero_ils/alembic/add75cbcad66_migrate_obsolete_country_codes.py @@ -31,12 +31,12 @@ # revision identifiers, used by Alembic. -revision = 'add75cbcad66' -down_revision = 'e3eb396b39bb' +revision = "add75cbcad66" +down_revision = "e3eb396b39bb" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): @@ -49,24 +49,24 @@ def upgrade(): def fix_documents(pids, old_country, new_country): for pid in pids: doc = Document.get_record_by_pid(pid) - for provision in doc.get('provisionActivity'): - for place in provision.get('place'): - if place.get('country') == old_country: - place['country'] = new_country + for provision in doc.get("provisionActivity"): + for place in provision.get("place"): + if place.get("country") == old_country: + place["country"] = new_country LOGGER.info( - f'Doc {pid}: replacing {old_country} by' - f' {new_country}') + f"Doc {pid}: replacing {old_country} by" f" {new_country}" + ) doc.replace(doc, commit=True, dbcommit=True, reindex=True) def fix_patrons(pids, old_country, new_country): for pid in pids: ptrn = Patron.get_record_by_pid(pid) - if address := ptrn.get('second_address', {}): - if address.get('country') == old_country: - address['country'] = new_country + if address := ptrn.get("second_address", {}): + if address.get("country") == old_country: + address["country"] = new_country LOGGER.info( - f'Patron {pid}: replacing {old_country} by' - f' {new_country}') + f"Patron {pid}: replacing {old_country} by" f" {new_country}" + ) ptrn.replace(ptrn, commit=True, dbcommit=True, reindex=True) def fix_users(query, old_country, new_country): @@ -74,27 +74,32 @@ def fix_users(query, old_country, new_country): profile.country = new_country db.session.merge(profile) LOGGER.info( - f'User {profile.last_name}, {profile.first_name}' - f': replacing {old_country} by {new_country}') + f"User {profile.last_name}, {profile.first_name}" + f": replacing {old_country} by {new_country}" + ) db.session.commit() for old_country, new_country in _OBSOLETE_COUNTRIES_MAPPING.items(): - LOGGER.info(f'Processing {old_country}') + LOGGER.info(f"Processing {old_country}") if doc_pids := [ - hit.pid for hit in DocumentsSearch() - .filter('term', provisionActivity__place__country=old_country) - .source('pid').scan() + hit.pid + for hit in DocumentsSearch() + .filter("term", provisionActivity__place__country=old_country) + .source("pid") + .scan() ]: - LOGGER.info(f'Found {len(doc_pids)} documents with {old_country}.') + LOGGER.info(f"Found {len(doc_pids)} documents with {old_country}.") fix_documents(doc_pids, old_country, new_country) if ptrn_pids := [ - hit.pid for hit in PatronsSearch() - .filter('term', second_address__country=old_country) - .source('pid').scan() + hit.pid + for hit in PatronsSearch() + .filter("term", second_address__country=old_country) + .source("pid") + .scan() ]: - LOGGER.info(f'Found {len(ptrn_pids)} patrons with {old_country}.') + LOGGER.info(f"Found {len(ptrn_pids)} patrons with {old_country}.") fix_patrons(ptrn_pids, old_country, new_country) # query = UserProfile.query.filter_by(country=old_country) diff --git a/rero_ils/alembic/b90f8b148948_add_column_to_selfcheck_terminals.py b/rero_ils/alembic/b90f8b148948_add_column_to_selfcheck_terminals.py index 77261633fb..c97617b4fd 100644 --- a/rero_ils/alembic/b90f8b148948_add_column_to_selfcheck_terminals.py +++ b/rero_ils/alembic/b90f8b148948_add_column_to_selfcheck_terminals.py @@ -24,26 +24,27 @@ from alembic import op # revision identifiers, used by Alembic. -revision = 'b90f8b148948' -down_revision = '54134957af7d' +revision = "b90f8b148948" +down_revision = "54134957af7d" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): """Upgrade database.""" # ### commands auto generated by Alembic - please adjust! ### - op.add_column('selfcheck_terminals', sa.Column('comments', sa.Text(), - nullable=True)) - LOGGER.info(f'column added.') + op.add_column( + "selfcheck_terminals", sa.Column("comments", sa.Text(), nullable=True) + ) + LOGGER.info(f"column added.") # ### end Alembic commands ### def downgrade(): """Downgrade database.""" # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('selfcheck_terminals', 'comments') - LOGGER.info(f'column dropped.') + op.drop_column("selfcheck_terminals", "comments") + LOGGER.info(f"column dropped.") # ### end Alembic commands ### diff --git a/rero_ils/alembic/bd78d77eb7e3_isfiction.py b/rero_ils/alembic/bd78d77eb7e3_isfiction.py index 29082eb9e1..941d3842b6 100644 --- a/rero_ils/alembic/bd78d77eb7e3_isfiction.py +++ b/rero_ils/alembic/bd78d77eb7e3_isfiction.py @@ -21,17 +21,16 @@ from invenio_db import db -from rero_ils.modules.documents.api import Document, DocumentsIndexer, \ - DocumentsSearch +from rero_ils.modules.documents.api import Document, DocumentsIndexer, DocumentsSearch from rero_ils.modules.documents.models import DocumentFictionType # revision identifiers, used by Alembic. -revision = 'bd78d77eb7e3' -down_revision = ('fc45b1b998b8', 'a941628259e1') +revision = "bd78d77eb7e3" +down_revision = ("fc45b1b998b8", "a941628259e1") branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") DEBUG = False @@ -47,17 +46,19 @@ def dbcommit_and_bulk_index(uuids, idx): indexer = DocumentsIndexer() indexer.bulk_index(uuids) count = indexer.process_bulk_queue() - LOGGER.info(f'{count[1]} records indexed.') + LOGGER.info(f"{count[1]} records indexed.") return [] def upgrade(): """Upgrade database.""" # fiction statement: fiction - FICTIONS_TERMS = ['Fictions', 'Films de fiction'] - query = DocumentsSearch() \ - .filter('terms', facet_genre_form_en=FICTIONS_TERMS) \ - .exclude('exists', field='fiction_statement') + FICTIONS_TERMS = ["Fictions", "Films de fiction"] + query = ( + DocumentsSearch() + .filter("terms", facet_genre_form_en=FICTIONS_TERMS) + .exclude("exists", field="fiction_statement") + ) ids = [hit.meta.id for hit in query.source().scan()] LOGGER.info(f'Add fiction_statement="fiction" to documents: {len(ids)}') uuids = [] @@ -67,54 +68,50 @@ def upgrade(): uuids.append(_id) if DEBUG: LOGGER.info( - f'{idx:<10} {doc.pid:<10} ' - 'add fiction_statement="fiction"' + f"{idx:<10} {doc.pid:<10} " 'add fiction_statement="fiction"' ) - doc['fiction_statement'] = DocumentFictionType.Fiction.value + doc["fiction_statement"] = DocumentFictionType.Fiction.value doc.update(data=doc, commit=True, dbcommit=False, reindex=False) if len(uuids) >= 1000: uuids = dbcommit_and_bulk_index(uuids, idx) dbcommit_and_bulk_index(uuids, idx) # fiction statement: non-fiction - query = DocumentsSearch() \ - .exclude('term', harvested=True) \ - .exclude('terms', facet_genre_form_en=FICTIONS_TERMS) \ - .exclude('exists', field='fiction_statement') \ - .filter('exists', field='subjects') + query = ( + DocumentsSearch() + .exclude("term", harvested=True) + .exclude("terms", facet_genre_form_en=FICTIONS_TERMS) + .exclude("exists", field="fiction_statement") + .filter("exists", field="subjects") + ) ids = [hit.meta.id for hit in query.source().scan()] - LOGGER.info( - f'Add fiction_statement="non_fiction" to documents: {len(ids)}') + LOGGER.info(f'Add fiction_statement="non_fiction" to documents: {len(ids)}') uuids = [] for idx, _id in enumerate(ids, 1): if doc := Document.get_record(_id): uuids.append(_id) if DEBUG: LOGGER.info( - f'{idx:<10} {doc.pid:<10} ' - 'add fiction_statement="non_fiction"' + f"{idx:<10} {doc.pid:<10} " 'add fiction_statement="non_fiction"' ) - doc['fiction_statement'] = DocumentFictionType.NonFiction.value + doc["fiction_statement"] = DocumentFictionType.NonFiction.value doc.update(data=doc, commit=True, dbcommit=False, reindex=False) if len(uuids) >= 1000: uuids = dbcommit_and_bulk_index(uuids, idx) dbcommit_and_bulk_index(uuids, idx) DocumentsSearch().flush_and_refresh() # fiction statement: unspecified - query = DocumentsSearch() \ - .exclude('exists', field='fiction_statement') + query = DocumentsSearch().exclude("exists", field="fiction_statement") ids = [hit.meta.id for hit in query.source().scan()] - LOGGER.info( - f'Add fiction_statement="unspecified" to documents: {len(ids)}') + LOGGER.info(f'Add fiction_statement="unspecified" to documents: {len(ids)}') uuids = [] for idx, _id in enumerate(ids, 1): if doc := Document.get_record(_id): uuids.append(_id) if DEBUG: LOGGER.info( - f'{idx:<10} {doc.pid:<10} ' - 'add fiction_statement="unspecified"' + f"{idx:<10} {doc.pid:<10} " 'add fiction_statement="unspecified"' ) - doc['fiction_statement'] = DocumentFictionType.Unspecified.value + doc["fiction_statement"] = DocumentFictionType.Unspecified.value doc.update(data=doc, commit=True, dbcommit=False, reindex=False) if len(uuids) >= 1000: uuids = dbcommit_and_bulk_index(uuids, idx) @@ -123,21 +120,17 @@ def upgrade(): def downgrade(): """Downgrade database.""" - query = DocumentsSearch() \ - .filter('exists', field='fiction_statement') + query = DocumentsSearch().filter("exists", field="fiction_statement") ids = [hit.meta.id for hit in query.source().scan()] - LOGGER.info(f'Remove fiction_statement from documents: {len(ids)}') + LOGGER.info(f"Remove fiction_statement from documents: {len(ids)}") uuids = [] idx = 0 for idx, _id in enumerate(ids, 1): if doc := Document.get_record(_id): uuids.append(_id) if DEBUG: - LOGGER.info( - f'{idx:<10} {doc.pid:<10} ' - 'remove fiction_statement' - ) - doc.pop('fiction_statement', None) + LOGGER.info(f"{idx:<10} {doc.pid:<10} " "remove fiction_statement") + doc.pop("fiction_statement", None) doc.update(data=doc, commit=True, dbcommit=False, reindex=False) if len(uuids) >= 1000: uuids = dbcommit_and_bulk_index(uuids, idx) diff --git a/rero_ils/alembic/cc7ffbe1e078_cipo_request_duration_field_creation.py b/rero_ils/alembic/cc7ffbe1e078_cipo_request_duration_field_creation.py index 46b41243a5..71d4fa4486 100644 --- a/rero_ils/alembic/cc7ffbe1e078_cipo_request_duration_field_creation.py +++ b/rero_ils/alembic/cc7ffbe1e078_cipo_request_duration_field_creation.py @@ -23,37 +23,40 @@ from rero_ils.modules.circ_policies.api import CircPoliciesSearch, CircPolicy # revision identifiers, used by Alembic. -revision = 'cc7ffbe1e078' -down_revision = '9e3145d88e64' +revision = "cc7ffbe1e078" +down_revision = "9e3145d88e64" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): """Update circulation policy records.""" - query = CircPoliciesSearch() \ - .filter('term', allow_requests=True) \ - .source(['pid']).scan() + query = ( + CircPoliciesSearch().filter("term", allow_requests=True).source(["pid"]).scan() + ) for hit in query: cipo = CircPolicy.get_record_by_pid(hit.pid) - cipo['pickup_hold_duration'] = 10 # default value is 10 days + cipo["pickup_hold_duration"] = 10 # default value is 10 days cipo.update(cipo, dbcommit=True, reindex=True) - LOGGER.info(f' * Updated cipo#{cipo.pid}') + LOGGER.info(f" * Updated cipo#{cipo.pid}") CircPoliciesSearch.flush_and_refresh() - LOGGER.info(f'upgrade to {revision}') + LOGGER.info(f"upgrade to {revision}") def downgrade(): """Reset circulation policy records.""" - query = CircPoliciesSearch() \ - .filter('exists', field='pickup_hold_duration') \ - .source(['pid']).scan() + query = ( + CircPoliciesSearch() + .filter("exists", field="pickup_hold_duration") + .source(["pid"]) + .scan() + ) for hit in query: cipo = CircPolicy.get_record_by_pid(hit.pid) - del cipo['pickup_hold_duration'] + del cipo["pickup_hold_duration"] cipo.update(cipo, dbcommit=True, reindex=True) - LOGGER.info(f' * Updated cipo#{cipo.pid}') + LOGGER.info(f" * Updated cipo#{cipo.pid}") CircPoliciesSearch.flush_and_refresh() - LOGGER.info(f'downgrade to revision {down_revision}') + LOGGER.info(f"downgrade to revision {down_revision}") diff --git a/rero_ils/alembic/ce4923ba5286_delete_expired_items_temporary_locations.py b/rero_ils/alembic/ce4923ba5286_delete_expired_items_temporary_locations.py index 5d8f568bb3..96e347cca9 100644 --- a/rero_ils/alembic/ce4923ba5286_delete_expired_items_temporary_locations.py +++ b/rero_ils/alembic/ce4923ba5286_delete_expired_items_temporary_locations.py @@ -21,27 +21,26 @@ from rero_ils.modules.items.api import Item, ItemsSearch -revision = 'ce4923ba5286' -down_revision = '05555c03fe49' +revision = "ce4923ba5286" +down_revision = "05555c03fe49" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def index_items_with_temporary_location(): """Index items with temporary location.""" - query = ItemsSearch() \ - .filter('exists', field='temporary_location').source(['pid']) + query = ItemsSearch().filter("exists", field="temporary_location").source(["pid"]) ids = [(hit.meta.id, hit.pid) for hit in query.scan()] errors = 0 for idx, (id, pid) in enumerate(ids): - LOGGER.info(f'{idx} * Reindex item: {pid}') + LOGGER.info(f"{idx} * Reindex item: {pid}") try: item = Item.get_record(id) item.reindex() except Exception as err: - LOGGER.error(f'{idx} * Reindex item: {pid} {err}') + LOGGER.error(f"{idx} * Reindex item: {pid} {err}") errors += 1 return errors @@ -49,10 +48,10 @@ def index_items_with_temporary_location(): def upgrade(): """Index items with temporary location.""" errors = index_items_with_temporary_location() - LOGGER.info(f'upgraded to version: {revision} errors: {errors}') + LOGGER.info(f"upgraded to version: {revision} errors: {errors}") def downgrade(): """Index items with temporary location.""" errors = index_items_with_temporary_location() - LOGGER.info(f'downgraded to version: {down_revision} errors: {errors}') + LOGGER.info(f"downgraded to version: {down_revision} errors: {errors}") diff --git a/rero_ils/alembic/e3eb396b39bb_migration_ill_pickup.py b/rero_ils/alembic/e3eb396b39bb_migration_ill_pickup.py index c65088263f..30dcf57ef7 100644 --- a/rero_ils/alembic/e3eb396b39bb_migration_ill_pickup.py +++ b/rero_ils/alembic/e3eb396b39bb_migration_ill_pickup.py @@ -22,12 +22,12 @@ from rero_ils.modules.locations.api import Location, LocationsSearch -revision = 'e3eb396b39bb' -down_revision = '8145a7cdef99' +revision = "e3eb396b39bb" +down_revision = "8145a7cdef99" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): @@ -35,17 +35,15 @@ def upgrade(): Assign ill pickup on locations that are pickup. """ - query = LocationsSearch() \ - .filter('term', is_pickup=True) \ - .source(['pid']) + query = LocationsSearch().filter("term", is_pickup=True).source(["pid"]) hits = list(query.scan()) for hit in hits: location = Location.get_record_by_pid(hit.pid) - location['is_ill_pickup'] = True - location['ill_pickup_name'] = location['pickup_name'] + location["is_ill_pickup"] = True + location["ill_pickup_name"] = location["pickup_name"] location.update(location, dbcommit=True, reindex=True) - LOGGER.info(f' * Upgrade location#{location.pid}') - LOGGER.info(f'TOTAL :: {len(hits)}') + LOGGER.info(f" * Upgrade location#{location.pid}") + LOGGER.info(f"TOTAL :: {len(hits)}") def downgrade(): diff --git a/rero_ils/alembic/e63e5dfa2416_new_vendor_serial_contact.py b/rero_ils/alembic/e63e5dfa2416_new_vendor_serial_contact.py index 579f940198..4e32ed45d4 100644 --- a/rero_ils/alembic/e63e5dfa2416_new_vendor_serial_contact.py +++ b/rero_ils/alembic/e63e5dfa2416_new_vendor_serial_contact.py @@ -24,12 +24,12 @@ from rero_ils.modules.vendors.api import Vendor, VendorsIndexer, VendorsSearch from rero_ils.modules.vendors.models import VendorContactType -revision = 'e63e5dfa2416' -down_revision = 'add75cbcad66' +revision = "e63e5dfa2416" +down_revision = "add75cbcad66" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") indexing_chunck_size = 1000 @@ -39,25 +39,25 @@ def upgrade(): uuids_to_reindex = [] for uuid in uuids: record = Vendor.get_record(uuid) - record.setdefault('communication_language', 'fre') + record.setdefault("communication_language", "fre") changes = False - if contact_info := record.pop('default_contact', None): - contact_info['type'] = VendorContactType.DEFAULT - record.setdefault('contacts', []).append(contact_info) + if contact_info := record.pop("default_contact", None): + contact_info["type"] = VendorContactType.DEFAULT + record.setdefault("contacts", []).append(contact_info) changes = True - if contact_info := record.pop('order_contact', None): - contact_info['type'] = VendorContactType.ORDER - if record.get('contacts', [{}])[0].get('city'): - contact_info.setdefault('city', record['contacts'][0]['city']) - record.setdefault('contacts', []).append(contact_info) + if contact_info := record.pop("order_contact", None): + contact_info["type"] = VendorContactType.ORDER + if record.get("contacts", [{}])[0].get("city"): + contact_info.setdefault("city", record["contacts"][0]["city"]) + record.setdefault("contacts", []).append(contact_info) changes = True if changes: - LOGGER.info(f'* Updating vendor#{record.pid} [{uuid}]...') + LOGGER.info(f"* Updating vendor#{record.pid} [{uuid}]...") try: record.update(record, dbcommit=True, reindex=False) uuids_to_reindex.append(uuid) except Exception as e: - LOGGER.error(f'Error for pid {record.pid}: {e}') + LOGGER.error(f"Error for pid {record.pid}: {e}") def downgrade(): @@ -67,16 +67,16 @@ def downgrade(): for uuid in uuids: record = Vendor.get_record(uuid) changes = False - for contact in record.pop('contacts', []): - contact_type = contact.pop('type', None) + for contact in record.pop("contacts", []): + contact_type = contact.pop("type", None) if contact_type == VendorContactType.DEFAULT: - record['default_contact'] = contact + record["default_contact"] = contact changes = True if contact == VendorContactType.ORDER: - record['order_contact'] = contact + record["order_contact"] = contact changes = True if changes: - LOGGER.info(f'* Updating vendor#{record.pid} [{uuid}]...') + LOGGER.info(f"* Updating vendor#{record.pid} [{uuid}]...") record.update(record, dbcommit=True, reindex=False) uuids_to_reindex.append(uuid) _indexing_records(uuids_to_reindex) @@ -87,10 +87,10 @@ def _indexing_records(record_ids): if not record_ids: return - LOGGER.info(f'Indexing {len(record_ids)} records ....') + LOGGER.info(f"Indexing {len(record_ids)} records ....") indexer = VendorsIndexer() chunks = [ - record_ids[x:x + indexing_chunck_size] + record_ids[x : x + indexing_chunck_size] for x in range(0, len(record_ids), indexing_chunck_size) ] total_indexed = 0 @@ -98,4 +98,4 @@ def _indexing_records(record_ids): indexer.bulk_index(chuncked_ids) _, count = indexer.process_bulk_queue() total_indexed += count[0] - LOGGER.info(f'{total_indexed}/{len(record_ids)} records indexed.') + LOGGER.info(f"{total_indexed}/{len(record_ids)} records indexed.") diff --git a/rero_ils/alembic/eec683a446e5_merging_rero_ils_branches.py b/rero_ils/alembic/eec683a446e5_merging_rero_ils_branches.py index 16712fd686..64007b641b 100644 --- a/rero_ils/alembic/eec683a446e5_merging_rero_ils_branches.py +++ b/rero_ils/alembic/eec683a446e5_merging_rero_ils_branches.py @@ -21,13 +21,13 @@ from logging import getLogger # revision identifiers, used by Alembic. -revision = 'eec683a446e5' -down_revision = ('fc45b1b998b8', 'a941628259e1') +revision = "eec683a446e5" +down_revision = ("fc45b1b998b8", "a941628259e1") branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): diff --git a/rero_ils/alembic/f0e7f3b80a21_initial.py b/rero_ils/alembic/f0e7f3b80a21_initial.py index d43f19a33a..306bc662e0 100644 --- a/rero_ils/alembic/f0e7f3b80a21_initial.py +++ b/rero_ils/alembic/f0e7f3b80a21_initial.py @@ -20,9 +20,9 @@ # revision identifiers, used by Alembic. -revision = 'f0e7f3b80a21' +revision = "f0e7f3b80a21" down_revision = None -branch_labels = ('rero_ils',) +branch_labels = ("rero_ils",) depends_on = None diff --git a/rero_ils/alembic/fc45b1b998b8_holdings_affiliated_to_libraries.py b/rero_ils/alembic/fc45b1b998b8_holdings_affiliated_to_libraries.py index 027bf24d01..4d053694d8 100644 --- a/rero_ils/alembic/fc45b1b998b8_holdings_affiliated_to_libraries.py +++ b/rero_ils/alembic/fc45b1b998b8_holdings_affiliated_to_libraries.py @@ -21,39 +21,34 @@ from logging import getLogger -from rero_ils.modules.organisations.api import Organisation, \ - OrganisationsSearch +from rero_ils.modules.organisations.api import Organisation, OrganisationsSearch # revision identifiers, used by Alembic. -revision = 'fc45b1b998b8' -down_revision = '74ab9da9f078' +revision = "fc45b1b998b8" +down_revision = "74ab9da9f078" branch_labels = () depends_on = None -LOGGER = getLogger('alembic') +LOGGER = getLogger("alembic") def upgrade(): """Upgrade organisations online_harvested_source to list.""" - query = OrganisationsSearch() \ - .filter('exists', field='online_harvested_source') - LOGGER.info(f'Update Organisations: {query.count()}') - for idx, hit in enumerate(query.source('pid').scan()): + query = OrganisationsSearch().filter("exists", field="online_harvested_source") + LOGGER.info(f"Update Organisations: {query.count()}") + for idx, hit in enumerate(query.source("pid").scan()): org = Organisation.get_record_by_pid(hit.pid) - org['online_harvested_source'] = [org['online_harvested_source']] - LOGGER.info( - f'{idx:<3} org: {org.pid} -> {org["online_harvested_source"]}') + org["online_harvested_source"] = [org["online_harvested_source"]] + LOGGER.info(f'{idx:<3} org: {org.pid} -> {org["online_harvested_source"]}') org.update(data=org, dbcommit=True, reindex=True) def downgrade(): """Downgrade organisations online_harvested_source to string.""" - query = OrganisationsSearch() \ - .filter('exists', field='online_harvested_source') - LOGGER.info(f'Downgrad Organisations: {query.count()}') - for idx, hit in enumerate(query.source('pid').scan()): + query = OrganisationsSearch().filter("exists", field="online_harvested_source") + LOGGER.info(f"Downgrad Organisations: {query.count()}") + for idx, hit in enumerate(query.source("pid").scan()): org = Organisation.get_record_by_pid(hit.pid) - org['online_harvested_source'] = org['online_harvested_source'][0] - LOGGER.info( - f'{idx:<3} org: {org.pid} -> {org["online_harvested_source"]}') + org["online_harvested_source"] = org["online_harvested_source"][0] + LOGGER.info(f'{idx:<3} org: {org.pid} -> {org["online_harvested_source"]}') org.update(data=org, dbcommit=True, reindex=True) diff --git a/rero_ils/celery.py b/rero_ils/celery.py index 84e59171e0..a530e9c690 100644 --- a/rero_ils/celery.py +++ b/rero_ils/celery.py @@ -26,10 +26,12 @@ # load .env and .flaskenv load_dotenv() -celery = create_celery_app(create_ui( - SENTRY_TRANSPORT='raven.transport.http.HTTPTransport', - RATELIMIT_ENABLED=False, -)) +celery = create_celery_app( + create_ui( + SENTRY_TRANSPORT="raven.transport.http.HTTPTransport", + RATELIMIT_ENABLED=False, + ) +) """Celery application for Invenio. Overrides SENTRY_TRANSPORT wih synchronous HTTP transport since Celery does not deal nicely with the default threaded transport. @@ -37,4 +39,4 @@ # Trigger an app log message upon import. This makes Sentry logging # work with `get_task_logger(__name__)`. -celery.flask_app.logger.info('Created Celery app') +celery.flask_app.logger.info("Created Celery app") diff --git a/rero_ils/config.py b/rero_ils/config.py index 6ec69ed886..595fae2d1d 100644 --- a/rero_ils/config.py +++ b/rero_ils/config.py @@ -31,55 +31,68 @@ from celery.schedules import crontab from flask import request -from invenio_circulation.pidstore.pids import CIRCULATION_LOAN_FETCHER, \ - CIRCULATION_LOAN_MINTER, CIRCULATION_LOAN_PID_TYPE -from invenio_circulation.search.api import LoansSearch -from invenio_circulation.transitions.transitions import CreatedToPending, \ - ItemAtDeskToItemOnLoan, ItemInTransitHouseToItemReturned, \ - ItemOnLoanToItemInTransitHouse, ItemOnLoanToItemOnLoan, \ - ItemOnLoanToItemReturned, PendingToItemAtDesk, \ - PendingToItemInTransitPickup, ToCancelled, ToItemOnLoan +from invenio_circulation.pidstore.pids import ( + CIRCULATION_LOAN_FETCHER, + CIRCULATION_LOAN_MINTER, + CIRCULATION_LOAN_PID_TYPE, +) +from invenio_circulation.transitions.transitions import ( + CreatedToPending, + ItemAtDeskToItemOnLoan, + ItemInTransitHouseToItemReturned, + ItemOnLoanToItemInTransitHouse, + ItemOnLoanToItemOnLoan, + ItemOnLoanToItemReturned, + PendingToItemAtDesk, + PendingToItemInTransitPickup, + ToCancelled, + ToItemOnLoan, +) from invenio_records_rest.facets import range_filter, terms_filter from invenio_records_rest.utils import allow_all, deny_all from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount -from rero_ils.modules.acquisition.acq_accounts.permissions import \ - AcqAccountPermissionPolicy +from rero_ils.modules.acquisition.acq_accounts.permissions import ( + AcqAccountPermissionPolicy, +) from rero_ils.modules.acquisition.acq_invoices.api import AcquisitionInvoice -from rero_ils.modules.acquisition.acq_invoices.permissions import \ - AcqInvoicePermissionPolicy +from rero_ils.modules.acquisition.acq_invoices.permissions import ( + AcqInvoicePermissionPolicy, +) from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine -from rero_ils.modules.acquisition.acq_order_lines.permissions import \ - AcqOrderLinePermissionPolicy +from rero_ils.modules.acquisition.acq_order_lines.permissions import ( + AcqOrderLinePermissionPolicy, +) from rero_ils.modules.acquisition.acq_orders.api import AcqOrder -from rero_ils.modules.acquisition.acq_orders.permissions import \ - AcqOrderPermissionPolicy +from rero_ils.modules.acquisition.acq_orders.permissions import AcqOrderPermissionPolicy from rero_ils.modules.acquisition.acq_receipt_lines.api import AcqReceiptLine -from rero_ils.modules.acquisition.acq_receipt_lines.permissions import \ - AcqReceiptLinePermissionPolicy +from rero_ils.modules.acquisition.acq_receipt_lines.permissions import ( + AcqReceiptLinePermissionPolicy, +) from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt -from rero_ils.modules.acquisition.acq_receipts.permissions import \ - AcqReceiptPermissionPolicy +from rero_ils.modules.acquisition.acq_receipts.permissions import ( + AcqReceiptPermissionPolicy, +) from rero_ils.modules.acquisition.budgets.api import Budget -from rero_ils.modules.acquisition.budgets.permissions import \ - BudgetPermissionPolicy +from rero_ils.modules.acquisition.budgets.permissions import BudgetPermissionPolicy from rero_ils.modules.entities.local_entities.api import LocalEntity -from rero_ils.modules.entities.local_entities.permissions import \ - LocalEntityPermissionPolicy +from rero_ils.modules.entities.local_entities.permissions import ( + LocalEntityPermissionPolicy, +) from rero_ils.modules.entities.models import EntityFieldWithRef, EntityType from rero_ils.modules.entities.remote_entities.api import RemoteEntity -from rero_ils.modules.entities.remote_entities.permissions import \ - RemoteEntityPermissionPolicy +from rero_ils.modules.entities.remote_entities.permissions import ( + RemoteEntityPermissionPolicy, +) +from rero_ils.modules.loans.api import LoansSearch from .modules.circ_policies.api import CircPolicy -from .modules.circ_policies.permissions import \ - CirculationPolicyPermissionPolicy +from .modules.circ_policies.permissions import CirculationPolicyPermissionPolicy from .modules.collections.api import Collection from .modules.collections.permissions import CollectionPermissionPolicy from .modules.documents.api import Document from .modules.documents.permissions import DocumentPermissionPolicy -from .modules.documents.query import acquisition_filter, \ - nested_identified_filter +from .modules.documents.query import acquisition_filter, nested_identified_filter from .modules.holdings.api import Holding from .modules.holdings.models import HoldingCirculationAction from .modules.holdings.permissions import HoldingsPermissionPolicy @@ -90,25 +103,30 @@ from .modules.items.api import Item from .modules.items.models import ItemCirculationAction from .modules.items.permissions import ItemPermissionPolicy -from .modules.items.utils import item_location_retriever, \ - same_location_validator +from .modules.items.utils import item_location_retriever, same_location_validator from .modules.libraries.api import Library from .modules.libraries.permissions import LibraryPermissionPolicy from .modules.loans.api import Loan from .modules.loans.models import LoanState from .modules.loans.permissions import LoanPermissionPolicy from .modules.loans.query import misc_status_filter -from .modules.loans.utils import can_be_requested, get_default_loan_duration, \ - get_extension_params, is_item_available_for_checkout, \ - loan_build_document_ref, loan_build_item_ref, loan_build_patron_ref, \ - validate_item_pickup_transaction_locations, validate_loan_duration +from .modules.loans.utils import ( + can_be_requested, + get_default_loan_duration, + get_extension_params, + is_item_available_for_checkout, + loan_build_document_ref, + loan_build_item_ref, + loan_build_patron_ref, + validate_item_pickup_transaction_locations, + validate_loan_duration, +) from .modules.local_fields.api import LocalField from .modules.local_fields.permissions import LocalFieldPermissionPolicy from .modules.locations.api import Location from .modules.locations.permissions import LocationPermissionPolicy from .modules.notifications.api import Notification -from .modules.notifications.dispatcher import \ - Dispatcher as NotificationDispatcher +from .modules.notifications.dispatcher import Dispatcher as NotificationDispatcher from .modules.notifications.models import NotificationType from .modules.notifications.permissions import NotificationPermissionPolicy from .modules.operation_logs.api import OperationLog @@ -116,12 +134,12 @@ from .modules.organisations.api import Organisation from .modules.organisations.permissions import OrganisationPermissionPolicy from .modules.patron_transaction_events.api import PatronTransactionEvent -from .modules.patron_transaction_events.permissions import \ - PatronTransactionEventPermissionPolicy +from .modules.patron_transaction_events.permissions import ( + PatronTransactionEventPermissionPolicy, +) from .modules.patron_transaction_events.utils import total_facet_filter_builder from .modules.patron_transactions.api import PatronTransaction -from .modules.patron_transactions.permissions import \ - PatronTransactionPermissionPolicy +from .modules.patron_transactions.permissions import PatronTransactionPermissionPolicy from .modules.patron_types.api import PatronType from .modules.patron_types.permissions import PatronTypePermissionPolicy from .modules.patrons.api import Patron @@ -132,22 +150,29 @@ from .modules.stats.api.api import Stat from .modules.stats.permissions import StatisticsPermissionPolicy from .modules.stats_cfg.api import StatConfiguration -from .modules.stats_cfg.permissions import \ - StatisticsConfigurationPermissionPolicy +from .modules.stats_cfg.permissions import StatisticsConfigurationPermissionPolicy from .modules.templates.permissions import TemplatePermissionPolicy -from .modules.users.api import get_profile_countries, \ - get_readonly_profile_fields +from .modules.users.api import get_profile_countries, get_readonly_profile_fields from .modules.users.models import UserRole from .modules.vendors.api import Vendor from .modules.vendors.permissions import VendorPermissionPolicy -from .permissions import librarian_delete_permission_factory, \ - librarian_permission_factory, librarian_update_permission_factory, \ - wiki_edit_ui_permission, wiki_edit_view_permission -from .query import and_i18n_term_filter, and_term_filter, \ - exclude_terms_filter, i18n_terms_filter, or_terms_filter_by_criteria +from .permissions import ( + librarian_delete_permission_factory, + librarian_permission_factory, + librarian_update_permission_factory, + wiki_edit_ui_permission, + wiki_edit_view_permission, +) +from .query import ( + and_i18n_term_filter, + and_term_filter, + exclude_terms_filter, + i18n_terms_filter, + or_terms_filter_by_criteria, +) from .utils import TranslatedList, get_current_language -APP_THEME = ['bootstrap3'] +APP_THEME = ["bootstrap3"] def _(x): @@ -158,14 +183,16 @@ def _(x): # Personalized homepage RERO_ILS_PERSONALIZED_CSS_BY_VIEW = True RERO_ILS_PERSONALIZED_HOMEPAGE_BY_VIEW = False -RERO_ILS_HOMEPAGE_GENERAL_BLOCK = 'rero_ils/_frontpage_block_test.html' -RERO_ILS_HOMEPAGE_GENERAL_SLOGAN = 'rero_ils/_frontpage_slogan_test.html' +RERO_ILS_HOMEPAGE_GENERAL_BLOCK = "rero_ils/_frontpage_block_test.html" +RERO_ILS_HOMEPAGE_GENERAL_SLOGAN = "rero_ils/_frontpage_slogan_test.html" #: Link to privacy and data protection policy for the instance -RERO_ILS_PRIVACY_POLICY_URL = 'https://www.rero.ch/legal/privacy/declaration_protection_donnees_RERO-ILS.pdf' +RERO_ILS_PRIVACY_POLICY_URL = ( + "https://www.rero.ch/legal/privacy/declaration_protection_donnees_RERO-ILS.pdf" +) # ILL Request config RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW = True -RERO_ILS_ILL_DEFAULT_SOURCE = 'RERO +' +RERO_ILS_ILL_DEFAULT_SOURCE = "RERO +" RERO_ILS_ILL_HIDE_MONTHS = 6 # DOCUMENT ADVANCED SEARCH @@ -174,39 +201,39 @@ def _(x): # Rate limiting # ============= #: Storage for ratelimiter. -RATELIMIT_STORAGE_URL = 'redis://localhost:6379/3' -RATELIMIT_DEFAULT = '5000/second' +RATELIMIT_STORAGE_URI = "redis://localhost:6379/3" +RATELIMIT_DEFAULT = "5000/second" RATELIMIT_ENABLED = False # I18N # ==== #: Default language -BABEL_DEFAULT_LANGUAGE = 'en' -RERO_ILS_APP_DEFAULT_LANGUAGE = 'eng' +BABEL_DEFAULT_LANGUAGE = "en" +RERO_ILS_APP_DEFAULT_LANGUAGE = "eng" #: Default time zone -BABEL_DEFAULT_TIMEZONE = 'Europe/Zurich' +BABEL_DEFAULT_TIMEZONE = "Europe/Zurich" #: Other supported languages (do not include the default language in list). I18N_LANGUAGES = [ - ('fr', _('French')), - ('de', _('German')), - ('it', _('Italian')), + ("fr", _("French")), + ("de", _("German")), + ("it", _("Italian")), ] # Define the default system currency in used. Each organisation can override -# this parameter using the 'default_currency' field -RERO_ILS_DEFAULT_CURRENCY = 'CHF' +# this parameter using the "default_currency" field +RERO_ILS_DEFAULT_CURRENCY = "CHF" # Base templates # ============== #: Global base template. -BASE_TEMPLATE = 'rero_ils/page.html' +BASE_TEMPLATE = "rero_ils/page.html" #: Cover page base template (used for e.g. login/sign-up). -COVER_TEMPLATE = 'rero_ils/page_cover.html' +COVER_TEMPLATE = "rero_ils/page_cover.html" #: Footer base template. -FOOTER_TEMPLATE = 'rero_ils/footer.html' +FOOTER_TEMPLATE = "rero_ils/footer.html" #: Header base template. -HEADER_TEMPLATE = 'rero_ils/header.html' +HEADER_TEMPLATE = "rero_ils/header.html" #: Settings base template -SETTINGS_TEMPLATE = 'rero_ils/page_settings.html' +SETTINGS_TEMPLATE = "rero_ils/page_settings.html" #: Admin base template # ADMIN_BASE_TEMPLATE = BASE_TEMPLATE @@ -219,24 +246,24 @@ def _(x): # Miscellaneous variable around templates # ======================= #: Template for security pages. -SECURITY_LOGIN_USER_TEMPLATE = 'rero_ils/login_user.html' -SECURITY_REGISTER_USER_TEMPLATE = 'rero_ils/register_user.html' -SECURITY_FORGOT_PASSWORD_TEMPLATE = 'rero_ils/forgot_password.html' -SECURITY_RESET_PASSWORD_TEMPLATE = 'rero_ils/reset_password.html' -RERO_ILS_SEARCH_TEMPLATE = 'rero_ils/search.html' +SECURITY_LOGIN_USER_TEMPLATE = "rero_ils/login_user.html" +SECURITY_REGISTER_USER_TEMPLATE = "rero_ils/register_user.html" +SECURITY_FORGOT_PASSWORD_TEMPLATE = "rero_ils/forgot_password.html" +SECURITY_RESET_PASSWORD_TEMPLATE = "rero_ils/reset_password.html" +RERO_ILS_SEARCH_TEMPLATE = "rero_ils/search.html" # Theme configuration # =================== #: Brand logo. -THEME_LOGO = 'images/logo-rero-plus.svg' +THEME_LOGO = "images/logo-rero-plus.svg" #: Site name -THEME_SITENAME = _('rero-ils') +THEME_SITENAME = _("rero-ils") #: Use default frontpage. THEME_FRONTPAGE = False #: Frontpage title. -THEME_FRONTPAGE_TITLE = _('rero-ils') +THEME_FRONTPAGE_TITLE = _("rero-ils") #: Frontpage template. -THEME_FRONTPAGE_TEMPLATE = 'rero_ils/frontpage.html' +THEME_FRONTPAGE_TEMPLATE = "rero_ils/frontpage.html" #: Theme base template. THEME_BASE_TEMPLATE = BASE_TEMPLATE #: Cover page theme template (used for e.g. login/sign-up). @@ -248,15 +275,15 @@ def _(x): #: Settings page template used for e.g. display user settings views. THEME_SETTINGS_TEMPLATE = SETTINGS_TEMPLATE #: Template for error pages. -THEME_ERROR_TEMPLATE = 'rero_ils/page_error.html' +THEME_ERROR_TEMPLATE = "rero_ils/page_error.html" # External CSS for each organisation customization # For production: change "test" with "prod" on url -RERO_ILS_THEME_ORGANISATION_CSS_ENDPOINT = 'https://resources.rero.ch/bib/test/css/' +RERO_ILS_THEME_ORGANISATION_CSS_ENDPOINT = "https://resources.rero.ch/bib/test/css/" #: Template for including a tracking code for web analytics. -THEME_TRACKINGCODE_TEMPLATE = 'rero_ils/trackingcode.html' -THEME_JAVASCRIPT_TEMPLATE = 'rero_ils/javascript.html' +THEME_TRACKINGCODE_TEMPLATE = "rero_ils/trackingcode.html" +THEME_JAVASCRIPT_TEMPLATE = "rero_ils/javascript.html" -WEBPACKEXT_PROJECT = 'rero_ils.theme.webpack:project' +WEBPACKEXT_PROJECT = "rero_ils.theme.webpack:project" # Email configuration # =================== @@ -270,7 +297,7 @@ def _(x): # Assets # ====== #: Static files collection method (defaults to symbolic link to files). -COLLECT_STORAGE = 'flask_collect.storage.link' +COLLECT_STORAGE = "flask_collect.storage.link" # Accounts # ======== @@ -279,10 +306,10 @@ def _(x): #: Email subject for account registration emails. SECURITY_EMAIL_SUBJECT_REGISTER = _("Welcome") #: Email subjects for password reset -SECURITY_EMAIL_SUBJECT_PASSWORD_RESET = _('RERO ID password reset') -SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = _('Your RERO ID password has been reset') +SECURITY_EMAIL_SUBJECT_PASSWORD_RESET = _("RERO ID password reset") +SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = _("Your RERO ID password has been reset") #: Redis session storage URL. -ACCOUNTS_SESSION_REDIS_URL = 'redis://localhost:6379/1' +ACCOUNTS_SESSION_REDIS_URL = "redis://localhost:6379/1" #: Enable session/user id request tracing. This feature will add X-Session-ID #: and X-User-ID headers to HTTP response. You MUST ensure that NGINX (or other #: proxies) removes these headers again before sending the response to the @@ -303,11 +330,7 @@ def _(x): ACCOUNTS_USER_PROFILE_SCHEMA = UserProfile RERO_PUBLIC_USERPROFILES_READONLY = False -RERO_PUBLIC_USERPROFILES_READONLY_FIELDS = [ - 'first_name', - 'last_name', - 'birth_date' -] +RERO_PUBLIC_USERPROFILES_READONLY_FIELDS = ["first_name", "last_name", "birth_date"] #: USER PROFILES USERPROFILES_READ_ONLY = False @@ -315,7 +338,7 @@ def _(x): # Disable User Profiles USERPROFILES = True USERPROFILES_COUNTRIES = get_profile_countries -USERPROFILES_DEFAULT_COUNTRY = 'sz' +USERPROFILES_DEFAULT_COUNTRY = "sz" USERPROFILES_READONLY_FIELDS = get_readonly_profile_fields # Custom login view @@ -330,183 +353,184 @@ def _(x): "send_confirmation": "rero_ils.accounts_views:DisabledAuthApiView", "confirm_email": "rero_ils.accounts_views:DisabledAuthApiView", "sessions_list": "rero_ils.accounts_views:DisabledAuthApiView", - "sessions_item": "rero_ils.accounts_views:DisabledAuthApiView" + "sessions_item": "rero_ils.accounts_views:DisabledAuthApiView", } ACCOUNTS_REGISTER_BLUEPRINT = True """Needed to generate reset password link.""" -SECURITY_LOGIN_URL = '/signin/' +SECURITY_LOGIN_URL = "/signin/" """URL endpoint for login.""" -SECURITY_LOGOUT_URL = '/signout/' +SECURITY_LOGOUT_URL = "/signout/" """URL endpoint for logout.""" # Celery configuration # ==================== #: URL of message broker for Celery (default is RabbitMQ). -CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672/' +CELERY_BROKER_URL = "amqp://guest:guest@localhost:5672/" #: URL of backend for result storage (default is Redis). -CELERY_RESULT_BACKEND = 'redis://localhost:6379/2' +CELERY_RESULT_BACKEND = "redis://localhost:6379/2" #: Scheduled tasks configuration (aka cronjobs). CELERY_BEAT_SCHEDULE = { - 'scheduler-timestamp': { - 'task': ('rero_ils.modules.tasks.scheduler_timestamp'), - 'schedule': timedelta(minutes=1), - 'enabled': False + "scheduler-timestamp": { + "task": ("rero_ils.modules.tasks.scheduler_timestamp"), + "schedule": timedelta(minutes=1), + "enabled": False, # Save a timestamp so we can externaly test the timestamp changed # every minute. If the timestamp is not changing the scheduller # is not working. }, - 'bulk-indexer': { - 'task': 'rero_ils.modules.tasks.process_bulk_queue', - 'schedule': timedelta(minutes=1), - 'enabled': False + "bulk-indexer": { + "task": "rero_ils.modules.tasks.process_bulk_queue", + "schedule": timedelta(minutes=1), + "enabled": False, }, - 'accounts': { - 'task': 'invenio_accounts.tasks.clean_session_table', - 'schedule': timedelta(minutes=60), - 'enabled': False + "accounts": { + "task": "invenio_accounts.tasks.clean_session_table", + "schedule": timedelta(minutes=60), + "enabled": False, }, - 'ebooks-harvester': { - 'task': 'invenio_oaiharvester.tasks.list_records_from_dates', - 'schedule': crontab(minute=22, hour=22), - 'kwargs': {'name': 'ebooks'}, - 'enabled': False + "ebooks-harvester": { + "task": "invenio_oaiharvester.tasks.list_records_from_dates", + "schedule": crontab(minute=22, hour=22), + "kwargs": {"name": "ebooks"}, + "enabled": False, }, - 'notification-creation': { - 'task': 'rero_ils.modules.notifications.tasks.create_notifications', - 'schedule': crontab(minute=0, hour=5), # Every day at 05:00 UTC, - 'kwargs': { - 'types': [NotificationType.DUE_SOON, NotificationType.OVERDUE] - }, - 'enabled': False, + "notification-creation": { + "task": "rero_ils.modules.notifications.tasks.create_notifications", + "schedule": crontab(minute=0, hour=3), # Every day at 05:00 UTC, + "kwargs": {"types": [NotificationType.DUE_SOON, NotificationType.OVERDUE]}, + "enabled": False, # TODO: in production set this up once a day }, - 'notification-dispatch-availability': { - 'task': 'rero_ils.modules.notifications.tasks.process_notifications', - 'schedule': timedelta(minutes=15), - 'kwargs': { - 'notification_type': NotificationType.AVAILABILITY - }, - 'enabled': False, + "notification-dispatch-availability": { + "task": "rero_ils.modules.notifications.tasks.process_notifications", + "schedule": timedelta(minutes=15), + "kwargs": {"notification_type": NotificationType.AVAILABILITY}, + "enabled": False, }, - 'notification-dispatch-recall': { - 'task': 'rero_ils.modules.notifications.tasks.process_notifications', - 'schedule': timedelta(minutes=15), - 'kwargs': { - 'notification_type': NotificationType.RECALL - }, - 'enabled': False, + "notification-dispatch-recall": { + "task": "rero_ils.modules.notifications.tasks.process_notifications", + "schedule": timedelta(minutes=15), + "kwargs": {"notification_type": NotificationType.RECALL}, + "enabled": False, }, - 'claims-creation': { - 'task': 'rero_ils.modules.items.tasks.process_late_issues', - 'schedule': crontab(minute=0, hour=6), # Every day at 06:00 UTC, - 'enabled': False + "claims-creation": { + "task": "rero_ils.modules.items.tasks.process_late_issues", + "schedule": crontab(minute=0, hour=6), # Every day at 06:00 UTC, + "enabled": False, }, - 'clean_obsolete_temporary_item_types_and_locations': { - 'task': ('rero_ils.modules.items.tasks' - '.clean_obsolete_temporary_item_types_and_locations'), - 'schedule': crontab(minute=15, hour=2), # Every day at 02:15 UTC, - 'enabled': False + "clean_obsolete_temporary_item_types_and_locations": { + "task": ( + "rero_ils.modules.items.tasks" + ".clean_obsolete_temporary_item_types_and_locations" + ), + "schedule": crontab(minute=15, hour=2), # Every day at 02:15 UTC, + "enabled": False, }, - 'cancel-expired-request': { - 'task': 'rero_ils.modules.loans.tasks.cancel_expired_request_task', - 'schedule': crontab(minute=15, hour=3), # Every day at 03:15 UTC, - 'enabled': False + "cancel-expired-request": { + "task": "rero_ils.modules.loans.tasks.cancel_expired_request_task", + "schedule": crontab(minute=15, hour=3), # Every day at 03:15 UTC, + "enabled": False, }, - 'automatic_renewal': { - 'task': 'rero_ils.modules.loans.tasks.automatic_renewal', - 'schedule': crontab(minute=30, hour=3), # Every day at 03:30 UTC - 'enabled': False + "automatic_renewal": { + "task": "rero_ils.modules.loans.tasks.automatic_renewal", + "schedule": crontab(minute=30, hour=3), # Every day at 03:30 UTC + "enabled": False, }, - 'anonymize-loans': { - 'task': 'rero_ils.modules.loans.tasks.loan_anonymizer', - 'schedule': crontab(minute=0, hour=7), # Every day at 07:00 UTC, - 'enabled': False + "anonymize-loans": { + "task": "rero_ils.modules.loans.tasks.loan_anonymizer", + "schedule": crontab(minute=0, hour=7), # Every day at 07:00 UTC, + "enabled": False, }, - 'clear_and_renew_subscriptions': { - 'task': ('rero_ils.modules.patrons.tasks' - '.task_clear_and_renew_subscriptions'), - 'schedule': crontab(minute=2, hour=2), # Every day at 02:02 UTC, - 'enabled': False + "clear_and_renew_subscriptions": { + "task": ( + "rero_ils.modules.patrons.tasks" ".task_clear_and_renew_subscriptions" + ), + "schedule": crontab(minute=2, hour=2), # Every day at 02:02 UTC, + "enabled": False, }, - 'delete_standard_holdings_having_no_items': { - 'task': ('rero_ils.modules.holdings.tasks' - '.delete_standard_holdings_having_no_items'), - 'schedule': crontab(minute=30, hour=4), # Every day at 04:30 UTC, - 'enabled': False + "delete_standard_holdings_having_no_items": { + "task": ( + "rero_ils.modules.holdings.tasks" + ".delete_standard_holdings_having_no_items" + ), + "schedule": crontab(minute=30, hour=4), # Every day at 04:30 UTC, + "enabled": False, }, - 'collect-stats-billing': { - 'task': ('rero_ils.modules.stats.tasks.collect_stats_billing'), - 'schedule': crontab(minute=0, hour=1), # Every day at 01:00 UTC, - 'enabled': False + "collect-stats-billing": { + "task": ("rero_ils.modules.stats.tasks.collect_stats_billing"), + "schedule": crontab(minute=0, hour=1), # Every day at 01:00 UTC, + "enabled": False, }, - 'collect-stats-librarian': { - 'task': ('rero_ils.modules.stats.tasks.collect_stats_librarian'), - 'schedule': crontab(minute=30, hour=1, day_of_month='1'), # First day of the month at 01:30 UTC, - 'enabled': False + "collect-stats-librarian": { + "task": ("rero_ils.modules.stats.tasks.collect_stats_librarian"), + "schedule": crontab( + minute=30, hour=1, day_of_month="1" + ), # First day of the month at 01:30 UTC, + "enabled": False, }, - 'collect-stats-report-month': { - 'task': ('rero_ils.modules.stats.tasks.collect_stats_reports'), - 'schedule': crontab(minute=0, hour=1, day_of_month='1'), # First day of the month at 01:30 UTC, - 'kwargs': { - 'frequency': 'month' - }, - 'enabled': False + "collect-stats-report-month": { + "task": ("rero_ils.modules.stats.tasks.collect_stats_reports"), + "schedule": crontab( + minute=0, hour=1, day_of_month="1" + ), # First day of the month at 01:30 UTC, + "kwargs": {"frequency": "month"}, + "enabled": False, }, - 'collect-stats-report-year': { - 'task': ('rero_ils.modules.stats.tasks.collect_stats_reports'), - 'schedule': crontab(minute=0, hour=1, day_of_month='1', month_of_year='1'), # First day of the month at 01:30 UTC, - 'kwargs': { - 'frequency': 'year' - }, - 'enabled': False + "collect-stats-report-year": { + "task": ("rero_ils.modules.stats.tasks.collect_stats_reports"), + "schedule": crontab( + minute=0, hour=1, day_of_month="1", month_of_year="1" + ), # First day of the month at 01:30 UTC, + "kwargs": {"frequency": "year"}, + "enabled": False, }, - 'delete-provisional-items': { - 'task': 'rero_ils.modules.items.tasks.delete_provisional_items', - 'schedule': crontab(minute=0, hour=3), # Every day at 03:00 UTC, - 'enabled': False, + "delete-provisional-items": { + "task": "rero_ils.modules.items.tasks.delete_provisional_items", + "schedule": crontab(minute=0, hour=3), # Every day at 03:00 UTC, + "enabled": False, }, - 'delete-loans-created': { - 'task': 'rero_ils.modules.loans.tasks.delete_loans_created', - 'schedule': crontab(minute=0, hour=5), # Every day at 05:00 UTC, - 'enabled': False, + "delete-loans-created": { + "task": "rero_ils.modules.loans.tasks.delete_loans_created", + "schedule": crontab(minute=0, hour=5), # Every day at 05:00 UTC, + "enabled": False, }, - 'sync-entities': { - 'task': 'rero_ils.modules.entities.remote_entities.tasks.sync_entities', - 'schedule': crontab(minute=0, hour=1), # Every day at 01:00 UTC, - 'enabled': False, + "sync-entities": { + "task": "rero_ils.modules.entities.remote_entities.tasks.sync_entities", + "schedule": crontab(minute=0, hour=1), # Every day at 01:00 UTC, + "enabled": False, }, - 'replace-identified-by': { - 'task': 'rero_ils.modules.entities.remote_entities.tasks.replace_identified_by', - 'schedule': crontab(minute=0, hour=3, day_of_week=6), # Every Saturday at 03:00 UTC, - 'enabled': False, + "replace-identified-by": { + "task": "rero_ils.modules.entities.remote_entities.tasks.replace_identified_by", + "schedule": crontab( + minute=0, hour=3, day_of_week=6 + ), # Every Saturday at 03:00 UTC, + "enabled": False, }, - # 'mef-harvester': { - # 'task': 'rero_ils.modules.apiharvester.tasks.harvest_records', - # 'schedule': timedelta(minutes=60), - # 'kwargs': {'name': 'mef', 'enabled': False), - # 'enabled': False, + # "mef-harvester": { + # "task": "rero_ils.modules.apiharvester.tasks.harvest_records", + # "schedule": timedelta(minutes=60), + # "kwargs": {"name": "mef", "enabled": False), + # "enabled": False, # }, } CELERY_BROKER_HEARTBEAT = 0 INDEXER_BULK_REQUEST_TIMEOUT = 60 -CELERY_BEAT_SCHEDULER = 'rero_ils.schedulers.RedisScheduler' -CELERY_REDIS_SCHEDULER_URL = 'redis://localhost:6379/4' +CELERY_BEAT_SCHEDULER = "rero_ils.schedulers.RedisScheduler" +CELERY_REDIS_SCHEDULER_URL = "redis://localhost:6379/4" -RERO_IMPORT_CACHE = 'redis://localhost:6379/5' +RERO_IMPORT_CACHE = "redis://localhost:6379/5" RERO_IMPORT_CACHE_EXPIRE = 10 # Database # ======== #: Database URI including user and password -SQLALCHEMY_DATABASE_URI = ( - 'postgresql+psycopg2://rero-ils:rero-ils@localhost/rero-ils' -) +SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://rero-ils:rero-ils@localhost/rero-ils" #: Disable Versioning due to Bad Performance DB_VERSIONING = False #: Disable warning @@ -519,7 +543,7 @@ def _(x): #: Secret key - each installation (dev, production, ...) needs a separate key. #: It should be changed before deploying. -SECRET_KEY = 'CHANGE_ME' +SECRET_KEY = "CHANGE_ME" #: Max upload size for form data via application/mulitpart-formdata. MAX_CONTENT_LENGTH = 100 * 1024 * 1024 # 100 MiB #: For dev. Set to false when testing on localhost in no debug mode @@ -528,46 +552,37 @@ def _(x): APP_DEFAULT_SECURE_HEADERS = { # disabled as https is not used by the application: # https is done by the haproxy - 'force_https': False, - 'force_https_permanent': False, - 'force_file_save': False, - 'frame_options': 'sameorigin', - 'frame_options_allow_from': None, - 'strict_transport_security': True, - 'strict_transport_security_preload': False, - 'strict_transport_security_max_age': 31556926, # One year in seconds - 'strict_transport_security_include_subdomains': True, - 'content_security_policy': { - 'default-src': ['*'], - 'font-src': ['*', 'data:'], - 'img-src': [ - '*', - "'self'", - 'data:', - 'blob:' - ], - 'style-src': [ - '*', - "'self'", - "'unsafe-inline'" - ], - 'script-src': [ + "force_https": False, + "force_https_permanent": False, + "force_file_save": False, + "frame_options": "sameorigin", + "frame_options_allow_from": None, + "strict_transport_security": True, + "strict_transport_security_preload": False, + "strict_transport_security_max_age": 31556926, # One year in seconds + "strict_transport_security_include_subdomains": True, + "content_security_policy": { + "default-src": ["*"], + "font-src": ["*", "data:"], + "img-src": ["*", "'self'", "data:", "blob:"], + "style-src": ["*", "'self'", "'unsafe-inline'"], + "script-src": [ "'self'", "'unsafe-eval'", "'unsafe-inline'", - # '*.rero.ch', - 'https://www.googletagmanager.com', - 'https://www.google-analytics.com', - 'https://services.test.rero.ch', - 'https://services.rero.ch', - 'https://cdn.jsdelivr.net', - 'https://www.babelio.com' - ] + # "*.rero.ch", + "https://www.googletagmanager.com", + "https://www.google-analytics.com", + "https://services.test.rero.ch", + "https://services.rero.ch", + "https://cdn.jsdelivr.net", + "https://www.babelio.com", + ], }, - 'content_security_policy_report_uri': None, - 'content_security_policy_report_only': False, - 'session_cookie_secure': True, - 'session_cookie_http_only': True, + "content_security_policy_report_uri": None, + "content_security_policy_report_only": False, + "session_cookie_secure": True, + "session_cookie_http_only": True, } #: Sets cookie with the secure flag (by default False) SESSION_COOKIE_SECURE = False @@ -575,7 +590,7 @@ def _(x): #: provided, the allowed hosts variable is set to localhost. In production it #: should be set to the correct host and it is strongly recommended to only #: route correct hosts to the application. -APP_ALLOWED_HOSTS = ['localhost', '127.0.0.1'] +APP_ALLOWED_HOSTS = ["localhost", "127.0.0.1"] # Previewers # ========== @@ -619,15 +634,13 @@ def _(x): RECORDS_REST_DEFAULT_READ_PERMISSION_FACTORY = librarian_permission_factory """Default read permission factory: check if the record exists.""" -RECORDS_REST_DEFAULT_UPDATE_PERMISSION_FACTORY = \ - librarian_update_permission_factory +RECORDS_REST_DEFAULT_UPDATE_PERMISSION_FACTORY = librarian_update_permission_factory """Default update permission factory: reject any request.""" -RECORDS_REST_DEFAULT_DELETE_PERMISSION_FACTORY = \ - librarian_delete_permission_factory +RECORDS_REST_DEFAULT_DELETE_PERMISSION_FACTORY = librarian_delete_permission_factory """Default delete permission factory: reject any request.""" -REST_MIMETYPE_QUERY_ARG_NAME = 'format' +REST_MIMETYPE_QUERY_ARG_NAME = "format" """Name of the query argument to specify the mimetype wanted for the output.""" MAX_RESULT_WINDOW = 100000 @@ -635,1776 +648,2227 @@ def _(x): RECORDS_REST_ENDPOINTS = dict( coll=dict( - pid_type='coll', - pid_minter='collection_id', - pid_fetcher='collection_id', - search_class='rero_ils.modules.collections.api:CollectionsSearch', - search_index='collections', - indexer_class='rero_ils.modules.collections.api:CollectionsIndexer', + pid_type="coll", + pid_minter="collection_id", + pid_fetcher="collection_id", + search_class="rero_ils.modules.collections.api:CollectionsSearch", + search_index="collections", + indexer_class="rero_ils.modules.collections.api:CollectionsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.collections.serializers:json_coll_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.collections.serializers:json_coll_search", }, record_loaders={ - 'application/json': lambda: Collection(request.get_json()), + "application/json": lambda: Collection(request.get_json()), }, - record_class='rero_ils.modules.collections.api:Collection', - list_route='/collections/', - item_route='/collections/', - default_media_type='application/json', + record_class="rero_ils.modules.collections.api:Collection", + list_route="/collections/", + item_route="/collections/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:view_search_collection_factory', - list_permission_factory_imp=lambda record: CollectionPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: CollectionPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: CollectionPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: CollectionPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: CollectionPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:view_search_collection_factory", + list_permission_factory_imp=lambda record: CollectionPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: CollectionPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: CollectionPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: CollectionPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: CollectionPermissionPolicy( + "delete", record=record + ), ), doc=dict( - pid_type='doc', - pid_minter='document_id', - pid_fetcher='document_id', - search_class='rero_ils.modules.documents.api:DocumentsSearch', - search_index='documents', - indexer_class='rero_ils.modules.documents.api:DocumentsIndexer', + pid_type="doc", + pid_minter="document_id", + pid_fetcher="document_id", + search_class="rero_ils.modules.documents.api:DocumentsSearch", + search_index="documents", + indexer_class="rero_ils.modules.documents.api:DocumentsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.documents.serializers:json_doc_response', - 'application/export+json': 'rero_ils.modules.documents.serializers:json_export_doc_response', - 'application/x-research-info-systems': 'rero_ils.modules.documents.serializers:ris_doc_response' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.documents.serializers:json_doc_response", + "application/export+json": "rero_ils.modules.documents.serializers:json_export_doc_response", + "application/x-research-info-systems": "rero_ils.modules.documents.serializers:ris_doc_response", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', - 'raw': 'application/export+json', - 'ris': 'application/x-research-info-systems' + "json": "application/json", + "rero": "application/rero+json", + "raw": "application/export+json", + "ris": "application/x-research-info-systems", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.documents.serializers:json_doc_search', - 'application/export+json': 'rero_ils.modules.documents.serializers:json_export_doc_search', - 'application/x-research-info-systems': 'rero_ils.modules.documents.serializers:ris_doc_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.documents.serializers:json_doc_search", + "application/export+json": "rero_ils.modules.documents.serializers:json_export_doc_search", + "application/x-research-info-systems": "rero_ils.modules.documents.serializers:ris_doc_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', - 'raw': 'application/export+json', - 'ris': 'application/x-research-info-systems' + "json": "application/json", + "rero": "application/rero+json", + "raw": "application/export+json", + "ris": "application/x-research-info-systems", }, record_loaders={ - 'application/marcxml+xml': 'rero_ils.modules.documents.loaders:marcxml_loader', - 'application/json': lambda: Document(request.get_json()), + "application/marcxml+xml": "rero_ils.modules.documents.loaders:marcxml_loader", + "application/json": lambda: Document(request.get_json()), }, - list_route='/documents/', - record_class='rero_ils.modules.documents.api:Document', - item_route=('/documents/'), + list_route="/documents/", + record_class="rero_ils.modules.documents.api:Document", + item_route=( + "/documents/" + ), # suggesters=dict( # title={ - # 'completion': { - # 'field': 'title_suggest', - # 'size': 10, - # 'skip_duplicates': True + # "completion": { + # "field": "title_suggest", + # "size": 10, + # "skip_duplicates": True # } # }, # ), - default_media_type='application/json', + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:documents_search_factory', - list_permission_factory_imp=lambda record: DocumentPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: DocumentPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: DocumentPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: DocumentPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: DocumentPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:documents_search_factory", + list_permission_factory_imp=lambda record: DocumentPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: DocumentPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: DocumentPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: DocumentPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: DocumentPermissionPolicy( + "delete", record=record + ), ), illr=dict( - pid_type='illr', - pid_minter='ill_request_id', - pid_fetcher='ill_request_id', - search_class='rero_ils.modules.ill_requests.api:ILLRequestsSearch', - search_index='ill_requests', - indexer_class='rero_ils.modules.ill_requests.api:ILLRequestsIndexer', + pid_type="illr", + pid_minter="ill_request_id", + pid_fetcher="ill_request_id", + search_class="rero_ils.modules.ill_requests.api:ILLRequestsSearch", + search_index="ill_requests", + indexer_class="rero_ils.modules.ill_requests.api:ILLRequestsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.ill_requests.serializers:json_ill_request_response' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.ill_requests.serializers:json_ill_request_response", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero+json': 'application/rero+json', + "json": "application/json", + "rero+json": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.ill_requests.serializers:json_ill_request_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.ill_requests.serializers:json_ill_request_search", }, record_loaders={ - 'application/json': lambda: ILLRequest(request.get_json()), + "application/json": lambda: ILLRequest(request.get_json()), }, - record_class='rero_ils.modules.ill_requests.api:ILLRequest', - list_route='/ill_requests/', - item_route='/ill_requests/', - default_media_type='application/json', + record_class="rero_ils.modules.ill_requests.api:ILLRequest", + list_route="/ill_requests/", + item_route="/ill_requests/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:ill_request_search_factory', - list_permission_factory_imp=lambda record: ILLRequestPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: ILLRequestPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: ILLRequestPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: ILLRequestPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: ILLRequestPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:ill_request_search_factory", + list_permission_factory_imp=lambda record: ILLRequestPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: ILLRequestPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: ILLRequestPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: ILLRequestPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: ILLRequestPermissionPolicy( + "delete", record=record + ), ), item=dict( - pid_type='item', - pid_minter='item_id', - pid_fetcher='item_id', - search_class='rero_ils.modules.items.api:ItemsSearch', - search_index='items', - indexer_class='rero_ils.modules.items.api:ItemsIndexer', + pid_type="item", + pid_minter="item_id", + pid_fetcher="item_id", + search_class="rero_ils.modules.items.api:ItemsSearch", + search_index="items", + indexer_class="rero_ils.modules.items.api:ItemsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.items.serializers:json_item_response' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.items.serializers:json_item_response", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', - 'csv': 'text/csv', + "json": "application/json", + "rero": "application/rero+json", + "csv": "text/csv", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.items.serializers:json_item_search', - 'text/csv': 'rero_ils.modules.items.serializers:csv_item_search', + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.items.serializers:json_item_search", + "text/csv": "rero_ils.modules.items.serializers:csv_item_search", }, - list_route='/items/', + list_route="/items/", record_loaders={ - 'application/json': lambda: Item(request.get_json()), + "application/json": lambda: Item(request.get_json()), }, - record_class='rero_ils.modules.items.api:Item', - item_route='/items/', - default_media_type='application/json', + record_class="rero_ils.modules.items.api:Item", + item_route="/items/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:items_search_factory', - list_permission_factory_imp=lambda record: ItemPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: ItemPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: ItemPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: ItemPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: ItemPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:items_search_factory", + list_permission_factory_imp=lambda record: ItemPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: ItemPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: ItemPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: ItemPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: ItemPermissionPolicy( + "delete", record=record + ), ), itty=dict( - pid_type='itty', - pid_minter='item_type_id', - pid_fetcher='item_type_id', - search_class='rero_ils.modules.item_types.api:ItemTypesSearch', - search_index='item_types', - indexer_class='rero_ils.modules.item_types.api:ItemTypesIndexer', + pid_type="itty", + pid_minter="item_type_id", + pid_fetcher="item_type_id", + search_class="rero_ils.modules.item_types.api:ItemTypesSearch", + search_index="item_types", + indexer_class="rero_ils.modules.item_types.api:ItemTypesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/item_types/', + list_route="/item_types/", record_loaders={ - 'application/json': lambda: ItemType(request.get_json()), + "application/json": lambda: ItemType(request.get_json()), }, - record_class='rero_ils.modules.item_types.api:ItemType', - item_route=('/item_types/'), - default_media_type='application/json', + record_class="rero_ils.modules.item_types.api:ItemType", + item_route=( + "/item_types/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: ItemTypePermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: ItemTypePermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: ItemTypePermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: ItemTypePermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: ItemTypePermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: ItemTypePermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: ItemTypePermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: ItemTypePermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: ItemTypePermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: ItemTypePermissionPolicy( + "delete", record=record + ), ), stat=dict( - pid_type='stat', - pid_minter='stat_id', - pid_fetcher='stat_id', - search_class='rero_ils.modules.stats.api.api:StatsSearch', - search_index='stats', - indexer_class='rero_ils.modules.stats.api.api:StatsIndexer', + pid_type="stat", + pid_minter="stat_id", + pid_fetcher="stat_id", + search_class="rero_ils.modules.stats.api.api:StatsSearch", + search_index="stats", + indexer_class="rero_ils.modules.stats.api.api:StatsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'text/csv': 'rero_ils.modules.stats.serializers:csv_v1_response', - }, - record_serializers_aliases={ - 'json': 'application/json', - 'csv': 'text/csv' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "text/csv": "rero_ils.modules.stats.serializers:csv_v1_response", }, + record_serializers_aliases={"json": "application/json", "csv": "text/csv"}, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/stats/', + list_route="/stats/", record_loaders={ - 'application/json': lambda: Stat(request.get_json()), + "application/json": lambda: Stat(request.get_json()), }, - record_class='rero_ils.modules.stats.api.api:Stat', - item_route='/stats/', - default_media_type='application/json', - search_factory_imp='rero_ils.query:organisation_search_factory', + record_class="rero_ils.modules.stats.api.api:Stat", + item_route="/stats/", + default_media_type="application/json", + search_factory_imp="rero_ils.query:organisation_search_factory", max_result_window=MAX_RESULT_WINDOW, - list_permission_factory_imp=lambda record: StatisticsPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: StatisticsPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: StatisticsPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: StatisticsPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: StatisticsPermissionPolicy('delete', record=record) + list_permission_factory_imp=lambda record: StatisticsPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: StatisticsPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: StatisticsPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: StatisticsPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: StatisticsPermissionPolicy( + "delete", record=record + ), ), stacfg=dict( - pid_type='stacfg', - pid_minter='stat_cfg_id', - pid_fetcher='stat_cfg_id', - search_class='rero_ils.modules.stats_cfg.api:StatsConfigurationSearch', - search_index='stats_cfg', - indexer_class=\ - 'rero_ils.modules.stats_cfg.api:StatsConfigurationIndexer', + pid_type="stacfg", + pid_minter="stat_cfg_id", + pid_fetcher="stat_cfg_id", + search_class="rero_ils.modules.stats_cfg.api:StatsConfigurationSearch", + search_index="stats_cfg", + indexer_class="rero_ils.modules.stats_cfg.api:StatsConfigurationIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' - }, - record_serializers_aliases={ - 'json': 'application/json' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, + record_serializers_aliases={"json": "application/json"}, search_serializers_aliases={ - 'json': 'application/json', - 'rero+json': 'application/json', + "json": "application/json", + "rero+json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.stats_cfg.serializers:json_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.stats_cfg.serializers:json_search", }, - list_route='/stats_cfg/', + list_route="/stats_cfg/", record_loaders={ - 'application/json': lambda: StatConfiguration(request.get_json()), + "application/json": lambda: StatConfiguration(request.get_json()), }, - record_class='rero_ils.modules.stats_cfg.api:StatConfiguration', - item_route=('/stats_cfg/'), - default_media_type='application/json', + record_class="rero_ils.modules.stats_cfg.api:StatConfiguration", + item_route=( + "/stats_cfg/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: StatisticsConfigurationPermissionPolicy( + "delete", record=record + ), ), hold=dict( - pid_type='hold', - pid_minter='holding_id', - pid_fetcher='holding_id', - search_class='rero_ils.modules.holdings.api:HoldingsSearch', - search_index='holdings', - indexer_class='rero_ils.modules.holdings.api:HoldingsIndexer', + pid_type="hold", + pid_minter="holding_id", + pid_fetcher="holding_id", + search_class="rero_ils.modules.holdings.api:HoldingsSearch", + search_index="holdings", + indexer_class="rero_ils.modules.holdings.api:HoldingsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.holdings.serializers:json_holdings_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.holdings.serializers:json_holdings_search", }, - list_route='/holdings/', + list_route="/holdings/", record_loaders={ - 'application/json': lambda: Holding(request.get_json()), + "application/json": lambda: Holding(request.get_json()), }, - record_class='rero_ils.modules.holdings.api:Holding', - item_route='/holdings/', - default_media_type='application/json', + record_class="rero_ils.modules.holdings.api:Holding", + item_route="/holdings/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:holdings_search_factory', - list_permission_factory_imp=lambda record: HoldingsPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: HoldingsPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: HoldingsPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: HoldingsPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: HoldingsPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:holdings_search_factory", + list_permission_factory_imp=lambda record: HoldingsPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: HoldingsPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: HoldingsPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: HoldingsPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: HoldingsPermissionPolicy( + "delete", record=record + ), ), lofi=dict( - pid_type='lofi', - pid_minter='local_field_id', - pid_fetcher='local_field_id', - search_class='rero_ils.modules.local_fields.api:LocalFieldsSearch', - search_index='local_fields', - indexer_class='rero_ils.modules.local_fields.api:LocalFieldsIndexer', + pid_type="lofi", + pid_minter="local_field_id", + pid_fetcher="local_field_id", + search_class="rero_ils.modules.local_fields.api:LocalFieldsSearch", + search_index="local_fields", + indexer_class="rero_ils.modules.local_fields.api:LocalFieldsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': lambda: LocalField(request.get_json()), + "application/json": lambda: LocalField(request.get_json()), }, - record_class='rero_ils.modules.local_fields.api:LocalField', - list_route='/local_fields/', - item_route='/local_fields/', - default_media_type='application/json', + record_class="rero_ils.modules.local_fields.api:LocalField", + list_route="/local_fields/", + item_route="/local_fields/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: LocalFieldPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: LocalFieldPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: LocalFieldPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: LocalFieldPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: LocalFieldPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: LocalFieldPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: LocalFieldPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: LocalFieldPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: LocalFieldPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: LocalFieldPermissionPolicy( + "delete", record=record + ), ), ptrn=dict( - pid_type='ptrn', - pid_minter='patron_id', - pid_fetcher='patron_id', - search_class='rero_ils.modules.patrons.api:PatronsSearch', - search_index='patrons', - indexer_class='rero_ils.modules.patrons.api:PatronsIndexer', + pid_type="ptrn", + pid_minter="patron_id", + pid_fetcher="patron_id", + search_class="rero_ils.modules.patrons.api:PatronsSearch", + search_index="patrons", + indexer_class="rero_ils.modules.patrons.api:PatronsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', - 'rero+json': 'application/rero+json', + "json": "application/json", + "rero+json": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.patrons.serializers:json_patron_search' - }, - list_route='/patrons/', - record_loaders={ - 'application/json': 'rero_ils.modules.patrons.loaders:json_v1' - }, - record_class='rero_ils.modules.patrons.api:Patron', - item_route='/patrons/', - default_media_type='application/json', + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.patrons.serializers:json_patron_search", + }, + list_route="/patrons/", + record_loaders={"application/json": "rero_ils.modules.patrons.loaders:json_v1"}, + record_class="rero_ils.modules.patrons.api:Patron", + item_route="/patrons/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: PatronPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: PatronPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: PatronPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: PatronPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: PatronPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: PatronPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: PatronPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: PatronPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: PatronPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: PatronPermissionPolicy( + "delete", record=record + ), ), pttr=dict( - pid_type='pttr', - pid_minter='patron_transaction_id', - pid_fetcher='patron_transaction_id', - search_class='rero_ils.modules.patron_transactions.api:PatronTransactionsSearch', - search_index='patron_transactions', - indexer_class='rero_ils.modules.patron_transactions.api:PatronTransactionsIndexer', + pid_type="pttr", + pid_minter="patron_transaction_id", + pid_fetcher="patron_transaction_id", + search_class="rero_ils.modules.patron_transactions.api:PatronTransactionsSearch", + search_index="patron_transactions", + indexer_class="rero_ils.modules.patron_transactions.api:PatronTransactionsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.patron_transactions.serializers:json_pttr_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.patron_transactions.serializers:json_pttr_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, record_loaders={ - 'application/json': lambda: PatronTransaction(request.get_json()), + "application/json": lambda: PatronTransaction(request.get_json()), }, - record_class='rero_ils.modules.patron_transactions.api:PatronTransaction', - list_route='/patron_transactions/', - item_route=('/patron_transactions/'), - default_media_type='application/json', + record_class="rero_ils.modules.patron_transactions.api:PatronTransaction", + list_route="/patron_transactions/", + item_route=( + "/patron_transactions/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:patron_transactions_search_factory', - list_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:patron_transactions_search_factory", + list_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: PatronTransactionPermissionPolicy( + "delete", record=record + ), ), ptre=dict( - pid_type='ptre', - pid_minter='patron_transaction_event_id', - pid_fetcher='patron_transaction_event_id', - search_class=('rero_ils.modules.patron_transaction_events.api:' - 'PatronTransactionEventsSearch'), - search_index='patron_transaction_events', - indexer_class='rero_ils.modules.patron_transaction_events.api:PatronTransactionEventsIndexer', + pid_type="ptre", + pid_minter="patron_transaction_event_id", + pid_fetcher="patron_transaction_event_id", + search_class=( + "rero_ils.modules.patron_transaction_events.api:" + "PatronTransactionEventsSearch" + ), + search_index="patron_transaction_events", + indexer_class="rero_ils.modules.patron_transaction_events.api:PatronTransactionEventsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' - }, - record_serializers_aliases={ - 'json': 'application/json' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, + record_serializers_aliases={"json": "application/json"}, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.patron_transaction_events.serializers:json_ptre_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.patron_transaction_events.serializers:json_ptre_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, record_loaders={ - 'application/json': lambda: PatronTransactionEvent(request.get_json()), + "application/json": lambda: PatronTransactionEvent(request.get_json()), }, - record_class='rero_ils.modules.patron_transaction_events.api:PatronTransactionEvent', - list_route='/patron_transaction_events/', - item_route=('/patron_transaction_events/'), - default_media_type='application/json', + record_class="rero_ils.modules.patron_transaction_events.api:PatronTransactionEvent", + list_route="/patron_transaction_events/", + item_route=( + "/patron_transaction_events/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:patron_transactions_search_factory', - list_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:patron_transactions_search_factory", + list_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: PatronTransactionEventPermissionPolicy( + "delete", record=record + ), ), ptty=dict( - pid_type='ptty', - pid_minter='patron_type_id', - pid_fetcher='patron_type_id', - search_class='rero_ils.modules.patron_types.api:PatronTypesSearch', - search_index='patron_types', - indexer_class='rero_ils.modules.patron_types.api:PatronTypesIndexer', + pid_type="ptty", + pid_minter="patron_type_id", + pid_fetcher="patron_type_id", + search_class="rero_ils.modules.patron_types.api:PatronTypesSearch", + search_index="patron_types", + indexer_class="rero_ils.modules.patron_types.api:PatronTypesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/patron_types/', + list_route="/patron_types/", record_loaders={ - 'application/json': lambda: PatronType(request.get_json()), + "application/json": lambda: PatronType(request.get_json()), }, - record_class='rero_ils.modules.patron_types.api:PatronType', - item_route=('/patron_types/'), - default_media_type='application/json', + record_class="rero_ils.modules.patron_types.api:PatronType", + item_route=( + "/patron_types/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: PatronTypePermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: PatronTypePermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: PatronTypePermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: PatronTypePermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: PatronTypePermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: PatronTypePermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: PatronTypePermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: PatronTypePermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: PatronTypePermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: PatronTypePermissionPolicy( + "delete", record=record + ), ), org=dict( - pid_type='org', - pid_minter='organisation_id', - pid_fetcher='organisation_id', - search_class='rero_ils.modules.organisations.api:OrganisationsSearch', - search_index='organisations', - indexer_class=('rero_ils.modules.organisations.api:' - 'OrganisationsIndexer'), + pid_type="org", + pid_minter="organisation_id", + pid_fetcher="organisation_id", + search_class="rero_ils.modules.organisations.api:OrganisationsSearch", + search_index="organisations", + indexer_class=("rero_ils.modules.organisations.api:" "OrganisationsIndexer"), record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/organisations/', + list_route="/organisations/", record_loaders={ - 'application/json': lambda: Organisation(request.get_json()), + "application/json": lambda: Organisation(request.get_json()), }, - record_class='rero_ils.modules.organisations.api:Organisation', - item_route='/organisations/', - default_media_type='application/json', + record_class="rero_ils.modules.organisations.api:Organisation", + item_route="/organisations/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_organisation_search_factory', - list_permission_factory_imp=lambda record: OrganisationPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: OrganisationPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: OrganisationPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: OrganisationPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: OrganisationPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_organisation_search_factory", + list_permission_factory_imp=lambda record: OrganisationPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: OrganisationPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: OrganisationPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: OrganisationPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: OrganisationPermissionPolicy( + "delete", record=record + ), ), lib=dict( - pid_type='lib', - pid_minter='library_id', - pid_fetcher='library_id', - search_class='rero_ils.modules.libraries.api:LibrariesSearch', - search_index='libraries', - indexer_class='rero_ils.modules.libraries.api:LibrariesIndexer', + pid_type="lib", + pid_minter="library_id", + pid_fetcher="library_id", + search_class="rero_ils.modules.libraries.api:LibrariesSearch", + search_index="libraries", + indexer_class="rero_ils.modules.libraries.api:LibrariesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/libraries/', + list_route="/libraries/", record_loaders={ - 'application/json': lambda: Library(request.get_json()), + "application/json": lambda: Library(request.get_json()), }, - record_class='rero_ils.modules.libraries.api:Library', - item_route='/libraries/', - default_media_type='application/json', + record_class="rero_ils.modules.libraries.api:Library", + item_route="/libraries/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: LibraryPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: LibraryPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: LibraryPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: LibraryPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: LibraryPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: LibraryPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: LibraryPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: LibraryPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: LibraryPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: LibraryPermissionPolicy( + "delete", record=record + ), ), loc=dict( - pid_type='loc', - pid_minter='location_id', - pid_fetcher='location_id', - search_class='rero_ils.modules.locations.api:LocationsSearch', - search_index='locations', - indexer_class='rero_ils.modules.locations.api:LocationsIndexer', + pid_type="loc", + pid_minter="location_id", + pid_fetcher="location_id", + search_class="rero_ils.modules.locations.api:LocationsSearch", + search_index="locations", + indexer_class="rero_ils.modules.locations.api:LocationsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.locations.serializers:json_loc_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.locations.serializers:json_loc_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, - list_route='/locations/', + list_route="/locations/", record_loaders={ - 'application/json': lambda: Location(request.get_json()), + "application/json": lambda: Location(request.get_json()), }, - record_class='rero_ils.modules.locations.api:Location', - item_route='/locations/', - default_media_type='application/json', + record_class="rero_ils.modules.locations.api:Location", + item_route="/locations/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:viewcode_patron_search_factory', - list_permission_factory_imp=lambda record: LocationPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: LocationPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: LocationPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: LocationPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: LocationPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:viewcode_patron_search_factory", + list_permission_factory_imp=lambda record: LocationPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: LocationPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: LocationPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: LocationPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: LocationPermissionPolicy( + "delete", record=record + ), ), rement=dict( - pid_type='rement', - pid_minter='remote_entity_id', - pid_fetcher='remote_entity_id', - search_class='rero_ils.modules.entities.remote_entities.api:RemoteEntitiesSearch', - search_index='remote_entities', - indexer_class='rero_ils.modules.entities.remote_entities.api:RemoteEntitiesIndexer', + pid_type="rement", + pid_minter="remote_entity_id", + pid_fetcher="remote_entity_id", + search_class="rero_ils.modules.entities.remote_entities.api:RemoteEntitiesSearch", + search_index="remote_entities", + indexer_class="rero_ils.modules.entities.remote_entities.api:RemoteEntitiesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' - }, - search_serializers_aliases={ - 'json': 'application/json' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/remote_entities/', + search_serializers_aliases={"json": "application/json"}, + list_route="/remote_entities/", record_loaders={ - 'application/json': lambda: RemoteEntity(request.get_json()), + "application/json": lambda: RemoteEntity(request.get_json()), }, - record_class='rero_ils.modules.entities.remote_entities.api:RemoteEntity', - item_route='/remote_entities/', - default_media_type='application/json', + record_class="rero_ils.modules.entities.remote_entities.api:RemoteEntity", + item_route="/remote_entities/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:remote_entity_view_search_factory', - list_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:remote_entity_view_search_factory", + list_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: RemoteEntityPermissionPolicy( + "delete", record=record + ), ), locent=dict( - pid_type='locent', - pid_minter='local_entity_id', - pid_fetcher='local_entity_id', - search_class='rero_ils.modules.entities.local_entities.api:LocalEntitiesSearch', - search_index='local_entities', - indexer_class='rero_ils.modules.entities.local_entities.indexer:LocalEntitiesIndexer', + pid_type="locent", + pid_minter="local_entity_id", + pid_fetcher="local_entity_id", + search_class="rero_ils.modules.entities.local_entities.api:LocalEntitiesSearch", + search_index="local_entities", + indexer_class="rero_ils.modules.entities.local_entities.indexer:LocalEntitiesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' - }, - search_serializers_aliases={ - 'json': 'application/json' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, - list_route='/local_entities/', + search_serializers_aliases={"json": "application/json"}, + list_route="/local_entities/", record_loaders={ - 'application/json': lambda: LocalEntity(request.get_json()), + "application/json": lambda: LocalEntity(request.get_json()), }, - record_class='rero_ils.modules.entities.local_entities.api:LocalEntity', - item_route='/local_entities/', - default_media_type='application/json', + record_class="rero_ils.modules.entities.local_entities.api:LocalEntity", + item_route="/local_entities/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:remote_entity_view_search_factory', - list_permission_factory_imp=lambda record: LocalEntityPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: LocalEntityPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: LocalEntityPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: LocalEntityPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: LocalEntityPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:remote_entity_view_search_factory", + list_permission_factory_imp=lambda record: LocalEntityPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: LocalEntityPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: LocalEntityPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: LocalEntityPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: LocalEntityPermissionPolicy( + "delete", record=record + ), ), ent=dict( - pid_type='ent', - pid_minter='entity_id', # This is mandatory for invenio-records-rest but not used - pid_fetcher='entity_id', - search_index='entities', + pid_type="ent", + pid_minter="entity_id", # This is mandatory for invenio-records-rest but not used + pid_fetcher="entity_id", + search_index="entities", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, # This is mandatory for invenio-records-rest but not used search_serializers={ - 'application/json': 'rero_ils.modules.entities.serializers:json_entities_search' + "application/json": "rero_ils.modules.entities.serializers:json_entities_search" }, - search_serializers_aliases={ - 'json': 'application/json' - }, - list_route='/entities/', - item_route='/entities/', # mandatory for invenio-records-rest (only used for permissions) - default_media_type='application/json', + search_serializers_aliases={"json": "application/json"}, + list_route="/entities/", + item_route="/entities/", # mandatory for invenio-records-rest (only used for permissions) + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:remote_entity_view_search_factory', + search_factory_imp="rero_ils.query:remote_entity_view_search_factory", list_permission_factory_imp=allow_all, read_permission_factory_imp=deny_all, create_permission_factory_imp=deny_all, update_permission_factory_imp=deny_all, - delete_permission_factory_imp=deny_all + delete_permission_factory_imp=deny_all, ), cipo=dict( - pid_type='cipo', - pid_minter='circ_policy_id', - pid_fetcher='circ_policy_id', - search_class='rero_ils.modules.circ_policies.api:CircPoliciesSearch', - search_index='circ_policies', - indexer_class='rero_ils.modules.circ_policies.api:CircPoliciesIndexer', + pid_type="cipo", + pid_minter="circ_policy_id", + pid_fetcher="circ_policy_id", + search_class="rero_ils.modules.circ_policies.api:CircPoliciesSearch", + search_index="circ_policies", + indexer_class="rero_ils.modules.circ_policies.api:CircPoliciesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': lambda: CircPolicy(request.get_json()), - }, - list_route='/circ_policies/', - record_class='rero_ils.modules.circ_policies.api:CircPolicy', - item_route=('/circ_policies/'), - default_media_type='application/json', + "application/json": lambda: CircPolicy(request.get_json()), + }, + list_route="/circ_policies/", + record_class="rero_ils.modules.circ_policies.api:CircPolicy", + item_route=( + "/circ_policies/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: CirculationPolicyPermissionPolicy( + "delete", record=record + ), ), notif=dict( - pid_type='notif', - pid_minter='notification_id', - pid_fetcher='notification_id', - search_class='rero_ils.modules.notifications.api:NotificationsSearch', - search_index='notifications', - indexer_class=('rero_ils.modules.notifications.api:' - 'NotificationsIndexer'), + pid_type="notif", + pid_minter="notification_id", + pid_fetcher="notification_id", + search_class="rero_ils.modules.notifications.api:NotificationsSearch", + search_index="notifications", + indexer_class=("rero_ils.modules.notifications.api:" "NotificationsIndexer"), record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': lambda: Notification(request.get_json()), - }, - list_route='/notifications/', - record_class='rero_ils.modules.notifications.api:Notification', - item_route=('/notifications/'), - default_media_type='application/json', + "application/json": lambda: Notification(request.get_json()), + }, + list_route="/notifications/", + record_class="rero_ils.modules.notifications.api:Notification", + item_route=( + "/notifications/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: NotificationPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: NotificationPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: NotificationPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: NotificationPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: NotificationPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: NotificationPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: NotificationPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: NotificationPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: NotificationPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: NotificationPermissionPolicy( + "delete", record=record + ), ), vndr=dict( - pid_type='vndr', - pid_minter='vendor_id', - pid_fetcher='vendor_id', - search_class='rero_ils.modules.vendors.api:VendorsSearch', - search_index='vendors', - indexer_class='rero_ils.modules.vendors.api:VendorsIndexer', + pid_type="vndr", + pid_minter="vendor_id", + pid_fetcher="vendor_id", + search_class="rero_ils.modules.vendors.api:VendorsSearch", + search_index="vendors", + indexer_class="rero_ils.modules.vendors.api:VendorsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_accounts.serializers:json_acq_account_response' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_accounts.serializers:json_acq_account_response", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': lambda: Vendor(request.get_json()), + "application/json": lambda: Vendor(request.get_json()), }, - record_class='rero_ils.modules.vendors.api:Vendor', - list_route='/vendors/', - item_route=('/vendors/'), - default_media_type='application/json', + record_class="rero_ils.modules.vendors.api:Vendor", + list_route="/vendors/", + item_route=( + "/vendors/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: VendorPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: VendorPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: VendorPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: VendorPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: VendorPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: VendorPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: VendorPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: VendorPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: VendorPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: VendorPermissionPolicy( + "delete", record=record + ), ), acac=dict( - pid_type='acac', - pid_minter='acq_account_id', - pid_fetcher='acq_account_id', - search_class='rero_ils.modules.acquisition.acq_accounts.api:AcqAccountsSearch', - search_index='acq_accounts', - indexer_class='rero_ils.modules.acquisition.acq_accounts.api:AcqAccountsIndexer', + pid_type="acac", + pid_minter="acq_account_id", + pid_fetcher="acq_account_id", + search_class="rero_ils.modules.acquisition.acq_accounts.api:AcqAccountsSearch", + search_index="acq_accounts", + indexer_class="rero_ils.modules.acquisition.acq_accounts.api:AcqAccountsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_accounts.serializers:json_acq_account_response' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_accounts.serializers:json_acq_account_response", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_accounts.serializers:json_acq_account_search' - }, - search_serializers_aliases={ - 'json': 'application/json' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.acquisition.acq_accounts.serializers:json_acq_account_search", }, + search_serializers_aliases={"json": "application/json"}, record_loaders={ - 'application/json': lambda: AcqAccount(request.get_json()), - }, - record_class='rero_ils.modules.acquisition.acq_accounts.api:AcqAccount', - list_route='/acq_accounts/', - item_route=('/acq_accounts/'), - default_media_type='application/json', + "application/json": lambda: AcqAccount(request.get_json()), + }, + record_class="rero_ils.modules.acquisition.acq_accounts.api:AcqAccount", + list_route="/acq_accounts/", + item_route=( + "/acq_accounts/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:acq_accounts_search_factory', - list_permission_factory_imp=lambda record: AcqAccountPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: AcqAccountPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: AcqAccountPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: AcqAccountPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: AcqAccountPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:acq_accounts_search_factory", + list_permission_factory_imp=lambda record: AcqAccountPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: AcqAccountPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: AcqAccountPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: AcqAccountPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: AcqAccountPermissionPolicy( + "delete", record=record + ), ), budg=dict( - pid_type='budg', - pid_minter='budget_id', - pid_fetcher='budget_id', - search_class='rero_ils.modules.acquisition.budgets.api:BudgetsSearch', - search_index='budgets', - indexer_class='rero_ils.modules.acquisition.budgets.api:BudgetsIndexer', + pid_type="budg", + pid_minter="budget_id", + pid_fetcher="budget_id", + search_class="rero_ils.modules.acquisition.budgets.api:BudgetsSearch", + search_index="budgets", + indexer_class="rero_ils.modules.acquisition.budgets.api:BudgetsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.budgets.serializers:json_budg_record' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.budgets.serializers:json_budg_record", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': lambda: Budget(request.get_json()), + "application/json": lambda: Budget(request.get_json()), }, - record_class='rero_ils.modules.acquisition.budgets.api:Budget', - list_route='/budgets/', - item_route=('/budgets/'), - default_media_type='application/json', + record_class="rero_ils.modules.acquisition.budgets.api:Budget", + list_route="/budgets/", + item_route=( + "/budgets/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: BudgetPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: BudgetPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: BudgetPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: BudgetPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: BudgetPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: BudgetPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: BudgetPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: BudgetPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: BudgetPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: BudgetPermissionPolicy( + "delete", record=record + ), ), acor=dict( - pid_type='acor', - pid_minter='acq_order_id', - pid_fetcher='acq_order_id', - search_class='rero_ils.modules.acquisition.acq_orders.api:AcqOrdersSearch', - search_index='acq_orders', - indexer_class='rero_ils.modules.acquisition.acq_orders.api:AcqOrdersIndexer', + pid_type="acor", + pid_minter="acq_order_id", + pid_fetcher="acq_order_id", + search_class="rero_ils.modules.acquisition.acq_orders.api:AcqOrdersSearch", + search_index="acq_orders", + indexer_class="rero_ils.modules.acquisition.acq_orders.api:AcqOrdersIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_orders.serializers:json_acor_record' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_orders.serializers:json_acor_record", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_orders.serializers:json_acor_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.acquisition.acq_orders.serializers:json_acor_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, record_loaders={ - 'application/json': lambda: AcqOrder(request.get_json()), + "application/json": lambda: AcqOrder(request.get_json()), }, - record_class='rero_ils.modules.acquisition.acq_orders.api:AcqOrder', - list_route='/acq_orders/', - item_route=('/acq_orders/'), - default_media_type='application/json', + record_class="rero_ils.modules.acquisition.acq_orders.api:AcqOrder", + list_route="/acq_orders/", + item_route=( + "/acq_orders/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: AcqOrderPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: AcqOrderPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: AcqOrderPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: AcqOrderPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: AcqOrderPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: AcqOrderPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: AcqOrderPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: AcqOrderPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: AcqOrderPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: AcqOrderPermissionPolicy( + "delete", record=record + ), ), acol=dict( - pid_type='acol', - pid_minter='acq_order_line_id', - pid_fetcher='acq_order_line_id', - search_class='rero_ils.modules.acquisition.acq_order_lines.api:AcqOrderLinesSearch', - search_index='acq_order_lines', - indexer_class=('rero_ils.modules.acquisition.acq_order_lines.api:' - 'AcqOrderLinesIndexer'), + pid_type="acol", + pid_minter="acq_order_line_id", + pid_fetcher="acq_order_line_id", + search_class="rero_ils.modules.acquisition.acq_order_lines.api:AcqOrderLinesSearch", + search_index="acq_order_lines", + indexer_class=( + "rero_ils.modules.acquisition.acq_order_lines.api:" "AcqOrderLinesIndexer" + ), record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_order_lines.serializers:json_acol_record' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_order_lines.serializers:json_acol_record", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': lambda: AcqOrderLine(request.get_json()), - }, - record_class='rero_ils.modules.acquisition.acq_order_lines.api:AcqOrderLine', - list_route='/acq_order_lines/', - item_route=('/acq_order_lines/'), - default_media_type='application/json', + "application/json": lambda: AcqOrderLine(request.get_json()), + }, + record_class="rero_ils.modules.acquisition.acq_order_lines.api:AcqOrderLine", + list_route="/acq_order_lines/", + item_route=( + "/acq_order_lines/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: AcqOrderLinePermissionPolicy( + "delete", record=record + ), ), acre=dict( - pid_type='acre', - pid_minter='acq_receipt_id', - pid_fetcher='acq_receipt_id', - search_class='rero_ils.modules.acquisition.acq_receipts.api:AcqReceiptsSearch', - search_index='acq_receipts', - indexer_class='rero_ils.modules.acquisition.acq_receipts.api:AcqReceiptsIndexer', + pid_type="acre", + pid_minter="acq_receipt_id", + pid_fetcher="acq_receipt_id", + search_class="rero_ils.modules.acquisition.acq_receipts.api:AcqReceiptsSearch", + search_index="acq_receipts", + indexer_class="rero_ils.modules.acquisition.acq_receipts.api:AcqReceiptsIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_receipts.serializers:json_acre_record' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_receipts.serializers:json_acre_record", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json', + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', + "application/json": "rero_ils.modules.serializers:json_v1_search", }, record_loaders={ - 'application/json': lambda: AcqReceipt(request.get_json()), - }, - record_class='rero_ils.modules.acquisition.acq_receipts.api:AcqReceipt', - list_route='/acq_receipts/', - item_route=('/acq_receipts/'), - default_media_type='application/json', + "application/json": lambda: AcqReceipt(request.get_json()), + }, + record_class="rero_ils.modules.acquisition.acq_receipts.api:AcqReceipt", + list_route="/acq_receipts/", + item_route=( + "/acq_receipts/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: AcqReceiptPermissionPolicy( + "delete", record=record + ), ), acrl=dict( - pid_type='acrl', - pid_minter='acq_receipt_line_id', - pid_fetcher='acq_receipt_line_id', - search_class='rero_ils.modules.acquisition.acq_receipt_lines.api:AcqReceiptLinesSearch', - search_index='acq_receipt_lines', - indexer_class='rero_ils.modules.acquisition.acq_receipt_lines.api:AcqReceiptLinesIndexer', + pid_type="acrl", + pid_minter="acq_receipt_line_id", + pid_fetcher="acq_receipt_line_id", + search_class="rero_ils.modules.acquisition.acq_receipt_lines.api:AcqReceiptLinesSearch", + search_index="acq_receipt_lines", + indexer_class="rero_ils.modules.acquisition.acq_receipt_lines.api:AcqReceiptLinesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_receipt_lines.serializers:json_acrl_record' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_receipt_lines.serializers:json_acrl_record", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_receipt_lines.serializers:json_acrl_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.acquisition.acq_receipt_lines.serializers:json_acrl_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, record_loaders={ - 'application/json': lambda: AcqReceiptLine(request.get_json()), - }, - record_class='rero_ils.modules.acquisition.acq_receipt_lines.api:AcqReceiptLine', - list_route='/acq_receipt_lines/', - item_route=('/acq_receipt_lines/'), - default_media_type='application/json', + "application/json": lambda: AcqReceiptLine(request.get_json()), + }, + record_class="rero_ils.modules.acquisition.acq_receipt_lines.api:AcqReceiptLine", + list_route="/acq_receipt_lines/", + item_route=( + "/acq_receipt_lines/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: AcqReceiptLinePermissionPolicy( + "delete", record=record + ), ), acin=dict( - pid_type='acin', - pid_minter='acq_invoice_id', - pid_fetcher='acq_invoice_id', - search_class='rero_ils.modules.acquisition.acq_invoices.api:AcquisitionInvoicesSearch', - search_index='acq_invoices', - indexer_class='rero_ils.modules.acquisition.acq_invoices.api:AcquisitionInvoicesIndexer', + pid_type="acin", + pid_minter="acq_invoice_id", + pid_fetcher="acq_invoice_id", + search_class="rero_ils.modules.acquisition.acq_invoices.api:AcquisitionInvoicesSearch", + search_index="acq_invoices", + indexer_class="rero_ils.modules.acquisition.acq_invoices.api:AcquisitionInvoicesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_invoices.serializers:json_acq_invoice_record' + "application/json": "rero_ils.modules.serializers:json_v1_response", + "application/rero+json": "rero_ils.modules.acquisition.acq_invoices.serializers:json_acq_invoice_record", }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.acquisition.acq_invoices.serializers:json_acq_invoice_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.acquisition.acq_invoices.serializers:json_acq_invoice_search", }, record_loaders={ - 'application/json': lambda: AcquisitionInvoice(request.get_json()), - }, - record_class='rero_ils.modules.acquisition.acq_invoices.api:AcquisitionInvoice', - list_route='/acq_invoices/', - item_route=('/acq_invoices/'), - default_media_type='application/json', + "application/json": lambda: AcquisitionInvoice(request.get_json()), + }, + record_class="rero_ils.modules.acquisition.acq_invoices.api:AcquisitionInvoice", + list_route="/acq_invoices/", + item_route=( + "/acq_invoices/" + ), + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:organisation_search_factory', - list_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:organisation_search_factory", + list_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: AcqInvoicePermissionPolicy( + "delete", record=record + ), ), tmpl=dict( - pid_type='tmpl', - pid_minter='template_id', - pid_fetcher='template_id', - search_class='rero_ils.modules.templates.api:TemplatesSearch', - search_index='templates', - indexer_class='rero_ils.modules.templates.api:TemplatesIndexer', + pid_type="tmpl", + pid_minter="template_id", + pid_fetcher="template_id", + search_class="rero_ils.modules.templates.api:TemplatesSearch", + search_index="templates", + indexer_class="rero_ils.modules.templates.api:TemplatesIndexer", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', + "json": "application/json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search' + "application/json": "rero_ils.modules.serializers:json_v1_search" }, record_loaders={ - 'application/json': 'rero_ils.modules.templates.loaders:json_v1' + "application/json": "rero_ils.modules.templates.loaders:json_v1" }, - list_route='/templates/', - record_class='rero_ils.modules.templates.api:Template', - item_route='/templates/', - default_media_type='application/json', + list_route="/templates/", + record_class="rero_ils.modules.templates.api:Template", + item_route="/templates/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:templates_search_factory', - list_permission_factory_imp=lambda record: TemplatePermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: TemplatePermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: TemplatePermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: TemplatePermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: TemplatePermissionPolicy('delete', record=record) + search_factory_imp="rero_ils.query:templates_search_factory", + list_permission_factory_imp=lambda record: TemplatePermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: TemplatePermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: TemplatePermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: TemplatePermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: TemplatePermissionPolicy( + "delete", record=record + ), ), oplg=dict( # TODO: useless, but required - pid_type='oplg', - pid_minter='recid', - pid_fetcher='operation_log_id', - search_index='operation_logs', + pid_type="oplg", + pid_minter="recid", + pid_fetcher="operation_log_id", + search_index="operation_logs", record_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_response' + "application/json": "rero_ils.modules.serializers:json_v1_response" }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'rero_ils.modules.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.operation_logs.serializers:json_oplogs_search' + "application/json": "rero_ils.modules.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.operation_logs.serializers:json_oplogs_search", }, record_loaders={ - 'application/json': lambda: OperationLog(request.get_json()), + "application/json": lambda: OperationLog(request.get_json()), }, - record_class='rero_ils.modules.operation_logs.api:OperationLog', - list_route='/operation_logs/', + record_class="rero_ils.modules.operation_logs.api:OperationLog", + list_route="/operation_logs/", # TODO: create a converter for es id, not used for the moment. - item_route='/operation_logs/', - default_media_type='application/json', + item_route="/operation_logs/", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, - search_factory_imp='rero_ils.query:operation_logs_search_factory', - list_permission_factory_imp=lambda record: OperationLogPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: OperationLogPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: OperationLogPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: OperationLogPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: OperationLogPermissionPolicy('delete', record=record) - ) + search_factory_imp="rero_ils.query:operation_logs_search_factory", + list_permission_factory_imp=lambda record: OperationLogPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: OperationLogPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: OperationLogPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: OperationLogPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: OperationLogPermissionPolicy( + "delete", record=record + ), + ), ) # Default view code for all organisations view # TODO: Should be taken into angular -RERO_ILS_SEARCH_GLOBAL_VIEW_CODE = 'global' -RERO_ILS_SEARCH_GLOBAL_NAME = _('Global catalog') +RERO_ILS_SEARCH_GLOBAL_VIEW_CODE = "global" +RERO_ILS_SEARCH_GLOBAL_NAME = _("Global catalog") # Default number of results in facet RERO_ILS_DEFAULT_AGGREGATION_SIZE = 30 # Number of aggregation by index name RERO_ILS_AGGREGATION_SIZE = { - 'documents': 50, - 'organisations': 10, - 'collections': 20, - 'entities': 20 + "documents": 50, + "organisations": 10, + "collections": 20, + "entities": 20, } -DOCUMENTS_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get('documents', RERO_ILS_DEFAULT_AGGREGATION_SIZE) -PTRE_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get('patron_transaction_events', RERO_ILS_DEFAULT_AGGREGATION_SIZE) -ACQ_ORDER_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get('acq_orders', RERO_ILS_DEFAULT_AGGREGATION_SIZE) -ENTITIES_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get('entities', RERO_ILS_DEFAULT_AGGREGATION_SIZE) +DOCUMENTS_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get( + "documents", RERO_ILS_DEFAULT_AGGREGATION_SIZE +) +PTRE_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get( + "patron_transaction_events", RERO_ILS_DEFAULT_AGGREGATION_SIZE +) +ACQ_ORDER_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get( + "acq_orders", RERO_ILS_DEFAULT_AGGREGATION_SIZE +) +ENTITIES_AGGREGATION_SIZE = RERO_ILS_AGGREGATION_SIZE.get( + "entities", RERO_ILS_DEFAULT_AGGREGATION_SIZE +) RECORDS_REST_FACETS = dict( documents=dict( i18n_aggs=dict( author=dict( - en=dict(terms=dict(field='facet_contribution_en', size=DOCUMENTS_AGGREGATION_SIZE)), - fr=dict(terms=dict(field='facet_contribution_fr', size=DOCUMENTS_AGGREGATION_SIZE)), - de=dict(terms=dict(field='facet_contribution_de', size=DOCUMENTS_AGGREGATION_SIZE)), - it=dict(terms=dict(field='facet_contribution_it', size=DOCUMENTS_AGGREGATION_SIZE)), + en=dict( + terms=dict( + field="facet_contribution_en", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + fr=dict( + terms=dict( + field="facet_contribution_fr", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + de=dict( + terms=dict( + field="facet_contribution_de", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + it=dict( + terms=dict( + field="facet_contribution_it", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), ), subject=dict( - en=dict(terms=dict(field='facet_subject_en', size=DOCUMENTS_AGGREGATION_SIZE)), - fr=dict(terms=dict(field='facet_subject_fr', size=DOCUMENTS_AGGREGATION_SIZE)), - de=dict(terms=dict(field='facet_subject_de', size=DOCUMENTS_AGGREGATION_SIZE)), - it=dict(terms=dict(field='facet_subject_it', size=DOCUMENTS_AGGREGATION_SIZE)), + en=dict( + terms=dict( + field="facet_subject_en", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + fr=dict( + terms=dict( + field="facet_subject_fr", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + de=dict( + terms=dict( + field="facet_subject_de", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + it=dict( + terms=dict( + field="facet_subject_it", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), ), genreForm=dict( - en=dict(terms=dict(field='facet_genre_form_en', size=DOCUMENTS_AGGREGATION_SIZE)), - fr=dict(terms=dict(field='facet_genre_form_fr', size=DOCUMENTS_AGGREGATION_SIZE)), - de=dict(terms=dict(field='facet_genre_form_de', size=DOCUMENTS_AGGREGATION_SIZE)), - it=dict(terms=dict(field='facet_genre_form_it', size=DOCUMENTS_AGGREGATION_SIZE)), + en=dict( + terms=dict( + field="facet_genre_form_en", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + fr=dict( + terms=dict( + field="facet_genre_form_fr", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + de=dict( + terms=dict( + field="facet_genre_form_de", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + it=dict( + terms=dict( + field="facet_genre_form_it", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), ), ), aggs=dict( # The organisation or library facet is defined dynamically during the query (query.py) document_type=dict( - terms=dict(field='type.main_type', size=DOCUMENTS_AGGREGATION_SIZE), + terms=dict(field="type.main_type", size=DOCUMENTS_AGGREGATION_SIZE), aggs=dict( - document_subtype=dict(terms=dict(field='type.subtype', size=DOCUMENTS_AGGREGATION_SIZE)) - ) + document_subtype=dict( + terms=dict( + field="type.subtype", size=DOCUMENTS_AGGREGATION_SIZE + ) + ) + ), + ), + language=dict( + terms=dict(field="language.value", size=DOCUMENTS_AGGREGATION_SIZE) ), - language=dict(terms=dict(field='language.value', size=DOCUMENTS_AGGREGATION_SIZE)), organisation=dict( - terms=dict(field='organisation_pid', size=DOCUMENTS_AGGREGATION_SIZE, min_doc_count=0), + terms=dict( + field="organisation_pid", + size=DOCUMENTS_AGGREGATION_SIZE, + min_doc_count=0, + ), aggs=dict( library=dict( - terms=dict(field='library_pid', size=DOCUMENTS_AGGREGATION_SIZE, min_doc_count=0), + terms=dict( + field="library_pid", + size=DOCUMENTS_AGGREGATION_SIZE, + min_doc_count=0, + ), aggs=dict( - location=dict(terms=dict(field='location_pid', size=DOCUMENTS_AGGREGATION_SIZE, min_doc_count=0)) - ) + location=dict( + terms=dict( + field="location_pid", + size=DOCUMENTS_AGGREGATION_SIZE, + min_doc_count=0, + ) + ) + ), ) + ), + ), + status=dict( + terms=dict( + field="holdings.items.status", size=DOCUMENTS_AGGREGATION_SIZE + ) + ), + intendedAudience=dict( + terms=dict( + field="intendedAudience.value", size=DOCUMENTS_AGGREGATION_SIZE ) ), - status=dict(terms=dict(field='holdings.items.status', size=DOCUMENTS_AGGREGATION_SIZE)), - intendedAudience=dict(terms=dict(field='intendedAudience.value', size=DOCUMENTS_AGGREGATION_SIZE)), year=dict( filter=dict(bool=dict(filter=[])), aggs=dict( - year_min=dict(min=dict(field='provisionActivity.startDate')), - year_max=dict(max=dict(field='provisionActivity.startDate')) - ) + year_min=dict(min=dict(field="provisionActivity.startDate")), + year_max=dict(max=dict(field="provisionActivity.startDate")), + ), ), acquisition=dict( - nested=dict(path='holdings.items.acquisition'), + nested=dict(path="holdings.items.acquisition"), aggs=dict( - date_min=dict(min=dict(field='holdings.items.acquisition.date', format='yyyy-MM-dd')), - date_max=dict(max=dict(field='holdings.items.acquisition.date', format='yyyy-MM-dd')) - ) + date_min=dict( + min=dict( + field="holdings.items.acquisition.date", format="yyyy-MM-dd" + ) + ), + date_max=dict( + max=dict( + field="holdings.items.acquisition.date", format="yyyy-MM-dd" + ) + ), + ), ), fiction_statement=dict( terms=dict(field="fiction_statement", size=DOCUMENTS_AGGREGATION_SIZE) ), ), filters={ - _('online'): or_terms_filter_by_criteria({ - 'electronicLocator.type': ['versionOfResource', 'resource'], - 'holdings.holdings_type': ['electronic'], - '_exists_': 'files' - }), - _('not_online'): or_terms_filter_by_criteria({ - 'holdings.holdings_type': ['standard', 'serial'] - }), - _('author'): and_i18n_term_filter('facet_contribution'), - _('subject'): and_i18n_term_filter('facet_subject'), + _("online"): or_terms_filter_by_criteria( + { + "electronicLocator.type": ["versionOfResource", "resource"], + "holdings.holdings_type": ["electronic"], + "_exists_": "files", + } + ), + _("not_online"): or_terms_filter_by_criteria( + {"holdings.holdings_type": ["standard", "serial"]} + ), + _("author"): and_i18n_term_filter("facet_contribution"), + _("subject"): and_i18n_term_filter("facet_subject"), # This filter is used with timestamp - _('acquisition'): acquisition_filter(), + _("acquisition"): acquisition_filter(), # This filter is only used for constructed queries # --> Ex: &new_acquisition=2020-01-01:2021-01-01 - _('new_acquisition'): acquisition_filter(), - _('identifiers'): nested_identified_filter(), + _("new_acquisition"): acquisition_filter(), + _("identifiers"): nested_identified_filter(), }, post_filters={ - _('document_type'): { - _('document_type'): terms_filter('type.main_type'), - _('document_subtype'): terms_filter('type.subtype') + _("document_type"): { + _("document_type"): terms_filter("type.main_type"), + _("document_subtype"): terms_filter("type.subtype"), }, - _('language'): terms_filter('language.value'), - _('organisation'): { - _('organisation'): terms_filter( - 'organisation_pid' - ), - _('library'): terms_filter('library_pid'), - _('location'): terms_filter('location_pid') + _("language"): terms_filter("language.value"), + _("organisation"): { + _("organisation"): terms_filter("organisation_pid"), + _("library"): terms_filter("library_pid"), + _("location"): terms_filter("location_pid"), }, - _('status'): terms_filter('holdings.items.status'), - _('genreForm'): i18n_terms_filter('facet_genre_form'), - _('intendedAudience'): terms_filter('intendedAudience.value'), - _('year'): range_filter('provisionActivity.startDate'), - _('fiction_statement'): terms_filter('fiction_statement') - } - + _("status"): terms_filter("holdings.items.status"), + _("genreForm"): i18n_terms_filter("facet_genre_form"), + _("intendedAudience"): terms_filter("intendedAudience.value"), + _("year"): range_filter("provisionActivity.startDate"), + _("fiction_statement"): terms_filter("fiction_statement"), + }, ), items=dict( aggs=dict( document_type=dict( - terms=dict(field='document.document_type.main_type', size=DOCUMENTS_AGGREGATION_SIZE), + terms=dict( + field="document.document_type.main_type", + size=DOCUMENTS_AGGREGATION_SIZE, + ), aggs=dict( - document_subtype=dict(terms=dict(field='document.document_type.subtype', size=DOCUMENTS_AGGREGATION_SIZE)) + document_subtype=dict( + terms=dict( + field="document.document_type.subtype", + size=DOCUMENTS_AGGREGATION_SIZE, + ) + ) + ), + ), + library=dict( + terms=dict(field="library.pid", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE) + ), + location=dict( + terms=dict(field="location.pid", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE) + ), + item_type=dict( + terms=dict( + field="item_type.pid", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE + ) + ), + temporary_location=dict( + terms=dict( + field="temporary_location.pid", + size=RERO_ILS_DEFAULT_AGGREGATION_SIZE, + ) + ), + temporary_item_type=dict( + terms=dict( + field="temporary_item_type.pid", + size=RERO_ILS_DEFAULT_AGGREGATION_SIZE, + ) + ), + status=dict( + terms=dict(field="status", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE) + ), + vendor=dict( + terms=dict(field="vendor.pid", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE) + ), + claims_count=dict( + terms=dict( + field="issue.claims.counter", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE ) ), - library=dict(terms=dict(field='library.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - location=dict(terms=dict(field='location.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - item_type=dict(terms=dict(field='item_type.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - temporary_location=dict(terms=dict(field='temporary_location.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - temporary_item_type=dict(terms=dict(field='temporary_item_type.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - status=dict(terms=dict(field='status', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - vendor=dict(terms=dict(field='vendor.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - claims_count=dict(terms=dict(field='issue.claims.counter', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)), - claims_date=dict(date_histogram=dict(field='issue.claims.dates', calendar_interval='1d', format='yyyy-MM-dd')), - current_requests=dict(max=dict(field='current_pending_requests')) + claims_date=dict( + date_histogram=dict( + field="issue.claims.dates", + calendar_interval="1d", + format="yyyy-MM-dd", + ) + ), + current_requests=dict(max=dict(field="current_pending_requests")), ), filters={ - _('document_type'): and_term_filter('document.document_type.main_type'), - _('document_subtype'): and_term_filter('document.document_type.subtype'), - _('library'): and_term_filter('library.pid'), - _('location'): and_term_filter('location.pid'), - _('item_type'): and_term_filter('item_type.pid'), - _('temporary_item_type'): and_term_filter('temporary_item_type.pid'), - _('temporary_location'): and_term_filter('temporary_location.pid'), - _('status'): and_term_filter('status'), - _('vendor'): and_term_filter('vendor.pid'), - _('or_issue_status'): terms_filter('issue.status'), # to allow multiple filters support, in this case to filter by "late or claimed" - _('claims_count'): and_term_filter('issue.claims.counter'), - _('claims_date'): range_filter('issue.claims.dates', format='epoch_millis', start_date_math='/d', end_date_math='/d'), - _('current_requests'): range_filter('current_pending_requests') - } + _("document_type"): and_term_filter("document.document_type.main_type"), + _("document_subtype"): and_term_filter("document.document_type.subtype"), + _("library"): and_term_filter("library.pid"), + _("location"): and_term_filter("location.pid"), + _("item_type"): and_term_filter("item_type.pid"), + _("temporary_item_type"): and_term_filter("temporary_item_type.pid"), + _("temporary_location"): and_term_filter("temporary_location.pid"), + _("status"): and_term_filter("status"), + _("vendor"): and_term_filter("vendor.pid"), + _("or_issue_status"): terms_filter( + "issue.status" + ), # to allow multiple filters support, in this case to filter by "late or claimed" + _("claims_count"): and_term_filter("issue.claims.counter"), + _("claims_date"): range_filter( + "issue.claims.dates", + format="epoch_millis", + start_date_math="/d", + end_date_math="/d", + ), + _("current_requests"): range_filter("current_pending_requests"), + }, ), loans=dict( aggs=dict( owner_library=dict( - terms=dict(field='library_pid', size=DOCUMENTS_AGGREGATION_SIZE), + terms=dict(field="library_pid", size=DOCUMENTS_AGGREGATION_SIZE), aggs=dict( owner_location=dict( - terms=dict(field='location_pid', size=DOCUMENTS_AGGREGATION_SIZE) + terms=dict( + field="location_pid", size=DOCUMENTS_AGGREGATION_SIZE + ) ) - ) + ), ), pickup_library=dict( - terms=dict(field='pickup_library_pid', size=DOCUMENTS_AGGREGATION_SIZE), + terms=dict(field="pickup_library_pid", size=DOCUMENTS_AGGREGATION_SIZE), aggs=dict( pickup_location=dict( - terms=dict(field='pickup_location_pid', size=DOCUMENTS_AGGREGATION_SIZE) + terms=dict( + field="pickup_location_pid", size=DOCUMENTS_AGGREGATION_SIZE + ) ) - ) + ), ), transaction_library=dict( - terms=dict(field='transaction_library_pid', size=DOCUMENTS_AGGREGATION_SIZE), + terms=dict( + field="transaction_library_pid", size=DOCUMENTS_AGGREGATION_SIZE + ), aggs=dict( transaction_location=dict( - terms=dict(field='transaction_location_pid', size=DOCUMENTS_AGGREGATION_SIZE) + terms=dict( + field="transaction_location_pid", + size=DOCUMENTS_AGGREGATION_SIZE, + ) ) - ) + ), ), patron_type=dict( - terms=dict(field='patron_type_pid', size=DOCUMENTS_AGGREGATION_SIZE) - ), - status=dict( - terms=dict(field='state', size=DOCUMENTS_AGGREGATION_SIZE) + terms=dict(field="patron_type_pid", size=DOCUMENTS_AGGREGATION_SIZE) ), + status=dict(terms=dict(field="state", size=DOCUMENTS_AGGREGATION_SIZE)), misc_status=dict( filters=dict( filters=dict( - overdue=dict(range=dict(end_date=dict(lt='now/d'))), - expired_request=dict(range=dict(request_expire_date=dict(lt='now/d'))) + overdue=dict(range=dict(end_date=dict(lt="now/d"))), + expired_request=dict( + range=dict(request_expire_date=dict(lt="now/d")) + ), ) ) ), request_expire_date=dict( date_histogram=dict( - field='request_expire_date', - calendar_interval='1d', - format='yyyy-MM-dd' + field="request_expire_date", + calendar_interval="1d", + format="yyyy-MM-dd", ) ), end_date=dict( date_histogram=dict( - field='end_date', - calendar_interval='1d', - format='yyyy-MM-dd' + field="end_date", calendar_interval="1d", format="yyyy-MM-dd" ) - ) + ), ), filters={ - _('owner_library'): and_term_filter('library_pid'), - _('owner_location'): and_term_filter('location_pid'), - _('pickup_library'): and_term_filter('pickup_library_pid'), - _('pickup_location'): and_term_filter('pickup_location_pid'), - _('transaction_library'): and_term_filter('transaction_library_pid'), - _('transaction_location'): and_term_filter('transaction_location_pid'), - _('status'): and_term_filter('state'), - _('misc_status'): misc_status_filter(), - _('patron_type'): and_term_filter('patron_type_pid'), - _('request_expire_date'): range_filter( - 'request_expire_date', - format='epoch_millis', - start_date_math='/d', - end_date_math='/d' - ), - _('end_date'): range_filter( - 'end_date', - format='epoch_millis', - start_date_math='/d', - end_date_math='/d' - ), - _('exclude_status'): exclude_terms_filter('state') - } + _("owner_library"): and_term_filter("library_pid"), + _("owner_location"): and_term_filter("location_pid"), + _("pickup_library"): and_term_filter("pickup_library_pid"), + _("pickup_location"): and_term_filter("pickup_location_pid"), + _("transaction_library"): and_term_filter("transaction_library_pid"), + _("transaction_location"): and_term_filter("transaction_location_pid"), + _("status"): and_term_filter("state"), + _("misc_status"): misc_status_filter(), + _("patron_type"): and_term_filter("patron_type_pid"), + _("request_expire_date"): range_filter( + "request_expire_date", + format="epoch_millis", + start_date_math="/d", + end_date_math="/d", + ), + _("end_date"): range_filter( + "end_date", + format="epoch_millis", + start_date_math="/d", + end_date_math="/d", + ), + _("exclude_status"): exclude_terms_filter("state"), + }, ), patrons=dict( aggs=dict( roles=dict( terms=dict( - field='roles', + field="roles", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'patrons', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "patrons", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), city=dict( terms=dict( - field='facet_city', + field="facet_city", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'facet_city', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "facet_city", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), patron_type=dict( terms=dict( - field='patron.type.pid', + field="patron.type.pid", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'patron__type', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "patron__type", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) - ) + ), ), filters={ - _('roles'): and_term_filter('roles'), - _('city'): and_term_filter('facet_city'), - _('patron_type'): and_term_filter('patron.type.pid'), - _('blocked'): and_term_filter('patron.blocked'), - _('expired'): patron_expired() + _("roles"): and_term_filter("roles"), + _("city"): and_term_filter("facet_city"), + _("patron_type"): and_term_filter("patron.type.pid"), + _("blocked"): and_term_filter("patron.blocked"), + _("expired"): patron_expired(), }, ), acq_accounts=dict( aggs=dict( library=dict( terms=dict( - field='library.pid', + field="library.pid", size=RERO_ILS_AGGREGATION_SIZE.get( - 'acq_accounts', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "acq_accounts", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), budget=dict( terms=dict( - field='budget', + field="budget", size=RERO_ILS_AGGREGATION_SIZE.get( - 'budget', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "budget", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) - ) + ), ), filters={ - _('library'): and_term_filter('library.pid'), - _('budget'): and_term_filter('budget') + _("library"): and_term_filter("library.pid"), + _("budget"): and_term_filter("budget"), }, ), acq_invoices=dict( aggs=dict( library=dict( terms=dict( - field='library.pid', + field="library.pid", size=RERO_ILS_AGGREGATION_SIZE.get( - 'acq_invoices', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "acq_invoices", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), status=dict( terms=dict( - field='invoice_status', + field="invoice_status", size=RERO_ILS_AGGREGATION_SIZE.get( - 'acq_invoices', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "acq_invoices", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) - ) + ), ), filters={ - _('library'): and_term_filter('library.pid'), - _('status'): and_term_filter('invoice_status') + _("library"): and_term_filter("library.pid"), + _("status"): and_term_filter("invoice_status"), }, ), acq_orders=dict( aggs=dict( - library=dict(terms=dict(field='library.pid', size=ACQ_ORDER_AGGREGATION_SIZE)), - vendor=dict(terms=dict(field='vendor.pid',size=ACQ_ORDER_AGGREGATION_SIZE)), - type=dict(terms=dict(field='type', size=ACQ_ORDER_AGGREGATION_SIZE)), - status=dict(terms=dict(field='status', size=ACQ_ORDER_AGGREGATION_SIZE)), - account=dict(terms=dict(field='order_lines.account.pid', size=ACQ_ORDER_AGGREGATION_SIZE)), - budget=dict(terms=dict(field='budget.pid', size=ACQ_ORDER_AGGREGATION_SIZE)), + library=dict( + terms=dict(field="library.pid", size=ACQ_ORDER_AGGREGATION_SIZE) + ), + vendor=dict( + terms=dict(field="vendor.pid", size=ACQ_ORDER_AGGREGATION_SIZE) + ), + type=dict(terms=dict(field="type", size=ACQ_ORDER_AGGREGATION_SIZE)), + status=dict(terms=dict(field="status", size=ACQ_ORDER_AGGREGATION_SIZE)), + account=dict( + terms=dict( + field="order_lines.account.pid", size=ACQ_ORDER_AGGREGATION_SIZE + ) + ), + budget=dict( + terms=dict(field="budget.pid", size=ACQ_ORDER_AGGREGATION_SIZE) + ), order_date=dict( date_histogram=dict( - field='order_lines.order_date', - calendar_interval='1d', - format='yyyy-MM-dd' + field="order_lines.order_date", + calendar_interval="1d", + format="yyyy-MM-dd", ) ), receipt_date=dict( date_histogram=dict( - field='receipts.receipt_date', - calendar_interval='1d', - format='yyyy-MM-dd' + field="receipts.receipt_date", + calendar_interval="1d", + format="yyyy-MM-dd", ) - ) + ), ), filters={ - _('library'): and_term_filter('library.pid'), - _('vendor'): and_term_filter('vendor.pid'), - _('type'): and_term_filter('type'), - _('account'): and_term_filter('order_lines.account.pid'), - _('budget'): and_term_filter('budget.pid'), - _('order_date'): range_filter( - 'order_lines.order_date', - format='epoch_millis', - start_date_math='/d', - end_date_math='/d' - ), - _('receipt_date'): range_filter( - 'receipts.receipt_date', - format='epoch_millis', - start_date_math='/d', - end_date_math='/d' - ) + _("library"): and_term_filter("library.pid"), + _("vendor"): and_term_filter("vendor.pid"), + _("type"): and_term_filter("type"), + _("account"): and_term_filter("order_lines.account.pid"), + _("budget"): and_term_filter("budget.pid"), + _("order_date"): range_filter( + "order_lines.order_date", + format="epoch_millis", + start_date_math="/d", + end_date_math="/d", + ), + _("receipt_date"): range_filter( + "receipts.receipt_date", + format="epoch_millis", + start_date_math="/d", + end_date_math="/d", + ), }, post_filters={ - _('status'): terms_filter('status'), - } + _("status"): terms_filter("status"), + }, ), entities=dict( aggs=dict( resource_type=dict( - terms=dict( - field='resource_type', - size=ENTITIES_AGGREGATION_SIZE - ) - ), - type=dict( - terms=dict( - field='type', - size=ENTITIES_AGGREGATION_SIZE - ) + terms=dict(field="resource_type", size=ENTITIES_AGGREGATION_SIZE) ), + type=dict(terms=dict(field="type", size=ENTITIES_AGGREGATION_SIZE)), source_catalog=dict( - terms=dict( - field='source_catalog', - size=ENTITIES_AGGREGATION_SIZE - ) + terms=dict(field="source_catalog", size=ENTITIES_AGGREGATION_SIZE) ), ), filters={ - _('resource_type'): and_term_filter('resource_type'), - _('type'): and_term_filter('type'), - _('source_catalog'): and_term_filter('source_catalog'), - } + _("resource_type"): and_term_filter("resource_type"), + _("type"): and_term_filter("type"), + _("source_catalog"): and_term_filter("source_catalog"), + }, ), remote_entities=dict( aggs=dict( - sources=dict( - terms=dict( - field='sources', - size=ENTITIES_AGGREGATION_SIZE - ) - ), - type=dict( - terms=dict( - field='type', - size=ENTITIES_AGGREGATION_SIZE - ) - ) + sources=dict(terms=dict(field="sources", size=ENTITIES_AGGREGATION_SIZE)), + type=dict(terms=dict(field="type", size=ENTITIES_AGGREGATION_SIZE)), ), filters={ - _('sources'): and_term_filter('sources'), - _('type'): and_term_filter('type') - } + _("sources"): and_term_filter("sources"), + _("type"): and_term_filter("type"), + }, ), local_entities=dict( aggs=dict( source_catalog=dict( - terms=dict( - field='source_catalog', - size=ENTITIES_AGGREGATION_SIZE - ) + terms=dict(field="source_catalog", size=ENTITIES_AGGREGATION_SIZE) ), - type=dict( - terms=dict( - field='type', - size=ENTITIES_AGGREGATION_SIZE - ) - ) + type=dict(terms=dict(field="type", size=ENTITIES_AGGREGATION_SIZE)), ), filters={ - _('sources'): and_term_filter('sources'), - _('type'): and_term_filter('type') - } + _("sources"): and_term_filter("sources"), + _("type"): and_term_filter("type"), + }, ), templates=dict( aggs=dict( type=dict( terms=dict( - field='template_type', + field="template_type", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'templates', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "templates", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), visibility=dict( terms=dict( - field='visibility', + field="visibility", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'visibility', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "visibility", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) - ) + ), ), filters={ - _('type'): and_term_filter('template_type'), - _('visibility'): and_term_filter('visibility') - } + _("type"): and_term_filter("template_type"), + _("visibility"): and_term_filter("visibility"), + }, ), collections=dict( aggs=dict( type=dict( terms=dict( - field='collection_type', + field="collection_type", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'collections', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "collections", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), library=dict( terms=dict( - field='libraries.pid', + field="libraries.pid", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'collections', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "collections", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), subject=dict( terms=dict( - field='subjects.name', + field="subjects.name", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'collections', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "collections", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), teacher=dict( terms=dict( - field='teachers.facet', + field="teachers.facet", # This does not take into account # env variable or instance config file size=RERO_ILS_AGGREGATION_SIZE.get( - 'collections', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "collections", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) - ) + ), ), filters={ - _('type'): and_term_filter('collection_type'), - _('library'): and_term_filter('libraries.pid'), - _('subject'): and_term_filter('subjects.name'), - _('teacher'): and_term_filter('teachers.facet') - } + _("type"): and_term_filter("collection_type"), + _("library"): and_term_filter("libraries.pid"), + _("subject"): and_term_filter("subjects.name"), + _("teacher"): and_term_filter("teachers.facet"), + }, ), ill_requests=dict( aggs=dict( request_status=dict( terms=dict( - field='status', + field="status", size=RERO_ILS_AGGREGATION_SIZE.get( - 'ill_requests', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "ill_requests", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), loan_status=dict( terms=dict( - field='loan_status', + field="loan_status", size=RERO_ILS_AGGREGATION_SIZE.get( - 'ill_requests', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "ill_requests", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), requester=dict( terms=dict( - field='patron.facet', + field="patron.facet", size=RERO_ILS_AGGREGATION_SIZE.get( - 'ill_requests', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "ill_requests", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) ), library=dict( terms=dict( - field='library.pid', + field="library.pid", size=RERO_ILS_AGGREGATION_SIZE.get( - 'ill_requests', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "ill_requests", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), ) - ) + ), ), filters={ - _('request_status'): and_term_filter('status'), - _('loan_status'): and_term_filter('loan_status'), - _('requester'): and_term_filter('patron.facet'), - _('library'): and_term_filter('library.pid') - } - ), - patron_transactions=dict( - aggs=dict( - total=dict( - sum=dict( - field='total_amount' - ) - ) - ) + _("request_status"): and_term_filter("status"), + _("loan_status"): and_term_filter("loan_status"), + _("requester"): and_term_filter("patron.facet"), + _("library"): and_term_filter("library.pid"), + }, ), + patron_transactions=dict(aggs=dict(total=dict(sum=dict(field="total_amount")))), patron_transaction_events=dict( aggs=dict( - category=dict(terms=dict(field='category', size=PTRE_AGGREGATION_SIZE)), + category=dict(terms=dict(field="category", size=PTRE_AGGREGATION_SIZE)), owning_library=dict( - terms=dict(field='owning_library.pid', size=PTRE_AGGREGATION_SIZE), - aggs=dict(owning_location=dict(terms=dict(field='owning_location.pid', size=PTRE_AGGREGATION_SIZE))) + terms=dict(field="owning_library.pid", size=PTRE_AGGREGATION_SIZE), + aggs=dict( + owning_location=dict( + terms=dict( + field="owning_location.pid", size=PTRE_AGGREGATION_SIZE + ) + ) + ), + ), + patron_type=dict( + terms=dict(field="patron_type.pid", size=PTRE_AGGREGATION_SIZE) ), - patron_type=dict(terms=dict(field='patron_type.pid', size=PTRE_AGGREGATION_SIZE)), total=dict( filter=total_facet_filter_builder, aggs=dict( - payment=dict(sum=dict(field='amount', script=dict(source='Math.round(_value*100)/100.00'))), + payment=dict( + sum=dict( + field="amount", + script=dict(source="Math.round(_value*100)/100.00"), + ) + ), subtype=dict( - terms=dict(field='subtype', size=PTRE_AGGREGATION_SIZE), - aggs=dict(subtotal=dict(sum=dict( - field='amount', - script=dict(source='Math.round(_value*100)/100.00') - ))) - ) - ) + terms=dict(field="subtype", size=PTRE_AGGREGATION_SIZE), + aggs=dict( + subtotal=dict( + sum=dict( + field="amount", + script=dict(source="Math.round(_value*100)/100.00"), + ) + ) + ), + ), + ), ), transaction_date=dict( - date_histogram=dict(field='creation_date', calendar_interval='1d', format='yyyy-MM-dd') + date_histogram=dict( + field="creation_date", calendar_interval="1d", format="yyyy-MM-dd" + ) + ), + transaction_library=dict( + terms=dict(field="library.pid", size=PTRE_AGGREGATION_SIZE) ), - transaction_library=dict(terms=dict(field='library.pid', size=PTRE_AGGREGATION_SIZE)), type=dict( - terms=dict(field='type', size=PTRE_AGGREGATION_SIZE), - aggs=dict(subtype=dict(terms=dict(field='subtype', size=PTRE_AGGREGATION_SIZE))) - ) + terms=dict(field="type", size=PTRE_AGGREGATION_SIZE), + aggs=dict( + subtype=dict( + terms=dict(field="subtype", size=PTRE_AGGREGATION_SIZE) + ) + ), + ), ), filters={ - 'item': and_term_filter('item.pid'), - 'patron_type': and_term_filter('patron_type.pid'), - 'transaction_library': and_term_filter('library.pid'), - 'transaction_date': range_filter( - 'creation_date', - format='epoch_millis', - start_date_math='/d', - end_date_math='/d' + "item": and_term_filter("item.pid"), + "patron_type": and_term_filter("patron_type.pid"), + "transaction_library": and_term_filter("library.pid"), + "transaction_date": range_filter( + "creation_date", + format="epoch_millis", + start_date_math="/d", + end_date_math="/d", ), }, post_filters={ - 'owning_library': { - 'owning_library': terms_filter('owning_library.pid'), - 'owning_location': terms_filter('owning_location.pid') + "owning_library": { + "owning_library": terms_filter("owning_library.pid"), + "owning_location": terms_filter("owning_location.pid"), }, - 'category': terms_filter('category'), - 'type': { - 'type': terms_filter('type'), - 'subtype': terms_filter('subtype') - } - } + "category": terms_filter("category"), + "type": {"type": terms_filter("type"), "subtype": terms_filter("subtype")}, + }, ), stats_cfg=dict( aggs=dict( category=dict( terms=dict( - field='category.type', + field="category.type", size=RERO_ILS_AGGREGATION_SIZE.get( - 'stats_cfg', RERO_ILS_DEFAULT_AGGREGATION_SIZE) + "stats_cfg", RERO_ILS_DEFAULT_AGGREGATION_SIZE + ), + ), + aggs=dict( + indicator=dict( + terms=dict( + field="category.indicator.type", + size=DOCUMENTS_AGGREGATION_SIZE, + ) + ) ), - aggs=dict( - indicator=dict(terms=dict(field='category.indicator.type', size=DOCUMENTS_AGGREGATION_SIZE)) - ) ), frequency=dict( - terms=dict(field='frequency', size=DOCUMENTS_AGGREGATION_SIZE), - + terms=dict(field="frequency", size=DOCUMENTS_AGGREGATION_SIZE), + ), + library=dict( + terms=dict(field="library.pid", size=RERO_ILS_DEFAULT_AGGREGATION_SIZE) ), - library=dict(terms=dict(field='library.pid', size=RERO_ILS_DEFAULT_AGGREGATION_SIZE)) ), filters={ - _('category'): and_term_filter('category.type'), - _('indicator'): and_term_filter('category.indicator.type'), - _('frequency'): and_term_filter('frequency'), - _('library'): and_term_filter('library.pid'), - _('active'): and_term_filter('is_active') - } - ) + _("category"): and_term_filter("category.type"), + _("indicator"): and_term_filter("category.indicator.type"), + _("frequency"): and_term_filter("frequency"), + _("library"): and_term_filter("library.pid"), + _("active"): and_term_filter("is_active"), + }, + ), ) # Elasticsearch fields boosting by index RERO_ILS_QUERY_BOOSTING = { - 'documents': [ - 'autocomplete_title^3', - 'title.*^3', + "documents": [ + "autocomplete_title^3", + "title.*^3", # the fulltext fields are removed by default - 'fulltext^6', - 'fulltext.*^6', - 'contribution.entity.authorized_access_point_fr*^2', - 'contribution.entity.authorized_access_point_en*^2', - 'contribution.entity.authorized_access_point_de*^2', - 'contribution.entity.authorized_access_point_it*^2', - 'contribution.entity.authorized_access_point^2', - 'subjects.entity.authorized_access_point_fr*^2', - 'subjects.entity.authorized_access_point_en*^2', - 'subjects.entity.authorized_access_point_de*^2', - 'subjects.entity.authorized_access_point_it*^2', - 'subjects.entity.authorized_access_point^2', - 'provisionActivity.startDate^2', - '*' + "fulltext^6", + "fulltext.*^6", + "contribution.entity.authorized_access_point_fr*^2", + "contribution.entity.authorized_access_point_en*^2", + "contribution.entity.authorized_access_point_de*^2", + "contribution.entity.authorized_access_point_it*^2", + "contribution.entity.authorized_access_point^2", + "subjects.entity.authorized_access_point_fr*^2", + "subjects.entity.authorized_access_point_en*^2", + "subjects.entity.authorized_access_point_de*^2", + "subjects.entity.authorized_access_point_it*^2", + "subjects.entity.authorized_access_point^2", + "provisionActivity.startDate^2", + "*", ], - 'patrons': [ - 'barcode^3', - '*' - ] + "patrons": ["barcode^3", "*"], } # sort options indexes = [ - 'acq_accounts', - 'acq_orders', - 'acq_order_lines', - 'acq_receipts', - 'acq_receipt_lines', - 'budgets', - 'circ_policies', - 'collections', - 'documents', - 'entities', - 'holdings', - 'items', - 'item_types', - 'ill_requests', - 'libraries', - 'loans', - 'local_entities', - 'local_fields', - 'locations', - 'notifications', - 'operation_logs', - 'organisations', - 'patrons', - 'patron_transaction_events', - 'patron_transactions', - 'patron_types', - 'remote_entities', - 'templates', - 'vendors' + "acq_accounts", + "acq_orders", + "acq_order_lines", + "acq_receipts", + "acq_receipt_lines", + "budgets", + "circ_policies", + "collections", + "documents", + "entities", + "holdings", + "items", + "item_types", + "ill_requests", + "libraries", + "loans", + "local_entities", + "local_fields", + "locations", + "notifications", + "operation_logs", + "organisations", + "patrons", + "patron_transaction_events", + "patron_transactions", + "patron_types", + "remote_entities", + "templates", + "vendors", ] RECORDS_REST_SORT_OPTIONS = dict() @@ -2412,302 +2876,255 @@ def _(x): for index in indexes: RECORDS_REST_SORT_OPTIONS[index] = dict( bestmatch=dict( - fields=['-_score'], title='Best match', default_order='asc', - order=1 + fields=["-_score"], title="Best match", default_order="asc", order=1 ), mostrecent=dict( - fields=['-_created'], title='Most recent', default_order='desc', - order=2 + fields=["-_created"], title="Most recent", default_order="desc", order=2 ), lastupdated=dict( - fields=['-_updated'], title='Last updated', default_order='desc', - order=3 + fields=["-_updated"], title="Last updated", default_order="desc", order=3 ), created=dict( - fields=['_created'], title='Most recent', default_order='asc', - order=4 + fields=["_created"], title="Most recent", default_order="asc", order=4 ), ) - RECORDS_REST_DEFAULT_SORT[index] = dict( - query='bestmatch', noquery='mostrecent') + RECORDS_REST_DEFAULT_SORT[index] = dict(query="bestmatch", noquery="mostrecent") # ------ ACQUISITION ACCOUNTS SORT -RECORDS_REST_SORT_OPTIONS['acq_accounts']['name'] = dict( - fields=['name.raw'], title='Account name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["acq_accounts"]["name"] = dict( + fields=["name.raw"], title="Account name", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['acq_accounts']['depth'] = dict( - fields=['depth'], title='Depth', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["acq_accounts"]["depth"] = dict( + fields=["depth"], title="Depth", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['acq_accounts'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["acq_accounts"] = dict(query="bestmatch", noquery="name") # ------ ACQUISITION ORDERS SORT -RECORDS_REST_SORT_OPTIONS['acq_orders']['receipt_date'] = dict( - fields=['-receipts.receipt_date'], title='Receipt date', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["acq_orders"]["receipt_date"] = dict( + fields=["-receipts.receipt_date"], title="Receipt date", default_order="desc" ) -RECORDS_REST_SORT_OPTIONS['acq_orders']['order_date_new'] = dict( - fields=['-order_date'], title='Order date (newest)', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["acq_orders"]["order_date_new"] = dict( + fields=["-order_date"], title="Order date (newest)", default_order="desc" ) -RECORDS_REST_SORT_OPTIONS['acq_orders']['order_date_old'] = dict( - fields=['order_date'], title='Order date (oldest)', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["acq_orders"]["order_date_old"] = dict( + fields=["order_date"], title="Order date (oldest)", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['acq_orders']['reference_asc'] = dict( - fields=['reference.sort'], title='Reference (asc)', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["acq_orders"]["reference_asc"] = dict( + fields=["reference.sort"], title="Reference (asc)", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['acq_orders']['reference_desc'] = dict( - fields=['-reference.sort'], title='Reference (desc)', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["acq_orders"]["reference_desc"] = dict( + fields=["-reference.sort"], title="Reference (desc)", default_order="desc" +) +RECORDS_REST_DEFAULT_SORT["acq_orders"] = dict( + query="bestmatch", noquery="receipt_date" ) -RECORDS_REST_DEFAULT_SORT['acq_orders'] = dict( - query='bestmatch', noquery='receipt_date') # ------ ACQUISITION ORDER LINES SORT -RECORDS_REST_SORT_OPTIONS['acq_order_lines'] = dict( - pid=dict( - fields=['_id'], title='Order line PID', default_order='asc' - ), +RECORDS_REST_SORT_OPTIONS["acq_order_lines"] = dict( + pid=dict(fields=["_id"], title="Order line PID", default_order="asc"), priority=dict( - title='priority', - fields=['-priority', '_created'], - default_order='asc', - order=1 - ) + title="priority", fields=["-priority", "_created"], default_order="asc", order=1 + ), +) +RECORDS_REST_DEFAULT_SORT["acq_order_lines"] = dict( + query="bestmatch", noquery="priority" ) -RECORDS_REST_DEFAULT_SORT['acq_order_lines'] = dict( - query='bestmatch', noquery='priority') # ------ ACQUISITION RECEIPTS SORT -RECORDS_REST_SORT_OPTIONS['acq_receipts']['receipt_date'] = dict( - fields=['-receipt_lines.receipt_date'], title='Receipt date', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["acq_receipts"]["receipt_date"] = dict( + fields=["-receipt_lines.receipt_date"], title="Receipt date", default_order="desc" +) +RECORDS_REST_DEFAULT_SORT["acq_receipts"] = dict( + query="bestmatch", noquery="receipt_date" ) -RECORDS_REST_DEFAULT_SORT['acq_receipts'] = dict( - query='bestmatch', noquery='receipt_date') # ------ ACQUISITION RECEIPT LINES SORT -RECORDS_REST_SORT_OPTIONS['acq_receipt_lines']['receipt_date'] = dict( - fields=['-receipt_date'], title='Receipt date', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["acq_receipt_lines"]["receipt_date"] = dict( + fields=["-receipt_date"], title="Receipt date", default_order="desc" +) +RECORDS_REST_DEFAULT_SORT["acq_receipt_lines"] = dict( + query="bestmatch", noquery="receipt_date" ) -RECORDS_REST_DEFAULT_SORT['acq_receipt_lines'] = dict( - query='bestmatch', noquery='receipt_date') # ------ BUDGETS SORT -RECORDS_REST_SORT_OPTIONS['budgets']['name'] = dict( - fields=['budget_name'], title='Budget name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["budgets"]["name"] = dict( + fields=["budget_name"], title="Budget name", default_order="asc" ) # ------ CIRCULATION POLICIES SORT -RECORDS_REST_SORT_OPTIONS['circ_policies']['name'] = dict( - fields=['circ_policy_name'], title='Circulation policy name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["circ_policies"]["name"] = dict( + fields=["circ_policy_name"], title="Circulation policy name", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['circ_policies'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["circ_policies"] = dict(query="bestmatch", noquery="name") # ------ COLLECTIONS SORT -RECORDS_REST_SORT_OPTIONS['collections']['start_date'] = dict( - fields=['start_date', 'title_sort'], title='Start date and title', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["collections"]["start_date"] = dict( + fields=["start_date", "title_sort"], + title="Start date and title", + default_order="asc", ) -RECORDS_REST_SORT_OPTIONS['collections']['title'] = dict( - fields=['title_sort'], title='title', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["collections"]["title"] = dict( + fields=["title_sort"], title="title", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['collections'] = dict( - query='bestmatch', noquery='start_date') +RECORDS_REST_DEFAULT_SORT["collections"] = dict(query="bestmatch", noquery="start_date") # ------ ENTITIES SORT -RECORDS_REST_SORT_OPTIONS['entities']['fr_name'] = dict( - fields=['authorized_access_point_fr.sort'], - default_order='asc' +RECORDS_REST_SORT_OPTIONS["entities"]["fr_name"] = dict( + fields=["authorized_access_point_fr.sort"], default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['entities']['de_name'] = dict( - fields=['authorized_access_point_de.sort'], - default_order='asc' +RECORDS_REST_SORT_OPTIONS["entities"]["de_name"] = dict( + fields=["authorized_access_point_de.sort"], default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['entities']['en_name'] = dict( - fields=['authorized_access_point_en.sort'], - default_order='asc' +RECORDS_REST_SORT_OPTIONS["entities"]["en_name"] = dict( + fields=["authorized_access_point_en.sort"], default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['entities']['it_name'] = dict( - fields=['authorized_access_point_it.sort'], - default_order='asc' +RECORDS_REST_SORT_OPTIONS["entities"]["it_name"] = dict( + fields=["authorized_access_point_it.sort"], default_order="asc" ) # ------ DOCUMENTS SORT -RECORDS_REST_SORT_OPTIONS['documents']['title'] = dict( - fields=['sort_title'], title='Document title', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["documents"]["title"] = dict( + fields=["sort_title"], title="Document title", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['documents']['pub_date_new'] = dict( - fields=['-sort_date_new'], title='Document date (newest)', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["documents"]["pub_date_new"] = dict( + fields=["-sort_date_new"], title="Document date (newest)", default_order="desc" ) -RECORDS_REST_SORT_OPTIONS['documents']['pub_date_old'] = dict( - fields=['sort_date_old'], title='Document date (oldest)', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["documents"]["pub_date_old"] = dict( + fields=["sort_date_old"], title="Document date (oldest)", default_order="asc" ) # ------ HOLDINGS SORT -RECORDS_REST_SORT_OPTIONS['holdings']['library_location'] = dict( - fields=['library.pid', 'location.pid'], - title='Holdings library location sort', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["holdings"]["library_location"] = dict( + fields=["library.pid", "location.pid"], + title="Holdings library location sort", + default_order="asc", +) +RECORDS_REST_SORT_OPTIONS["holdings"]["organisation_library_location"] = dict( + fields=["organisation.pid", "library.pid", "location.pid"], + title="Holdings Organisation library location sort", + default_order="asc", ) -RECORDS_REST_SORT_OPTIONS['holdings']['organisation_library_location'] = dict( - fields=['organisation.pid', 'library.pid', 'location.pid'], - title='Holdings Organisation library location sort', - default_order='asc' +RECORDS_REST_DEFAULT_SORT["holdings"] = dict( + query="bestmatch", noquery="library_location" ) -RECORDS_REST_DEFAULT_SORT['holdings'] = dict( - query='bestmatch', noquery='library_location') # ------ ITEMS SORT -RECORDS_REST_SORT_OPTIONS['items']['barcode'] = dict( - fields=['barcode'], title='Barcode', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["barcode"] = dict( + fields=["barcode"], title="Barcode", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['items']['call_number'] = dict( - fields=['call_number.raw'], title='Call Number', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["call_number"] = dict( + fields=["call_number.raw"], title="Call Number", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['items']['second_call_number'] = dict( - fields=['second_call_number.raw'], title='Second call Number', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["second_call_number"] = dict( + fields=["second_call_number.raw"], title="Second call Number", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['items']['issue_expected_date'] = dict( - fields=['issue.expected_date'], title='Issue expected date', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["issue_expected_date"] = dict( + fields=["issue.expected_date"], title="Issue expected date", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['items']['issue_sort_date'] = dict( - fields=['issue.sort_date'], title='Issue chronology date', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["issue_sort_date"] = dict( + fields=["issue.sort_date"], title="Issue chronology date", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['items']['enumeration_chronology'] = dict( - fields=['enumerationAndChronology.sort'], title='Enumeration and Chronology', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["enumeration_chronology"] = dict( + fields=["enumerationAndChronology.sort"], + title="Enumeration and Chronology", + default_order="asc", ) -RECORDS_REST_SORT_OPTIONS['items']['library'] = dict( - fields=['library.pid'], title='Library', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["items"]["library"] = dict( + fields=["library.pid"], title="Library", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['items']['current_requests'] = dict( - fields=['-current_pending_requests'], title='Current pending requests', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["items"]["current_requests"] = dict( + fields=["-current_pending_requests"], + title="Current pending requests", + default_order="desc", ) -RECORDS_REST_DEFAULT_SORT['items'] = dict( - query='bestmatch', noquery='enumeration_chronology') +RECORDS_REST_DEFAULT_SORT["items"] = dict( + query="bestmatch", noquery="enumeration_chronology" +) # ------ ITEM TYPES SORT -RECORDS_REST_SORT_OPTIONS['item_types']['name'] = dict( - fields=['item_type_name'], title='Item type name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["item_types"]["name"] = dict( + fields=["item_type_name"], title="Item type name", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['item_types'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["item_types"] = dict(query="bestmatch", noquery="name") # ------ LIBRARIES SORT -RECORDS_REST_SORT_OPTIONS['libraries']['name'] = dict( - fields=['library_name'], title='Library name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["libraries"]["name"] = dict( + fields=["library_name"], title="Library name", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['libraries']['code'] = dict( - fields=['code'], title='Library code', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["libraries"]["code"] = dict( + fields=["code"], title="Library code", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['libraries'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["libraries"] = dict(query="bestmatch", noquery="name") # ------ LOANS SORT -RECORDS_REST_SORT_OPTIONS['loans']['transactiondate'] = dict( - fields=['-transaction_date'], title='Transaction date', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["loans"]["transactiondate"] = dict( + fields=["-transaction_date"], title="Transaction date", default_order="desc" ) -RECORDS_REST_SORT_OPTIONS['loans']['duedate'] = dict( - fields=['end_date'], title='Due date', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["loans"]["duedate"] = dict( + fields=["end_date"], title="Due date", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['loans'] = dict( - query='bestmatch', noquery='transactiondate') +RECORDS_REST_DEFAULT_SORT["loans"] = dict(query="bestmatch", noquery="transactiondate") # ------ LOCATIONS SORT -RECORDS_REST_SORT_OPTIONS['locations']['name'] = dict( - fields=['location_name'], title='Location name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["locations"]["name"] = dict( + fields=["location_name"], title="Location name", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['locations']['pickup_name'] = dict( - fields=['pickup_name.keyword'], title='Pickup Location name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["locations"]["pickup_name"] = dict( + fields=["pickup_name.keyword"], title="Pickup Location name", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['locations'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["locations"] = dict(query="bestmatch", noquery="name") # ------ PATRONS SORT -RECORDS_REST_SORT_OPTIONS['patrons']['full_name'] = dict( - fields=['last_name_sort', 'first_name_sort'], title='Patron fullname', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["patrons"]["full_name"] = dict( + fields=["last_name_sort", "first_name_sort"], + title="Patron fullname", + default_order="asc", ) -RECORDS_REST_DEFAULT_SORT['patrons'] = dict( - query='bestmatch', noquery='full_name') +RECORDS_REST_DEFAULT_SORT["patrons"] = dict(query="bestmatch", noquery="full_name") # ------ PATRON TRANSACTIONS SORT -RECORDS_REST_SORT_OPTIONS['patron_transactions']['creation_date'] = dict( - fields=['creation_date'], title='Patron transaction creation date', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["patron_transactions"]["creation_date"] = dict( + fields=["creation_date"], + title="Patron transaction creation date", + default_order="asc", ) -RECORDS_REST_DEFAULT_SORT['patrons'] = dict( - query='mostrecent', noquery='creation_date') +RECORDS_REST_DEFAULT_SORT["patrons"] = dict(query="mostrecent", noquery="creation_date") # ------ PATRON TYPES SORT -RECORDS_REST_SORT_OPTIONS['patron_types']['name'] = dict( - fields=['patron_type_name'], title='Patron type name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["patron_types"]["name"] = dict( + fields=["patron_type_name"], title="Patron type name", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['patron_types'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["patron_types"] = dict(query="bestmatch", noquery="name") # ------ TEMPLATES SORT -RECORDS_REST_SORT_OPTIONS['templates']['name'] = dict( - fields=['name_sort'], title='Template name', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["templates"]["name"] = dict( + fields=["name_sort"], title="Template name", default_order="asc" ) -RECORDS_REST_DEFAULT_SORT['templates'] = dict( - query='bestmatch', noquery='name') +RECORDS_REST_DEFAULT_SORT["templates"] = dict(query="bestmatch", noquery="name") # ------ VENDORS SORT -RECORDS_REST_SORT_OPTIONS['vendors']['name_asc'] = dict( - fields=['name.sort'], title='Vendor name (asc)', - default_order='asc' +RECORDS_REST_SORT_OPTIONS["vendors"]["name_asc"] = dict( + fields=["name.sort"], title="Vendor name (asc)", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['vendors']['name_desc'] = dict( - fields=['-name.sort'], title='Vendor name (desc)', - default_order='desc' +RECORDS_REST_SORT_OPTIONS["vendors"]["name_desc"] = dict( + fields=["-name.sort"], title="Vendor name (desc)", default_order="desc" ) -RECORDS_REST_DEFAULT_SORT['vendors'] = dict( - query='bestmatch', noquery='name_asc') +RECORDS_REST_DEFAULT_SORT["vendors"] = dict(query="bestmatch", noquery="name_asc") # ------ PATRON TRANSACTION SORT -RECORDS_REST_SORT_OPTIONS['patron_transaction_events']['created'] = dict( - fields=['_created'], title='Transaction date', default_order='asc' +RECORDS_REST_SORT_OPTIONS["patron_transaction_events"]["created"] = dict( + fields=["_created"], title="Transaction date", default_order="asc" ) -RECORDS_REST_SORT_OPTIONS['patron_transaction_events']['amount'] = dict( - fields=['amount'], title='Amount date', default_order='desc' +RECORDS_REST_SORT_OPTIONS["patron_transaction_events"]["amount"] = dict( + fields=["amount"], title="Amount date", default_order="desc" ) -RECORDS_REST_DEFAULT_SORT['acq_accounts'] = dict(query='bestmatch', noquery='created') +RECORDS_REST_DEFAULT_SORT["acq_accounts"] = dict(query="bestmatch", noquery="created") # ============================================================================= @@ -2722,250 +3139,247 @@ def _(x): evaluate the need. Keys are the name use to define the need into `invenio_access.actions`. """ -ACCESS_CACHE = 'invenio_cache:current_cache' +ACCESS_CACHE = "invenio_cache:current_cache" -RERO_ILS_EXPOSED_NEED_FILTER = dict( - regexp=r'.*-((?!read|update|delete).)*$' -) +RERO_ILS_EXPOSED_NEED_FILTER = dict(regexp=r".*-((?!read|update|delete).)*$") RERO_ILS_PERMISSIONS_ACTIONS = [ # resource basic permissions -------------------------- - 'rero_ils.modules.acquisition.acq_accounts.permissions:access_action', - 'rero_ils.modules.acquisition.acq_accounts.permissions:search_action', - 'rero_ils.modules.acquisition.acq_accounts.permissions:read_action', - 'rero_ils.modules.acquisition.acq_accounts.permissions:create_action', - 'rero_ils.modules.acquisition.acq_accounts.permissions:update_action', - 'rero_ils.modules.acquisition.acq_accounts.permissions:delete_action', - 'rero_ils.modules.acquisition.acq_invoices.permissions:access_action', - 'rero_ils.modules.acquisition.acq_invoices.permissions:search_action', - 'rero_ils.modules.acquisition.acq_invoices.permissions:read_action', - 'rero_ils.modules.acquisition.acq_invoices.permissions:create_action', - 'rero_ils.modules.acquisition.acq_invoices.permissions:update_action', - 'rero_ils.modules.acquisition.acq_invoices.permissions:delete_action', - 'rero_ils.modules.acquisition.acq_order_lines.permissions:access_action', - 'rero_ils.modules.acquisition.acq_order_lines.permissions:search_action', - 'rero_ils.modules.acquisition.acq_order_lines.permissions:read_action', - 'rero_ils.modules.acquisition.acq_order_lines.permissions:create_action', - 'rero_ils.modules.acquisition.acq_order_lines.permissions:update_action', - 'rero_ils.modules.acquisition.acq_order_lines.permissions:delete_action', - 'rero_ils.modules.acquisition.acq_orders.permissions:access_action', - 'rero_ils.modules.acquisition.acq_orders.permissions:search_action', - 'rero_ils.modules.acquisition.acq_orders.permissions:read_action', - 'rero_ils.modules.acquisition.acq_orders.permissions:create_action', - 'rero_ils.modules.acquisition.acq_orders.permissions:update_action', - 'rero_ils.modules.acquisition.acq_orders.permissions:delete_action', - 'rero_ils.modules.acquisition.acq_receipts.permissions:access_action', - 'rero_ils.modules.acquisition.acq_receipts.permissions:search_action', - 'rero_ils.modules.acquisition.acq_receipts.permissions:read_action', - 'rero_ils.modules.acquisition.acq_receipts.permissions:create_action', - 'rero_ils.modules.acquisition.acq_receipts.permissions:update_action', - 'rero_ils.modules.acquisition.acq_receipts.permissions:delete_action', - 'rero_ils.modules.acquisition.acq_receipt_lines.permissions:access_action', - 'rero_ils.modules.acquisition.acq_receipt_lines.permissions:search_action', - 'rero_ils.modules.acquisition.acq_receipt_lines.permissions:read_action', - 'rero_ils.modules.acquisition.acq_receipt_lines.permissions:create_action', - 'rero_ils.modules.acquisition.acq_receipt_lines.permissions:update_action', - 'rero_ils.modules.acquisition.acq_receipt_lines.permissions:delete_action', - 'rero_ils.modules.acquisition.budgets.permissions:access_action', - 'rero_ils.modules.acquisition.budgets.permissions:search_action', - 'rero_ils.modules.acquisition.budgets.permissions:read_action', - 'rero_ils.modules.acquisition.budgets.permissions:create_action', - 'rero_ils.modules.acquisition.budgets.permissions:update_action', - 'rero_ils.modules.acquisition.budgets.permissions:delete_action', - 'rero_ils.modules.circ_policies.permissions:access_action', - 'rero_ils.modules.circ_policies.permissions:search_action', - 'rero_ils.modules.circ_policies.permissions:read_action', - 'rero_ils.modules.circ_policies.permissions:create_action', - 'rero_ils.modules.circ_policies.permissions:update_action', - 'rero_ils.modules.circ_policies.permissions:delete_action', - 'rero_ils.modules.collections.permissions:access_action', - 'rero_ils.modules.collections.permissions:search_action', - 'rero_ils.modules.collections.permissions:read_action', - 'rero_ils.modules.collections.permissions:create_action', - 'rero_ils.modules.collections.permissions:update_action', - 'rero_ils.modules.collections.permissions:delete_action', - 'rero_ils.modules.documents.permissions:access_action', - 'rero_ils.modules.documents.permissions:search_action', - 'rero_ils.modules.documents.permissions:read_action', - 'rero_ils.modules.documents.permissions:create_action', - 'rero_ils.modules.documents.permissions:update_action', - 'rero_ils.modules.documents.permissions:delete_action', - 'rero_ils.modules.files.permissions:access_action', - 'rero_ils.modules.files.permissions:search_action', - 'rero_ils.modules.files.permissions:read_action', - 'rero_ils.modules.files.permissions:create_action', - 'rero_ils.modules.files.permissions:update_action', - 'rero_ils.modules.files.permissions:delete_action', - 'rero_ils.modules.entities.local_entities.permissions.access_action', - 'rero_ils.modules.entities.local_entities.permissions.search_action', - 'rero_ils.modules.entities.local_entities.permissions.read_action', - 'rero_ils.modules.entities.local_entities.permissions.create_action', - 'rero_ils.modules.entities.local_entities.permissions.update_action', - 'rero_ils.modules.entities.local_entities.permissions.delete_action', - 'rero_ils.modules.holdings.permissions:access_action', - 'rero_ils.modules.holdings.permissions:search_action', - 'rero_ils.modules.holdings.permissions:read_action', - 'rero_ils.modules.holdings.permissions:create_action', - 'rero_ils.modules.holdings.permissions:update_action', - 'rero_ils.modules.holdings.permissions:delete_action', - 'rero_ils.modules.items.permissions:access_action', - 'rero_ils.modules.items.permissions:search_action', - 'rero_ils.modules.items.permissions:read_action', - 'rero_ils.modules.items.permissions:create_action', - 'rero_ils.modules.items.permissions:update_action', - 'rero_ils.modules.items.permissions:delete_action', - 'rero_ils.modules.ill_requests.permissions:access_action', - 'rero_ils.modules.ill_requests.permissions:search_action', - 'rero_ils.modules.ill_requests.permissions:read_action', - 'rero_ils.modules.ill_requests.permissions:create_action', - 'rero_ils.modules.ill_requests.permissions:update_action', - 'rero_ils.modules.ill_requests.permissions:delete_action', - 'rero_ils.modules.item_types.permissions:access_action', - 'rero_ils.modules.item_types.permissions:search_action', - 'rero_ils.modules.item_types.permissions:read_action', - 'rero_ils.modules.item_types.permissions:create_action', - 'rero_ils.modules.item_types.permissions:update_action', - 'rero_ils.modules.item_types.permissions:delete_action', - 'rero_ils.modules.libraries.permissions:access_action', - 'rero_ils.modules.libraries.permissions:search_action', - 'rero_ils.modules.libraries.permissions:read_action', - 'rero_ils.modules.libraries.permissions:create_action', - 'rero_ils.modules.libraries.permissions:update_action', - 'rero_ils.modules.libraries.permissions:delete_action', - 'rero_ils.modules.loans.permissions:access_action', - 'rero_ils.modules.loans.permissions:search_action', - 'rero_ils.modules.loans.permissions:read_action', - 'rero_ils.modules.locations.permissions:access_action', - 'rero_ils.modules.locations.permissions:search_action', - 'rero_ils.modules.locations.permissions:read_action', - 'rero_ils.modules.locations.permissions:create_action', - 'rero_ils.modules.locations.permissions:update_action', - 'rero_ils.modules.locations.permissions:delete_action', - 'rero_ils.modules.local_fields.permissions:access_action', - 'rero_ils.modules.local_fields.permissions:search_action', - 'rero_ils.modules.local_fields.permissions:read_action', - 'rero_ils.modules.local_fields.permissions:create_action', - 'rero_ils.modules.local_fields.permissions:update_action', - 'rero_ils.modules.local_fields.permissions:delete_action', - 'rero_ils.modules.notifications.permissions:access_action', - 'rero_ils.modules.notifications.permissions:search_action', - 'rero_ils.modules.notifications.permissions:read_action', - 'rero_ils.modules.notifications.permissions:create_action', - 'rero_ils.modules.notifications.permissions:update_action', - 'rero_ils.modules.notifications.permissions:delete_action', - 'rero_ils.modules.operation_logs.permissions:access_action', - 'rero_ils.modules.operation_logs.permissions:search_action', - 'rero_ils.modules.operation_logs.permissions:read_action', - 'rero_ils.modules.organisations.permissions:access_action', - 'rero_ils.modules.organisations.permissions:search_action', - 'rero_ils.modules.organisations.permissions:read_action', - 'rero_ils.modules.organisations.permissions:create_action', - 'rero_ils.modules.organisations.permissions:update_action', - 'rero_ils.modules.organisations.permissions:delete_action', - 'rero_ils.modules.patrons.permissions:access_action', - 'rero_ils.modules.patrons.permissions:search_action', - 'rero_ils.modules.patrons.permissions:read_action', - 'rero_ils.modules.patrons.permissions:create_action', - 'rero_ils.modules.patrons.permissions:update_action', - 'rero_ils.modules.patrons.permissions:delete_action', - 'rero_ils.modules.patron_transactions.permissions:access_action', - 'rero_ils.modules.patron_transactions.permissions:search_action', - 'rero_ils.modules.patron_transactions.permissions:read_action', - 'rero_ils.modules.patron_transactions.permissions:create_action', - 'rero_ils.modules.patron_transactions.permissions:update_action', - 'rero_ils.modules.patron_transactions.permissions:delete_action', - 'rero_ils.modules.patron_transaction_events.permissions:access_action', - 'rero_ils.modules.patron_transaction_events.permissions:search_action', - 'rero_ils.modules.patron_transaction_events.permissions:read_action', - 'rero_ils.modules.patron_transaction_events.permissions:create_action', - 'rero_ils.modules.patron_transaction_events.permissions:update_action', - 'rero_ils.modules.patron_transaction_events.permissions:delete_action', - 'rero_ils.modules.patron_types.permissions:access_action', - 'rero_ils.modules.patron_types.permissions:search_action', - 'rero_ils.modules.patron_types.permissions:read_action', - 'rero_ils.modules.patron_types.permissions:create_action', - 'rero_ils.modules.patron_types.permissions:update_action', - 'rero_ils.modules.patron_types.permissions:delete_action', - 'rero_ils.modules.stats.permissions:access_action', - 'rero_ils.modules.stats.permissions:search_action', - 'rero_ils.modules.stats.permissions:read_action', - 'rero_ils.modules.stats_cfg.permissions:access_action', - 'rero_ils.modules.stats_cfg.permissions:search_action', - 'rero_ils.modules.stats_cfg.permissions:read_action', - 'rero_ils.modules.stats_cfg.permissions:create_action', - 'rero_ils.modules.stats_cfg.permissions:update_action', - 'rero_ils.modules.stats_cfg.permissions:delete_action', - 'rero_ils.modules.templates.permissions:access_action', - 'rero_ils.modules.templates.permissions:search_action', - 'rero_ils.modules.templates.permissions:read_action', - 'rero_ils.modules.templates.permissions:create_action', - 'rero_ils.modules.templates.permissions:update_action', - 'rero_ils.modules.templates.permissions:delete_action', - 'rero_ils.modules.vendors.permissions:access_action', - 'rero_ils.modules.vendors.permissions:search_action', - 'rero_ils.modules.vendors.permissions:read_action', - 'rero_ils.modules.vendors.permissions:create_action', - 'rero_ils.modules.vendors.permissions:update_action', - 'rero_ils.modules.vendors.permissions:delete_action', + "rero_ils.modules.acquisition.acq_accounts.permissions:access_action", + "rero_ils.modules.acquisition.acq_accounts.permissions:search_action", + "rero_ils.modules.acquisition.acq_accounts.permissions:read_action", + "rero_ils.modules.acquisition.acq_accounts.permissions:create_action", + "rero_ils.modules.acquisition.acq_accounts.permissions:update_action", + "rero_ils.modules.acquisition.acq_accounts.permissions:delete_action", + "rero_ils.modules.acquisition.acq_invoices.permissions:access_action", + "rero_ils.modules.acquisition.acq_invoices.permissions:search_action", + "rero_ils.modules.acquisition.acq_invoices.permissions:read_action", + "rero_ils.modules.acquisition.acq_invoices.permissions:create_action", + "rero_ils.modules.acquisition.acq_invoices.permissions:update_action", + "rero_ils.modules.acquisition.acq_invoices.permissions:delete_action", + "rero_ils.modules.acquisition.acq_order_lines.permissions:access_action", + "rero_ils.modules.acquisition.acq_order_lines.permissions:search_action", + "rero_ils.modules.acquisition.acq_order_lines.permissions:read_action", + "rero_ils.modules.acquisition.acq_order_lines.permissions:create_action", + "rero_ils.modules.acquisition.acq_order_lines.permissions:update_action", + "rero_ils.modules.acquisition.acq_order_lines.permissions:delete_action", + "rero_ils.modules.acquisition.acq_orders.permissions:access_action", + "rero_ils.modules.acquisition.acq_orders.permissions:search_action", + "rero_ils.modules.acquisition.acq_orders.permissions:read_action", + "rero_ils.modules.acquisition.acq_orders.permissions:create_action", + "rero_ils.modules.acquisition.acq_orders.permissions:update_action", + "rero_ils.modules.acquisition.acq_orders.permissions:delete_action", + "rero_ils.modules.acquisition.acq_receipts.permissions:access_action", + "rero_ils.modules.acquisition.acq_receipts.permissions:search_action", + "rero_ils.modules.acquisition.acq_receipts.permissions:read_action", + "rero_ils.modules.acquisition.acq_receipts.permissions:create_action", + "rero_ils.modules.acquisition.acq_receipts.permissions:update_action", + "rero_ils.modules.acquisition.acq_receipts.permissions:delete_action", + "rero_ils.modules.acquisition.acq_receipt_lines.permissions:access_action", + "rero_ils.modules.acquisition.acq_receipt_lines.permissions:search_action", + "rero_ils.modules.acquisition.acq_receipt_lines.permissions:read_action", + "rero_ils.modules.acquisition.acq_receipt_lines.permissions:create_action", + "rero_ils.modules.acquisition.acq_receipt_lines.permissions:update_action", + "rero_ils.modules.acquisition.acq_receipt_lines.permissions:delete_action", + "rero_ils.modules.acquisition.budgets.permissions:access_action", + "rero_ils.modules.acquisition.budgets.permissions:search_action", + "rero_ils.modules.acquisition.budgets.permissions:read_action", + "rero_ils.modules.acquisition.budgets.permissions:create_action", + "rero_ils.modules.acquisition.budgets.permissions:update_action", + "rero_ils.modules.acquisition.budgets.permissions:delete_action", + "rero_ils.modules.circ_policies.permissions:access_action", + "rero_ils.modules.circ_policies.permissions:search_action", + "rero_ils.modules.circ_policies.permissions:read_action", + "rero_ils.modules.circ_policies.permissions:create_action", + "rero_ils.modules.circ_policies.permissions:update_action", + "rero_ils.modules.circ_policies.permissions:delete_action", + "rero_ils.modules.collections.permissions:access_action", + "rero_ils.modules.collections.permissions:search_action", + "rero_ils.modules.collections.permissions:read_action", + "rero_ils.modules.collections.permissions:create_action", + "rero_ils.modules.collections.permissions:update_action", + "rero_ils.modules.collections.permissions:delete_action", + "rero_ils.modules.documents.permissions:access_action", + "rero_ils.modules.documents.permissions:search_action", + "rero_ils.modules.documents.permissions:read_action", + "rero_ils.modules.documents.permissions:create_action", + "rero_ils.modules.documents.permissions:update_action", + "rero_ils.modules.documents.permissions:delete_action", + "rero_ils.modules.files.permissions:access_action", + "rero_ils.modules.files.permissions:search_action", + "rero_ils.modules.files.permissions:read_action", + "rero_ils.modules.files.permissions:create_action", + "rero_ils.modules.files.permissions:update_action", + "rero_ils.modules.files.permissions:delete_action", + "rero_ils.modules.entities.local_entities.permissions.access_action", + "rero_ils.modules.entities.local_entities.permissions.search_action", + "rero_ils.modules.entities.local_entities.permissions.read_action", + "rero_ils.modules.entities.local_entities.permissions.create_action", + "rero_ils.modules.entities.local_entities.permissions.update_action", + "rero_ils.modules.entities.local_entities.permissions.delete_action", + "rero_ils.modules.holdings.permissions:access_action", + "rero_ils.modules.holdings.permissions:search_action", + "rero_ils.modules.holdings.permissions:read_action", + "rero_ils.modules.holdings.permissions:create_action", + "rero_ils.modules.holdings.permissions:update_action", + "rero_ils.modules.holdings.permissions:delete_action", + "rero_ils.modules.items.permissions:access_action", + "rero_ils.modules.items.permissions:search_action", + "rero_ils.modules.items.permissions:read_action", + "rero_ils.modules.items.permissions:create_action", + "rero_ils.modules.items.permissions:update_action", + "rero_ils.modules.items.permissions:delete_action", + "rero_ils.modules.ill_requests.permissions:access_action", + "rero_ils.modules.ill_requests.permissions:search_action", + "rero_ils.modules.ill_requests.permissions:read_action", + "rero_ils.modules.ill_requests.permissions:create_action", + "rero_ils.modules.ill_requests.permissions:update_action", + "rero_ils.modules.ill_requests.permissions:delete_action", + "rero_ils.modules.item_types.permissions:access_action", + "rero_ils.modules.item_types.permissions:search_action", + "rero_ils.modules.item_types.permissions:read_action", + "rero_ils.modules.item_types.permissions:create_action", + "rero_ils.modules.item_types.permissions:update_action", + "rero_ils.modules.item_types.permissions:delete_action", + "rero_ils.modules.libraries.permissions:access_action", + "rero_ils.modules.libraries.permissions:search_action", + "rero_ils.modules.libraries.permissions:read_action", + "rero_ils.modules.libraries.permissions:create_action", + "rero_ils.modules.libraries.permissions:update_action", + "rero_ils.modules.libraries.permissions:delete_action", + "rero_ils.modules.loans.permissions:access_action", + "rero_ils.modules.loans.permissions:search_action", + "rero_ils.modules.loans.permissions:read_action", + "rero_ils.modules.locations.permissions:access_action", + "rero_ils.modules.locations.permissions:search_action", + "rero_ils.modules.locations.permissions:read_action", + "rero_ils.modules.locations.permissions:create_action", + "rero_ils.modules.locations.permissions:update_action", + "rero_ils.modules.locations.permissions:delete_action", + "rero_ils.modules.local_fields.permissions:access_action", + "rero_ils.modules.local_fields.permissions:search_action", + "rero_ils.modules.local_fields.permissions:read_action", + "rero_ils.modules.local_fields.permissions:create_action", + "rero_ils.modules.local_fields.permissions:update_action", + "rero_ils.modules.local_fields.permissions:delete_action", + "rero_ils.modules.notifications.permissions:access_action", + "rero_ils.modules.notifications.permissions:search_action", + "rero_ils.modules.notifications.permissions:read_action", + "rero_ils.modules.notifications.permissions:create_action", + "rero_ils.modules.notifications.permissions:update_action", + "rero_ils.modules.notifications.permissions:delete_action", + "rero_ils.modules.operation_logs.permissions:access_action", + "rero_ils.modules.operation_logs.permissions:search_action", + "rero_ils.modules.operation_logs.permissions:read_action", + "rero_ils.modules.organisations.permissions:access_action", + "rero_ils.modules.organisations.permissions:search_action", + "rero_ils.modules.organisations.permissions:read_action", + "rero_ils.modules.organisations.permissions:create_action", + "rero_ils.modules.organisations.permissions:update_action", + "rero_ils.modules.organisations.permissions:delete_action", + "rero_ils.modules.patrons.permissions:access_action", + "rero_ils.modules.patrons.permissions:search_action", + "rero_ils.modules.patrons.permissions:read_action", + "rero_ils.modules.patrons.permissions:create_action", + "rero_ils.modules.patrons.permissions:update_action", + "rero_ils.modules.patrons.permissions:delete_action", + "rero_ils.modules.patron_transactions.permissions:access_action", + "rero_ils.modules.patron_transactions.permissions:search_action", + "rero_ils.modules.patron_transactions.permissions:read_action", + "rero_ils.modules.patron_transactions.permissions:create_action", + "rero_ils.modules.patron_transactions.permissions:update_action", + "rero_ils.modules.patron_transactions.permissions:delete_action", + "rero_ils.modules.patron_transaction_events.permissions:access_action", + "rero_ils.modules.patron_transaction_events.permissions:search_action", + "rero_ils.modules.patron_transaction_events.permissions:read_action", + "rero_ils.modules.patron_transaction_events.permissions:create_action", + "rero_ils.modules.patron_transaction_events.permissions:update_action", + "rero_ils.modules.patron_transaction_events.permissions:delete_action", + "rero_ils.modules.patron_types.permissions:access_action", + "rero_ils.modules.patron_types.permissions:search_action", + "rero_ils.modules.patron_types.permissions:read_action", + "rero_ils.modules.patron_types.permissions:create_action", + "rero_ils.modules.patron_types.permissions:update_action", + "rero_ils.modules.patron_types.permissions:delete_action", + "rero_ils.modules.stats.permissions:access_action", + "rero_ils.modules.stats.permissions:search_action", + "rero_ils.modules.stats.permissions:read_action", + "rero_ils.modules.stats_cfg.permissions:access_action", + "rero_ils.modules.stats_cfg.permissions:search_action", + "rero_ils.modules.stats_cfg.permissions:read_action", + "rero_ils.modules.stats_cfg.permissions:create_action", + "rero_ils.modules.stats_cfg.permissions:update_action", + "rero_ils.modules.stats_cfg.permissions:delete_action", + "rero_ils.modules.templates.permissions:access_action", + "rero_ils.modules.templates.permissions:search_action", + "rero_ils.modules.templates.permissions:read_action", + "rero_ils.modules.templates.permissions:create_action", + "rero_ils.modules.templates.permissions:update_action", + "rero_ils.modules.templates.permissions:delete_action", + "rero_ils.modules.vendors.permissions:access_action", + "rero_ils.modules.vendors.permissions:search_action", + "rero_ils.modules.vendors.permissions:read_action", + "rero_ils.modules.vendors.permissions:create_action", + "rero_ils.modules.vendors.permissions:update_action", + "rero_ils.modules.vendors.permissions:delete_action", # additional permissions ------------------------------ - 'rero_ils.modules.acquisition.acq_accounts.permissions:transfer_action', - 'rero_ils.modules.permissions:permission_management', - 'rero_ils.modules.permissions:access_ui_admin', - 'rero_ils.modules.permissions:access_circulation', - 'rero_ils.modules.permissions:can_use_debug_mode', - 'rero_ils.modules.items.permissions:late_issue_management' + "rero_ils.modules.acquisition.acq_accounts.permissions:transfer_action", + "rero_ils.modules.permissions:permission_management", + "rero_ils.modules.permissions:access_ui_admin", + "rero_ils.modules.permissions:access_circulation", + "rero_ils.modules.permissions:can_use_debug_mode", + "rero_ils.modules.items.permissions:late_issue_management", ] # Detailed View Configuration # =========================== RECORDS_UI_ENDPOINTS = { - 'coll': dict( - pid_type='coll', - route='//collections/', - template='rero_ils/detailed_view_collections.html', - record_class='rero_ils.modules.collections.api:Collection', - view_imp='rero_ils.modules.collections.views.collection_view_method' - ), - 'doc': dict( - pid_type='doc', - route='//documents/', - template='rero_ils/detailed_view_documents.html', - record_class='rero_ils.modules.documents.api:Document', - view_imp='rero_ils.modules.documents.views.doc_item_view_method', - - ), - 'doc_export': dict( - pid_type='doc', - route='//documents//export/', - view_imp='invenio_records_ui.views.export', - template='rero_ils/export_documents.html', - record_class='rero_ils.modules.documents.api:Document', - ), - 'stats': dict( - pid_type='stat', - route='/stats/', - template='rero_ils/detailed_view_stats.html', - record_class='rero_ils.modules.stats.api.api:Stat', - view_imp='rero_ils.modules.stats.views.stats_view_method', - permission_factory_imp='rero_ils.modules.stats.permissions:stats_ui_permission_factory', + "coll": dict( + pid_type="coll", + route="//collections/", + template="rero_ils/detailed_view_collections.html", + record_class="rero_ils.modules.collections.api:Collection", + view_imp="rero_ils.modules.collections.views.collection_view_method", + ), + "doc": dict( + pid_type="doc", + route="//documents/", + template="rero_ils/detailed_view_documents.html", + record_class="rero_ils.modules.documents.api:Document", + view_imp="rero_ils.modules.documents.views.doc_item_view_method", + ), + "doc_export": dict( + pid_type="doc", + route="//documents//export/", + view_imp="invenio_records_ui.views.export", + template="rero_ils/export_documents.html", + record_class="rero_ils.modules.documents.api:Document", + ), + "stats": dict( + pid_type="stat", + route="/stats/", + template="rero_ils/detailed_view_stats.html", + record_class="rero_ils.modules.stats.api.api:Stat", + view_imp="rero_ils.modules.stats.views.stats_view_method", + permission_factory_imp="rero_ils.modules.stats.permissions:stats_ui_permission_factory", ), "recid_preview": dict( pid_type="recid", route="/records/preview//", view_imp="rero_invenio_files.records.previewer.preview", record_class="rero_invenio_files.records.api:RecordWithFile", - ) + ), } RECORDS_UI_EXPORT_FORMATS = { - 'doc': { - 'json': dict( - title='JSON', - serializer='invenio_records_rest.serializers:json_v1', + "doc": { + "json": dict( + title="JSON", + serializer="invenio_records_rest.serializers:json_v1", order=1, ), - 'ris': dict( - title='RIS (Endnote, Zotero, ...)', - serializer='rero_ils.modules.documents.serializers:ris_serializer', + "ris": dict( + title="RIS (Endnote, Zotero, ...)", + serializer="rero_ils.modules.documents.serializers:ris_serializer", order=2, ), } @@ -2973,37 +3387,37 @@ def _(x): RERO_ILS_DEFAULT_JSON_SCHEMA = { - 'acac': '/acq_accounts/acq_account-v0.0.1.json', - 'acol': '/acq_order_lines/acq_order_line-v0.0.1.json', - 'acor': '/acq_orders/acq_order-v0.0.1.json', - 'acre': '/acq_receipts/acq_receipt-v0.0.1.json', - 'acrl': '/acq_receipt_lines/acq_receipt_line-v0.0.1.json', - 'acin': '/acq_invoices/acq_invoice-v0.0.1.json', - 'budg': '/budgets/budget-v0.0.1.json', - 'cipo': '/circ_policies/circ_policy-v0.0.1.json', - 'coll': '/collections/collection-v0.0.1.json', - 'rement': '/remote_entities/remote_entity-v0.0.1.json', - 'doc': '/documents/document-v0.0.1.json', - 'hold': '/holdings/holding-v0.0.1.json', - 'illr': '/ill_requests/ill_request-v0.0.1.json', - 'item': '/items/item-v0.0.1.json', - 'itty': '/item_types/item_type-v0.0.1.json', - 'lib': '/libraries/library-v0.0.1.json', - 'loc': '/locations/location-v0.0.1.json', - 'locent': '/local_entities/local_entity-v0.0.1.json', - 'lofi': '/local_fields/local_field-v0.0.1.json', - 'notif': '/notifications/notification-v0.0.1.json', - 'org': '/organisations/organisation-v0.0.1.json', - 'pttr': '/patron_transactions/patron_transaction-v0.0.1.json', - 'ptty': '/patron_types/patron_type-v0.0.1.json', - 'ptre': '/patron_transaction_events/patron_transaction_event-v0.0.1.json', - 'ptrn': '/patrons/patron-v0.0.1.json', - 'stat': '/stats/stat-v0.0.1.json', - 'stacfg': '/stats_cfg/stat_cfg-v0.0.1.json', - 'tmpl': '/templates/template-v0.0.1.json', - 'oplg': '/operation_logs/operation_log-v0.0.1.json', - 'vndr': '/vendors/vendor-v0.0.1.json', - 'user': '/users/user-v0.0.1.json' + "acac": "/acq_accounts/acq_account-v0.0.1.json", + "acol": "/acq_order_lines/acq_order_line-v0.0.1.json", + "acor": "/acq_orders/acq_order-v0.0.1.json", + "acre": "/acq_receipts/acq_receipt-v0.0.1.json", + "acrl": "/acq_receipt_lines/acq_receipt_line-v0.0.1.json", + "acin": "/acq_invoices/acq_invoice-v0.0.1.json", + "budg": "/budgets/budget-v0.0.1.json", + "cipo": "/circ_policies/circ_policy-v0.0.1.json", + "coll": "/collections/collection-v0.0.1.json", + "rement": "/remote_entities/remote_entity-v0.0.1.json", + "doc": "/documents/document-v0.0.1.json", + "hold": "/holdings/holding-v0.0.1.json", + "illr": "/ill_requests/ill_request-v0.0.1.json", + "item": "/items/item-v0.0.1.json", + "itty": "/item_types/item_type-v0.0.1.json", + "lib": "/libraries/library-v0.0.1.json", + "loc": "/locations/location-v0.0.1.json", + "locent": "/local_entities/local_entity-v0.0.1.json", + "lofi": "/local_fields/local_field-v0.0.1.json", + "notif": "/notifications/notification-v0.0.1.json", + "org": "/organisations/organisation-v0.0.1.json", + "pttr": "/patron_transactions/patron_transaction-v0.0.1.json", + "ptty": "/patron_types/patron_type-v0.0.1.json", + "ptre": "/patron_transaction_events/patron_transaction_event-v0.0.1.json", + "ptrn": "/patrons/patron-v0.0.1.json", + "stat": "/stats/stat-v0.0.1.json", + "stacfg": "/stats_cfg/stat_cfg-v0.0.1.json", + "tmpl": "/templates/template-v0.0.1.json", + "oplg": "/operation_logs/operation_log-v0.0.1.json", + "vndr": "/vendors/vendor-v0.0.1.json", + "user": "/users/user-v0.0.1.json", } # Operation Log Configuration @@ -3011,11 +3425,11 @@ def _(x): # TODO: this can be removed as it is used only for the UI as it has been # refactored using extensions RERO_ILS_ENABLE_OPERATION_LOG = { - 'documents': 'doc', - 'holdings': 'hold', - 'items': 'item', - 'ill_requests': 'illr', - 'local_entities': 'locent' + "documents": "doc", + "holdings": "hold", + "items": "item", + "ill_requests": "illr", + "local_entities": "locent", } RERO_ILS_ENABLE_OPERATION_LOG_VALIDATION = False @@ -3034,32 +3448,27 @@ def _(x): LOGGING_SENTRY_CELERY = True ROLLOVER_LOGGING_CONFIG = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'standard': { - 'format': '%(asctime)s [%(levelname)s] :: %(message)s' - } + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": {"format": "%(asctime)s [%(levelname)s] :: %(message)s"} }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'standard' - }, - 'file': { - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': 'logs/rollover_log', - 'backupCount': 10, - 'formatter': 'standard' - } + "handlers": { + "console": {"class": "logging.StreamHandler", "formatter": "standard"}, + "file": { + "class": "logging.handlers.RotatingFileHandler", + "filename": "logs/rollover_log", + "backupCount": 10, + "formatter": "standard", + }, }, - 'loggers': { - 'rero_ils.modules.acquisition.rollover': { - 'handlers': ['console', 'file'], - 'level': 'INFO', - 'propagate': False + "loggers": { + "rero_ils.modules.acquisition.rollover": { + "handlers": ["console", "file"], + "level": "INFO", + "propagate": False, } - } + }, } @@ -3073,18 +3482,15 @@ def _(x): # Define which files should be considered as a template file. Each full_path # file matching one of the specific regular expression will be considered. -RERO_ILS_NOTIFICATIONS_ALLOWED_TEMPLATE_FILES = [ - '*.txt', - '*.tpl.*' -] +RERO_ILS_NOTIFICATIONS_ALLOWED_TEMPLATE_FILES = ["*.txt", "*.tpl.*"] # Define functions to use when we would send a notification. Each key will be # the communication channel, value is the function to call. The used functions # should accept one positional argument. RERO_ILS_COMMUNICATION_DISPATCHER_FUNCTIONS = { CommunicationChannel.EMAIL: NotificationDispatcher.send_notification_by_email, CommunicationChannel.MAIL: NotificationDispatcher.send_mail_for_printing, - # 'sms': not_yet_implemented - # 'telepathy': self.madness_mind + # "sms": not_yet_implemented + # "telepathy": self.madness_mind # ... } @@ -3093,8 +3499,8 @@ def _(x): #: Supercharge flask_security invalid password or user message. #: flask_security uses its own translation domain, so we need #: translate the message on demand with a custom list class. -SECURITY_MSG_INVALID_PASSWORD = TranslatedList(('INVALID_USER_OR_PASSWORD', 'error')) -SECURITY_MSG_USER_DOES_NOT_EXIST = TranslatedList(('INVALID_USER_OR_PASSWORD', 'error')) +SECURITY_MSG_INVALID_PASSWORD = TranslatedList(("INVALID_USER_OR_PASSWORD", "error")) +SECURITY_MSG_USER_DOES_NOT_EXIST = TranslatedList(("INVALID_USER_OR_PASSWORD", "error")) #: Allow password change by users. SECURITY_CHANGEABLE = True @@ -3124,26 +3530,26 @@ def _(x): # Misc INDEXER_REPLACE_REFS = True -INDEXER_RECORD_TO_INDEX = 'rero_ils.modules.indexer_utils.record_to_index' +INDEXER_RECORD_TO_INDEX = "rero_ils.modules.indexer_utils.record_to_index" #: Trigger delay for celery tasks to index referenced records. RERO_ILS_INDEXER_TASK_DELAY = timedelta(seconds=2) -RERO_ILS_APP_URL_SCHEME = 'https' -RERO_ILS_APP_HOST = 'bib.rero.ch' +RERO_ILS_APP_URL_SCHEME = "https" +RERO_ILS_APP_HOST = "bib.rero.ch" #: Actual URL used to construct links in notifications for example -RERO_ILS_APP_URL = 'https://bib.rero.ch' +RERO_ILS_APP_URL = "https://bib.rero.ch" -RERO_ILS_PERMALINK_RERO_URL = 'http://data.rero.ch/01-{identifier}' +RERO_ILS_PERMALINK_RERO_URL = "http://data.rero.ch/01-{identifier}" # Flag to determine the state of ILS # Show or hide message on red RERO_ILS_STATE_PRODUCTION = False -RERO_ILS_STATE_MESSAGE = _('This is a TEST VERSION.') -RERO_ILS_STATE_LINK_MESSAGE = _('Go to the production site.') +RERO_ILS_STATE_MESSAGE = _("This is a TEST VERSION.") +RERO_ILS_STATE_LINK_MESSAGE = _("Go to the production site.") RERO_ILS_STATE_LINK = RERO_ILS_APP_URL # robots.txt response -RERO_ILS_ROBOTS = 'User-Agent: *\nDisallow: /\n' +RERO_ILS_ROBOTS = "User-Agent: *\nDisallow: /\n" #: Git commit hash. If set, a link to github commit page #: is displayed on RERO-ILS frontpage. RERO_ILS_APP_GIT_HASH = None @@ -3153,83 +3559,75 @@ def _(x): #: RERO_ILS MEF specific configurations. RERO_ILS_MEF_CONFIG = { - 'agents': { - 'base_url': os.environ.get('RERO_ILS_MEF_AGENTS_URL', 'https://mef.rero.ch/api/agents'), - 'sources': ['idref', 'gnd'] + "agents": { + "base_url": os.environ.get( + "RERO_ILS_MEF_AGENTS_URL", "https://mef.rero.ch/api/agents" + ), + "sources": ["idref", "gnd"], }, - 'concepts': { - 'base_url': os.environ.get('RERO_ILS_MEF_CONCEPTS_URL', 'https://mef.rero.ch/api/concepts'), - 'sources': ['idref'] + "concepts": { + "base_url": os.environ.get( + "RERO_ILS_MEF_CONCEPTS_URL", "https://mef.rero.ch/api/concepts" + ), + "sources": ["idref"], }, - 'concepts-genreForm': { - 'base_url': os.environ.get('RERO_ILS_MEF_CONCEPTS_URL', 'https://mef.rero.ch/api/concepts'), - 'sources': ['idref'], - 'filters': [ - {'idref.bnf_type': 'genre/forme Rameau'} - ] + "concepts-genreForm": { + "base_url": os.environ.get( + "RERO_ILS_MEF_CONCEPTS_URL", "https://mef.rero.ch/api/concepts" + ), + "sources": ["idref"], + "filters": [{"idref.bnf_type": "genre/forme Rameau"}], }, - 'places': { - 'base_url': os.environ.get('RERO_ILS_MEF_PLACES_URL', 'https://mef.rero.ch/api/places'), - 'sources': ['idref'] + "places": { + "base_url": os.environ.get( + "RERO_ILS_MEF_PLACES_URL", "https://mef.rero.ch/api/places" + ), + "sources": ["idref"], }, } RERO_ILS_ENTITY_TYPES = { - 'bf:Person': 'agents', - 'bf:Organisation': 'agents', - 'bf:Topic': 'concepts', - 'bf:Temporal': 'concepts', - 'bf:Place': 'places' + "bf:Person": "agents", + "bf:Organisation": "agents", + "bf:Topic": "concepts", + "bf:Temporal": "concepts", + "bf:Place": "places", } # The absolute path to put the agent synchronization logs, default is the # instance path -# RERO_ILS_MEF_SYNC_LOG_DIR = '/var/logs/reroils' +# RERO_ILS_MEF_SYNC_LOG_DIR = "/var/logs/reroils" -RERO_ILS_APP_HELP_PAGE = ( - 'https://github.com/rero/rero-ils/wiki/Public-demo-help' -) +RERO_ILS_APP_HELP_PAGE = "https://github.com/rero/rero-ils/wiki/Public-demo-help" #: Cover service -RERO_ILS_THUMBNAIL_SERVICE_URL = 'https://services.test.rero.ch/cover' +RERO_ILS_THUMBNAIL_SERVICE_URL = "https://services.test.rero.ch/cover" #: Entities -RERO_ILS_AGENTS_SOURCES = ['idref', 'gnd', 'rero'] -RERO_ILS_AGENTS_SOURCES_EXCLUDE_LINK = ['rero'] +RERO_ILS_AGENTS_SOURCES = ["idref", "gnd", "rero"] +RERO_ILS_AGENTS_SOURCES_EXCLUDE_LINK = ["rero"] RERO_ILS_AGENTS_LABEL_ORDER = { - 'fallback': 'fr', - 'fr': ['idref', 'rero', 'gnd'], - 'de': ['gnd', 'idref', 'rero'], + "fallback": "fr", + "fr": ["idref", "rero", "gnd"], + "de": ["gnd", "idref", "rero"], } RERO_ILS_DEFAULT_SUGGESTION_LIMIT = 10 RERO_ILS_APP_ENTITIES_FIELDS_REF = [ EntityFieldWithRef.CONTRIBUTION, EntityFieldWithRef.GENRE_FORM, - EntityFieldWithRef.SUBJECTS + EntityFieldWithRef.SUBJECTS, ] RERO_ILS_APP_ENTITIES_TYPES_FIELDS = { EntityType.ORGANISATION: [ EntityFieldWithRef.CONTRIBUTION, - EntityFieldWithRef.SUBJECTS - ], - EntityType.PERSON: [ - EntityFieldWithRef.CONTRIBUTION, - EntityFieldWithRef.SUBJECTS - ], - EntityType.PLACE: [ - EntityFieldWithRef.SUBJECTS - ], - EntityType.TEMPORAL: [ - EntityFieldWithRef.SUBJECTS - ], - EntityType.TOPIC: [ EntityFieldWithRef.SUBJECTS, - EntityFieldWithRef.GENRE_FORM ], - EntityType.WORK: [ - EntityFieldWithRef.SUBJECTS - ] + EntityType.PERSON: [EntityFieldWithRef.CONTRIBUTION, EntityFieldWithRef.SUBJECTS], + EntityType.PLACE: [EntityFieldWithRef.SUBJECTS], + EntityType.TEMPORAL: [EntityFieldWithRef.SUBJECTS], + EntityType.TOPIC: [EntityFieldWithRef.SUBJECTS, EntityFieldWithRef.GENRE_FORM], + EntityType.WORK: [EntityFieldWithRef.SUBJECTS], } # ============================================================================= @@ -3249,21 +3647,21 @@ def _(x): UserRole.CIRCULATION_MANAGER, UserRole.CATALOG_MANAGER, UserRole.USER_MANAGER, - UserRole.ACQUISITION_MANAGER + UserRole.ACQUISITION_MANAGER, }, - UserRole.USER_MANAGER: {UserRole.PATRON} + UserRole.USER_MANAGER: {UserRole.PATRON}, } # JSONSchemas # =========== """Default json schema host.""" -JSONSCHEMAS_HOST = 'bib.rero.ch' +JSONSCHEMAS_HOST = "bib.rero.ch" """Default schema endpoint.""" JSONSCHEMAS_ENDPOINT = "/schemas" """Whether to resolve $ref before serving a schema.""" JSONSCHEMAS_REPLACE_REFS = False """Loader class used in ``JSONRef`` when replacing ``$ref``.""" -JSONSCHEMAS_LOADER_CLS = 'rero_ils.jsonschemas.utils.JsonLoader' +JSONSCHEMAS_LOADER_CLS = "rero_ils.jsonschemas.utils.JsonLoader" """Register the endpoints on the API app.""" JSONSCHEMAS_REGISTER_ENDPOINTS_API = False """Register the endpoints on the UI app.""" @@ -3271,7 +3669,7 @@ def _(x): # OAI-PMH # ======= -OAISERVER_ID_PREFIX = 'oai:bib.rero.ch:' +OAISERVER_ID_PREFIX = "oai:bib.rero.ch:" # ============================================================================= # RERO_ILS LOANS SPECIAL CONFIGURATION @@ -3284,7 +3682,7 @@ def _(x): # ============================================================================= # ANONYMIZATION PROCESS CONFIGURATION # ============================================================================= -# Specify the delay (in days) under which no loan can't be anonymized anyway (for circulation management process). +# Specify the delay (in days) under which no loan can"t be anonymized anyway (for circulation management process). RERO_ILS_ANONYMISATION_MIN_TIME_LIMIT = 3 * 365 / 12 # Specify the delay (in days) when a loan should be anonymized anyway after it concluded. RERO_ILS_ANONYMISATION_MAX_TIME_LIMIT = 6 * 365 / 12 @@ -3294,8 +3692,7 @@ def _(x): CIRCULATION_PATRON_EXISTS = Patron.get_record_by_pid CIRCULATION_ITEM_LOCATION_RETRIEVER = item_location_retriever -CIRCULATION_DOCUMENT_RETRIEVER_FROM_ITEM = \ - Item.get_document_pid_by_item_pid_object +CIRCULATION_DOCUMENT_RETRIEVER_FROM_ITEM = Item.get_document_pid_by_item_pid_object CIRCULATION_ITEMS_RETRIEVER_FROM_DOCUMENT = Item.get_items_pid_by_document_pid CIRCULATION_DOCUMENT_EXISTS = Document.get_record_by_pid @@ -3304,24 +3701,21 @@ def _(x): CIRCULATION_PATRON_REF_BUILDER = loan_build_patron_ref CIRCULATION_DOCUMENT_REF_BUILDER = loan_build_document_ref -CIRCULATION_TRANSACTION_LOCATION_VALIDATOR = \ - Location.transaction_location_validator -CIRCULATION_TRANSACTION_USER_VALIDATOR = \ - Patron.transaction_user_validator +CIRCULATION_TRANSACTION_LOCATION_VALIDATOR = Location.transaction_location_validator +CIRCULATION_TRANSACTION_USER_VALIDATOR = Patron.transaction_user_validator -CIRCULATION_LOAN_LOCATIONS_VALIDATION = \ - validate_item_pickup_transaction_locations +CIRCULATION_LOAN_LOCATIONS_VALIDATION = validate_item_pickup_transaction_locations CIRCULATION_SAME_LOCATION_VALIDATOR = same_location_validator """Validates the item, pickup and transaction locations of pending loans.""" # This is needed for absolute URL (url_for) -# SERVER_NAME = 'localhost:5000' +# SERVER_NAME = "localhost:5000" # CIRCULATION_REST_ENDPOINTS = {} """Default circulation policies when performing an action on a Loan.""" -_LOANID_CONVERTER = 'pid(loanid,record_class="invenio_circulation.api:Loan")' +_LOANID_CONVERTER = "pid(loanid,record_class='invenio_circulation.api:Loan')" """Loan PID url converter.""" CIRCULATION_REST_ENDPOINTS = dict( @@ -3330,157 +3724,134 @@ def _(x): pid_minter=CIRCULATION_LOAN_MINTER, pid_fetcher=CIRCULATION_LOAN_FETCHER, search_class=LoansSearch, - search_index='loans', - indexer_class='rero_ils.modules.loans.api:LoansIndexer', + search_index="loans", + indexer_class="rero_ils.modules.loans.api:LoansIndexer", record_serializers={ - 'application/json': 'invenio_records_rest.serializers:json_v1_response', + "application/json": "invenio_records_rest.serializers:json_v1_response", }, record_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, search_serializers={ - 'application/json': 'invenio_records_rest.serializers:json_v1_search', - 'application/rero+json': 'rero_ils.modules.loans.serializers:json_loan_search' + "application/json": "invenio_records_rest.serializers:json_v1_search", + "application/rero+json": "rero_ils.modules.loans.serializers:json_loan_search", }, search_serializers_aliases={ - 'json': 'application/json', - 'rero': 'application/rero+json' + "json": "application/json", + "rero": "application/rero+json", }, record_loaders={ - 'application/json': lambda: Loan(request.get_json()), + "application/json": lambda: Loan(request.get_json()), }, - record_class='rero_ils.modules.loans.api:Loan', - search_factory_imp='rero_ils.query:circulation_search_factory', - list_route='/loans/', - item_route=f'/loans/<{_LOANID_CONVERTER}:pid_value>', - default_media_type='application/json', + record_class="rero_ils.modules.loans.api:Loan", + search_factory_imp="rero_ils.query:circulation_search_factory", + list_route="/loans/", + item_route=f"/loans/<{_LOANID_CONVERTER}:pid_value>", + default_media_type="application/json", max_result_window=MAX_RESULT_WINDOW, error_handlers=dict(), - list_permission_factory_imp=lambda record: LoanPermissionPolicy('search', record=record), - read_permission_factory_imp=lambda record: LoanPermissionPolicy('read', record=record), - create_permission_factory_imp=lambda record: LoanPermissionPolicy('create', record=record), - update_permission_factory_imp=lambda record: LoanPermissionPolicy('update', record=record), - delete_permission_factory_imp=lambda record: LoanPermissionPolicy('delete', record=record) + list_permission_factory_imp=lambda record: LoanPermissionPolicy( + "search", record=record + ), + read_permission_factory_imp=lambda record: LoanPermissionPolicy( + "read", record=record + ), + create_permission_factory_imp=lambda record: LoanPermissionPolicy( + "create", record=record + ), + update_permission_factory_imp=lambda record: LoanPermissionPolicy( + "update", record=record + ), + delete_permission_factory_imp=lambda record: LoanPermissionPolicy( + "delete", record=record + ), ) ) """Disable Circulation REST API.""" CIRCULATION_LOAN_TRANSITIONS = { - 'CREATED': [ - dict( - dest=LoanState.PENDING, - trigger='request', - transition=CreatedToPending - ), + "CREATED": [ + dict(dest=LoanState.PENDING, trigger="request", transition=CreatedToPending), dict( dest=LoanState.ITEM_ON_LOAN, - trigger='checkout', + trigger="checkout", transition=ToItemOnLoan, - assign_item=False + assign_item=False, ), ], - 'PENDING': [ + "PENDING": [ dict( dest=LoanState.ITEM_AT_DESK, transition=PendingToItemAtDesk, - trigger='validate_request' + trigger="validate_request", ), dict( dest=LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, transition=PendingToItemInTransitPickup, - trigger='validate_request' - ), - dict( - dest=LoanState.ITEM_ON_LOAN, - transition=ToItemOnLoan, - trigger='checkout' + trigger="validate_request", ), - dict( - dest=LoanState.CANCELLED, - trigger='cancel', - transition=ToCancelled - ) + dict(dest=LoanState.ITEM_ON_LOAN, transition=ToItemOnLoan, trigger="checkout"), + dict(dest=LoanState.CANCELLED, trigger="cancel", transition=ToCancelled), ], - 'ITEM_AT_DESK': [ + "ITEM_AT_DESK": [ dict( dest=LoanState.ITEM_ON_LOAN, transition=ItemAtDeskToItemOnLoan, - trigger='checkout' + trigger="checkout", ), - dict( - dest=LoanState.CANCELLED, - trigger='cancel', - transition=ToCancelled - ) + dict(dest=LoanState.CANCELLED, trigger="cancel", transition=ToCancelled), ], - 'ITEM_IN_TRANSIT_FOR_PICKUP': [ - dict( - dest=LoanState.ITEM_AT_DESK, - trigger='receive' - ), - dict( - dest=LoanState.CANCELLED, - trigger='cancel', - transition=ToCancelled - ) + "ITEM_IN_TRANSIT_FOR_PICKUP": [ + dict(dest=LoanState.ITEM_AT_DESK, trigger="receive"), + dict(dest=LoanState.CANCELLED, trigger="cancel", transition=ToCancelled), ], - 'ITEM_ON_LOAN': [ + "ITEM_ON_LOAN": [ dict( dest=LoanState.ITEM_RETURNED, transition=ItemOnLoanToItemReturned, - trigger='checkin', - assign_item=False + trigger="checkin", + assign_item=False, ), dict( dest=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, transition=ItemOnLoanToItemInTransitHouse, - trigger='checkin' + trigger="checkin", ), dict( dest=LoanState.ITEM_ON_LOAN, transition=ItemOnLoanToItemOnLoan, - trigger='extend' + trigger="extend", ), - dict( - dest=LoanState.CANCELLED, - trigger='cancel', - transition=ToCancelled - ) + dict(dest=LoanState.CANCELLED, trigger="cancel", transition=ToCancelled), ], - 'ITEM_IN_TRANSIT_TO_HOUSE': [ + "ITEM_IN_TRANSIT_TO_HOUSE": [ dict( dest=LoanState.ITEM_RETURNED, transition=ItemInTransitHouseToItemReturned, - trigger='receive', - assign_item=False + trigger="receive", + assign_item=False, ), - dict( - dest=LoanState.CANCELLED, - trigger='cancel', - transition=ToCancelled - ) + dict(dest=LoanState.CANCELLED, trigger="cancel", transition=ToCancelled), ], - 'ITEM_RETURNED': [], - 'CANCELLED': [], + "ITEM_RETURNED": [], + "CANCELLED": [], } CIRCULATION_POLICIES = dict( checkout=dict( duration_default=get_default_loan_duration, duration_validate=validate_loan_duration, - item_can_circulate=is_item_available_for_checkout + item_can_circulate=is_item_available_for_checkout, ), extension=dict( from_end_date=False, duration_default=partial( - get_extension_params, parameter_name='duration_default'), - max_count=partial( - get_extension_params, parameter_name='max_count'), + get_extension_params, parameter_name="duration_default" + ), + max_count=partial(get_extension_params, parameter_name="max_count"), ), - request=dict( - can_be_requested=can_be_requested - ) + request=dict(can_be_requested=can_be_requested), ) CIRCULATION_ACTIONS_VALIDATION = { @@ -3489,18 +3860,18 @@ def _(x): Item.can_request, CircPolicy.can_request, Patron.can_request, - PatronType.can_request + PatronType.can_request, ], ItemCirculationAction.EXTEND: [ Loan.can_extend, Patron.can_extend, - PatronType.can_extend + PatronType.can_extend, ], ItemCirculationAction.CHECKOUT: [ Patron.can_checkout, CircPolicy.allow_checkout, - PatronType.can_checkout - ] + PatronType.can_checkout, + ], } HOLDING_CIRCULATION_ACTIONS_VALIDATION = { @@ -3509,29 +3880,21 @@ def _(x): Holding.can_request, CircPolicy.can_request, Patron.can_request, - PatronType.can_request + PatronType.can_request, ] } # WIKI # ==== -WIKI_CONTENT_DIR = './wiki' -WIKI_INDEX_DIR = './wiki/_index' -WIKI_URL_PREFIX = '/help' -WIKI_LANGUAGES = { - 'en': 'English', - 'fr': 'French', - 'de': 'German', - 'it': 'Italian' -} +WIKI_CONTENT_DIR = "./wiki" +WIKI_INDEX_DIR = "./wiki/_index" +WIKI_URL_PREFIX = "/help" +WIKI_LANGUAGES = {"en": "English", "fr": "French", "de": "German", "it": "Italian"} WIKI_CURRENT_LANGUAGE = get_current_language -WIKI_UPLOAD_FOLDER = os.path.join(WIKI_CONTENT_DIR, 'files') -WIKI_BASE_TEMPLATE = 'rero_ils/page_wiki.html' +WIKI_UPLOAD_FOLDER = os.path.join(WIKI_CONTENT_DIR, "files") +WIKI_BASE_TEMPLATE = "rero_ils/page_wiki.html" WIKI_EDIT_VIEW_PERMISSION = wiki_edit_view_permission WIKI_EDIT_UI_PERMISSION = wiki_edit_ui_permission -WIKI_MARKDOWN_EXTENSIONS = set(( - 'extra', - 'markdown_captions' -)) +WIKI_MARKDOWN_EXTENSIONS = set(("extra", "markdown_captions")) # IMPORT FROM EXTERNAL SOURCE CONFIGURATION # ============================================================================= @@ -3550,111 +3913,103 @@ def _(x): RERO_IMPORT_REST_ENDPOINTS = dict( loc=dict( - key='loc', - import_class='rero_ils.modules.imports.api:LoCImport', + key="loc", + import_class="rero_ils.modules.imports.api:LoCImport", import_size=50, - label='Library of Congress', - weight=70 + label="Library of Congress", + weight=70, ), bnf=dict( - key='bnf', - import_class='rero_ils.modules.imports.api:BnfImport', + key="bnf", + import_class="rero_ils.modules.imports.api:BnfImport", import_size=50, - label='BNF', - weight=20 + label="BNF", + weight=20, ), dnb=dict( - key='dnb', - import_class='rero_ils.modules.imports.api:DNBImport', + key="dnb", + import_class="rero_ils.modules.imports.api:DNBImport", import_size=50, - label='DNB', - weight=20 + label="DNB", + weight=20, ), slsp=dict( - key='slsp', - import_class='rero_ils.modules.imports.api:SLSPImport', + key="slsp", + import_class="rero_ils.modules.imports.api:SLSPImport", import_size=50, - label='SLSP', - weight=15 + label="SLSP", + weight=15, ), ugent=dict( - key='ugent', - import_class='rero_ils.modules.imports.api:UGentImport', + key="ugent", + import_class="rero_ils.modules.imports.api:UGentImport", import_size=50, - label='UGent', - weight=30 + label="UGent", + weight=30, ), kul=dict( - key='kul', - import_class='rero_ils.modules.imports.api:KULImport', + key="kul", + import_class="rero_ils.modules.imports.api:KULImport", import_size=50, - label='KULeuven', - weight=30 + label="KULeuven", + weight=30, ), sudoc=dict( - key='sudoc', - import_class='rero_ils.modules.imports.api:SUDOCImport', + key="sudoc", + import_class="rero_ils.modules.imports.api:SUDOCImport", import_size=50, - label='SUDOC', - weight=20 + label="SUDOC", + weight=20, ), renouvaud=dict( - key='renouvaud', - import_class='rero_ils.modules.imports.api:RenouvaudImport', + key="renouvaud", + import_class="rero_ils.modules.imports.api:RenouvaudImport", import_size=50, - label='Renouvaud', - weight=20 + label="Renouvaud", + weight=20, ), ) # RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE -# Use attribute 'subjects' or 'subjects_imported' +# Use attribute "subjects" or "subjects_imported" # ============================================================================= -RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE = 'subjects_imported' +RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE = "subjects_imported" # STREAMED EXPORT RECORDS # ============================================================================= RERO_INVENIO_BASE_EXPORT_REST_ENDPOINTS = dict( acq_account=dict( - resource=RECORDS_REST_ENDPOINTS.get('acac'), - default_media_type='text/csv', + resource=RECORDS_REST_ENDPOINTS.get("acac"), + default_media_type="text/csv", search_serializers={ - 'text/csv': 'rero_ils.modules.acquisition.acq_accounts.serializers:csv_acq_account_search', + "text/csv": "rero_ils.modules.acquisition.acq_accounts.serializers:csv_acq_account_search", }, - search_serializers_aliases={ - 'csv': 'text/csv' - } + search_serializers_aliases={"csv": "text/csv"}, ), acq_order=dict( - resource=RECORDS_REST_ENDPOINTS.get('acor'), - default_media_type='text/csv', + resource=RECORDS_REST_ENDPOINTS.get("acor"), + default_media_type="text/csv", search_serializers={ - 'text/csv': 'rero_ils.modules.acquisition.acq_orders.serializers:csv_acor_search', + "text/csv": "rero_ils.modules.acquisition.acq_orders.serializers:csv_acor_search", }, - search_serializers_aliases={ - 'csv': 'text/csv' - } + search_serializers_aliases={"csv": "text/csv"}, ), loan=dict( - resource=CIRCULATION_REST_ENDPOINTS.get('loanid'), - default_media_type='text/csv', + resource=CIRCULATION_REST_ENDPOINTS.get("loanid"), + default_media_type="text/csv", search_serializers={ - 'text/csv': 'rero_ils.modules.loans.serializers:csv_stream_search', + "text/csv": "rero_ils.modules.loans.serializers:csv_stream_search", }, - search_serializers_aliases={ - 'csv': 'text/csv' - } + search_serializers_aliases={"csv": "text/csv"}, ), patron_transaction_events=dict( - resource=RECORDS_REST_ENDPOINTS.get('ptre'), - default_media_type='text/csv', + resource=RECORDS_REST_ENDPOINTS.get("ptre"), + default_media_type="text/csv", search_serializers={ - 'text/csv': 'rero_ils.modules.patron_transaction_events.serializers:csv_ptre_search', + "text/csv": "rero_ils.modules.patron_transaction_events.serializers:csv_ptre_search", }, - search_serializers_aliases={ - 'csv': 'text/csv' - } - ) + search_serializers_aliases={"csv": "text/csv"}, + ), ) # SRU @@ -3674,41 +4029,39 @@ def _(x): SIP2_SUPPORT_ONLINE_STATUS = True SIP2_SUPPORT_OFFLINE_STATUS = True SIP2_SUPPORT_STATUS_UPDATE = True -SIP2_DATE_FORMAT = '%Y%m%d %H%M%S' +SIP2_DATE_FORMAT = "%Y%m%d %H%M%S" SIP2_PERMISSIONS_FACTORY = seflcheck_permission_factory SIP2_REMOTE_ACTION_HANDLERS = dict( rero_ils=dict( - login_handler='rero_ils.modules.selfcheck.api:selfcheck_login', - logout_handler='rero_ils.modules.selfcheck.api:selfcheck_logout', - system_status_handler='rero_ils.modules.selfcheck.api:system_status', + login_handler="rero_ils.modules.selfcheck.api:selfcheck_login", + logout_handler="rero_ils.modules.selfcheck.api:selfcheck_logout", + system_status_handler="rero_ils.modules.selfcheck.api:system_status", patron_handlers=dict( - validate_patron='rero_ils.modules.selfcheck.api:validate_patron_account', - authorize_patron='rero_ils.modules.selfcheck.api:authorize_patron', - enable_patron='rero_ils.modules.selfcheck.api:enable_patron', - patron_status='rero_ils.modules.selfcheck.api:patron_status', - account='rero_ils.modules.selfcheck.api:patron_information' - ), - item_handlers=dict( - item='rero_ils.modules.selfcheck.api:item_information' + validate_patron="rero_ils.modules.selfcheck.api:validate_patron_account", + authorize_patron="rero_ils.modules.selfcheck.api:authorize_patron", + enable_patron="rero_ils.modules.selfcheck.api:enable_patron", + patron_status="rero_ils.modules.selfcheck.api:patron_status", + account="rero_ils.modules.selfcheck.api:patron_information", ), + item_handlers=dict(item="rero_ils.modules.selfcheck.api:item_information"), circulation_handlers=dict( - checkout='rero_ils.modules.selfcheck.api:selfcheck_checkout', - checkin='rero_ils.modules.selfcheck.api:selfcheck_checkin', - renew='rero_ils.modules.selfcheck.api:selfcheck_renew', - ) + checkout="rero_ils.modules.selfcheck.api:selfcheck_checkout", + checkin="rero_ils.modules.selfcheck.api:selfcheck_checkin", + renew="rero_ils.modules.selfcheck.api:selfcheck_renew", + ), ) ) #: see invenio_sip2.models.SelfcheckMediaType SIP2_MEDIA_TYPES = dict( - docmaintype_book='BOOK', - docmaintype_article='MAGAZINE', - docmaintype_serial='MAGAZINE', - docmaintype_series='BOUND_JOURNAL', - docmaintype_audio='AUDIO', - docmaintype_movie_series='VIDEO', + docmaintype_book="BOOK", + docmaintype_article="MAGAZINE", + docmaintype_serial="MAGAZINE", + docmaintype_series="BOUND_JOURNAL", + docmaintype_audio="AUDIO", + docmaintype_movie_series="VIDEO", ) # SIP2 summary field mapper @@ -3716,14 +4069,14 @@ def _(x): # patron information message response. # see invenio_sip2.models.SelfcheckSummary for available summary fields SIP2_SUMMARY_FIELDS = { - LoanState.PENDING: 'unavailable_hold_items', - LoanState.ITEM_AT_DESK: 'hold_items', - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: 'unavailable_hold_items', - LoanState.ITEM_ON_LOAN: 'charged_items' + LoanState.PENDING: "unavailable_hold_items", + LoanState.ITEM_AT_DESK: "hold_items", + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: "unavailable_hold_items", + LoanState.ITEM_ON_LOAN: "charged_items", } # OAuth base template -OAUTH2SERVER_COVER_TEMPLATE = 'rero_ils/oauth/base.html' +OAUTH2SERVER_COVER_TEMPLATE = "rero_ils/oauth/base.html" # STOP WORDS # Disregarded articles for sorting processes @@ -3732,73 +4085,125 @@ def _(x): RERO_ILS_STOP_WORDS_ACTIVATE = True # PUNCTUATION RERO_ILS_STOP_WORDS_PUNCTUATION = [ - r'\[', r'\]', '"', ',', ';', ':', r'\.', '_', - r'\?', r'\!', r'\*', r'\+', '\n' + r"\[", + r"\]", + '"', + ",", + ";", + ":", + r"\.", + "_", + r"\?", + r"\!", + r"\*", + r"\+", + "\n", ] + # STOP WORDS BY LANGUAGE # Possibility to add a default configuration with a "default" entry. # This default configuration will be used if the language is not present RERO_ILS_STOP_WORDS = { - 'dan': ["de", "den", "det", "en", "et"], - 'dut': [ - "d'", "de", "den", "der", "des", "het", "'s", "'t", "een", - "eene", "eener", "eens", "ene", "'n"], - 'eng': ["a", "an", "the"], - 'epo': ["la", "l'", "unu"], - 'fre': ["de", "des", "du", "l'", "la", "le", "les", "un", "une"], - 'ger': [ - "das", "dem", "den", "der", "des", "die", - "ein", "eine", "einem", "einen", "einer", "eines"], - 'hun': ["a", "az", "egy"], - 'ita': [ - "gli", "i", "il", "l'", "la", "le", "li", "lo", - "un", "un'", "una", "uno"], - 'nor': ["de", "dei", "den", "det", "ei", "en", "et"], - 'por': ["a", "as", "o", "os", "um", "uma", "umas", "uns"], - 'spa': ["el", "la", "las", "lo", "los", "un", "una", "unas", "unos"], - 'swe': ["de", "den", "det", "en", "ett"] + "dan": ["de", "den", "det", "en", "et"], + "dut": [ + "d'", + "de", + "den", + "der", + "des", + "het", + "'s", + "'t", + "een", + "eene", + "eener", + "eens", + "ene", + "'n", + ], + "eng": ["a", "an", "the"], + "epo": ["la", "l'", "unu"], + "fre": ["de", "des", "du", "l'", "la", "le", "les", "un", "une"], + "ger": [ + "das", + "dem", + "den", + "der", + "des", + "die", + "ein", + "eine", + "einem", + "einen", + "einer", + "eines", + ], + "hun": ["a", "az", "egy"], + "ita": ["gli", "i", "il", "l'", "la", "le", "li", "lo", "un", "un'", "una", "uno"], + "nor": ["de", "dei", "den", "det", "ei", "en", "et"], + "por": ["a", "as", "o", "os", "um", "uma", "umas", "uns"], + "spa": ["el", "la", "las", "lo", "los", "un", "una", "unas", "unos"], + "swe": ["de", "den", "det", "en", "ett"], } # LANGUAGE MAPPING # ================ -RERO_ILS_LANGUAGE_MAPPING = { - 'dum': 'dut' # neerlandais -} +RERO_ILS_LANGUAGE_MAPPING = {"dum": "dut"} # neerlandais # EXPORT MAPPING # ================ RERO_ILS_EXPORT_MAPPER = { - 'ris': { - 'doctype_mapping': { - 'BOOK': lambda m, s: (m == 'docmaintype_book' - and s not in ['docsubtype_manuscript', - 'docsubtype_thesis', - 'docsubtype_e-book'] - ) or m in ['docmaintype_children', - 'docmaintype_comic', - 'docmaintype_leaf'], - 'ART': lambda m, s: m == 'docmaintype_image' - or s == 'docsubtype_kamishibai', - 'JOUR': lambda m, s: m == 'docmaintype_article', - 'MUSIC': lambda m, s: m == 'docmaintype_audio' - and s == 'docsubtype_music', - 'EBOOK': lambda m, s: m == 'docmaintype_book' - and s == 'docsubtype_e-book', - 'MANSCPT': lambda m, s: m == 'docmaintype_book' - and s == 'docsubtype_manuscript', - 'THES': lambda m, s: m == 'docmaintype_book' - and s == 'docsubtype_thesis', - 'SOUND': lambda m, s: m == 'docmaintype_audio' - and not s == 'docsubtype_music', - 'MAP': lambda m, s: m == 'docmaintype_map', - 'VIDEO': lambda m, s: m == 'docmaintype_movie_series', - 'JFULL': lambda m, s: m == 'docmaintype_serial', - 'SER': lambda m, s: m == 'docmaintype_series', - }, - 'export_fields': [ - 'TY', 'ID', 'TI', 'T2', 'AU', 'A2', 'DA', 'SP', 'EP', 'CY', 'LA', - 'PB', 'SN', 'UR', 'KW', 'ET', 'DO', 'VL', 'IS', 'PP', 'Y1', 'PY' - ] + "ris": { + "doctype_mapping": { + "BOOK": lambda m, s: ( + m == "docmaintype_book" + and s + not in [ + "docsubtype_manuscript", + "docsubtype_thesis", + "docsubtype_e-book", + ] + ) + or m in ["docmaintype_children", "docmaintype_comic", "docmaintype_leaf"], + "ART": lambda m, s: m == "docmaintype_image" + or s == "docsubtype_kamishibai", + "JOUR": lambda m, s: m == "docmaintype_article", + "MUSIC": lambda m, s: m == "docmaintype_audio" and s == "docsubtype_music", + "EBOOK": lambda m, s: m == "docmaintype_book" and s == "docsubtype_e-book", + "MANSCPT": lambda m, s: m == "docmaintype_book" + and s == "docsubtype_manuscript", + "THES": lambda m, s: m == "docmaintype_book" and s == "docsubtype_thesis", + "SOUND": lambda m, s: m == "docmaintype_audio" + and not s == "docsubtype_music", + "MAP": lambda m, s: m == "docmaintype_map", + "VIDEO": lambda m, s: m == "docmaintype_movie_series", + "JFULL": lambda m, s: m == "docmaintype_serial", + "SER": lambda m, s: m == "docmaintype_series", + }, + "export_fields": [ + "TY", + "ID", + "TI", + "T2", + "AU", + "A2", + "DA", + "SP", + "EP", + "CY", + "LA", + "PB", + "SN", + "UR", + "KW", + "ET", + "DO", + "VL", + "IS", + "PP", + "Y1", + "PY", + ], } } @@ -3806,8 +4211,9 @@ def _(x): # ============================== RERO_ILS_PASSWORD_MIN_LENGTH = 8 RERO_ILS_PASSWORD_SPECIAL_CHAR = False -RERO_ILS_PASSWORD_GENERATOR = 'rero_ils.modules.utils:password_generator' -RERO_ILS_PASSWORD_VALIDATOR = 'rero_ils.modules.utils:password_validator' +RERO_ILS_PASSWORD_GENERATOR = "rero_ils.modules.utils:password_generator" +RERO_ILS_PASSWORD_VALIDATOR = "rero_ils.modules.utils:password_validator" + # ADVANCED SEARCH CONFIG # ====================== @@ -3817,176 +4223,140 @@ def search_type(field): :param: field: the field key. :return: a list of options. """ - output = []; + output = [] if field not in [ - 'canton', 'country', 'rdaCarrierType', 'rdaContentType', - 'rdaMediaType' + "canton", + "country", + "rdaCarrierType", + "rdaContentType", + "rdaMediaType", ]: - output.append({'label': _('contains'), 'value': 'contains'}) - if field not in ['identifiedBy', 'isbn', 'issn']: - output.append({'label': _('phrase'), 'value': 'phrase'}) + output.append({"label": _("contains"), "value": "contains"}) + if field not in ["identifiedBy", "isbn", "issn"]: + output.append({"label": _("phrase"), "value": "phrase"}) return output + RERO_ILS_APP_ADVANCED_SEARCH_CONFIG = [ { - 'label': _('Title'), - 'value': 'title', - 'field': 'title.*', - 'options': { - 'search_type': search_type('title') - } + "label": _("Title"), + "value": "title", + "field": "title.*", + "options": {"search_type": search_type("title")}, }, { - 'label': _('Responsibility statement'), - 'value': 'responsibilityStatement', - 'field': 'responsibilityStatement.value', - 'options': { - 'search_type': search_type('responsibilityStatement') - } + "label": _("Responsibility statement"), + "value": "responsibilityStatement", + "field": "responsibilityStatement.value", + "options": {"search_type": search_type("responsibilityStatement")}, }, { - 'label': _('Contribution'), - 'value': 'contribution', - 'field': 'contribution.entity.*', - 'options': { - 'search_type': search_type('contribution') - } + "label": _("Contribution"), + "value": "contribution", + "field": "contribution.entity.*", + "options": {"search_type": search_type("contribution")}, }, { - 'label': _('Country'), - 'value': 'country', - 'field': 'provisionActivity.place.country', - 'options': { - 'search_type': search_type('country') - } + "label": _("Country"), + "value": "country", + "field": "provisionActivity.place.country", + "options": {"search_type": search_type("country")}, }, { - 'label': _('Canton'), - 'value': 'canton', - 'field': 'provisionActivity.place.canton', - 'options': { - 'search_type': search_type('canton') - } + "label": _("Canton"), + "value": "canton", + "field": "provisionActivity.place.canton", + "options": {"search_type": search_type("canton")}, }, { - 'label': _('Provision activity statement'), - 'value': 'provisionActivityStatement', - 'field': 'provisionActivity._text.value', - 'options': { - 'search_type': search_type('provisionActivityStatement') - } + "label": _("Provision activity statement"), + "value": "provisionActivityStatement", + "field": "provisionActivity._text.value", + "options": {"search_type": search_type("provisionActivityStatement")}, }, { - 'label': _('Series statement'), - 'value': 'seriesStatement', - 'field': 'seriesStatement.*', - 'options': { - 'search_type': search_type('seriesStatement') - } + "label": _("Series statement"), + "value": "seriesStatement", + "field": "seriesStatement.*", + "options": {"search_type": search_type("seriesStatement")}, }, { - 'label': _('Identifier'), - 'value': 'identifiedBy', - 'field': 'identifiedBy.value', - 'options': { - 'search_type': search_type('identifiedBy') - } + "label": _("Identifier"), + "value": "identifiedBy", + "field": "identifiedBy.value", + "options": {"search_type": search_type("identifiedBy")}, }, { - 'label': _('ISBN'), - 'value': 'isbn', - 'field': 'isbn', - 'options': { - 'search_type': search_type('isbn') - } + "label": _("ISBN"), + "value": "isbn", + "field": "isbn", + "options": {"search_type": search_type("isbn")}, }, { - 'label': _('ISSN'), - 'value': 'issn', - 'field': 'issn', - 'options': { - 'search_type': search_type('issn') - } + "label": _("ISSN"), + "value": "issn", + "field": "issn", + "options": {"search_type": search_type("issn")}, }, { - 'label': _('Genre, form'), - 'value': 'genreForm', - 'field': 'genreForm.entity.*', - 'options': { - 'search_type': search_type('genreForm') - } + "label": _("Genre, form"), + "value": "genreForm", + "field": "genreForm.entity.*", + "options": {"search_type": search_type("genreForm")}, }, { - 'label': _('Subject'), - 'value': 'subjects', - 'field': 'subjects.entity.*', - 'options': { - 'search_type': search_type('subjects') - } + "label": _("Subject"), + "value": "subjects", + "field": "subjects.entity.*", + "options": {"search_type": search_type("subjects")}, }, { - 'label': _('Call number'), - 'value': 'callNumber', - 'field': 'call_numbers', - 'options': { - 'search_type': search_type('callNumber') - } + "label": _("Call number"), + "value": "callNumber", + "field": "call_numbers", + "options": {"search_type": search_type("callNumber")}, }, { - 'label': _('Local fields (document)'), - 'value': 'documentLocalFields', - 'field': 'local_fields.*', - 'options': { - 'search_type': search_type('documentLocalFields') - } + "label": _("Local fields (document)"), + "value": "documentLocalFields", + "field": "local_fields.*", + "options": {"search_type": search_type("documentLocalFields")}, }, { - 'label': _('Local fields (holdings)'), - 'value': 'holdingsLocalFields', - 'field': 'holdings.local_fields.*', - 'options': { - 'search_type': search_type('holdingsLocalFields') - } + "label": _("Local fields (holdings)"), + "value": "holdingsLocalFields", + "field": "holdings.local_fields.*", + "options": {"search_type": search_type("holdingsLocalFields")}, }, { - 'label': _('Local fields (items)'), - 'value': 'itemLocalFields', - 'field': 'holdings.items.local_fields.*', - 'options': { - 'search_type': search_type('itemLocalFields') - } + "label": _("Local fields (items)"), + "value": "itemLocalFields", + "field": "holdings.items.local_fields.*", + "options": {"search_type": search_type("itemLocalFields")}, }, { - 'label': _('Classification'), - 'value': 'classification', - 'field': 'classification.*', - 'options': { - 'search_type': search_type('classification') - } + "label": _("Classification"), + "value": "classification", + "field": "classification.*", + "options": {"search_type": search_type("classification")}, }, { - 'label': _('RDA content type'), - 'value': 'rdaContentType', - 'field': 'contentMediaCarrier.contentType', - 'options': { - 'search_type': search_type('rdaContentType') - } + "label": _("RDA content type"), + "value": "rdaContentType", + "field": "contentMediaCarrier.contentType", + "options": {"search_type": search_type("rdaContentType")}, }, { - 'label': _('RDA media type'), - 'value': 'rdaMediaType', - 'field': 'contentMediaCarrier.mediaType', - 'options': { - 'search_type': search_type('rdaMediaType') - } + "label": _("RDA media type"), + "value": "rdaMediaType", + "field": "contentMediaCarrier.mediaType", + "options": {"search_type": search_type("rdaMediaType")}, }, { - 'label': _('RDA carrier type'), - 'value': 'rdaCarrierType', - 'field': 'contentMediaCarrier.carrierType', - 'options': { - 'search_type': search_type('rdaCarrierType') - } + "label": _("RDA carrier type"), + "value": "rdaCarrierType", + "field": "contentMediaCarrier.carrierType", + "options": {"search_type": search_type("rdaCarrierType")}, }, ] @@ -3994,9 +4364,7 @@ def search_type(field): # ========================= RERO_ILS_APP_BABELTHEQUE_ENABLED_VIEWS = [] -FILES_REST_STORAGE_CLASS_LIST = { - "L": "Local" -} +FILES_REST_STORAGE_CLASS_LIST = {"L": "Local"} MAX_CONTENT_LENGTH = 500 * 1024 * 1024 @@ -4005,4 +4373,6 @@ def search_type(field): RECORDS_REFRESOLVER_STORE = "rero_ils.modules.utils.refresolver_store" RERO_FILES_RECORD_SERVICE_CONFIG = "rero_ils.modules.files.services.RecordServiceConfig" -RERO_FILES_RECORD_FILE_SERVICE_CONFIG = "rero_ils.modules.files.services.RecordFileServiceConfig" +RERO_FILES_RECORD_FILE_SERVICE_CONFIG = ( + "rero_ils.modules.files.services.RecordFileServiceConfig" +) diff --git a/rero_ils/dojson/cli.py b/rero_ils/dojson/cli.py index aae1cf1e5c..05dc748030 100644 --- a/rero_ils/dojson/cli.py +++ b/rero_ils/dojson/cli.py @@ -23,12 +23,13 @@ from flask import current_app from werkzeug.local import LocalProxy -_datastore = LocalProxy(lambda: current_app.extensions['security'].datastore) +_datastore = LocalProxy(lambda: current_app.extensions["security"].datastore) -@click.command('reverse') +@click.command("reverse") def reverse(): """Reverse the order of the data.""" + def processor(iterator): items = [] for item in iterator: @@ -39,10 +40,14 @@ def processor(iterator): return processor -@click.command('head') -@click.argument('max', type=click.INT,) +@click.command("head") +@click.argument( + "max", + type=click.INT, +) def head(max): """Take only the first max items.""" + def processor(iterator): n = 0 for item in iterator: @@ -55,5 +60,5 @@ def processor(iterator): def pretty_json_dump(iterator): - """Dump JSON from iteraror.""" + """Dump JSON from iterator.""" return json.dumps(list(iterator), indent=2) diff --git a/rero_ils/dojson/utils.py b/rero_ils/dojson/utils.py index d46faf49c9..4234a0c289 100644 --- a/rero_ils/dojson/utils.py +++ b/rero_ils/dojson/utils.py @@ -33,301 +33,340 @@ from pkg_resources import resource_string _UNIMARC_LANGUAGES_SCRIPTS = { - 'ba': 'latn', # Latin - 'ca': 'cyrl', # Cyrillic - 'da': 'jpan', # Japanese - undefined writing - 'db': 'hani', # Japanese - Kanji - 'dc': 'hrkt', # Japanese - Kana - 'ea': 'hani', # Chinese characters (Chinese, Japanese, Korean) - 'fa': 'arab', # Arabic - 'ga': 'grek', # Greek - 'ha': 'hebr', # Hebrew - 'ia': 'thai', # Thai - 'ja': 'deva', # devanagari - 'ka': 'kore', # Korean - 'la': 'taml', # Tamil - 'ma': 'geor', # Georgian - 'mb': 'armn', # Armenian - 'zz': 'zyyy' # other + "ba": "latn", # Latin + "ca": "cyrl", # Cyrillic + "da": "jpan", # Japanese - undefined writing + "db": "hani", # Japanese - Kanji + "dc": "hrkt", # Japanese - Kana + "ea": "hani", # Chinese characters (Chinese, Japanese, Korean) + "fa": "arab", # Arabic + "ga": "grek", # Greek + "ha": "hebr", # Hebrew + "ia": "thai", # Thai + "ja": "deva", # devanagari + "ka": "kore", # Korean + "la": "taml", # Tamil + "ma": "geor", # Georgian + "mb": "armn", # Armenian + "zz": "zyyy", # other } _LANGUAGES_SCRIPTS = { - 'armn': ('arm', ), - 'arab': ('ara', 'per'), - 'cyrl': ('bel', 'chu', 'mac', 'rus', 'srp', 'ukr'), - 'deva': ('awa', 'bho', 'bra', 'doi', 'hin', 'kas', 'kok', 'mag', 'mai', - 'mar', 'mun', 'nep', 'pli', 'pra', 'raj', 'san', 'sat', 'snd'), - 'geor': ('geo', ), - 'grek': ('grc', 'gre'), - 'hani': ('chi', 'jpn'), - 'hebr': ('heb', 'lad', 'yid'), - 'hrkt': ('jpn', ), - 'jpan': ('jpn', ), - 'kore': ('kor', ), - 'taml': ('tam', ), - 'thai': ('tha', ), - 'zyyy': ('chi', ) + "armn": ("arm",), + "arab": ("ara", "per"), + "cyrl": ("bel", "chu", "mac", "rus", "srp", "ukr"), + "deva": ( + "awa", + "bho", + "bra", + "doi", + "hin", + "kas", + "kok", + "mag", + "mai", + "mar", + "mun", + "nep", + "pli", + "pra", + "raj", + "san", + "sat", + "snd", + ), + "geor": ("geo",), + "grek": ("grc", "gre"), + "hani": ("chi", "jpn"), + "hebr": ("heb", "lad", "yid"), + "hrkt": ("jpn",), + "jpan": ("jpn",), + "kore": ("kor",), + "taml": ("tam",), + "thai": ("tha",), + "zyyy": ("chi",), } -_SCRIPT_PER_LANG_ASIA = { - 'jpn': 'jpan', - 'kor': 'kore', - 'chi': 'hani' -} +_SCRIPT_PER_LANG_ASIA = {"jpn": "jpan", "kor": "kore", "chi": "hani"} _SCRIPT_PER_LANG_NOT_ASIA = { - 'arm': 'armn', - 'geo': 'geor', - 'gre': 'grek', - 'grc': 'grek', - 'ara': 'arab', - 'per': 'arab', - 'bel': 'cyrl', - 'rus': 'cyrl', - 'mac': 'cyrl', - 'srp': 'cyrl', - 'tha': 'thai', - 'ukr': 'cyrl', - 'chu': 'cyrl', - 'yid': 'hebr', - 'heb': 'hebr', - 'lad': 'hebr', - 'chi': 'hani' + "arm": "armn", + "geo": "geor", + "gre": "grek", + "grc": "grek", + "ara": "arab", + "per": "arab", + "bel": "cyrl", + "rus": "cyrl", + "mac": "cyrl", + "srp": "cyrl", + "tha": "thai", + "ukr": "cyrl", + "chu": "cyrl", + "yid": "hebr", + "heb": "hebr", + "lad": "hebr", + "chi": "hani", } _SCRIPT_PER_CODE = { - '(S': 'grek', - '(3': 'arab', - '(B': 'latn', - '(N': 'cyrl', - '(2': 'hebr' + "(S": "grek", + "(3": "arab", + "(B": "latn", + "(N": "cyrl", + "(2": "hebr", } _ILLUSTRATIVE_CONTENT_REGEXP = { - 'illustrations': - re.compile( - r'ill?(\.|\s|:|,|;|s\.|us.*)|ill$|iil|^il$|^il(\.)|' - r'fig(\.|\s|,|ur|s)|fig$|abb(\.|\s|,|ild)|abb$|bild|zeichn|' - r'front(\.|is|esp|\s|,|s)|front$|dessin', - re.IGNORECASE), - 'maps': - re.compile( - r'cartes?|cartogra|cartin|cart\.|carta(\s|s)|carta$|maps?|kart', - re.IGNORECASE), - 'portraits': - re.compile(r'port(\.|r|\s|s)|portr$|ritr', re.IGNORECASE), - 'graphs': - re.compile(r'gra(ph|f)(\.)|^gra(ph|f)|\sgra(ph|f)|diag', - re.IGNORECASE), - 'photographs': - re.compile(r'(f|ph)oto(g|s|\s|,|typ|\.)|(f|ph)oto^', re.IGNORECASE), - 'facsimiles': re.compile(r'fa(c|k)', re.IGNORECASE), - 'coats of arms': re.compile(r'armoirie|arms|wappe|stemm', re.IGNORECASE), - 'genealogical tables': re.compile(r'genea|généa', re.IGNORECASE), - 'plans': re.compile(r'plan[^c]|plan$|piant', re.IGNORECASE), - 'forms': re.compile(r'form[^a|e]|modul', re.IGNORECASE), - 'illuminations': - re.compile(r'enlum|illum|miniatur|buchmale', re.IGNORECASE), - 'samples': re.compile(r'sample|échant|muster|campion', re.IGNORECASE) + "illustrations": re.compile( + r"ill?(\.|\s|:|,|;|s\.|us.*)|ill$|iil|^il$|^il(\.)|" + r"fig(\.|\s|,|ur|s)|fig$|abb(\.|\s|,|ild)|abb$|bild|zeichn|" + r"front(\.|is|esp|\s|,|s)|front$|dessin", + re.IGNORECASE, + ), + "maps": re.compile( + r"cartes?|cartogra|cartin|cart\.|carta(\s|s)|carta$|maps?|kart", re.IGNORECASE + ), + "portraits": re.compile(r"port(\.|r|\s|s)|portr$|ritr", re.IGNORECASE), + "graphs": re.compile(r"gra(ph|f)(\.)|^gra(ph|f)|\sgra(ph|f)|diag", re.IGNORECASE), + "photographs": re.compile(r"(f|ph)oto(g|s|\s|,|typ|\.)|(f|ph)oto^", re.IGNORECASE), + "facsimiles": re.compile(r"fa(c|k)", re.IGNORECASE), + "coats of arms": re.compile(r"armoirie|arms|wappe|stemm", re.IGNORECASE), + "genealogical tables": re.compile(r"genea|généa", re.IGNORECASE), + "plans": re.compile(r"plan[^c]|plan$|piant", re.IGNORECASE), + "forms": re.compile(r"form[^a|e]|modul", re.IGNORECASE), + "illuminations": re.compile(r"enlum|illum|miniatur|buchmale", re.IGNORECASE), + "samples": re.compile(r"sample|échant|muster|campion", re.IGNORECASE), } _PRODUCTION_METHOD_FROM_EXTENT_AND_PHYSICAL_DETAILS = { - 'rdapm:1001': re.compile(r'blueline', re.IGNORECASE), - 'rdapm:1002': re.compile(r'cyano|blaudr|bluepr', re.IGNORECASE), - 'rdapm:1003': re.compile(r'collot|lichtdr|(ph|f)otot', re.IGNORECASE), - 'rdapm:1004': re.compile(r'daguerr', re.IGNORECASE), - 'rdapm:1005': re.compile(r'stich|engrav|grav', re.IGNORECASE), - 'rdapm:1006': re.compile(r'eauforte|radier|etch', re.IGNORECASE), - 'rdapm:1007': re.compile(r'litho', re.IGNORECASE), - 'rdapm:1008': re.compile(r'(ph|f)oto[ck]o', re.IGNORECASE), - 'rdapm:1009': re.compile(r'photograv|fotograv|photoengrav', re.IGNORECASE), + "rdapm:1001": re.compile(r"blueline", re.IGNORECASE), + "rdapm:1002": re.compile(r"cyano|blaudr|bluepr", re.IGNORECASE), + "rdapm:1003": re.compile(r"collot|lichtdr|(ph|f)otot", re.IGNORECASE), + "rdapm:1004": re.compile(r"daguerr", re.IGNORECASE), + "rdapm:1005": re.compile(r"stich|engrav|grav", re.IGNORECASE), + "rdapm:1006": re.compile(r"eauforte|radier|etch", re.IGNORECASE), + "rdapm:1007": re.compile(r"litho", re.IGNORECASE), + "rdapm:1008": re.compile(r"(ph|f)oto[ck]o", re.IGNORECASE), + "rdapm:1009": re.compile(r"photograv|fotograv|photoengrav", re.IGNORECASE), # The rdapm:1010 extraction is done only from PHYSICAL_DETAILS by the code # 'rdapm:1010': r'impr|druck|print|offset|s[ée]riegr' - 'rdapm:1011': re.compile(r'white print', re.IGNORECASE), - 'rdapm:1012': re.compile(r'grav.+?sur bois|holzschn|woodc', re.IGNORECASE), - 'rdapm:1014': re.compile(r'hélio|helio', re.IGNORECASE), - 'rdapm:1015': re.compile(r'brûl|einbren|burn', re.IGNORECASE), - 'rdapm:1016': re.compile(r'inscript|inscrib', re.IGNORECASE), - 'rdapm:1017': re.compile(r'estamp|stempel|stamping|lino', re.IGNORECASE), - 'rdapm:1018': re.compile(r'emboss|präg', re.IGNORECASE), - 'rdapm:1019': re.compile(r'point rigide|solid dot', re.IGNORECASE), - 'rdapm:1020': re.compile(r'thermog|schwell|swell|minolta', re.IGNORECASE), - 'rdapm:1021': re.compile(r'thermof|va[ck]uum|moul.+?vide', re.IGNORECASE) + "rdapm:1011": re.compile(r"white print", re.IGNORECASE), + "rdapm:1012": re.compile(r"grav.+?sur bois|holzschn|woodc", re.IGNORECASE), + "rdapm:1014": re.compile(r"hélio|helio", re.IGNORECASE), + "rdapm:1015": re.compile(r"brûl|einbren|burn", re.IGNORECASE), + "rdapm:1016": re.compile(r"inscript|inscrib", re.IGNORECASE), + "rdapm:1017": re.compile(r"estamp|stempel|stamping|lino", re.IGNORECASE), + "rdapm:1018": re.compile(r"emboss|präg", re.IGNORECASE), + "rdapm:1019": re.compile(r"point rigide|solid dot", re.IGNORECASE), + "rdapm:1020": re.compile(r"thermog|schwell|swell|minolta", re.IGNORECASE), + "rdapm:1021": re.compile(r"thermof|va[ck]uum|moul.+?vide", re.IGNORECASE), } _COLOR_CONTENT_REGEXP = { # monochrom - 'rdacc:1002': re.compile( - r'noir|black|schwarz|nero|n\.\set|schw|b\&w|' - r'b\/n|s\/w|^n\set\sb|\sn\set\sb', - re.IGNORECASE - ), - + "rdacc:1002": re.compile( + r"noir|black|schwarz|nero|n\.\set|schw|b\&w|" r"b\/n|s\/w|^n\set\sb|\sn\set\sb", + re.IGNORECASE, + ), # polychrome - 'rdacc:1003': re.compile( - r'cou?l(\.|,|eur|ou?r|\s)|cou?l$|farb', - re.IGNORECASE - ), + "rdacc:1003": re.compile(r"cou?l(\.|,|eur|ou?r|\s)|cou?l$|farb", re.IGNORECASE), } _CANTON = [ - 'ag', 'ai', 'ar', 'be', 'bl', 'bs', 'fr', 'ge', 'gl', 'gr', 'ju', 'lu', - 'ne', 'nw', 'ow', 'sg', 'sh', 'so', 'sz', 'tg', 'ti', 'ur', 'vd', 'vs', - 'zg', 'zh' + "ag", + "ai", + "ar", + "be", + "bl", + "bs", + "fr", + "ge", + "gl", + "gr", + "ju", + "lu", + "ne", + "nw", + "ow", + "sg", + "sh", + "so", + "sz", + "tg", + "ti", + "ur", + "vd", + "vs", + "zg", + "zh", ] _OBSOLETE_COUNTRIES_MAPPING = { - 'cn': 'xxc', - 'err': 'er', - 'lir': 'li', - 'lvr': 'lv', - 'uk': 'xxk', - 'unr': 'un', - 'us': 'xxu', - 'ur': 'xxr', - 'ys': 'ye' + "cn": "xxc", + "err": "er", + "lir": "li", + "lvr": "lv", + "uk": "xxk", + "unr": "un", + "us": "xxu", + "ur": "xxr", + "ys": "ye", } # field 336 mapping _CONTENT_TYPE_MAPPING = { - 'cri': 'rdaco:1002', - 'crm': 'rdaco:1003', - 'crt': 'rdaco:1004', - 'crn': 'rdaco:1005', - 'cod': 'rdaco:1007', - 'crd': 'rdaco:1001', - 'crf': 'rdaco:1006', - 'tdi': 'rdaco:1023', - 'tdm': 'rdaco:1022', - 'sti': 'rdaco:1014', - 'tci': 'rdaco:1015', - 'prm': 'rdaco:1011', - 'ntv': 'rdaco:1009', - 'tcn': 'rdaco:1017', - 'tdf': 'rdaco:1021', - 'tcf': 'rdaco:1019', - 'ntm': 'rdaco:1010', - 'tcm': 'rdaco:1016', - 'cop': 'rdaco:1008', - 'snd': 'rdaco:1012', - 'txt': 'rdaco:1020', - 'tct': 'rdaco:1018', - 'spw': 'rdaco:1013', - 'xxx': 'other' + "cri": "rdaco:1002", + "crm": "rdaco:1003", + "crt": "rdaco:1004", + "crn": "rdaco:1005", + "cod": "rdaco:1007", + "crd": "rdaco:1001", + "crf": "rdaco:1006", + "tdi": "rdaco:1023", + "tdm": "rdaco:1022", + "sti": "rdaco:1014", + "tci": "rdaco:1015", + "prm": "rdaco:1011", + "ntv": "rdaco:1009", + "tcn": "rdaco:1017", + "tdf": "rdaco:1021", + "tcf": "rdaco:1019", + "ntm": "rdaco:1010", + "tcm": "rdaco:1016", + "cop": "rdaco:1008", + "snd": "rdaco:1012", + "txt": "rdaco:1020", + "tct": "rdaco:1018", + "spw": "rdaco:1013", + "xxx": "other", } # field 337 $b and field 338 (first char of $b) mapping _MEDIA_TYPE_MAPPING = { - 's': 'rdamt:1001', - 'h': 'rdamt:1002', - 'c': 'rdamt:1003', - 'p': 'rdamt:1004', - 'g': 'rdamt:1005', - 'm': 'rdamt:1005', # only in 338 (first char of $b) - 'e': 'rdamt:1006', - 'n': 'rdamt:1007', - 'v': 'rdamt:1008', - 'x': 'other', - 'z': 'other' # only in 338 (first char of $b) + "s": "rdamt:1001", + "h": "rdamt:1002", + "c": "rdamt:1003", + "p": "rdamt:1004", + "g": "rdamt:1005", + "m": "rdamt:1005", # only in 338 (first char of $b) + "e": "rdamt:1006", + "n": "rdamt:1007", + "v": "rdamt:1008", + "x": "other", + "z": "other", # only in 338 (first char of $b) } # field 338 mapping _CARRIER_TYPE_MAPPING = { - 'zu': 'unspecified', - 'sg': 'rdact:1002', - 'se': 'rdact:1003', - 'sd': 'rdact:1004', - 'si': 'rdact:1005', - 'sq': 'rdact:1006', - 'ss': 'rdact:1007', - 'st': 'rdact:1008', - 'sw': 'rdact:1071', - 'sz': 'other', - 'ha': 'rdact:1021', - 'he': 'rdact:1022', - 'hf': 'rdact:1023', - 'hb': 'rdact:1024', - 'hc': 'rdact:1025', - 'hd': 'rdact:1026', - 'hh': 'rdact:1027', - 'hg': 'rdact:1028', - 'hj': 'rdact:1056', - 'hz': 'other', - 'ck': 'rdact:1011', - 'cb': 'rdact:1012', - 'cd': 'rdact:1013', - 'ce': 'rdact:1014', - 'ca': 'rdact:1015', - 'cf': 'rdact:1016', - 'ch': 'rdact:1017', - 'cr': 'rdact:1018', - 'cz': 'other', - 'pp': 'rdact:1030', - 'pz': 'other', - 'mc': 'rdact:1032', - 'mf': 'rdact:1033', - 'mr': 'rdact:1034', - 'gd': 'rdact:1035', - 'gf': 'rdact:1036', - 'gc': 'rdact:1037', - 'gt': 'rdact:1039', - 'gs': 'rdact:1040', - 'mo': 'rdact:1069', - 'mz': 'other', - 'eh': 'rdact:1042', - 'es': 'rdact:1043', - 'ez': 'other', - 'no': 'rdact:1045', - 'nn': 'rdact:1046', - 'na': 'rdact:1047', - 'nb': 'rdact:1048', - 'nc': 'rdact:1049', - 'nr': 'rdact:1059', - 'nz': 'other', - 'vc': 'rdact:1051', - 'vf': 'rdact:1052', - 'vr': 'rdact:1053', - 'vd': 'rdact:1060', - 'vz': 'other' + "zu": "unspecified", + "sg": "rdact:1002", + "se": "rdact:1003", + "sd": "rdact:1004", + "si": "rdact:1005", + "sq": "rdact:1006", + "ss": "rdact:1007", + "st": "rdact:1008", + "sw": "rdact:1071", + "sz": "other", + "ha": "rdact:1021", + "he": "rdact:1022", + "hf": "rdact:1023", + "hb": "rdact:1024", + "hc": "rdact:1025", + "hd": "rdact:1026", + "hh": "rdact:1027", + "hg": "rdact:1028", + "hj": "rdact:1056", + "hz": "other", + "ck": "rdact:1011", + "cb": "rdact:1012", + "cd": "rdact:1013", + "ce": "rdact:1014", + "ca": "rdact:1015", + "cf": "rdact:1016", + "ch": "rdact:1017", + "cr": "rdact:1018", + "cz": "other", + "pp": "rdact:1030", + "pz": "other", + "mc": "rdact:1032", + "mf": "rdact:1033", + "mr": "rdact:1034", + "gd": "rdact:1035", + "gf": "rdact:1036", + "gc": "rdact:1037", + "gt": "rdact:1039", + "gs": "rdact:1040", + "mo": "rdact:1069", + "mz": "other", + "eh": "rdact:1042", + "es": "rdact:1043", + "ez": "other", + "no": "rdact:1045", + "nn": "rdact:1046", + "na": "rdact:1047", + "nb": "rdact:1048", + "nc": "rdact:1049", + "nr": "rdact:1059", + "nz": "other", + "vc": "rdact:1051", + "vf": "rdact:1052", + "vr": "rdact:1053", + "vd": "rdact:1060", + "vz": "other", } _ENCODING_LEVEL_MAPPING = { - ' ': 'Full level', - '1': 'Full level, material not examined', - '2': 'Less-than-full level, material not examined', - '3': 'Abbreviated level', - '4': 'Core level', - '5': 'Partial (preliminary) level', - '7': 'Minimal level', - '8': 'Prepublication level', - 'u': 'Unknown', - 'z': 'Not applicable' + " ": "Full level", + "1": "Full level, material not examined", + "2": "Less-than-full level, material not examined", + "3": "Abbreviated level", + "4": "Core level", + "5": "Partial (preliminary) level", + "7": "Minimal level", + "8": "Prepublication level", + "u": "Unknown", + "z": "Not applicable", } -_CONTRIBUTION_TAGS = ['100', '600', '610', '611', '630', '650', '651', - '655', '700', '701', '702', '703', '710', '711', '712'] +_CONTRIBUTION_TAGS = [ + "100", + "600", + "610", + "611", + "630", + "650", + "651", + "655", + "700", + "701", + "702", + "703", + "710", + "711", + "712", +] schema_in_bytes = resource_string( - 'rero_ils.jsonschemas', - 'common/languages-v0.0.1.json' + "rero_ils.jsonschemas", "common/languages-v0.0.1.json" ) -schema = jsonref.loads(schema_in_bytes.decode('utf8')) -_LANGUAGES = schema['language']['enum'] +schema = jsonref.loads(schema_in_bytes.decode("utf8")) +_LANGUAGES = schema["language"]["enum"] schema_in_bytes = resource_string( - 'rero_ils.jsonschemas', - 'common/countries-v0.0.1.json' + "rero_ils.jsonschemas", "common/countries-v0.0.1.json" ) -schema = jsonref.loads(schema_in_bytes.decode('utf8')) -_COUNTRIES = schema['country']['enum'] +schema = jsonref.loads(schema_in_bytes.decode("utf8")) +_COUNTRIES = schema["country"]["enum"] -re_identified = re.compile(r'\((.*)\)(.*)') +re_identified = re.compile(r"\((.*)\)(.*)") def error_print(*args): """Error printing to sdtout.""" - msg = ''.join(str(arg) + '\t' for arg in args) + msg = "".join(str(arg) + "\t" for arg in args) msg.strip() click.echo(msg) sys.stdout.flush() @@ -346,17 +385,16 @@ def not_repetitive(bibid, reroid, key, value, subfield, default=None): """Get the first value if the value is a list or tuple.""" data = value.get(subfield, default) if isinstance(data, (list, tuple)): - error_print( - 'WARNING NOT REPETITIVE:', bibid, reroid, key, subfield, value) + error_print("WARNING NOT REPETITIVE:", bibid, reroid, key, subfield, value) data = data[0] return data def get_field_link_data(value): """Get field link data from subfield $6.""" - subfield_6 = value.get('6', '') - tag_link = subfield_6.split('-') - link = tag_link[1] if len(tag_link) == 2 else '' + subfield_6 = value.get("6", "") + tag_link = subfield_6.split("-") + link = tag_link[1] if len(tag_link) == 2 else "" return tag_link, link @@ -368,19 +406,19 @@ def get_field_items(value): return utils.iteritems(value) -def build_string_from_subfields(value, subfield_selection, separator=' '): +def build_string_from_subfields(value, subfield_selection, separator=" "): """Build a string parsing the selected subfields in order.""" items = get_field_items(value) - return separator.join([ - remove_special_characters(value) - for key, value in items if key in subfield_selection - ]) + return separator.join( + [ + remove_special_characters(value) + for key, value in items + if key in subfield_selection + ] + ) -def remove_trailing_punctuation( - data, - punctuation=',', - spaced_punctuation=':;/-'): +def remove_trailing_punctuation(data, punctuation=",", spaced_punctuation=":;/-"): """Remove trailing punctuation from data. :param data: string to process @@ -397,25 +435,23 @@ def remove_trailing_punctuation( """ # escape chars: .[]^- if punctuation: - punctuation = re.sub(r'([\.\[\]\^\\-])', r'\\\1', punctuation) + punctuation = re.sub(r"([\.\[\]\^\\-])", r"\\\1", punctuation) if spaced_punctuation: - spaced_punctuation = \ - re.sub(r'([\.\[\]\^\\-])', r'\\\1', spaced_punctuation) + spaced_punctuation = re.sub(r"([\.\[\]\^\\-])", r"\\\1", spaced_punctuation) return re.sub( - fr'([{punctuation}]|\s+[{spaced_punctuation}])$', - '', - data.rstrip()).rstrip() + rf"([{punctuation}]|\s+[{spaced_punctuation}])$", "", data.rstrip() + ).rstrip() -def remove_special_characters(value, chars=['\u0098', '\u009C']): +def remove_special_characters(value, chars=["\u0098", "\u009C"]): """Remove special characters from a string. :params value: string to clean. :returns: a cleaned string. """ for char in chars: - value = value.replace(char, '') + value = value.replace(char, "") return value @@ -437,8 +473,8 @@ def get_mef_link(bibid, reroid, entity_type, ids, key): if not ids: return try: - entity_types = current_app.config.get('RERO_ILS_ENTITY_TYPES', {}) - mef_config = current_app.config.get('RERO_ILS_MEF_CONFIG') + entity_types = current_app.config.get("RERO_ILS_ENTITY_TYPES", {}) + mef_config = current_app.config.get("RERO_ILS_MEF_CONFIG") # Try to get RERO_ILS_ENTITY_TYPES and RERO_ILS_MEF_CONFIG from current app # In the dojson cli is no current app and we have to get the value directly # from config.py @@ -446,16 +482,16 @@ def get_mef_link(bibid, reroid, entity_type, ids, key): from rero_ils.config import RERO_ILS_ENTITY_TYPES as entity_types from rero_ils.config import RERO_ILS_MEF_CONFIG as mef_config entity_type = entity_types.get(entity_type) - mef_url = mef_config.get(entity_type, {}).get('base_url') + mef_url = mef_config.get(entity_type, {}).get("base_url") if not mef_url: return - sources = mef_config.get(entity_type, {}).get('sources') + sources = mef_config.get(entity_type, {}).get("sources") has_no_de_101 = True for id_ in ids: # see if we have a $0 with (DE-101) if match := re_identified.match(id_): with contextlib.suppress(IndexError): - if match.group(1).lower() == 'de-101': + if match.group(1).lower() == "de-101": has_no_de_101 = False break for id_ in ids: @@ -463,44 +499,57 @@ def get_mef_link(bibid, reroid, entity_type, ids, key): match = re_identified.search(id_) else: match = re_identified.search(id_[0]) - if match and len(match.groups()) == 2 \ - and key[:3] in _CONTRIBUTION_TAGS: + if match and len(match.groups()) == 2 and key[:3] in _CONTRIBUTION_TAGS: match_type = match.group(1).lower() match_value = match.group(2) - if match_type == 'de-101': - match_type = 'gnd' - elif match_type == 'de-588' and has_no_de_101: - match_type = 'gnd' + if match_type == "de-101": + match_type = "gnd" + elif match_type == "de-588" and has_no_de_101: + match_type = "gnd" match_value = get_gnd_de_101(match_value) if match_type and match_type in sources: - url = f'{mef_url}/mef/latest/{match_type}:{match_value}' + url = f"{mef_url}/mef/latest/{match_type}:{match_value}" response = requests_retry_session().get(url) status_code = response.status_code total = 0 if status_code == requests.codes.ok: - if value := response.json().get(match_type, {}).get('pid'): + if value := response.json().get(match_type, {}).get("pid"): if match_value != value: - error_print(f'INFO GET MEF {entity_type}:', - bibid, reroid, key, id_, 'NEW', - f'({match_type.upper()}){value}') - return f'{mef_url}/{match_type}/{value}' - error_print('WARNING GET MEF CONTRIBUTION:', - bibid, reroid, key, id_, url, status_code, total) + error_print( + f"INFO GET MEF {entity_type}:", + bibid, + reroid, + key, + id_, + "NEW", + f"({match_type.upper()}){value}", + ) + return f"{mef_url}/{match_type}/{value}" + error_print( + "WARNING GET MEF CONTRIBUTION:", + bibid, + reroid, + key, + id_, + url, + status_code, + total, + ) # if we have a viaf id, look for the contributor in MEF elif match_type == "viaf": - url = f'{mef_url}/mef?q=viaf_pid:{match_value}' + url = f"{mef_url}/mef?q=viaf_pid:{match_value}" response = requests_retry_session().get(url) status_code = response.status_code if status_code == requests.codes.ok: resp = response.json() with contextlib.suppress(IndexError, KeyError): - mdata = resp['hits']['hits'][0]['metadata'] - for source in ['idref', 'gnd']: - if match_value := mdata.get(source, {}).get('pid'): + mdata = resp["hits"]["hits"][0]["metadata"] + for source in ["idref", "gnd"]: + if match_value := mdata.get(source, {}).get("pid"): match_type = source break elif match: - error_print('ERROR GET MEF CONTRIBUTION:', bibid, reroid, key, id_) + error_print("ERROR GET MEF CONTRIBUTION:", bibid, reroid, key, id_) def add_note(new_note, data): @@ -511,11 +560,11 @@ def add_note(new_note, data): :param data: the object data on which the new note will be added :type data: object """ - if new_note and new_note.get('label') and new_note.get('noteType'): - notes = data.get('note', []) + if new_note and new_note.get("label") and new_note.get("noteType"): + notes = data.get("note", []) if new_note not in notes: notes.append(new_note) - data['note'] = notes + data["note"] = notes def add_data_and_sort_list(key, new_data, data): @@ -555,9 +604,8 @@ def join_alternate_graphic_data(alt_gr_1, alt_gr_2, join_str): for idx, data in enumerate(alt_gr_1): new_data = deepcopy(data) with contextlib.suppress(Exception): - if str_to_join := alt_gr_2[idx]['value']: - new_data['value'] = \ - join_str.join((new_data['value'], str_to_join)) + if str_to_join := alt_gr_2[idx]["value"]: + new_data["value"] = join_str.join((new_data["value"], str_to_join)) new_alt_gr_data.append(new_data) return new_alt_gr_data @@ -577,14 +625,13 @@ def __init__(self): The regular expression patterns needed to extract book formats are build by this constructor. """ - self._format_values = \ - (1, 2, 4, 8, 12, 16, 18, 24, 32, 36, 48, 64, 72, 96, 128) + self._format_values = (1, 2, 4, 8, 12, 16, 18, 24, 32, 36, 48, 64, 72, 96, 128) self._book_format_code_and_regexp = {} self._specific_for_1248 = { - 1: 'plano', - 2: r'fol[i.\s°⁰)]|fol', - 4: 'quarto', - 8: 'octavo' + 1: "plano", + 2: r"fol[i.\s°⁰)]|fol", + 4: "quarto", + 8: "octavo", } def _buid_regexp(value): @@ -596,27 +643,27 @@ def _buid_regexp(value): :rtype: str """ # generic regexp valid for all values - regexp = \ - fr'(^|[^\d]){value}\s?[°⁰º]|in(-|-gr\.)*\s*{value}($|[^\d])' + regexp = rf"(^|[^\d]){value}\s?[°⁰º]|in(-|-gr\.)*\s*{value}($|[^\d])" # add specific value regexp if value in self._specific_for_1248: - regexp = '|'.join([regexp, self._specific_for_1248[value]]) + regexp = "|".join([regexp, self._specific_for_1248[value]]) else: - additional = fr'[^\d]{value}mo|^{value}mo' - regexp = '|'.join([regexp, additional]) - return f'({regexp})' + additional = rf"[^\d]{value}mo|^{value}mo" + regexp = "|".join([regexp, additional]) + return f"({regexp})" def _populate_regexp(): """Populate all the expression patterns.""" for value in self._format_values: self._book_format_code_and_regexp[value] = {} - format_code = 'in-plano' + format_code = "in-plano" if value > 1: # {value}ᵒ (U+1d52 MODIFIER LETTER SMALL O) - format_code = f'{value}ᵒ' - self._book_format_code_and_regexp[value]['code'] = format_code - self._book_format_code_and_regexp[value]['regexp'] = \ - re.compile(_buid_regexp(value), re.IGNORECASE) + format_code = f"{value}ᵒ" + self._book_format_code_and_regexp[value]["code"] = format_code + self._book_format_code_and_regexp[value]["regexp"] = re.compile( + _buid_regexp(value), re.IGNORECASE + ) _populate_regexp() @@ -630,10 +677,9 @@ def extract_book_formats_from(self, subfield_data): """ book_formats = [] for value in self._format_values: - regexp = self._book_format_code_and_regexp[value]['regexp'] + regexp = self._book_format_code_and_regexp[value]["regexp"] if regexp.search(subfield_data): - book_formats.append( - self._book_format_code_and_regexp[value]['code']) + book_formats.append(self._book_format_code_and_regexp[value]["code"]) return book_formats @@ -647,33 +693,29 @@ class ReroIlsOverdo(Overdo): _blob_record = None leader = None - record_type = '' # LDR 06 - bib_level = '?' # LDR 07 + record_type = "" # LDR 06 + bib_level = "?" # LDR 07 extract_description_subfield = None extract_series_statement_subfield = None def __init__(self, bases=None, entry_point_group=None): """Reroilsoverdo init.""" - super().__init__( - bases=bases, entry_point_group=entry_point_group) + super().__init__(bases=bases, entry_point_group=entry_point_group) def do(self, blob, ignore_missing=True, exception_handlers=None): """Translate blob values and instantiate new model instance.""" self._blob_record = blob - self.leader = blob.get('leader', '') + self.leader = blob.get("leader", "") if self.leader: self.record_type = self.leader[6] # LDR 06 self.bib_level = self.leader[7] # LDR 07 result = super().do( - blob, - ignore_missing=ignore_missing, - exception_handlers=exception_handlers + blob, ignore_missing=ignore_missing, exception_handlers=exception_handlers ) - if not result.get('provisionActivity'): + if not result.get("provisionActivity"): self.default_provision_activity(result) - error_print( - 'WARNING PROVISION ACTIVITY:', self.bib_id, self.rero_id) + error_print("WARNING PROVISION ACTIVITY:", self.bib_id, self.rero_id) return result @@ -681,57 +723,63 @@ def build_place(self): """Build place data for provisionActivity.""" place = {} if self.cantons: - place['canton'] = self.cantons[0] + place["canton"] = self.cantons[0] if self.country: - place['country'] = self.country + place["country"] = self.country if self.links_from_752: - place['identifiedBy'] = self.links_from_752[0] + place["identifiedBy"] = self.links_from_752[0] return place def default_provision_activity(self, result): """Create default provisionActivity.""" places = [] - publication = { - 'type': 'bf:Publication' - } + publication = {"type": "bf:Publication"} if place := self.build_place(): places.append(place) # parce le link skipping the fist (already used by build_place) for i in range(1, len(self.links_from_752)): - place = { - 'country': 'xx', - 'identifiedBy': self.links_from_752[i] - } + place = {"country": "xx", "identifiedBy": self.links_from_752[i]} places.append(place) if places: - publication['place'] = places - result['provisionActivity'] = [publication] + publication["place"] = places + result["provisionActivity"] = [publication] - if self.date_type_from_008 in ['q', 'n']: - result['provisionActivity'][0][ - 'note' - ] = 'Date(s) uncertain or unknown' + if self.date_type_from_008 in ["q", "n"]: + result["provisionActivity"][0]["note"] = "Date(s) uncertain or unknown" start_date = make_year(self.date1_from_008) if not start_date or start_date > 2050: - error_print('WARNING START DATE 008:', self.bib_id, - self.rero_id, self.date1_from_008) + error_print( + "WARNING START DATE 008:", + self.bib_id, + self.rero_id, + self.date1_from_008, + ) start_date = 2050 - result['provisionActivity'][0]['note'] = \ - 'Date not available and automatically set to 2050' - result['provisionActivity'][0]['startDate'] = start_date + result["provisionActivity"][0][ + "note" + ] = "Date not available and automatically set to 2050" + result["provisionActivity"][0]["startDate"] = start_date if end_date := make_year(self.date2_from_008): if end_date > 2050: - error_print('WARNING END DATE 008:', self.bib_id, - self.rero_id, self.date1_from_008) + error_print( + "WARNING END DATE 008:", + self.bib_id, + self.rero_id, + self.date1_from_008, + ) else: - result['provisionActivity'][0]['endDate'] = end_date + result["provisionActivity"][0]["endDate"] = end_date if original_date := make_year(self.original_date_from_008): if original_date > 2050: - error_print('WARNING ORIGINAL DATE 008:', self.bib_id, - self.rero_id, self.original_date_from_008) + error_print( + "WARNING ORIGINAL DATE 008:", + self.bib_id, + self.rero_id, + self.original_date_from_008, + ) else: - result['provisionActivity'][0]['original_date'] = original_date + result["provisionActivity"][0]["original_date"] = original_date def get_fields(self, tag=None): """Get all fields having the given tag value.""" @@ -740,38 +788,39 @@ def get_fields(self, tag=None): for blob_key, blob_value in items: tag_value = blob_key[:3] if (tag_value == tag) or not tag: - field_data = {'tag': tag_value} + field_data = {"tag": tag_value} if len(blob_key) == 3: # if control field - field_data['data'] = blob_value.rstrip() + field_data["data"] = blob_value.rstrip() else: - field_data['ind1'] = blob_key[3:4] - field_data['ind2'] = blob_key[4:5] - field_data['subfields'] = blob_value + field_data["ind1"] = blob_key[3:4] + field_data["ind2"] = blob_key[4:5] + field_data["subfields"] = blob_value fields.append(field_data) return fields def get_control_field_data(self, field): """Get control fields data.""" field_data = None - if int(field['tag']) < 10: - field_data = field['data'] + if int(field["tag"]) < 10: + field_data = field["data"] else: - raise ValueError('control field expected (tag < 01x)') + raise ValueError("control field expected (tag < 01x)") return field_data def get_subfields(self, field, code=None): """Get all subfields having the given subfield code value.""" - if int(field['tag']) < 10: - raise ValueError('data field expected (tag >= 01x)') - items = get_field_items(field.get('subfields', {})) + if int(field["tag"]) < 10: + raise ValueError("data field expected (tag >= 01x)") + items = get_field_items(field.get("subfields", {})) return [ - subfield_data for subfield_code, subfield_data in items + subfield_data + for subfield_code, subfield_data in items if (subfield_code == code) or not code ] def build_value_with_alternate_graphic( - self, tag, code, label, index, link, - punct=None, spaced_punct=None): + self, tag, code, label, index, link, punct=None, spaced_punct=None + ): """ Build the data structure for alternate graphical representation. @@ -796,11 +845,11 @@ def build_value_with_alternate_graphic( } ] """ + def clean_punctuation(value, punct, spaced_punct): return remove_trailing_punctuation( - value, - punctuation=punct, - spaced_punctuation=spaced_punct) + value, punctuation=punct, spaced_punctuation=spaced_punct + ) # build_value_with_alternate_graphic starts here @@ -808,19 +857,22 @@ def clean_punctuation(value, punct, spaced_punct): value = clean_punctuation(label, punct, spaced_punct).strip() if value: value = remove_special_characters(value) - data = [{'value': value}] + data = [{"value": value}] else: - error_print('WARNING NO VALUE:', self.bib_id, self.rero_id, tag, - code, label) + error_print( + "WARNING NO VALUE:", self.bib_id, self.rero_id, tag, code, label + ) with contextlib.suppress(Exception): alt_gr = self.alternate_graphic[tag][link] - subfield = self.get_subfields(alt_gr['field'])[index] + subfield = self.get_subfields(alt_gr["field"])[index] value = clean_punctuation(subfield, punct, spaced_punct) if value: - data.append({ - 'value': value, - 'language': self.get_language_script(alt_gr['script']) - }) + data.append( + { + "value": value, + "language": self.get_language_script(alt_gr["script"]), + } + ) return data or None def extract_description_from_marc_field(self, key, value, data): @@ -849,55 +901,48 @@ def extract_description_from_marc_field(self, key, value, data): extent_and_physical_detail_data = [] extent = [] physical_details = [] - physical_details_str = '' - if value.get('a'): - extent = utils.force_list(value.get('a', []))[0] + physical_details_str = "" + if value.get("a"): + extent = utils.force_list(value.get("a", []))[0] extent_and_physical_detail_data.append(extent) - data['extent'] = remove_trailing_punctuation( - data=extent, - punctuation=':;', - spaced_punctuation=':;' + data["extent"] = remove_trailing_punctuation( + data=extent, punctuation=":;", spaced_punctuation=":;" ) - if not data['extent']: - data.pop('extent') + if not data["extent"]: + data.pop("extent") # extract the duration - circa_env = r'\s*(ca\.?|env\.?)?\s*\d+' - hour_min = r'(h|St(d|\.|u)|[mM]in)' + circa_env = r"\s*(ca\.?|env\.?)?\s*\d+" + hour_min = r"(h|St(d|\.|u)|[mM]in)" regexp = re.compile( - fr'(\((\[?{circa_env}\]?\s*{hour_min}.*?)\))|' - fr'(\[({circa_env}\s*{hour_min}.*?)\])', - re.IGNORECASE + rf"(\((\[?{circa_env}\]?\s*{hour_min}.*?)\))|" + rf"(\[({circa_env}\s*{hour_min}.*?)\])", + re.IGNORECASE, ) match = regexp.search(extent) if match and match.group(1): - duration = match.group(1).strip('()') - add_data_and_sort_list('duration', [duration], data) + duration = match.group(1).strip("()") + add_data_and_sort_list("duration", [duration], data) - subfield_code = self.extract_description_subfield['physical_detail'] + subfield_code = self.extract_description_subfield["physical_detail"] for physical_detail in utils.force_list(value.get(subfield_code, [])): - physical_detail = remove_trailing_punctuation( - physical_detail, ':;', ':;') + physical_detail = remove_trailing_punctuation(physical_detail, ":;", ":;") physical_details.append(physical_detail) extent_and_physical_detail_data.append(physical_detail) # to avoid empty note after removing punctuation if physical_detail: add_note( - dict( - noteType='otherPhysicalDetails', - label=physical_detail - ), - data) + dict(noteType="otherPhysicalDetails", label=physical_detail), data + ) - physical_details_str = '|'.join(physical_details) - extent_and_physical_detail_str = \ - '|'.join(extent_and_physical_detail_data) + physical_details_str = "|".join(physical_details) + extent_and_physical_detail_str = "|".join(extent_and_physical_detail_data) color_content_set = set() for key in _COLOR_CONTENT_REGEXP: regexp = _COLOR_CONTENT_REGEXP[key] if regexp.search(physical_details_str): color_content_set.add(key) - add_data_and_sort_list('colorContent', color_content_set, data) + add_data_and_sort_list("colorContent", color_content_set, data) production_method_set = set() for key in _PRODUCTION_METHOD_FROM_EXTENT_AND_PHYSICAL_DETAILS: @@ -907,68 +952,62 @@ def extract_description_from_marc_field(self, key, value, data): # extract build illustrativeContent data # remove 'couv. ill' and the extra '|' resulting of the remove - physical_detail_ill_str = \ - re.sub(r'couv\. ill', '', physical_details_str) - physical_detail_ill_str = \ - re.sub(r'\|\||^\||\|$', '', physical_detail_ill_str) + physical_detail_ill_str = re.sub(r"couv\. ill", "", physical_details_str) + physical_detail_ill_str = re.sub(r"\|\||^\||\|$", "", physical_detail_ill_str) illustration_set = set() for key in _ILLUSTRATIVE_CONTENT_REGEXP: regexp = _ILLUSTRATIVE_CONTENT_REGEXP[key] if regexp.search(physical_detail_ill_str): illustration_set.add(key) - add_data_and_sort_list('illustrativeContent', illustration_set, data) + add_data_and_sort_list("illustrativeContent", illustration_set, data) # remove 'rdapm:1005' if specific production_method exists - if ('rdapm:1005') in production_method_set: - del_set = \ - set(('rdapm:1009', 'rdapm:1012', 'rdapm:1014', 'rdapm:1016')) + if ("rdapm:1005") in production_method_set: + del_set = set(("rdapm:1009", "rdapm:1012", "rdapm:1014", "rdapm:1016")) if production_method_set.intersection(del_set): - production_method_set.remove('rdapm:1005') + production_method_set.remove("rdapm:1005") # extract production_method from physical_details only if re.search( - r'impr|druck|print|offset|s[ée]riegr', - physical_details_str, - re.IGNORECASE): - production_method_set.add('rdapm:1010') + r"impr|druck|print|offset|s[ée]riegr", physical_details_str, re.IGNORECASE + ): + production_method_set.add("rdapm:1010") # build productionMethod data - add_data_and_sort_list('productionMethod', production_method_set, data) + add_data_and_sort_list("productionMethod", production_method_set, data) # extract book_format from $c book_formats = [] tool = BookFormatExtraction() - subfield_code = self.extract_description_subfield['book_format'] + subfield_code = self.extract_description_subfield["book_format"] for dimension in utils.force_list(value.get(subfield_code, [])): formats = tool.extract_book_formats_from(dimension) for book_format in formats: book_formats.append(book_format) dim = remove_trailing_punctuation( - data=dimension.rstrip(), - punctuation='+,:;&.' + data=dimension.rstrip(), punctuation="+,:;&." ) if dim: - add_data_and_sort_list( - 'dimensions', utils.force_list(dim), data) - add_data_and_sort_list('bookFormat', book_formats, data) + add_data_and_sort_list("dimensions", utils.force_list(dim), data) + add_data_and_sort_list("bookFormat", book_formats, data) # extract accompanyingMaterial note from $e - if value.get('e'): + if value.get("e"): material_notes = [] if isinstance(self, ReroIlsMarc21Overdo): - material_note = utils.force_list(value.get('e', []))[0] - material_notes = material_note.split('+') + material_note = utils.force_list(value.get("e", []))[0] + material_notes = material_note.split("+") elif isinstance(self, ReroIlsUnimarcOverdo): - material_notes = utils.force_list(value.get('e', [])) + material_notes = utils.force_list(value.get("e", [])) for material_note in material_notes: if material_note: add_note( dict( - noteType='accompanyingMaterial', - label=material_note.strip() + noteType="accompanyingMaterial", label=material_note.strip() ), - data) + data, + ) def extract_series_statement_from_marc_field(self, key, value, data): """Extract the seriesStatement data from marc field data. @@ -993,67 +1032,69 @@ def extract_series_statement_from_marc_field(self, key, value, data): subseries = [] count = 0 tag = key[:3] - series_title_subfield_code = \ - self.extract_series_statement_subfield[tag]['series_title'] - series_enumeration_subfield_code = \ - self.extract_series_statement_subfield[tag]['series_enumeration'] - subfield_selection = \ - {series_title_subfield_code, series_enumeration_subfield_code} - subfield_visited = '' + series_title_subfield_code = self.extract_series_statement_subfield[tag][ + "series_title" + ] + series_enumeration_subfield_code = self.extract_series_statement_subfield[tag][ + "series_enumeration" + ] + subfield_selection = { + series_title_subfield_code, + series_enumeration_subfield_code, + } + subfield_visited = "" for blob_key, blob_value in items: if blob_key in subfield_selection: subfield_visited += blob_key value_data = self.build_value_with_alternate_graphic( - tag, blob_key, blob_value, index, link, ',.', ':;/-=') + tag, blob_key, blob_value, index, link, ",.", ":;/-=" + ) if blob_key == series_title_subfield_code: count += 1 if count == 1: - series['seriesTitle'] = value_data + series["seriesTitle"] = value_data else: - subseries.append({'subseriesTitle': value_data}) + subseries.append({"subseriesTitle": value_data}) elif blob_key == series_enumeration_subfield_code: if count == 1: - if 'seriesEnumeration' in series: - series['seriesEnumeration'] = \ - join_alternate_graphic_data( - alt_gr_1=series['seriesEnumeration'], - alt_gr_2=value_data, - join_str=', ' - ) + if "seriesEnumeration" in series: + series["seriesEnumeration"] = join_alternate_graphic_data( + alt_gr_1=series["seriesEnumeration"], + alt_gr_2=value_data, + join_str=", ", + ) else: - series['seriesEnumeration'] = value_data + series["seriesEnumeration"] = value_data elif count > 1: - if 'subseriesEnumeration' in subseries[count-2]: - alt_gr_1 = \ - subseries[count-2]['subseriesEnumeration'] - subseries[count-2]['subseriesEnumeration'] = \ + if "subseriesEnumeration" in subseries[count - 2]: + alt_gr_1 = subseries[count - 2]["subseriesEnumeration"] + subseries[count - 2]["subseriesEnumeration"] = ( join_alternate_graphic_data( alt_gr_1=alt_gr_1, alt_gr_2=value_data, - join_str=', ' + join_str=", ", ) + ) else: - subseries[count-2]['subseriesEnumeration'] = \ - value_data - if blob_key != '__order__': + subseries[count - 2]["subseriesEnumeration"] = value_data + if blob_key != "__order__": index += 1 - error_msg = '' - regexp = re.compile(fr'^[^{series_title_subfield_code}]') + error_msg = "" + regexp = re.compile(rf"^[^{series_title_subfield_code}]") if regexp.search(subfield_visited): error_msg = ( - f'missing leading subfield ${series_title_subfield_code} ' - f'in field {tag}' + f"missing leading subfield ${series_title_subfield_code} " + f"in field {tag}" ) - error_print('ERROR BAD FIELD FORMAT:', self.bib_id, self.rero_id, - error_msg) + error_print("ERROR BAD FIELD FORMAT:", self.bib_id, self.rero_id, error_msg) else: if subseries: - series['subseriesStatement'] = subseries - series_statement = data.get('seriesStatement', []) + series["subseriesStatement"] = subseries + series_statement = data.get("seriesStatement", []) if series: series_statement.append(series) - data['seriesStatement'] = series_statement + data["seriesStatement"] = series_statement class ReroIlsMarc21Overdo(ReroIlsOverdo): @@ -1062,15 +1103,15 @@ class ReroIlsMarc21Overdo(ReroIlsOverdo): This class adds RERO Marc21 properties and functions to the ReroIlsOverdo. """ - bib_id = '' - field_008_data = '' + bib_id = "" + field_008_data = "" lang_from_008 = None date1_from_008 = None date2_from_008 = None original_date_from_008 = None - date = {'start_date'} - date_type_from_008 = '' - serial_type = '' # 008 pos 21 + date = {"start_date"} + date_type_from_008 = "" + serial_type = "" # 008 pos 21 langs_from_041_a = [] langs_from_041_h = [] alternate_graphic = {} @@ -1082,30 +1123,14 @@ class ReroIlsMarc21Overdo(ReroIlsOverdo): def __init__(self, bases=None, entry_point_group=None): """Reroilsmarc21overdo init.""" - super().__init__( - bases=bases, entry_point_group=entry_point_group) + super().__init__(bases=bases, entry_point_group=entry_point_group) self.count = 0 - self.extract_description_subfield = { - 'physical_detail': 'b', - 'book_format': 'c' - } + self.extract_description_subfield = {"physical_detail": "b", "book_format": "c"} self.extract_series_statement_subfield = { - '490': { - 'series_title': 'a', - 'series_enumeration': 'v' - }, - '773': { - 'series_title': 't', - 'series_enumeration': 'g' - }, - '800': { - 'series_title': 't', - 'series_enumeration': 'v' - }, - '830': { - 'series_title': 'a', - 'series_enumeration': 'v' - } + "490": {"series_title": "a", "series_enumeration": "v"}, + "773": {"series_title": "t", "series_enumeration": "g"}, + "800": {"series_title": "t", "series_enumeration": "v"}, + "830": {"series_title": "a", "series_enumeration": "v"}, } def do(self, blob, ignore_missing=True, exception_handlers=None): @@ -1115,48 +1140,48 @@ def do(self, blob, ignore_missing=True, exception_handlers=None): try: # extract record leader self._blob_record = blob - self.leader = blob.get('leader', '') + self.leader = blob.get("leader", "") try: - self.bib_id = self.get_fields(tag='001')[0]['data'] + self.bib_id = self.get_fields(tag="001")[0]["data"] except Exception: - self.bib_id = '???' + self.bib_id = "???" try: - fields_035 = self.get_fields(tag='035') - self.rero_id = self.get_subfields(fields_035[0], 'a')[0] + fields_035 = self.get_fields(tag="035") + self.rero_id = self.get_subfields(fields_035[0], "a")[0] except Exception: - self.rero_id = '???' - self.field_008_data = '' + self.rero_id = "???" + self.field_008_data = "" self.date1_from_008 = None self.date2_from_008 = None self.original_date_from_008 = None - self.date_type_from_008 = '' - self.date = {'start_date': None} - self.serial_type = '' + self.date_type_from_008 = "" + self.date = {"start_date": None} + self.serial_type = "" self.is_top_level_record = False - fields_008 = self.get_fields(tag='008') + fields_008 = self.get_fields(tag="008") if fields_008: self.field_008_data = self.get_control_field_data( - fields_008[0]).rstrip() + fields_008[0] + ).rstrip() try: self.serial_type = self.field_008_data[21] except Exception as err: - error_print('ERROR SERIAL TYPE:', self.bib_id, - self.rero_id, err) + error_print("ERROR SERIAL TYPE:", self.bib_id, self.rero_id, err) self.date1_from_008 = self.field_008_data[7:11] self.date2_from_008 = self.field_008_data[11:15] self.date_type_from_008 = self.field_008_data[6] - if self.date_type_from_008 == 'r': + if self.date_type_from_008 == "r": self.original_date_from_008 = self.date2_from_008 self.admin_meta_data = {} - enc_level = '' + enc_level = "" if self.leader: enc_level = self.leader[17] # LDR 17 if enc_level in _ENCODING_LEVEL_MAPPING: encoding_level = _ENCODING_LEVEL_MAPPING[enc_level] else: - encoding_level = _ENCODING_LEVEL_MAPPING['u'] - self.admin_meta_data['encodingLevel'] = encoding_level + encoding_level = _ENCODING_LEVEL_MAPPING["u"] + self.admin_meta_data["encodingLevel"] = encoding_level self.init_lang() self.init_country() @@ -1166,81 +1191,85 @@ def do(self, blob, ignore_missing=True, exception_handlers=None): # get notes from 019 $a or $b and # identifiy a top level record (has 019 $a Niveau supérieur) - regexp = re.compile(r'Niveau sup[eé]rieur', re.IGNORECASE) - fields_019 = self.get_fields(tag='019') + regexp = re.compile(r"Niveau sup[eé]rieur", re.IGNORECASE) + fields_019 = self.get_fields(tag="019") notes_from_019_and_351 = [] for field_019 in fields_019: - note = '' - for subfield_a in self.get_subfields(field_019, 'a'): - note += ' | ' + subfield_a + note = "" + for subfield_a in self.get_subfields(field_019, "a"): + note += " | " + subfield_a if regexp.search(subfield_a): self.is_top_level_record = True - for subfield_b in self.get_subfields(field_019, 'b'): - note += ' | ' + subfield_b - for subfield_9 in self.get_subfields(field_019, '9'): - note += ' (' + subfield_9 + ')' + for subfield_b in self.get_subfields(field_019, "b"): + note += " | " + subfield_b + for subfield_9 in self.get_subfields(field_019, "9"): + note += " (" + subfield_9 + ")" break if note: notes_from_019_and_351.append(note[3:]) - fields_351 = self.get_fields(tag='351') + fields_351 = self.get_fields(tag="351") for field_351 in fields_351: - note = ' | '.join(self.get_subfields(field_351, 'c')) + note = " | ".join(self.get_subfields(field_351, "c")) if note: notes_from_019_and_351.append(note) if notes_from_019_and_351: - self.admin_meta_data['note'] = notes_from_019_and_351 + self.admin_meta_data["note"] = notes_from_019_and_351 - fields_040 = self.get_fields(tag='040') + fields_040 = self.get_fields(tag="040") for field_040 in fields_040: - for subfield_a in self.get_subfields(field_040, 'a'): - self.admin_meta_data['source'] = subfield_a - for subfield_b in self.get_subfields(field_040, 'b'): + for subfield_a in self.get_subfields(field_040, "a"): + self.admin_meta_data["source"] = subfield_a + for subfield_b in self.get_subfields(field_040, "b"): if subfield_b in _LANGUAGES: - self.admin_meta_data[ - 'descriptionLanguage'] = subfield_b + self.admin_meta_data["descriptionLanguage"] = subfield_b else: error_print( - 'WARNING NOT A LANGUAGE 040:', + "WARNING NOT A LANGUAGE 040:", self.bib_id, self.rero_id, - subfield_b + subfield_b, ) description_modifier = [] - for subfield_d in self.get_subfields(field_040, 'd'): + for subfield_d in self.get_subfields(field_040, "d"): description_modifier.append(subfield_d) if description_modifier: - self.admin_meta_data['descriptionModifier'] = \ - description_modifier + self.admin_meta_data["descriptionModifier"] = description_modifier description_conventions = [] - for subfield_e in self.get_subfields(field_040, 'e'): + for subfield_e in self.get_subfields(field_040, "e"): description_conventions.append(subfield_e) if description_conventions: - self.admin_meta_data['descriptionConventions'] = \ + self.admin_meta_data["descriptionConventions"] = ( description_conventions + ) # build the list of links from field 752 self.links_from_752 = [] - fields_752 = self.get_fields(tag='752') + fields_752 = self.get_fields(tag="752") for field_752 in fields_752: - subfields_d = self.get_subfields(field_752, 'd') + subfields_d = self.get_subfields(field_752, "d") if subfields_d: - identifier = build_identifier(field_752['subfields']) + identifier = build_identifier(field_752["subfields"]) if identifier: self.links_from_752.append(identifier) # check presence of specific fields - self.has_field_490 = len(self.get_fields(tag='490')) > 0 - self.has_field_580 = len(self.get_fields(tag='580')) > 0 + self.has_field_490 = len(self.get_fields(tag="490")) > 0 + self.has_field_580 = len(self.get_fields(tag="580")) > 0 result = super().do( blob, ignore_missing=ignore_missing, - exception_handlers=exception_handlers + exception_handlers=exception_handlers, ) except Exception as err: - error_print('ERROR DO:', self.bib_id, self.rero_id, self.count, - f'{err} {traceback.format_exception_only}') + error_print( + "ERROR DO:", + self.bib_id, + self.rero_id, + self.count, + f"{err} {traceback.format_exception_only}", + ) traceback.print_exc() raise Exception(err) return result @@ -1248,39 +1277,44 @@ def do(self, blob, ignore_missing=True, exception_handlers=None): def get_link_data(self, subfields_6_data): """Extract link and script data from subfields $6 data.""" link = None - tag, extra_data = subfields_6_data.split('-') + tag, extra_data = subfields_6_data.split("-") if extra_data: - link_and_script_data = extra_data.split('/') + link_and_script_data = extra_data.split("/") link = link_and_script_data[0] try: script_code = link_and_script_data[1] - except Exception as err: - script_code = 'latn' + except Exception: + script_code = "latn" try: script_dir = link_and_script_data[2] - except Exception as err: - script_dir = '' + except Exception: + script_dir = "" return tag, link, script_code, script_dir def init_country(self): """Initialization country (008 and 044).""" self.country = None self.cantons = [] - if fields_044 := self.get_fields(tag='044'): + if fields_044 := self.get_fields(tag="044"): field_044 = fields_044[0] - for cantons_code in self.get_subfields(field_044, 'c'): + for cantons_code in self.get_subfields(field_044, "c"): try: - if canton := cantons_code.split('-')[1].strip(): + if canton := cantons_code.split("-")[1].strip(): if canton in _CANTON: self.cantons.append(canton) else: - error_print('WARNING INIT CANTONS:', self.bib_id, - self.rero_id, cantons_code) - except Exception as err: - error_print('WARNING INIT CANTONS:', self.bib_id, - self.rero_id, cantons_code) + error_print( + "WARNING INIT CANTONS:", + self.bib_id, + self.rero_id, + cantons_code, + ) + except Exception: + error_print( + "WARNING INIT CANTONS:", self.bib_id, self.rero_id, cantons_code + ) if self.cantons: - self.country = 'sz' + self.country = "sz" # We did not find a country in 044 trying 008. if not self.country: with contextlib.suppress(Exception): @@ -1290,12 +1324,14 @@ def init_country(self): self.country = _OBSOLETE_COUNTRIES_MAPPING[self.country] # We did not find a country set it to 'xx' if self.country not in _COUNTRIES: - error_print('WARNING NOT A COUNTRY:', self.bib_id, self.rero_id, - self.country) - self.country = 'xx' + error_print( + "WARNING NOT A COUNTRY:", self.bib_id, self.rero_id, self.country + ) + self.country = "xx" def init_lang(self): """Initialization languages (008 and 041).""" + def init_lang_from(fields_041, code): """Construct list of language codes from data.""" langs_from_041 = [] @@ -1307,10 +1343,10 @@ def init_lang_from(fields_041, code): langs_from_041.append(lang_from_041) else: error_print( - 'WARNING NOT A LANGUAGE 041:', + "WARNING NOT A LANGUAGE 041:", self.bib_id, self.rero_id, - lang_from_041 + lang_from_041, ) return langs_from_041 @@ -1321,20 +1357,19 @@ def init_lang_from(fields_041, code): self.lang_from_008 = self.field_008_data[35:38] if self.lang_from_008 not in _LANGUAGES: error_print( - 'WARNING NOT A LANGUAGE 008:', + "WARNING NOT A LANGUAGE 008:", self.bib_id, self.rero_id, - self.lang_from_008 + self.lang_from_008, ) - self.lang_from_008 = 'und' - except Exception as err: - self.lang_from_008 = 'und' - error_print("WARNING: set 008 language to 'und'", self.bib_id, - self.rero_id) + self.lang_from_008 = "und" + except Exception: + self.lang_from_008 = "und" + error_print("WARNING: set 008 language to 'und'", self.bib_id, self.rero_id) - fields_041 = self.get_fields(tag='041') - self.langs_from_041_a = init_lang_from(fields_041, code='a') - self.langs_from_041_h = init_lang_from(fields_041, code='h') + fields_041 = self.get_fields(tag="041") + self.langs_from_041_a = init_lang_from(fields_041, code="a") + self.langs_from_041_h = init_lang_from(fields_041, code="h") def init_date(self): """Initialization start and end date. @@ -1344,17 +1379,17 @@ def init_date(self): 3. get dates from 773 $g 4. set start_date to 2050 """ - if self.date_type_from_008 in ['q', 'n']: - self.date['note'] = 'Date(s) uncertain or unknown' + if self.date_type_from_008 in ["q", "n"]: + self.date["note"] = "Date(s) uncertain or unknown" start_date = make_year(self.date1_from_008) if not (start_date and start_date >= -9999 and start_date <= 2050): start_date = None if not start_date: - fields_264 = self.get_fields('264') - for ind2 in ['1', '0', '2', '4', '3']: + fields_264 = self.get_fields("264") + for ind2 in ["1", "0", "2", "4", "3"]: for field_264 in fields_264: - if ind2 == field_264['ind2']: - if subfields_c := self.get_subfields(field_264, 'c'): + if ind2 == field_264["ind2"]: + if subfields_c := self.get_subfields(field_264, "c"): year = re.search(r"(-?\d{1,4})", subfields_c[0]) if year: year = int(year.group(0)) @@ -1367,9 +1402,9 @@ def init_date(self): # Inner loop was broken, break the outer. break if not start_date: - fields_773 = self.get_fields('773') + fields_773 = self.get_fields("773") for field_773 in fields_773: - if subfields_g := self.get_subfields(field_773, 'g'): + if subfields_g := self.get_subfields(field_773, "g"): year = re.search(r"(-?\d{4})", subfields_g[0]) if year: year = int(year.group(0)) @@ -1377,15 +1412,18 @@ def init_date(self): start_date = year if not start_date: start_date = 2050 - self.date['note'] = \ - 'Date not available and automatically set to 2050' - error_print('WARNING START DATE 264:', self.bib_id, self.rero_id, - self.date1_from_008) - self.date['start_date'] = start_date + self.date["note"] = "Date not available and automatically set to 2050" + error_print( + "WARNING START DATE 264:", + self.bib_id, + self.rero_id, + self.date1_from_008, + ) + self.date["start_date"] = start_date end_date = make_year(self.date2_from_008) if end_date and end_date >= -9999 and end_date <= 2050: - self.date['end_date'] = end_date + self.date["end_date"] = end_date def init_alternate_graphic(self): """Initialization of alternate graphic representation. @@ -1395,13 +1433,14 @@ def init_alternate_graphic(self): link code (from $6) of the linked_data field. The language script is extracted from $6 and used to qualify the alternate graphic value. """ + def get_script_from_lang(asian=False): """Initialization of alternate graphic representation.""" script = None - default_script = 'zyyy' + default_script = "zyyy" script_per_lang = _SCRIPT_PER_LANG_NOT_ASIA if asian: - default_script = 'hani' + default_script = "hani" script_per_lang = _SCRIPT_PER_LANG_ASIA script = script_per_lang.get(self.lang_from_008) if not script: @@ -1415,32 +1454,28 @@ def get_script_from_lang(asian=False): # function init_alternate_graphic start here self.alternate_graphic = {} - fields_880 = self.get_fields(tag='880') + fields_880 = self.get_fields(tag="880") for field_880 in fields_880: try: - subfields_6 = self.get_subfields(field_880, code='6') + subfields_6 = self.get_subfields(field_880, code="6") for subfield_6 in subfields_6: - tag, link, script_code, script_dir = self.get_link_data( - subfield_6) + tag, link, script_code, script_dir = self.get_link_data(subfield_6) tag_data = self.alternate_graphic.get(tag, {}) link_data = tag_data.get(link, {}) - if script_code == '$1': + if script_code == "$1": script = get_script_from_lang(asian=True) elif script_code in _SCRIPT_PER_CODE: script = _SCRIPT_PER_CODE[script_code] else: script = get_script_from_lang() - link_data['script'] = script - link_data['field'] = field_880 - if script_dir == 'r': - link_data['right_to_left'] = True + link_data["script"] = script + link_data["field"] = field_880 + if script_dir == "r": + link_data["right_to_left"] = True tag_data[link] = link_data self.alternate_graphic[tag] = tag_data except Exception as error: - click.secho( - f'Error in init_alternate_graphic: {error}', - fg='red' - ) + click.secho(f"Error in init_alternate_graphic: {error}", fg="red") def get_language_script(self, script_code): """Build the `language-script` code. @@ -1456,16 +1491,25 @@ def get_language_script(self, script_code): :rtype: str """ if script_code in _LANGUAGES_SCRIPTS: - languages = ([self.lang_from_008] + self.langs_from_041_a + - self.langs_from_041_h) + languages = ( + [self.lang_from_008] + self.langs_from_041_a + self.langs_from_041_h + ) for lang in languages: if lang in _LANGUAGES_SCRIPTS[script_code]: - return '-'.join([lang, script_code]) - error_print('WARNING LANGUAGE SCRIPTS:', self.bib_id, self.rero_id, - script_code, '008:', self.lang_from_008, - '041$a:', self.langs_from_041_a, - '041$h:', self.langs_from_041_h) - return '-'.join(['und', script_code]) + return "-".join([lang, script_code]) + error_print( + "WARNING LANGUAGE SCRIPTS:", + self.bib_id, + self.rero_id, + script_code, + "008:", + self.lang_from_008, + "041$a:", + self.langs_from_041_a, + "041$h:", + self.langs_from_041_h, + ) + return "-".join(["und", script_code]) def build_variant_title_data(self, string_set): """Build variant title data form fields 246. @@ -1476,67 +1520,61 @@ def build_variant_title_data(self, string_set): :rtype: list """ variant_list = [] - fields_246 = self.get_fields(tag='246') + fields_246 = self.get_fields(tag="246") for field_246 in fields_246: variant_data = {} - subfield_246_a = '' - if subfields_246_a := self.get_subfields(field_246, 'a'): + subfield_246_a = "" + if subfields_246_a := self.get_subfields(field_246, "a"): subfield_246_a = subfields_246_a[0] subfield_246_a_cleaned = remove_trailing_punctuation( - subfield_246_a, ',.', ':;/-=') + subfield_246_a, ",.", ":;/-=" + ) if subfield_246_a_cleaned not in string_set: # parse all subfields in order index = 1 - items = get_field_items(field_246['subfields']) + items = get_field_items(field_246["subfields"]) tag_link, link = get_field_link_data(field_246) - part_list = TitlePartList( - part_number_code='n', - part_name_code='p' - ) + part_list = TitlePartList(part_number_code="n", part_name_code="p") - subfield_selection = {'a', 'n', 'p'} + subfield_selection = {"a", "n", "p"} for blob_key, blob_value in items: if blob_key in subfield_selection: - if blob_key == 'a': - subfield_a_parts = blob_value.split(':') + if blob_key == "a": + subfield_a_parts = blob_value.split(":") part_index = 0 for subfield_a_part in subfield_a_parts: - value_data = \ - self.build_value_with_alternate_graphic( - '246', - blob_key, - subfield_a_part, - index, - link, - ',.', - ':;/-=', - ) - if value_data: - if part_index == 0: - variant_data['type'] = \ - 'bf:VariantTitle' - variant_data['mainTitle'] = value_data - else: - variant_data['subtitle'] = value_data - part_index += 1 - elif blob_key in ['n', 'p']: - value_data = \ - self.build_value_with_alternate_graphic( - '246', + value_data = self.build_value_with_alternate_graphic( + "246", blob_key, - blob_value, + subfield_a_part, index, link, - ',.', - ':;/-=', + ",.", + ":;/-=", ) + if value_data: + if part_index == 0: + variant_data["type"] = "bf:VariantTitle" + variant_data["mainTitle"] = value_data + else: + variant_data["subtitle"] = value_data + part_index += 1 + elif blob_key in ["n", "p"]: + value_data = self.build_value_with_alternate_graphic( + "246", + blob_key, + blob_value, + index, + link, + ",.", + ":;/-=", + ) if value_data: - part_list.update_part( - value_data, blob_key, blob_value) - if blob_key != '__order__': + part_list.update_part(value_data, blob_key, blob_value) + if blob_key != "__order__": index += 1 if the_part_list := part_list.get_part_list(): - variant_data['part'] = the_part_list + variant_data["part"] = the_part_list if variant_data: variant_list.append(variant_data) return variant_list @@ -1544,80 +1582,81 @@ def build_variant_title_data(self, string_set): def init_content_media_carrier_type(self): """Initialization content/media/carrier type (336, 337 and 338).""" content_media_carrier_type_per_tag = { - '336': 'contentType', - '337': 'mediaType', - '338': 'carrierType' + "336": "contentType", + "337": "mediaType", + "338": "carrierType", } content_media_carrier_map_per_tag = { - '336': _CONTENT_TYPE_MAPPING, - '337': _MEDIA_TYPE_MAPPING, - '338': _CARRIER_TYPE_MAPPING + "336": _CONTENT_TYPE_MAPPING, + "337": _MEDIA_TYPE_MAPPING, + "338": _CARRIER_TYPE_MAPPING, } content_media_carrier_type = {} - media_type_from_unlinked_337 = '' - for tag in ['336', '337', '338']: # parsing tag in the right order + media_type_from_unlinked_337 = "" + for tag in ["336", "337", "338"]: # parsing tag in the right order type_key = content_media_carrier_type_per_tag[tag] fields = self.get_fields(tag=tag) for field in fields: - subfields_8 = self.get_subfields(field, '8') or ['0'] - for subfield_b in self.get_subfields(field, 'b'): + subfields_8 = self.get_subfields(field, "8") or ["0"] + for subfield_b in self.get_subfields(field, "b"): type_found = False for link in subfields_8: linked_data = content_media_carrier_type.get(link, {}) - if tag == '336': - linked_data_type_value = \ - linked_data.get(type_key, []) - type_value = \ - content_media_carrier_map_per_tag[tag].get( - subfield_b, None) - if type_value and \ - type_value not in linked_data_type_value: + if tag == "336": + linked_data_type_value = linked_data.get(type_key, []) + type_value = content_media_carrier_map_per_tag[tag].get( + subfield_b, None + ) + if type_value and type_value not in linked_data_type_value: linked_data_type_value.append(type_value) linked_data[type_key] = linked_data_type_value type_found = True else: - if link == '0' and tag == '337': - media_type_from_unlinked_337 = \ + if link == "0" and tag == "337": + media_type_from_unlinked_337 = ( content_media_carrier_map_per_tag[tag].get( - subfield_b, None) - linked_data_type_value = \ - linked_data.get(type_key, '') - if type_value := content_media_carrier_map_per_tag[ - tag - ].get(subfield_b, None): + subfield_b, None + ) + ) + linked_data_type_value = linked_data.get(type_key, "") + if type_value := content_media_carrier_map_per_tag[tag].get( + subfield_b, None + ): linked_data_type_value = type_value linked_data[type_key] = linked_data_type_value type_found = True - if tag == '338': - media_type_from_338 = \ - _MEDIA_TYPE_MAPPING.get(subfield_b[0]) + if tag == "338": + media_type_from_338 = _MEDIA_TYPE_MAPPING.get( + subfield_b[0] + ) if media_type_from_338: - linked_data['mediaTypeFrom338'] = \ - media_type_from_338 + linked_data["mediaTypeFrom338"] = ( + media_type_from_338 + ) if type_found: content_media_carrier_type[link] = linked_data break # subfield $b in not repetitive self.content_media_carrier_type = [] for link, value in content_media_carrier_type.items(): - media_type = value.get('mediaType', None) - media_type_from_338 = value.get('mediaTypeFrom338', None) + media_type = value.get("mediaType", None) + media_type_from_338 = value.get("mediaTypeFrom338", None) # set mediaType from 338 if not get it form 337 if media_type_from_338: if not media_type: - value['mediaType'] = media_type_from_338 + value["mediaType"] = media_type_from_338 elif media_type_from_338 != media_type: - value['mediaType'] = media_type_from_338 + value["mediaType"] = media_type_from_338 error_print( - 'WARNING MEDIA TYPE:', - self.bib_id, self.rero_id, media_type) + "WARNING MEDIA TYPE:", self.bib_id, self.rero_id, media_type + ) if media_type_from_338 and not media_type: - value['mediaType'] = media_type_from_338 - value.pop('mediaTypeFrom338', None) - if 'contentType' in value: - if media_type_from_unlinked_337 and 'mediaType' not in value: - value['mediaType'] = media_type_from_unlinked_337 + value["mediaType"] = media_type_from_338 + value.pop("mediaTypeFrom338", None) + if "contentType" in value: + if media_type_from_unlinked_337 and "mediaType" not in value: + value["mediaType"] = media_type_from_unlinked_337 self.content_media_carrier_type.append(value) @@ -1627,26 +1666,22 @@ class ReroIlsUnimarcOverdo(ReroIlsOverdo): This class adds UNIMARC properties and functions to the ReroIlsOverdo. """ - bib_id = '' - rero_id = 'unimarc' + bib_id = "" + rero_id = "unimarc" lang_from_101 = None alternate_graphic = {} - serial_type = '' + serial_type = "" def __init__(self, bases=None, entry_point_group=None): """Constructor.""" - super().__init__( - bases=bases, entry_point_group=entry_point_group) + super().__init__(bases=bases, entry_point_group=entry_point_group) self.count = 0 self.extract_description_subfield = { - 'physical_detail': 'c', - 'book_format': 'd', + "physical_detail": "c", + "book_format": "d", } self.extract_series_statement_subfield = { - '225': { - 'series_title': 'a', - 'series_enumeration': 'v' - } + "225": {"series_title": "a", "series_enumeration": "v"} } def do(self, blob, ignore_missing=True, exception_handlers=None): @@ -1656,37 +1691,37 @@ def do(self, blob, ignore_missing=True, exception_handlers=None): try: self._blob_record = blob try: - self.bib_id = self.get_fields(tag='001')[0]['data'] - except Exception as err: - self.bib_id = '???' + self.bib_id = self.get_fields(tag="001")[0]["data"] + except Exception: + self.bib_id = "???" - if fields_101 := self.get_fields(tag='101'): - field_101_a = self.get_subfields(fields_101[0], 'a') - field_101_g = self.get_subfields(fields_101[0], 'g') + if fields_101 := self.get_fields(tag="101"): + field_101_a = self.get_subfields(fields_101[0], "a") + field_101_g = self.get_subfields(fields_101[0], "g") if field_101_a: self.lang_from_101 = field_101_a[0] if field_101_g: self.lang_from_101 = field_101_g[0] - if fields_110 := self.get_fields(tag='110'): - field_110_a = self.get_subfields(fields_110[0], 'a') + if fields_110 := self.get_fields(tag="110"): + field_110_a = self.get_subfields(fields_110[0], "a") if field_110_a and len(field_110_a[0]) > 0: self.serial_type = field_110_a[0][0] - enc_level = self.leader[17] if self.leader else '' + enc_level = self.leader[17] if self.leader else "" encoding_level = ( _ENCODING_LEVEL_MAPPING[enc_level] if enc_level in _ENCODING_LEVEL_MAPPING - else _ENCODING_LEVEL_MAPPING['u'] + else _ENCODING_LEVEL_MAPPING["u"] ) - self.admin_meta_data = {'encodingLevel': encoding_level} + self.admin_meta_data = {"encodingLevel": encoding_level} result = super().do( blob, ignore_missing=ignore_missing, - exception_handlers=exception_handlers + exception_handlers=exception_handlers, ) except Exception as err: - error_print('ERROR:', self.bib_id, self.rero_id, self.count, err) + error_print("ERROR:", self.bib_id, self.rero_id, self.count, err) traceback.print_exc() return result @@ -1711,12 +1746,18 @@ def get_language_script(self, unimarc_script_code): if script_code in _LANGUAGES_SCRIPTS: lang = self.lang_from_101 if lang in _LANGUAGES_SCRIPTS[script_code]: - return '-'.join([self.lang_from_101, script_code]) - error_print('WARNING LANGUAGE SCRIPTS:', self.bib_id, - self.rero_id, script_code, '101:', - self.lang_from_101, '101$a or $g:', - self.lang_from_101) - return '-'.join(['und', script_code]) + return "-".join([self.lang_from_101, script_code]) + error_print( + "WARNING LANGUAGE SCRIPTS:", + self.bib_id, + self.rero_id, + script_code, + "101:", + self.lang_from_101, + "101$a or $g:", + self.lang_from_101, + ) + return "-".join(["und", script_code]) def get_alt_graphic_fields(self, tag=None): """Get all alternate graphic fields having the given tag value. @@ -1732,22 +1773,26 @@ def get_alt_graphic_fields(self, tag=None): field_data = {} tag_value = blob_key[:3] if (tag_value == tag) or not tag: - field_data['tag'] = tag_value + field_data["tag"] = tag_value if len(blob_key) == 3: # if control field - field_data['data'] = blob_value.rstrip() + field_data["data"] = blob_value.rstrip() else: - field_data['ind1'] = blob_key[3:4] - field_data['ind2'] = blob_key[4:5] - field_data['subfields'] = blob_value - subfields_6 = self.get_subfields(field_data, '6') - subfields_7 = self.get_subfields(field_data, '7') + field_data["ind1"] = blob_key[3:4] + field_data["ind2"] = blob_key[4:5] + field_data["subfields"] = blob_value + subfields_6 = self.get_subfields(field_data, "6") + subfields_7 = self.get_subfields(field_data, "7") # alternate graphic link code start with 'a' - if subfields_6 and subfields_6[0][0] == 'a' \ - and subfields_7 and subfields_7[0] != 'ba': # ba=latin + if ( + subfields_6 + and subfields_6[0][0] == "a" + and subfields_7 + and subfields_7[0] != "ba" + ): # ba=latin tag_data = self.alternate_graphic.get(tag, {}) tag_data[subfields_6[0]] = {} - tag_data[subfields_6[0]]['field'] = field_data - tag_data[subfields_6[0]]['script'] = subfields_7[0] + tag_data[subfields_6[0]]["field"] = field_data + tag_data[subfields_6[0]]["script"] = subfields_7[0] self.alternate_graphic[tag] = tag_data else: fields.append(field_data) @@ -1791,26 +1836,22 @@ def update_part(self, value_data, subfield_code, subfield_data): :param subfield_data: part number or name depending of `subfield_code` :type subfield_data: str """ + def remove_last_dot(value): """Removes last dot from value if there are no other dots.""" - if value.count('.') == 1: - value = value.rstrip('.') + if value.count(".") == 1: + value = value.rstrip(".") return value value_data = remove_last_dot(value_data) if self.part_number_waiting_name: if subfield_code == self.part_name_code: self.part_list.append( - dict( - partNumber=self.part_number_waiting_name, - partName=value_data - ) + dict(partNumber=self.part_number_waiting_name, partName=value_data) ) self.part_number_waiting_name = {} else: - self.part_list.append( - dict(partNumber=self.part_number_waiting_name) - ) + self.part_list.append(dict(partNumber=self.part_number_waiting_name)) self.part_number_waiting_name = value_data else: if subfield_code == self.part_number_code: @@ -1830,14 +1871,13 @@ def get_part_list(self): :rtype: list """ if self.part_number_waiting_name: - self.part_list.append( - dict(partNumber=self.part_number_waiting_name) - ) + self.part_list.append(dict(partNumber=self.part_number_waiting_name)) return self.part_list def extract_subtitle_and_parallel_titles_from_field_245_b( - parallel_title_data, field_245_a_end_with_equal): + parallel_title_data, field_245_a_end_with_equal +): """Extracts subtitle and parallel titles from field 245 $b. This function retrieves the subtitle and the parallel title list @@ -1872,95 +1912,101 @@ def remove_leading_article(string, max_article_len=4): with the given 'max_article_len' chars. An empty string is returned if no leading word is removed. """ - last_rest = '' - for sep in ("'", ' '): + last_rest = "" + for sep in ("'", " "): first, sep, rest = string.partition(sep) len_rest = len(rest) - if len(first) <= max_article_len \ - and len_rest > 0 and len_rest > len(last_rest): + if ( + len(first) <= max_article_len + and len_rest > 0 + and len_rest > len(last_rest) + ): last_rest = rest return last_rest - data_std = '' - data_lang = '' - lang = '' + data_std = "" + data_lang = "" + lang = "" main_subtitle = [] parallel_titles = [] pararalel_title_string_set = set() for parallel_title_value in parallel_title_data: - value = parallel_title_value.get('value', '') - lang = parallel_title_value.get('language', '') + value = parallel_title_value.get("value", "") + lang = parallel_title_value.get("language", "") if lang: data_lang = value else: data_std = value - data_std_items = data_std.split('=') + data_std_items = data_std.split("=") data_lang_items = [] if data_lang: - data_lang_items = data_lang.split('=') + data_lang_items = data_lang.split("=") index = 0 out_data_dict = {} for data_std in data_std_items: if index == 0 and not field_245_a_end_with_equal: if data_std.rstrip(): - main_subtitle.append({'value': data_std.rstrip()}) + main_subtitle.append({"value": data_std.rstrip()}) if ( lang and index < len(data_lang_items) and data_lang_items[index].rstrip() ): - main_subtitle.append({ - 'value': data_lang_items[index].rstrip(), - 'language': lang - }) + main_subtitle.append( + {"value": data_lang_items[index].rstrip(), "language": lang} + ) else: main_title = [] subtitle = [] - data_value = \ - remove_trailing_punctuation(data_std.lstrip(), ',.', ':;/-=') - pararalel_title_str, sep, subtitle_str = data_value.partition(':') + data_value = remove_trailing_punctuation(data_std.lstrip(), ",.", ":;/-=") + pararalel_title_str, sep, subtitle_str = data_value.partition(":") pararalel_title_str = pararalel_title_str.strip() subtitle_str = subtitle_str.strip() - data_lang_value = '' - pararalel_title_altgr_str = '' - subtitle_altgr_str = '' + data_lang_value = "" + pararalel_title_altgr_str = "" + subtitle_altgr_str = "" if pararalel_title_str: - out_data_dict = {'type': 'bf:ParallelTitle'} - main_title.append({'value': pararalel_title_str}) + out_data_dict = {"type": "bf:ParallelTitle"} + main_title.append({"value": pararalel_title_str}) if lang: try: data_lang_value = remove_trailing_punctuation( - data_lang_items[index].lstrip(), ',.', ':;/-=') - except Exception as err: - data_lang_value = '[missing data]' - pararalel_title_altgr_str, sep, subtitle_altgr_str = \ - data_lang_value.partition(':') + data_lang_items[index].lstrip(), ",.", ":;/-=" + ) + except Exception: + data_lang_value = "[missing data]" + pararalel_title_altgr_str, sep, subtitle_altgr_str = ( + data_lang_value.partition(":") + ) if pararalel_title_altgr_str: - main_title.append({ - 'value': pararalel_title_altgr_str.strip(), - 'language': lang, - }) - pararalel_title_without_article = \ - remove_leading_article(pararalel_title_str) + main_title.append( + { + "value": pararalel_title_altgr_str.strip(), + "language": lang, + } + ) + pararalel_title_without_article = remove_leading_article( + pararalel_title_str + ) if pararalel_title_without_article: - pararalel_title_string_set.add( - pararalel_title_without_article - ) + pararalel_title_string_set.add(pararalel_title_without_article) pararalel_title_string_set.add(pararalel_title_str) if subtitle_str: - subtitle.append({'value': subtitle_str}) + subtitle.append({"value": subtitle_str}) if lang and subtitle_altgr_str: - subtitle.append({ - 'value': subtitle_altgr_str.strip(), - 'language': lang, - }) + subtitle.append( + { + "value": subtitle_altgr_str.strip(), + "language": lang, + } + ) if main_title: - out_data_dict['mainTitle'] = main_title + out_data_dict["mainTitle"] = main_title if subtitle: - out_data_dict['subtitle'] = subtitle + out_data_dict["subtitle"] = subtitle index += 1 if out_data_dict: parallel_titles.append(out_data_dict) @@ -1974,39 +2020,38 @@ def build_responsibility_data(responsibility_data): :return: a list of responsibility :rtype: list """ - data_std = '' - data_lang = '' - lang = '' + data_std = "" + data_lang = "" + lang = "" responsibilities = [] for responsibility_value in responsibility_data: - value = responsibility_value.get('value', '') - lang = responsibility_value.get('language', '') + value = responsibility_value.get("value", "") + lang = responsibility_value.get("language", "") if lang: data_lang = value else: data_std = value - data_std_items = data_std.split(';') + data_std_items = data_std.split(";") data_lang_items = [] if data_lang: - data_lang_items = data_lang.split(';') + data_lang_items = data_lang.split(";") index = 0 for data_std in data_std_items: out_data = [] - data_value = remove_trailing_punctuation( - data_std.lstrip(), ',.', ':;/-=') + data_value = remove_trailing_punctuation(data_std.lstrip(), ",.", ":;/-=") if data_value: - out_data.append({'value': data_value}) + out_data.append({"value": data_value}) if lang: try: - data_lang_value = \ - remove_trailing_punctuation( - data_lang_items[index].lstrip(), ',.', ':;/-=') + data_lang_value = remove_trailing_punctuation( + data_lang_items[index].lstrip(), ",.", ":;/-=" + ) if not data_lang_value: - raise Exception('missing data') - except Exception as err: - data_lang_value = '[missing data]' - out_data.append({'value': data_lang_value, 'language': lang}) + raise Exception("missing data") + except Exception: + data_lang_value = "[missing data]" + out_data.append({"value": data_lang_value, "language": lang}) index += 1 responsibilities.append(out_data) return responsibilities @@ -2027,19 +2072,20 @@ def get_gnd_de_101(de_588): from rero_ils.modules.utils import requests_retry_session url = ( - 'https://services.dnb.de/sru/authorities?version=1.1' - f'&operation=searchRetrieve&query=identifier%3D{de_588}' - '&recordSchema=oai_dc' + "https://services.dnb.de/sru/authorities?version=1.1" + f"&operation=searchRetrieve&query=identifier%3D{de_588}" + "&recordSchema=oai_dc" ) try: response = requests_retry_session().get(url) if response.status_code == requests.codes.ok: result = xmltodict.parse(response.text) with contextlib.suppress(Exception): - return result['searchRetrieveResponse']['records']['record'][ - 'recordData']['dc']['dc:identifier']['#text'] + return result["searchRetrieveResponse"]["records"]["record"][ + "recordData" + ]["dc"]["dc:identifier"]["#text"] except Exception as err: - current_app.logger.warning(f'get_gnd_de_101 de_588: {de_588} | {err}') + current_app.logger.warning(f"get_gnd_de_101 de_588: {de_588} | {err}") def build_identifier(data): @@ -2049,32 +2095,32 @@ def build_identifier(data): :returns: identifiedBy from $0 or None. """ sources_mapping = { - 'RERO': 'RERO', - 'RERO-RAMEAU': 'RERO', - 'IDREF': 'IdRef', - 'GND': 'GND', - 'DE-101': 'GND' + "RERO": "RERO", + "RERO-RAMEAU": "RERO", + "IDREF": "IdRef", + "GND": "GND", + "DE-101": "GND", } result = {} - if datas_0 := utils.force_list(data.get('0')): + if datas_0 := utils.force_list(data.get("0")): has_no_de_101 = True for data_0 in datas_0: # see if we have a $0 with (DE-101) if match := re_identified.match(data_0): with contextlib.suppress(IndexError): - if match.group(1).upper() == 'DE-101': + if match.group(1).upper() == "DE-101": has_no_de_101 = False break for data_0 in datas_0: if match := re_identified.match(data_0): with contextlib.suppress(IndexError): - result['value'] = match.group(2) + result["value"] = match.group(2) source = match.group(1) if identifier_type := sources_mapping.get(source.upper()): - result['type'] = identifier_type + result["type"] = identifier_type return result - elif source.upper() == 'DE-588' and has_no_de_101: + elif source.upper() == "DE-588" and has_no_de_101: if idn := get_gnd_de_101(match.group(2)): - result['value'] = idn - result['type'] = 'GND' + result["value"] = idn + result["type"] = "GND" return result diff --git a/rero_ils/es_templates/__init__.py b/rero_ils/es_templates/__init__.py index c20fe3984a..4de57b7051 100644 --- a/rero_ils/es_templates/__init__.py +++ b/rero_ils/es_templates/__init__.py @@ -20,6 +20,4 @@ def list_es_templates(): """Elasticsearch templates path.""" - return [ - 'rero_ils.es_templates' - ] + return ["rero_ils.es_templates"] diff --git a/rero_ils/facets.py b/rero_ils/facets.py index b90f0f2eec..9272199e46 100644 --- a/rero_ils/facets.py +++ b/rero_ils/facets.py @@ -46,21 +46,20 @@ def default_facets_factory(search, index): urlkwargs = MultiDict() # Check if facets configuration are defined for this index. If not, then we # can't build any facets for this index, just return the current search. - if index not in current_app.config.get('RECORDS_REST_FACETS', {}): + if index not in current_app.config.get("RECORDS_REST_FACETS", {}): return search, urlkwargs - facets = current_app.config['RECORDS_REST_FACETS'].get(index) - all_aggs, aggs = facets.get('aggs', {}), {} + facets = current_app.config["RECORDS_REST_FACETS"].get(index) + all_aggs, aggs = facets.get("aggs", {}), {} # i18n aggregations. # some aggregations' configuration are different depending on language # use for the search. Load the correct configuration for these # aggregations. - interface_language = request.args.get('lang', current_i18n.language) - default_language = current_app.config.get('BABEL_DEFAULT_LANGUAGE') + interface_language = request.args.get("lang", current_i18n.language) + default_language = current_app.config.get("BABEL_DEFAULT_LANGUAGE") for facet_name, facet_body in facets.get("i18n_aggs", {}).items(): - aggr = facet_body.get(interface_language, - facet_body.get(default_language)) + aggr = facet_body.get(interface_language, facet_body.get(default_language)) all_aggs[facet_name] = aggr # Get selected facets @@ -68,7 +67,7 @@ def default_facets_factory(search, index): # 'facets' query string argument to determine which facets it wants to be # built. If this argument isn't defined, all facets defined into the # configuration will be built. - selected_facets = request.args.getlist('facets') or all_aggs.keys() + selected_facets = request.args.getlist("facets") or all_aggs.keys() selected_facets = make_comma_list_a_list(selected_facets) # Filter to keep only configuration about selected facets. @@ -82,18 +81,19 @@ def default_facets_factory(search, index): # If no facet field are found, skip this aggregation, because we can't # determine which field used to filter the query facet_field = next( - (facet_body.get(k)['field'] - for k in ['terms', 'date_histogram'] - if k in facet_body), - None + ( + facet_body.get(k)["field"] + for k in ["terms", "date_histogram"] + if k in facet_body + ), + None, ) facet_filter = None if facet_field: # get DSL expression of post_filters, # both single post filters and group of post filters filters, filters_group, urlkwargs = _create_filter_dsl( - urlkwargs, - facets.get('post_filters', {}) + urlkwargs, facets.get("post_filters", {}) ) # create the filter to inject in the facet facet_filter = _facet_filter( @@ -102,8 +102,8 @@ def default_facets_factory(search, index): # Check if 'filter' is defined into the facet configuration. If yes, # then add this filter to the facet filter previously created. - if 'filter' in facet_body: - agg_filter = obj_or_import_string(facet_body.pop('filter')) + if "filter" in facet_body: + agg_filter = obj_or_import_string(facet_body.pop("filter")) if callable(agg_filter): agg_filter = agg_filter(search, urlkwargs) if facet_filter: @@ -117,17 +117,15 @@ def default_facets_factory(search, index): # add a nested aggs_facet in the facet aggs (OK search) if facet_field: facet_body = dict(aggs=dict(aggs_facet=facet_body)) - facet_body['filter'] = facet_filter.to_dict() + facet_body["filter"] = facet_filter.to_dict() aggs[facet_name] = facet_body search = _aggregations(search, aggs) # Query filter - search, urlkwargs = _query_filter( - search, urlkwargs, facets.get('filters', {})) + search, urlkwargs = _query_filter(search, urlkwargs, facets.get("filters", {})) # Post filter - search, urlkwargs = _post_filter( - search, urlkwargs, facets.get('post_filters', {})) + search, urlkwargs = _post_filter(search, urlkwargs, facets.get("post_filters", {})) return search, urlkwargs @@ -153,8 +151,7 @@ def _create_filter_dsl(urlkwargs, definitions): filters_group[name] = [] for f_name, f_filter_factory in filter_factory.items(): # the url parameters values for the facet f_name of the group - values = request.values.getlist(f_name, type=text_type) - if values: + if values := request.values.getlist(f_name, type=text_type): # pass the values to f_filter_factory to obtain the # DSL expression and append it to filters_group filters_group[name].append(f_filter_factory(values)) @@ -162,16 +159,14 @@ def _create_filter_dsl(urlkwargs, definitions): if v not in urlkwargs.getlist(f_name): urlkwargs.add(f_name, v) # create a filter DSL expression for single filters - else: - # the url parameters values for the single facet name - values = request.values.getlist(name, type=text_type) - if values: - # pass the values to the filter_factory to obtain the - # DSL expression and append it to filters - filters.append(filter_factory(values)) - for v in values: - if v not in urlkwargs.getlist(name): - urlkwargs.add(name, v) + # the url parameters values for the single facet name + elif values := request.values.getlist(name, type=text_type): + # pass the values to the filter_factory to obtain the + # DSL expression and append it to filters + filters.append(filter_factory(values)) + for v in values: + if v not in urlkwargs.getlist(name): + urlkwargs.add(name, v) return filters, filters_group, urlkwargs @@ -188,14 +183,13 @@ def _post_filter(search, urlkwargs, definitions): :param definitions: the filters dictionary :returns: """ - filters, filters_group, urlkwargs = \ - _create_filter_dsl(urlkwargs, definitions) + filters, filters_group, urlkwargs = _create_filter_dsl(urlkwargs, definitions) for filter_ in filters: search = search.post_filter(filter_) for _, filter_ in filters_group.items(): - q = Q('bool', should=filter_) + q = Q("bool", should=filter_) search = search.post_filter(q) return search, urlkwargs @@ -227,5 +221,5 @@ def _facet_filter(index, filters, filters_group, facet_name, facet_field): for name_group, filters in filters_group.items(): if facet_name != name_group and filters: - q &= Q('bool', should=filters) + q &= Q("bool", should=filters) return q if q != Q() else None diff --git a/rero_ils/filter.py b/rero_ils/filter.py index 57a922dadb..86e31ed88d 100644 --- a/rero_ils/filter.py +++ b/rero_ils/filter.py @@ -34,7 +34,7 @@ from .modules.utils import extracted_data_from_ref -def get_record_by_ref(ref, type='es_record'): +def get_record_by_ref(ref, type="es_record"): """Get record by ref. :param ref: The json $ref. Ex: {$ref: 'xxxxx'}. @@ -44,8 +44,9 @@ def get_record_by_ref(ref, type='es_record'): return extracted_data_from_ref(ref, data=type) -def node_assets(package, patterns=[ - 'runtime*.js', 'polyfills*.js', 'main*.js'], _type='js', tags=''): +def node_assets( + package, patterns=["runtime*.js", "polyfills*.js", "main*.js"], _type="js", tags="" +): """Generate the node assets html code. :param package: The node package path relative to node_modules. @@ -54,17 +55,17 @@ def node_assets(package, patterns=[ "param tags: additional script, link, html tags such as 'defer', etc. "return" html link, script code """ - package_path = os.path.join( - current_app.static_folder, 'node_modules', package) + package_path = os.path.join(current_app.static_folder, "node_modules", package) def to_html(value): - value = re.sub(r'(.*?)\/static', '/static', value) + value = re.sub(r"(.*?)\/static", "/static", value) # default: js html_code = f'' # styles - if _type == 'css': + if _type == "css": html_code = f'' return html_code + output_files = [] for pattern in patterns: files = glob.glob(os.path.join(package_path, pattern)) @@ -72,14 +73,19 @@ def to_html(value): class HTMLSafe: def __html__(): - return Markup('\n'.join(output_files)) + return Markup("\n".join(output_files)) + return HTMLSafe def format_date_filter( - date_str, date_format='full', time_format='medium', - locale=None, delimiter=', ', timezone=None, - timezone_default='utc' + date_str, + date_format="full", + time_format="medium", + locale=None, + delimiter=", ", + timezone=None, + timezone_default="utc", ): """Format the date to the given locale. @@ -101,24 +107,26 @@ def format_date_filter( locale = current_i18n.locale.language # Date formatting in GB English (DD/MM/YYYY) - if locale == 'en': - locale += '_GB' + if locale == "en": + locale += "_GB" if timezone: tzinfo = timezone else: - tzinfo = current_app.config.get( - 'BABEL_DEFAULT_TIMEZONE', timezone_default) + tzinfo = current_app.config.get("BABEL_DEFAULT_TIMEZONE", timezone_default) - datetimetz = format_datetime(dateparser.parse( - date_str, locales=['en']), tzinfo=tzinfo, locale='en') + datetimetz = format_datetime( + dateparser.parse(date_str, locales=["en"]), tzinfo=tzinfo, locale="en" + ) if date_format: date = format_date( - dateparser.parse(datetimetz), format=date_format, locale=locale) + dateparser.parse(datetimetz), format=date_format, locale=locale + ) if time_format: time = format_time( - dateparser.parse(datetimetz), format=time_format, locale=locale) + dateparser.parse(datetimetz), format=time_format, locale=locale + ) return delimiter.join(filter(None, [date, time])) @@ -128,7 +136,7 @@ def to_pretty_json(value): value, sort_keys=True, indent=4, - separators=(',', ': '), + separators=(",", ": "), ensure_ascii=False, ) @@ -140,16 +148,15 @@ def jsondumps(data): def text_to_id(text): """Text to id.""" - return re.sub(r'\W', '', text) + return re.sub(r"\W", "", text) -def empty_data(data, replacement_string='No data'): +def empty_data(data, replacement_string="No data"): """Return default string if no data.""" if data: return data - else: - msg = f'{replacement_string}' - return Markup(msg) + msg = f'{replacement_string}' + return Markup(msg) def address_block(metadata, language=None): @@ -172,10 +179,10 @@ def address_block(metadata, language=None): :return: the formatted address. """ try: - tpl_file = f'rero_ils/address_block/{language}.tpl.txt' + tpl_file = f"rero_ils/address_block/{language}.tpl.txt" return render_template(tpl_file, data=metadata) except TemplateNotFound: - tpl_file = 'rero_ils/address_block/eng.tpl.txt' + tpl_file = "rero_ils/address_block/eng.tpl.txt" return render_template(tpl_file, data=metadata) @@ -188,7 +195,7 @@ def message_filter(key): return Message.get(key) -def translate(data, prefix='', separator=', '): +def translate(data, prefix="", separator=", "): """Translate data. :param data: the data to translate @@ -198,7 +205,7 @@ def translate(data, prefix='', separator=', '): """ if data: if isinstance(data, list): - translated = [_(f'{prefix}{item}') for item in data] + translated = [_(f"{prefix}{item}") for item in data] return separator.join(translated) elif isinstance(data, str): - return _(f'{prefix}{data}') + return _(f"{prefix}{data}") diff --git a/rero_ils/jsonschemas/utils.py b/rero_ils/jsonschemas/utils.py index 06ce25da0a..13da675e38 100644 --- a/rero_ils/jsonschemas/utils.py +++ b/rero_ils/jsonschemas/utils.py @@ -52,9 +52,8 @@ def get_remote_json(self, uri, **kwargs): :param kwargs: Keyword arguments passed to json.loads(). :returns: resolved json schema. """ - path = current_jsonschemas.url_to_path(uri) - if path: - result = current_jsonschemas.get_schema(path=path) - else: - result = super().get_remote_json(uri, **kwargs) - return result + return ( + current_jsonschemas.get_schema(path=path) + if (path := current_jsonschemas.url_to_path(uri)) + else super().get_remote_json(uri, **kwargs) + ) diff --git a/rero_ils/modules/__init__.py b/rero_ils/modules/__init__.py index a596c42b35..eb51f04938 100644 --- a/rero_ils/modules/__init__.py +++ b/rero_ils/modules/__init__.py @@ -19,4 +19,4 @@ from .ext import REROILSAPP -__all__ = ('REROILSAPP') +__all__ = "REROILSAPP" diff --git a/rero_ils/modules/acquisition/acq_accounts/api.py b/rero_ils/modules/acquisition/acq_accounts/api.py index 9d4c8ed01b..b71eaf0cbe 100644 --- a/rero_ils/modules/acquisition/acq_accounts/api.py +++ b/rero_ils/modules/acquisition/acq_accounts/api.py @@ -23,31 +23,25 @@ from elasticsearch_dsl import Q from flask_babel import gettext as _ -from rero_ils.modules.acquisition.acq_invoices.api import \ - AcquisitionInvoicesSearch -from rero_ils.modules.acquisition.acq_order_lines.api import \ - AcqOrderLinesSearch -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineStatus -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLinesSearch +from rero_ils.modules.acquisition.acq_invoices.api import AcquisitionInvoicesSearch +from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLinesSearch +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_receipt_lines.api import AcqReceiptLinesSearch from rero_ils.modules.acquisition.acq_receipts.api import AcqReceiptsSearch from rero_ils.modules.acquisition.api import AcquisitionIlsRecord from rero_ils.modules.api import IlsRecordsIndexer, IlsRecordsSearch from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.minters import id_minter from rero_ils.modules.providers import Provider -from rero_ils.modules.utils import extracted_data_from_ref, get_objects, \ - sorted_pids +from rero_ils.modules.utils import extracted_data_from_ref, get_objects, sorted_pids from .extensions import ParentAccountDistributionCheck -from .models import AcqAccountExceedanceType, AcqAccountIdentifier, \ - AcqAccountMetadata +from .models import AcqAccountExceedanceType, AcqAccountIdentifier, AcqAccountMetadata AcqAccountProvider = type( - 'AcqAccountProvider', + "AcqAccountProvider", (Provider,), - dict(identifier=AcqAccountIdentifier, pid_type='acac') + dict(identifier=AcqAccountIdentifier, pid_type="acac"), ) acq_account_id_minter = partial(id_minter, provider=AcqAccountProvider) acq_account_id_fetcher = partial(id_fetcher, provider=AcqAccountProvider) @@ -59,9 +53,9 @@ class AcqAccountsSearch(IlsRecordsSearch): class Meta: """Search only on acquisition account index.""" - index = 'acq_accounts' + index = "acq_accounts" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -76,31 +70,18 @@ class AcqAccount(AcquisitionIlsRecord): model_cls = AcqAccountMetadata pids_exist_check = { - 'required': { - 'lib': 'library', - 'budg': 'budget' - }, - 'not_required': { - 'parent': 'acq_account', - 'org': 'organisation' - } + "required": {"lib": "library", "budg": "budget"}, + "not_required": {"parent": "acq_account", "org": "organisation"}, } - _extensions = [ - ParentAccountDistributionCheck() - ] + _extensions = [ParentAccountDistributionCheck()] @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=True, reindex=True, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=True, reindex=True, **kwargs + ): """Create Acquisition Order Line record.""" - # In order to check if some fields value are unique we need to ensure - # that object has been indexed just after the creation. - # TODO :: Maybe a better approach should be pass by `resource.service` - # and/or always use REST API. - record = super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) - return record + return super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) def __hash__(self): """Builtin function to return an hash for this account.""" @@ -110,22 +91,22 @@ def __hash__(self): @property def name(self): """Shortcut for acquisition account name.""" - return self.get('name') + return self.get("name") @property def library_pid(self): """Shortcut for acquisition account library PID.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) @property def library(self): """Shortcut to get related library.""" - return extracted_data_from_ref(self.get('library'), data='record') + return extracted_data_from_ref(self.get("library"), data="record") @property def budget(self): """Shortcut to get related budget.""" - return extracted_data_from_ref(self.get('budget'), data='record') + return extracted_data_from_ref(self.get("budget"), data="record") @property def organisation_pid(self): @@ -135,7 +116,7 @@ def organisation_pid(self): @property def parent_pid(self): """Shortcut to get the parent acquisition account pid.""" - if parent := self.get('parent'): + if parent := self.get("parent"): return extracted_data_from_ref(parent) @property @@ -160,7 +141,7 @@ def parent(self): @property def is_root(self): """Check if the account is a root account.""" - return 'parent' not in self + return "parent" not in self @property def depth(self): @@ -180,7 +161,7 @@ def is_active(self): To know if an account is is_active, we need to check the related budget. This budget has an 'is_active' field. """ - budget = extracted_data_from_ref(self.get('budget'), data='record') + budget = extracted_data_from_ref(self.get("budget"), data="record") return budget.is_active if budget else False @property @@ -198,20 +179,20 @@ def encumbrance_amount(self): status_list = [ AcqOrderLineStatus.APPROVED, AcqOrderLineStatus.ORDERED, - AcqOrderLineStatus.PARTIALLY_RECEIVED + AcqOrderLineStatus.PARTIALLY_RECEIVED, ] - query = AcqOrderLinesSearch()\ - .filter('term', acq_account__pid=self.pid)\ - .filter('terms', status=status_list)\ - - query.aggs.metric('total_amount', 'sum', - field='total_unreceived_amount') + query = ( + AcqOrderLinesSearch() + .filter("term", acq_account__pid=self.pid) + .filter("terms", status=status_list) + ) + query.aggs.metric("total_amount", "sum", field="total_unreceived_amount") results = query.execute() self_amount = results.aggregations.total_amount.value # Encumbrance of children accounts - query = AcqAccountsSearch().filter('term', parent__pid=self.pid) - query.aggs.metric('total', 'sum', field='encumbrance_amount.total') + query = AcqAccountsSearch().filter("term", parent__pid=self.pid) + query.aggs.metric("total", "sum", field="encumbrance_amount.total") results = query.execute() children_amount = results.aggregations.total.value @@ -228,34 +209,33 @@ def expenditure_amount(self): expenditure amount. """ # Expenditure of this account - search = AcqReceiptLinesSearch() \ - .filter('term', acq_account__pid=self.pid) - search.aggs.metric('sum_receipt_lines', 'sum', field='total_amount') + search = AcqReceiptLinesSearch().filter("term", acq_account__pid=self.pid) + search.aggs.metric("sum_receipt_lines", "sum", field="total_amount") results = search.execute() lines_expenditure = results.aggregations.sum_receipt_lines.value - receipt_expenditure = 0 - search = AcqReceiptsSearch() \ - .filter( - 'nested', - path='amount_adjustments', - query=Q( - 'bool', must=[ - Q('match', - amount_adjustments__acq_account__pid=self.pid) - ] - ) + search = AcqReceiptsSearch().filter( + "nested", + path="amount_adjustments", + query=Q( + "bool", must=[Q("match", amount_adjustments__acq_account__pid=self.pid)] + ), + ) + receipt_expenditure = sum( + sum( + [ + adjustment.amount + for adjustment in hit.amount_adjustments + if adjustment.acq_account.pid == self.pid + ] ) - for hit in search.scan(): - receipt_expenditure += sum([ - adjustment.amount for adjustment in hit.amount_adjustments - if adjustment.acq_account.pid == self.pid - ]) + for hit in search.scan() + ) self_amount = lines_expenditure + receipt_expenditure # Expenditure of children accounts - query = AcqAccountsSearch().filter('term', parent__pid=self.pid) - query.aggs.metric('total', 'sum', field='expenditure_amount.total') + query = AcqAccountsSearch().filter("term", parent__pid=self.pid) + query.aggs.metric("total", "sum", field="expenditure_amount.total") results = query.execute() children_amount = round(results.aggregations.total.value, 2) return round(self_amount, 2), round(children_amount, 2) @@ -273,17 +253,18 @@ def remaining_balance(self): :return: A tuple with self balance and total balance """ - initial_amount = self.get('allocated_amount') + initial_amount = self.get("allocated_amount") encumbrance = self.encumbrance_amount expenditure = self.expenditure_amount - self_balance = initial_amount \ - - self.distribution \ - - encumbrance[0] \ - - expenditure[0] - total_balance = initial_amount \ - - sum(list(self.encumbrance_amount)) \ + self_balance = ( + initial_amount - self.distribution - encumbrance[0] - expenditure[0] + ) + total_balance = ( + initial_amount + - sum(list(self.encumbrance_amount)) - sum(list(self.expenditure_amount)) + ) return round(self_balance, 2), round(total_balance, 2) @@ -297,8 +278,8 @@ def distribution(self): be distributed by the parent account is 4000. The distribution cannot exceed the allocated amount of the account. """ - query = AcqAccountsSearch().filter('term', parent__pid=self.pid) - query.aggs.metric('total_amount', 'sum', field='allocated_amount') + query = AcqAccountsSearch().filter("term", parent__pid=self.pid) + query.aggs.metric("total_amount", "sum", field="allocated_amount") results = query.execute() return round(results.aggregations.total_amount.value, 2) @@ -311,10 +292,10 @@ def get_exceedance(self, exceed_type): """ rate = 0 if exceed_type == AcqAccountExceedanceType.ENCUMBRANCE: - rate = self.get('encumbrance_exceedance', 0) + rate = self.get("encumbrance_exceedance", 0) elif exceed_type == AcqAccountExceedanceType.EXPENDITURE: - rate = self.get('expenditure_exceedance', 0) - return round(self['allocated_amount'] * rate) / 100 + rate = self.get("expenditure_exceedance", 0) + return round(self["allocated_amount"] * rate) / 100 def transfer_fund(self, target_account, amount): """Transfer funds between two accounts. @@ -331,9 +312,9 @@ def transfer_fund(self, target_account, amount): # * target account == source account # * requested transfer amount is available on source account. if self == target_account: - raise ValueError(_('Cannot transfer fund to myself.')) + raise ValueError(_("Cannot transfer fund to myself.")) if amount > self.remaining_balance[0]: - msg = _('Not enough available money from source account.') + msg = _("Not enough available money from source account.") raise ValueError(msg) # CASE 1 : target account is an ancestor of source account. @@ -341,10 +322,10 @@ def transfer_fund(self, target_account, amount): # ancestor accounts until reaching the target account source_ancestors = list(self.get_ancestors()) if target_account in source_ancestors: - for acc in ([self] + source_ancestors): + for acc in [self] + source_ancestors: if acc == target_account: break - acc['allocated_amount'] -= amount + acc["allocated_amount"] -= amount acc.update(acc, dbcommit=True, reindex=False) self.reindex() # index myself and all my ancestors return @@ -358,18 +339,17 @@ def transfer_fund(self, target_account, amount): # to increase the allocated amount. target_ancestors = list(target_account.get_ancestors()) common_ancestors = list( - set([self] + source_ancestors) & - set([target_account] + target_ancestors) + set([self] + source_ancestors) & set([target_account] + target_ancestors) ) common_ancestor = None if common_ancestors: common_ancestor = max(common_ancestors, key=lambda a: a.depth) # If we found a common ancestor, we are in the same tree if common_ancestor: - for acc in ([self] + source_ancestors): + for acc in [self] + source_ancestors: if acc == common_ancestor: break - acc['allocated_amount'] -= amount + acc["allocated_amount"] -= amount # Note : We need to reindex during update, to update parent # account balances. Without this reindex, the pre_commit hook # will detect a problem @@ -381,7 +361,7 @@ def transfer_fund(self, target_account, amount): break ancestors_to_apply.append(acc) for acc in reversed([target_account] + ancestors_to_apply): - acc['allocated_amount'] += amount + acc["allocated_amount"] += amount acc.update(acc, dbcommit=True, reindex=False) target_account.reindex() return @@ -391,11 +371,11 @@ def transfer_fund(self, target_account, amount): # the allocated amount. # * from target root account to target account, increase the # allocated amount. - for acc in ([self] + source_ancestors): - acc['allocated_amount'] -= amount + for acc in [self] + source_ancestors: + acc["allocated_amount"] -= amount acc.update(acc, dbcommit=True, reindex=True) for acc in reversed([target_account] + target_ancestors): - acc['allocated_amount'] += amount + acc["allocated_amount"] += amount acc.update(acc, dbcommit=True, reindex=False) target_account.reindex() @@ -414,8 +394,8 @@ def get_children(self, output=None): :param output: output method. 'count' or None :return a generator of children accounts (or length). """ - query = AcqAccountsSearch().filter('term', parent__pid=self.pid) - if output == 'count': + query = AcqAccountsSearch().filter("term", parent__pid=self.pid) + if output == "count": return query.count() return get_objects(AcqAccount, query) @@ -426,20 +406,20 @@ def get_links_to_me(self, get_pids=False): if False count of linked records """ links = {} - order_lines_query = AcqOrderLinesSearch()\ - .filter('term', acq_account__pid=self.pid) - children_query = AcqAccountsSearch()\ - .filter('term', parent__pid=self.pid) - invoices_query = AcquisitionInvoicesSearch()\ - .filter('term', invoice_items__acq_account__pid=self.pid) - receipts_query = AcqReceiptsSearch()\ - .filter('nested', - path='amount_adjustments', - query=Q( - 'bool', - must=[Q('match', - amount_adjustments__acq_account__pid=self.pid)] - )) + order_lines_query = AcqOrderLinesSearch().filter( + "term", acq_account__pid=self.pid + ) + children_query = AcqAccountsSearch().filter("term", parent__pid=self.pid) + invoices_query = AcquisitionInvoicesSearch().filter( + "term", invoice_items__acq_account__pid=self.pid + ) + receipts_query = AcqReceiptsSearch().filter( + "nested", + path="amount_adjustments", + query=Q( + "bool", must=[Q("match", amount_adjustments__acq_account__pid=self.pid)] + ), + ) if get_pids: order_lines = sorted_pids(order_lines_query) @@ -453,13 +433,13 @@ def get_links_to_me(self, get_pids=False): receipts = receipts_query.count() if order_lines: - links['acq_order_lines'] = order_lines + links["acq_order_lines"] = order_lines if children: - links['acq_accounts'] = children + links["acq_accounts"] = children if invoices: - links['acq_invoices'] = invoices + links["acq_invoices"] = invoices if receipts: - links['acq_receipts'] = receipts + links["acq_receipts"] = receipts return links def reasons_not_to_delete(self): @@ -467,10 +447,10 @@ def reasons_not_to_delete(self): cannot_delete = {} # Note: not possible to delete records attached to rolled_over budget. if not self.is_active: - cannot_delete['links'] = {'rolled_over': True} + cannot_delete["links"] = {"rolled_over": True} return cannot_delete if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete @@ -498,4 +478,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='acac') + super().bulk_index(record_id_iterator, doc_type="acac") diff --git a/rero_ils/modules/acquisition/acq_accounts/dumpers.py b/rero_ils/modules/acquisition/acq_accounts/dumpers.py index 91f3301c98..63f84afb56 100644 --- a/rero_ils/modules/acquisition/acq_accounts/dumpers.py +++ b/rero_ils/modules/acquisition/acq_accounts/dumpers.py @@ -31,7 +31,7 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ # Keep only some attributes from AcqOrderLine object initial dump - for attr in ['pid', 'name', 'number']: + for attr in ["pid", "name", "number"]: if value := record.get(attr): data.update({attr: value}) return {k: v for k, v in data.items() if v} diff --git a/rero_ils/modules/acquisition/acq_accounts/extensions.py b/rero_ils/modules/acquisition/acq_accounts/extensions.py index 0454d9e4ab..f3493b7246 100644 --- a/rero_ils/modules/acquisition/acq_accounts/extensions.py +++ b/rero_ils/modules/acquisition/acq_accounts/extensions.py @@ -28,28 +28,34 @@ class ParentAccountDistributionCheck(RecordExtension): def _check_balance(self, record): """Check if parent balance has enough money.""" original_record = record.__class__.get_record_by_pid(record.pid) - amount_to_check = record.get('allocated_amount') + amount_to_check = record.get("allocated_amount") if original_record: - amount_to_check -= original_record.get('allocated_amount') + amount_to_check -= original_record.get("allocated_amount") parent = record.parent # If we grow the allocated amount: # - Either record is a root account. In this case, nothing to check! # - Either record has parent, we need to check if parent has enough # balance to do that. - if amount_to_check > 0 and parent: - if parent.remaining_balance[0] < amount_to_check: - msg = _('Parent account available amount too low') - raise ValidationError(msg) + if ( + amount_to_check > 0 + and parent + and parent.remaining_balance[0] < amount_to_check + ): + msg = _("Parent account available amount too low") + raise ValidationError(msg) # If we decrease the allocated amount: # - Either record doesn't have any children : nothing to check! # - Either record has child : we need to decrease more the record # self balance (money still available for this account) - if amount_to_check < 0 and record.get_children(output='count'): - if original_record.remaining_balance[0] < abs(amount_to_check): - msg = _('Remaining balance too low') - raise ValidationError(msg) + if ( + amount_to_check < 0 + and record.get_children(output="count") + and original_record.remaining_balance[0] < abs(amount_to_check) + ): + msg = _("Remaining balance too low") + raise ValidationError(msg) pre_commit = _check_balance pre_create = _check_balance diff --git a/rero_ils/modules/acquisition/acq_accounts/jsonresolver.py b/rero_ils/modules/acquisition/acq_accounts/jsonresolver.py index d82f887af0..cbb2cf55cb 100644 --- a/rero_ils/modules/acquisition/acq_accounts/jsonresolver.py +++ b/rero_ils/modules/acquisition/acq_accounts/jsonresolver.py @@ -22,13 +22,13 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/acq_accounts/', host='bib.rero.ch') +@jsonresolver.route("/api/acq_accounts/", host="bib.rero.ch") def acq_account_resolver(pid): """Resolver for acq_account record.""" - persistent_id = PersistentIdentifier.get('acac', pid) + persistent_id = PersistentIdentifier.get("acac", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - f'Doc resolver error: /api/acq_accounts/{pid} {persistent_id}' + f"Doc resolver error: /api/acq_accounts/{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/acquisition/acq_accounts/listener.py b/rero_ils/modules/acquisition/acq_accounts/listener.py index f3a25bbdb7..cc88019157 100644 --- a/rero_ils/modules/acquisition/acq_accounts/listener.py +++ b/rero_ils/modules/acquisition/acq_accounts/listener.py @@ -21,8 +21,15 @@ from .models import AcqAccountExceedanceType -def enrich_acq_account_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_acq_account_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -30,54 +37,45 @@ def enrich_acq_account_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == AcqAccountsSearch.Meta.index: - account = record - if not isinstance(record, AcqAccount): - account = AcqAccount.get_record_by_pid(record.get('pid')) + if index.split("-")[0] != AcqAccountsSearch.Meta.index: + return + account = record + if not isinstance(record, AcqAccount): + account = AcqAccount.get_record_by_pid(record.get("pid")) - # compute the exceedance amounts - amount = account.get('allocated_amount', 0) - if amount: - if 'encumbrance_exceedance' in account: - json['encumbrance_exceedance'] = dict( - value=account.get('encumbrance_exceedance'), - amount=account.get_exceedance( - AcqAccountExceedanceType.ENCUMBRANCE) - ) - if 'expenditure_exceedance' in account: - json['expenditure_exceedance'] = dict( - value=account.get('expenditure_exceedance'), - amount=account.get_exceedance( - AcqAccountExceedanceType.EXPENDITURE) - ) - else: - json.pop('encumbrance_exceedance', None) - json.pop('expenditure_exceedance', None) + if amount := account.get("allocated_amount", 0): + if "encumbrance_exceedance" in account: + json["encumbrance_exceedance"] = dict( + value=account.get("encumbrance_exceedance"), + amount=account.get_exceedance(AcqAccountExceedanceType.ENCUMBRANCE), + ) + if "expenditure_exceedance" in account: + json["expenditure_exceedance"] = dict( + value=account.get("expenditure_exceedance"), + amount=account.get_exceedance(AcqAccountExceedanceType.EXPENDITURE), + ) + else: + json.pop("encumbrance_exceedance", None) + json.pop("expenditure_exceedance", None) - # encumbrance, expenditure and balance amounts - (self_amount, children_amount) = account.encumbrance_amount - json['encumbrance_amount'] = dict( - self=self_amount, - children=children_amount, - total=self_amount + children_amount - ) - (self_amount, children_amount) = account.expenditure_amount - json['expenditure_amount'] = dict( - self=self_amount, - children=children_amount, - total=self_amount + children_amount - ) - (self_amount, total_amount) = account.remaining_balance - json['remaining_balance'] = dict( - self=self_amount, - total=total_amount - ) + # encumbrance, expenditure and balance amounts + (self_amount, children_amount) = account.encumbrance_amount + json["encumbrance_amount"] = dict( + self=self_amount, + children=children_amount, + total=self_amount + children_amount, + ) + (self_amount, children_amount) = account.expenditure_amount + json["expenditure_amount"] = dict( + self=self_amount, + children=children_amount, + total=self_amount + children_amount, + ) + (self_amount, total_amount) = account.remaining_balance + json["remaining_balance"] = dict(self=self_amount, total=total_amount) - # additional fields for ES - json['is_active'] = account.is_active - json['depth'] = account.depth - json['distribution'] = account.distribution - json['organisation'] = dict( - pid=account.organisation_pid, - type='org' - ) + # additional fields for ES + json["is_active"] = account.is_active + json["depth"] = account.depth + json["distribution"] = account.distribution + json["organisation"] = dict(pid=account.organisation_pid, type="org") diff --git a/rero_ils/modules/acquisition/acq_accounts/models.py b/rero_ils/modules/acquisition/acq_accounts/models.py index 539efff965..5b12ef6042 100644 --- a/rero_ils/modules/acquisition/acq_accounts/models.py +++ b/rero_ils/modules/acquisition/acq_accounts/models.py @@ -27,23 +27,24 @@ class AcqAccountIdentifier(RecordIdentifier): """Sequence generator for acquisition account identifiers.""" - __tablename__ = 'acq_account_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "acq_account_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class AcqAccountMetadata(db.Model, RecordMetadataBase): """AcqAccount record metadata.""" - __tablename__ = 'acq_account_metadata' + __tablename__ = "acq_account_metadata" class AcqAccountExceedanceType: """Type of exceedance about an acquisition account.""" - ENCUMBRANCE = 'encumbrance' - EXPENDITURE = 'expenditure' + ENCUMBRANCE = "encumbrance" + EXPENDITURE = "expenditure" diff --git a/rero_ils/modules/acquisition/acq_accounts/permissions.py b/rero_ils/modules/acquisition/acq_accounts/permissions.py index cb1e105560..35521c66be 100644 --- a/rero_ils/modules/acquisition/acq_accounts/permissions.py +++ b/rero_ils/modules/acquisition/acq_accounts/permissions.py @@ -19,20 +19,23 @@ """Permissions for Acquisition account.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, DisallowedIfRollovered, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + DisallowedIfRollovered, + RecordPermissionPolicy, +) from .api import AcqAccount # Actions to control acquisition accounts resource policies -search_action = action_factory('acac-search') -read_action = action_factory('acac-read') -create_action = action_factory('acac-create') -update_action = action_factory('acac-update') -delete_action = action_factory('acac-delete') -access_action = action_factory('acac-access') -transfer_action = action_factory('acac-transfer') +search_action = action_factory("acac-search") +read_action = action_factory("acac-read") +create_action = action_factory("acac-create") +update_action = action_factory("acac-update") +delete_action = action_factory("acac-delete") +access_action = action_factory("acac-access") +transfer_action = action_factory("acac-transfer") class AcqAccountPermissionPolicy(RecordPermissionPolicy): @@ -42,13 +45,13 @@ class AcqAccountPermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByActionRestrictByManageableLibrary(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowedIfRollovered(AcqAccount) + DisallowedIfRollovered(AcqAccount), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowedIfRollovered(AcqAccount) + DisallowedIfRollovered(AcqAccount), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowedIfRollovered(AcqAccount) + DisallowedIfRollovered(AcqAccount), ] diff --git a/rero_ils/modules/acquisition/acq_accounts/serializers/__init__.py b/rero_ils/modules/acquisition/acq_accounts/serializers/__init__.py index 69067cc501..e38d311443 100644 --- a/rero_ils/modules/acquisition/acq_accounts/serializers/__init__.py +++ b/rero_ils/modules/acquisition/acq_accounts/serializers/__init__.py @@ -20,36 +20,40 @@ from invenio_records_rest.serializers.response import record_responsify -from rero_ils.modules.serializers import RecordSchemaJSONV1, \ - search_responsify, search_responsify_file +from rero_ils.modules.serializers import ( + RecordSchemaJSONV1, + search_responsify, + search_responsify_file, +) from .csv import AcqAccountCSVSerializer from .json import AcqAccountJSONSerializer __all__ = [ - 'json_acq_account_search', - 'json_acq_account_response', - 'csv_acq_account_search' + "json_acq_account_search", + "json_acq_account_response", + "csv_acq_account_search", ] """JSON v1 serializer.""" _json = AcqAccountJSONSerializer(RecordSchemaJSONV1) -json_acq_account_search = search_responsify(_json, 'application/rero+json') -json_acq_account_response = record_responsify(_json, 'application/rero+json') +json_acq_account_search = search_responsify(_json, "application/rero+json") +json_acq_account_response = record_responsify(_json, "application/rero+json") """CSV serializer.""" _csv = AcqAccountCSVSerializer( csv_included_fields=[ - 'account_pid', 'account_name', 'account_number', - 'account_allocated_amount', 'account_available_amount', - 'account_current_encumbrance', 'account_current_expenditure', - 'account_available_balance' + "account_pid", + "account_name", + "account_number", + "account_allocated_amount", + "account_available_amount", + "account_current_encumbrance", + "account_current_expenditure", + "account_available_balance", ] ) csv_acq_account_search = search_responsify_file( - _csv, - 'text/csv', - file_extension='csv', - file_prefix='export-accounts' + _csv, "text/csv", file_extension="csv", file_prefix="export-accounts" ) diff --git a/rero_ils/modules/acquisition/acq_accounts/serializers/csv.py b/rero_ils/modules/acquisition/acq_accounts/serializers/csv.py index 3638ab1dad..8a0cc83ed7 100644 --- a/rero_ils/modules/acquisition/acq_accounts/serializers/csv.py +++ b/rero_ils/modules/acquisition/acq_accounts/serializers/csv.py @@ -27,8 +27,9 @@ class AcqAccountCSVSerializer(CSVSerializer): """Mixin serializing records as CSV.""" - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -36,33 +37,34 @@ def serialize_search(self, pid_fetcher, search_result, links=None, :param links: Dictionary of links to add to response. :param item_links_factory: Factory function for record links. """ + def generate_csv(): headers = dict.fromkeys(self.csv_included_fields) # write the CSV output in memory line = Line() - writer = csv.DictWriter( - line, quoting=csv.QUOTE_ALL, fieldnames=headers) + writer = csv.DictWriter(line, quoting=csv.QUOTE_ALL, fieldnames=headers) writer.writeheader() yield line.read() for result in search_result: account = result.to_dict() csv_data = { - 'account_pid': account['pid'], - 'account_name': account.get('name'), - 'account_number': account.get('number'), - 'account_allocated_amount': - account.get('allocated_amount'), - 'account_available_amount': - account.get('allocated_amount', 0) - - account.get('distribution', 0), - 'account_current_encumbrance': - account.get('encumbrance_amount', {}).get('self'), - 'account_current_expenditure': - account.get('expenditure_amount', {}).get('self'), - 'account_available_balance': - account.get('remaining_balance').get('self') + "account_pid": account["pid"], + "account_name": account.get("name"), + "account_number": account.get("number"), + "account_allocated_amount": account.get("allocated_amount"), + "account_available_amount": account.get("allocated_amount", 0) + - account.get("distribution", 0), + "account_current_encumbrance": account.get( + "encumbrance_amount", {} + ).get("self"), + "account_current_expenditure": account.get( + "expenditure_amount", {} + ).get("self"), + "account_available_balance": account.get("remaining_balance").get( + "self" + ), } # write csv data data = self.process_dict(csv_data) diff --git a/rero_ils/modules/acquisition/acq_accounts/serializers/json.py b/rero_ils/modules/acquisition/acq_accounts/serializers/json.py index 90ddf5f185..ad63d2c1b8 100644 --- a/rero_ils/modules/acquisition/acq_accounts/serializers/json.py +++ b/rero_ils/modules/acquisition/acq_accounts/serializers/json.py @@ -29,29 +29,31 @@ class AcqAccountJSONSerializer(ACQJSONSerializer): def preprocess_record(self, pid, record, links_factory=None, **kwargs): """Prepare a record and persistent identifier for serialization.""" # Add some ES stored keys into response - query = AcqAccountsSearch().filter('term', pid=record.pid).source() + query = AcqAccountsSearch().filter("term", pid=record.pid).source() if hit := next(query.scan(), None): hit_metadata = hit.to_dict() - keys = ['depth', 'distribution', 'is_active', - 'encumbrance_exceedance', 'expenditure_exceedance', - 'encumbrance_amount', 'expenditure_amount', - 'remaining_balance'] + keys = [ + "depth", + "distribution", + "is_active", + "encumbrance_exceedance", + "expenditure_exceedance", + "encumbrance_amount", + "expenditure_amount", + "remaining_balance", + ] for key in keys: value = hit_metadata.get(key) if value is not None: record[key] = value return super().preprocess_record( - pid=pid, - record=record, - links_factory=links_factory, - kwargs=kwargs + pid=pid, record=record, links_factory=links_factory, kwargs=kwargs ) def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) super()._postprocess_search_aggregations(aggregations) diff --git a/rero_ils/modules/acquisition/acq_accounts/utils.py b/rero_ils/modules/acquisition/acq_accounts/utils.py index f05ec70542..ba7782d41f 100644 --- a/rero_ils/modules/acquisition/acq_accounts/utils.py +++ b/rero_ils/modules/acquisition/acq_accounts/utils.py @@ -25,8 +25,9 @@ def sort_accounts_as_tree(accounts): :param accounts: the accounts to sort. :return: the same account list sorted as a hierarchical tree. """ + def sort_by_name_key(acc): - return acc.get('name') + return acc.get("name") def _get_children_account(acc): children = filter(lambda a: a.parent_pid == acc.pid, accounts) diff --git a/rero_ils/modules/acquisition/acq_accounts/views.py b/rero_ils/modules/acquisition/acq_accounts/views.py index f151637c2a..666945af6f 100644 --- a/rero_ils/modules/acquisition/acq_accounts/views.py +++ b/rero_ils/modules/acquisition/acq_accounts/views.py @@ -22,19 +22,14 @@ from flask import Blueprint, jsonify, request -from rero_ils.modules.decorators import check_logged_as_librarian, \ - jsonify_error +from rero_ils.modules.decorators import check_logged_as_librarian, jsonify_error from .api import AcqAccount -api_blueprint = Blueprint( - 'api_acq_account', - __name__, - url_prefix='/acq_accounts' -) +api_blueprint = Blueprint("api_acq_account", __name__, url_prefix="/acq_accounts") -@api_blueprint.route('/transfer_funds', methods=['GET']) +@api_blueprint.route("/transfer_funds", methods=["GET"]) @check_logged_as_librarian @jsonify_error def transfer_funds(): @@ -57,20 +52,20 @@ def transfer_funds(): # * source: source account pid. Account must exists and active. # * target: target account pid. Account must exists and active. # * amount: the amount to transfer. Must be a positive number. - for arg_name in ['source', 'target', 'amount']: + for arg_name in ["source", "target", "amount"]: if arg_name not in request.args: raise KeyError(f"'{arg_name}' argument is required !") - source_acq = AcqAccount.get_record_by_pid(request.args['source']) + source_acq = AcqAccount.get_record_by_pid(request.args["source"]) if source_acq is None: - raise ValueError('Unable to load source account.') + raise ValueError("Unable to load source account.") elif not source_acq.is_active: - raise ValueError('Source account isn\'t active.') - target_acq = AcqAccount.get_record_by_pid(request.args['target']) + raise ValueError("Source account isn't active.") + target_acq = AcqAccount.get_record_by_pid(request.args["target"]) if target_acq is None: - raise ValueError('Unable to load target account.') + raise ValueError("Unable to load target account.") elif not target_acq.is_active: - raise ValueError('Target account isn\'t active.') - amount = float(request.args['amount']) + raise ValueError("Target account isn't active.") + amount = float(request.args["amount"]) if amount < 0: raise ValueError("'amount' should be a positive number.") diff --git a/rero_ils/modules/acquisition/acq_invoices/api.py b/rero_ils/modules/acquisition/acq_invoices/api.py index 1346e7bb58..3fe8c661ad 100644 --- a/rero_ils/modules/acquisition/acq_invoices/api.py +++ b/rero_ils/modules/acquisition/acq_invoices/api.py @@ -32,15 +32,14 @@ # provider AcquisitionInvoiceProvider = type( - 'AcqInvoiceProvider', + "AcqInvoiceProvider", (Provider,), - dict(identifier=AcquisitionInvoiceIdentifier, pid_type='acin') + dict(identifier=AcquisitionInvoiceIdentifier, pid_type="acin"), ) # minter acq_invoice_id_minter = partial(id_minter, provider=AcquisitionInvoiceProvider) # fetcher -acq_invoice_id_fetcher = partial( - id_fetcher, provider=AcquisitionInvoiceProvider) +acq_invoice_id_fetcher = partial(id_fetcher, provider=AcquisitionInvoiceProvider) class AcquisitionInvoicesSearch(IlsRecordsSearch): @@ -49,9 +48,9 @@ class AcquisitionInvoicesSearch(IlsRecordsSearch): class Meta: """Search only on acq_invoice index.""" - index = 'acq_invoices' + index = "acq_invoices" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -65,24 +64,18 @@ class AcquisitionInvoice(AcquisitionIlsRecord): provider = AcquisitionInvoiceProvider model_cls = AcquisitionInvoiceMetadata pids_exist_check = { - 'required': { - 'lib': 'library', - 'vndr': 'vendor' - }, - 'not_required': { - 'org': 'organisation' - } + "required": {"lib": "library", "vndr": "vendor"}, + "not_required": {"org": "organisation"}, } @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=False, reindex=False, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=False, reindex=False, **kwargs + ): """Create acquisition invoice record.""" cls._acquisition_invoice_build_org_ref(data) cls._build_total_amount_of_invoice(data) - record = super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) - return record + return super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) def update(self, data, commit=True, dbcommit=True, reindex=True): """Update Acquisition Invoice record.""" @@ -94,21 +87,22 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): def _build_total_amount_of_invoice(cls, data): """Build total amount for invoice.""" invoice_price = 0 - for idx, item in enumerate(data.get('invoice_items')): + for idx, item in enumerate(data.get("invoice_items")): # build total price for each invoice line item invoiceLine = InvoiceLine(item) - data['invoice_items'][idx]['total_price'] = invoiceLine.total_price - invoice_price += data['invoice_items'][idx]['total_price'] + data["invoice_items"][idx]["total_price"] = invoiceLine.total_price + invoice_price += data["invoice_items"][idx]["total_price"] # check if discount percentage - if data.get('discount', {}).get('percentage'): + if data.get("discount", {}).get("percentage"): invoice_price -= cls._calculate_percentage_discount( - invoice_price, data.get('discount').get('percentage')) + invoice_price, data.get("discount").get("percentage") + ) # check if discount amount - if data.get('discount', {}).get('amount'): - invoice_price -= data.get('discount').get('amount') + if data.get("discount", {}).get("amount"): + invoice_price -= data.get("discount").get("amount") # set invoice price - data['invoice_price'] = invoice_price + data["invoice_price"] = invoice_price @classmethod def _calculate_percentage_discount(cls, amount, percentage): @@ -118,30 +112,30 @@ def _calculate_percentage_discount(cls, amount, percentage): @classmethod def _acquisition_invoice_build_org_ref(cls, data): """Build $ref for the organisation of the acquisition invoice.""" - library_pid = data.get('library', {}).get('pid') - if not library_pid: - library_pid = data.get('library').get( - '$ref').split('libraries/')[1] - org_pid = Library.get_record_by_pid(library_pid).organisation_pid \ + library_pid = ( + data.get("library", {}).get("pid") + or data.get("library").get("$ref").split("libraries/")[1] + ) + org_pid = ( + Library.get_record_by_pid(library_pid).organisation_pid or cls.organisation_pid - data['organisation'] = { - '$ref': f'{get_base_url()}/api/organisations/{org_pid}' - } + ) + data["organisation"] = {"$ref": f"{get_base_url()}/api/organisations/{org_pid}"} @property def organisation_pid(self): """Shortcut for acquisition invoice organisation pid.""" - return extracted_data_from_ref(self.get('organisation')) + return extracted_data_from_ref(self.get("organisation")) @property def library_pid(self): """Shortcut for acquisition order library pid.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) @property def vendor_pid(self): """Shortcut for acquisition order vendor pid.""" - return extracted_data_from_ref(self.get('vendor')) + return extracted_data_from_ref(self.get("vendor")) @property def is_active(self): @@ -160,7 +154,7 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='acin') + super().bulk_index(record_id_iterator, doc_type="acin") class InvoiceLine(object): @@ -173,6 +167,6 @@ def __init__(self, data): @property def total_price(self): """Build total price for invoice line.""" - total_price = self.data['price'] * self.data['quantity'] - total_price -= self.data.get('discount', 0) + total_price = self.data["price"] * self.data["quantity"] + total_price -= self.data.get("discount", 0) return total_price diff --git a/rero_ils/modules/acquisition/acq_invoices/jsonresolver.py b/rero_ils/modules/acquisition/acq_invoices/jsonresolver.py index 4eb2a71cf6..9b529f56ca 100644 --- a/rero_ils/modules/acquisition/acq_invoices/jsonresolver.py +++ b/rero_ils/modules/acquisition/acq_invoices/jsonresolver.py @@ -22,14 +22,14 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/acq_invoices/', host='bib.rero.ch') +@jsonresolver.route("/api/acq_invoices/", host="bib.rero.ch") def acquisition_invoice_resolver(pid): """Resolver for acq_invoice record.""" - persistent_id = PersistentIdentifier.get('acin', pid) + persistent_id = PersistentIdentifier.get("acin", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - 'Acquisition invoice resolver error: /api/acq_invoices/' - f'{pid} {persistent_id}' + "Acquisition invoice resolver error: /api/acq_invoices/" + f"{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/acquisition/acq_invoices/models.py b/rero_ils/modules/acquisition/acq_invoices/models.py index 896a09e51c..a138d84a9e 100644 --- a/rero_ils/modules/acquisition/acq_invoices/models.py +++ b/rero_ils/modules/acquisition/acq_invoices/models.py @@ -27,16 +27,17 @@ class AcquisitionInvoiceIdentifier(RecordIdentifier): """Sequence generator for Acquisition Invoice identifiers.""" - __tablename__ = 'acq_invoice_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "acq_invoice_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class AcquisitionInvoiceMetadata(db.Model, RecordMetadataBase): """AcquisitionInvoice record metadata.""" - __tablename__ = 'acq_invoice_metadata' + __tablename__ = "acq_invoice_metadata" diff --git a/rero_ils/modules/acquisition/acq_invoices/permissions.py b/rero_ils/modules/acquisition/acq_invoices/permissions.py index df12e84890..b6c9e54eae 100644 --- a/rero_ils/modules/acquisition/acq_invoices/permissions.py +++ b/rero_ils/modules/acquisition/acq_invoices/permissions.py @@ -20,19 +20,22 @@ from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, DisallowedIfRollovered, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + DisallowedIfRollovered, + RecordPermissionPolicy, +) from .api import AcquisitionInvoice # Actions to control acquisition invoices resource policies -search_action = action_factory('acin-search') -read_action = action_factory('acin-read') -create_action = action_factory('acin-create') -update_action = action_factory('acin-update') -delete_action = action_factory('acin-delete') -access_action = action_factory('acin-access') +search_action = action_factory("acin-search") +read_action = action_factory("acin-read") +create_action = action_factory("acin-create") +update_action = action_factory("acin-update") +delete_action = action_factory("acin-delete") +access_action = action_factory("acin-access") class AcqInvoicePermissionPolicy(RecordPermissionPolicy): @@ -42,13 +45,13 @@ class AcqInvoicePermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByActionRestrictByManageableLibrary(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowedIfRollovered(AcquisitionInvoice) + DisallowedIfRollovered(AcquisitionInvoice), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowedIfRollovered(AcquisitionInvoice) + DisallowedIfRollovered(AcquisitionInvoice), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowedIfRollovered(AcquisitionInvoice) + DisallowedIfRollovered(AcquisitionInvoice), ] diff --git a/rero_ils/modules/acquisition/acq_invoices/serializers.py b/rero_ils/modules/acquisition/acq_invoices/serializers.py index 5276719549..161d65f36a 100644 --- a/rero_ils/modules/acquisition/acq_invoices/serializers.py +++ b/rero_ils/modules/acquisition/acq_invoices/serializers.py @@ -20,8 +20,12 @@ from invenio_records_rest.serializers.response import record_responsify from rero_ils.modules.libraries.api import LibrariesSearch -from rero_ils.modules.serializers import ACQJSONSerializer, JSONSerializer, \ - RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + ACQJSONSerializer, + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) class AcquisitionInvoiceJSONSerializer(ACQJSONSerializer): @@ -30,12 +34,11 @@ class AcquisitionInvoiceJSONSerializer(ACQJSONSerializer): def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) super()._postprocess_search_aggregations(aggregations) _json = AcquisitionInvoiceJSONSerializer(RecordSchemaJSONV1) -json_acq_invoice_search = search_responsify(_json, 'application/rero+json') -json_acq_invoice_record = record_responsify(_json, 'application/rero+json') +json_acq_invoice_search = search_responsify(_json, "application/rero+json") +json_acq_invoice_record = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/acquisition/acq_order_lines/api.py b/rero_ils/modules/acquisition/acq_order_lines/api.py index 9186ff43b1..7073b5c787 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/api.py +++ b/rero_ils/modules/acquisition/acq_order_lines/api.py @@ -30,18 +30,16 @@ from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.minters import id_minter from rero_ils.modules.providers import Provider -from rero_ils.modules.utils import extracted_data_from_ref, get_ref_for_pid, \ - sorted_pids +from rero_ils.modules.utils import extracted_data_from_ref, get_ref_for_pid, sorted_pids from .extensions import AcqOrderLineValidationExtension -from .models import AcqOrderLineIdentifier, AcqOrderLineMetadata, \ - AcqOrderLineStatus +from .models import AcqOrderLineIdentifier, AcqOrderLineMetadata, AcqOrderLineStatus # provider AcqOrderLineProvider = type( - 'AcqOrderLineProvider', + "AcqOrderLineProvider", (Provider,), - dict(identifier=AcqOrderLineIdentifier, pid_type='acol') + dict(identifier=AcqOrderLineIdentifier, pid_type="acol"), ) # minter acq_order_line_id_minter = partial(id_minter, provider=AcqOrderLineProvider) @@ -55,9 +53,9 @@ class AcqOrderLinesSearch(IlsRecordsSearch): class Meta: """Search only on Acquisition Order Line index.""" - index = 'acq_order_lines' + index = "acq_order_lines" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -71,19 +69,11 @@ class AcqOrderLine(AcquisitionIlsRecord): provider = AcqOrderLineProvider model_cls = AcqOrderLineMetadata pids_exist_check = { - 'required': { - 'doc': 'document', - 'acac': 'acq_account', - 'acor': 'acq_order' - }, - 'not_required': { - 'org': 'organisation' - } + "required": {"doc": "document", "acac": "acq_account", "acor": "acq_order"}, + "not_required": {"org": "organisation"}, } - _extensions = [ - AcqOrderLineValidationExtension() - ] + _extensions = [AcqOrderLineValidationExtension()] # API METHODS ============================================================= # Overriding the `IlsRecord` default behavior for create and update @@ -96,23 +86,22 @@ def extended_validation(self, **kwargs): - notes array has multiple notes with same type """ # NOTES fields testing - note_types = [note.get('type') for note in self.get('notes', [])] + note_types = [note.get("type") for note in self.get("notes", [])] if len(note_types) != len(set(note_types)): - return _('Can not have multiple notes of the same type.') + return _("Can not have multiple notes of the same type.") return True @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=True, reindex=True, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=True, reindex=True, **kwargs + ): """Create Acquisition Order Line record.""" # TODO : should be used into `pre_create` hook extensions but seems not # work as expected. cls._build_additional_refs(data) cls._build_total_amount(data) - record = super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) - return record + return super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) def update(self, data, commit=True, dbcommit=True, reindex=True): """Update Acquisition Order Line record.""" @@ -120,7 +109,7 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): original_record = self.__class__.get_record(self.id) new_data = deepcopy(dict(self)) - new_data.update(data) + new_data |= data self._build_additional_refs(new_data) self._build_total_amount(new_data) super().update(new_data, commit, dbcommit, reindex) @@ -135,19 +124,16 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): @classmethod def _build_additional_refs(cls, data): """Build $ref for the organisation of the acquisition order.""" - order = extracted_data_from_ref(data.get('acq_order'), data='record') - if order: - data['library'] = { - '$ref': get_ref_for_pid('lib', order.library_pid) - } - data['organisation'] = { - '$ref': get_ref_for_pid('org', order.organisation_pid) + if order := extracted_data_from_ref(data.get("acq_order"), data="record"): + data["library"] = {"$ref": get_ref_for_pid("lib", order.library_pid)} + data["organisation"] = { + "$ref": get_ref_for_pid("org", order.organisation_pid) } @classmethod def _build_total_amount(cls, data): """Build total amount for order line.""" - data['total_amount'] = data['amount'] * data['quantity'] + data["total_amount"] = data["amount"] * data["quantity"] # GETTER & SETTER ========================================================= # * Define some properties as shortcut to quickly access object attrs. @@ -156,32 +142,32 @@ def _build_total_amount(cls, data): @property def order_pid(self): """Shortcut for acquisition order pid.""" - return extracted_data_from_ref(self.get('acq_order')) + return extracted_data_from_ref(self.get("acq_order")) @property def order(self): """Shortcut to the order of the order line.""" - return extracted_data_from_ref(self.get('acq_order'), data='record') + return extracted_data_from_ref(self.get("acq_order"), data="record") @property def order_date(self): """Shortcut for acquisition order send date.""" - return self.get('order_date') + return self.get("order_date") @property def is_cancelled(self): """Shortcut for acquisition order is_cancelled falg.""" - return self.get('is_cancelled') + return self.get("is_cancelled") @property def account_pid(self): """Shortcut to the account pid related to this order line.""" - return extracted_data_from_ref(self.get('acq_account')) + return extracted_data_from_ref(self.get("acq_account")) @property def account(self): """Shortcut to the account object related to this order line.""" - return extracted_data_from_ref(self.get('acq_account'), data='record') + return extracted_data_from_ref(self.get("acq_account"), data="record") @property def is_active(self): @@ -195,12 +181,12 @@ def is_active(self): @property def document_pid(self): """Shortcut to the document pid related to this order line.""" - return extracted_data_from_ref(self.get('document')) + return extracted_data_from_ref(self.get("document")) @property def document(self): """Shortcut to the document object related to this order line.""" - return extracted_data_from_ref(self.get('document'), data='record') + return extracted_data_from_ref(self.get("document"), data="record") @property def organisation_pid(self): @@ -214,7 +200,7 @@ def quantity(self): This comes from the metadata of the order line that represent the number of items to order or already ordered. """ - return self.get('quantity') + return self.get("quantity") @property def received_quantity(self): @@ -223,11 +209,12 @@ def received_quantity(self): The received quantity is number of quantity received for the resource acq_receipt_line and for the corresponding acq_line_order. """ - from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLinesSearch - search = AcqReceiptLinesSearch()\ - .filter('term', acq_order_line__pid=self.pid) - search.aggs.metric('sum_order_line_recieved', 'sum', field='quantity') + from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLinesSearch, + ) + + search = AcqReceiptLinesSearch().filter("term", acq_order_line__pid=self.pid) + search.aggs.metric("sum_order_line_recieved", "sum", field="quantity") results = search.execute() return results.aggregations.sum_order_line_recieved.value @@ -239,11 +226,12 @@ def unreceived_quantity(self): @cached_property def receipt_date(self): """Get the first reception date for one item of this order line.""" - from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLinesSearch - search = AcqReceiptLinesSearch() \ - .filter('term', acq_order_line__pid=self.pid) - search.aggs.metric('min_receipt_date', 'min', field='receipt_date') + from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLinesSearch, + ) + + search = AcqReceiptLinesSearch().filter("term", acq_order_line__pid=self.pid) + search.aggs.metric("min_receipt_date", "min", field="receipt_date") results = search.execute() epoch = results.aggregations.min_receipt_date.value / 1000 return datetime.fromtimestamp(epoch) @@ -262,8 +250,11 @@ def status(self): """ if self.is_cancelled: return AcqOrderLineStatus.CANCELLED - status = AcqOrderLineStatus.ORDERED \ - if self.order_date else AcqOrderLineStatus.APPROVED + status = ( + AcqOrderLineStatus.ORDERED + if self.order_date + else AcqOrderLineStatus.APPROVED + ) received_quantity = self.received_quantity # not use the property to prevent an extra ES call unreceived_quantity = self.quantity - received_quantity @@ -286,8 +277,9 @@ def get_note(self, note_type): :return the note content if exists, otherwise returns None. """ note = [ - note.get('content') for note in self.get('notes', []) - if note.get('type') == note_type + note.get("content") + for note in self.get("notes", []) + if note.get("type") == note_type ] return next(iter(note), None) @@ -297,16 +289,17 @@ def get_links_to_me(self, get_pids=False): :param get_pids: if True list of linked pids if False count of linked records """ - from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLinesSearch + from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLinesSearch, + ) + links = {} - query = AcqReceiptLinesSearch()\ - .filter('term', acq_order_line__pid=self.pid) + query = AcqReceiptLinesSearch().filter("term", acq_order_line__pid=self.pid) receipt_lines = sorted_pids(query) if get_pids else query.count() if receipt_lines: - links['acq_receipt_lines'] = receipt_lines + links["acq_receipt_lines"] = receipt_lines return links @@ -315,10 +308,10 @@ def reasons_not_to_delete(self): cannot_delete = {} # Note: not possible to delete records attached to rolled_over budget. if not self.is_active: - cannot_delete['links'] = {'rolled_over': True} + cannot_delete["links"] = {"rolled_over": True} return cannot_delete if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete @@ -349,4 +342,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='acol') + super().bulk_index(record_id_iterator, doc_type="acol") diff --git a/rero_ils/modules/acquisition/acq_order_lines/dumpers.py b/rero_ils/modules/acquisition/acq_order_lines/dumpers.py index 20d809a2de..d9fc056c51 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/dumpers.py +++ b/rero_ils/modules/acquisition/acq_order_lines/dumpers.py @@ -20,10 +20,8 @@ from invenio_records.dumpers import Dumper as InvenioRecordsDumper -from rero_ils.modules.acquisition.acq_accounts.dumpers import \ - AcqAccountGenericDumper -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineNoteType +from rero_ils.modules.acquisition.acq_accounts.dumpers import AcqAccountGenericDumper +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineNoteType from rero_ils.modules.acquisition.dumpers import document_acquisition_dumper from rero_ils.modules.commons.identifiers import IdentifierType from rero_ils.modules.documents.extensions import TitleExtension @@ -44,35 +42,34 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ # Keep only some attributes from AcqOrderLine object initial dump. - for attr in ['pid', 'status', 'order_date', 'quantity']: + for attr in ["pid", "status", "order_date", "quantity"]: if value := record.get(attr): data.update({attr: value}) # Add account information's: pid, name and reference number. # (remove None values from account metadata) account = record.account - data['account'] = { - 'pid': account.pid, - 'name': account['name'], - 'number': account.get('number') + data["account"] = { + "pid": account.pid, + "name": account["name"], + "number": account.get("number"), } - data['account'] = {k: v for k, v in data['account'].items() if v} + data["account"] = {k: v for k, v in data["account"].items() if v} # Add document information's: pid, formatted title and ISBN # identifiers (remove None values from document metadata) document = record.document identifiers = document.get_identifiers( - filters=[IdentifierType.ISBN], - with_alternatives=True + filters=[IdentifierType.ISBN], with_alternatives=True ) identifiers = [identifier.normalize() for identifier in identifiers] - data['document'] = { - 'pid': document.pid, - 'title': TitleExtension.format_text(document.get('title', [])), - 'identifiers': identifiers + data["document"] = { + "pid": document.pid, + "title": TitleExtension.format_text(document.get("title", [])), + "identifiers": identifiers, } - data['document'] = {k: v for k, v in data['document'].items() if v} + data["document"] = {k: v for k, v in data["document"].items() if v} return data @@ -86,13 +83,14 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ # Dumps AcqOrderLine acquisition - data.update({ - 'quantity': record.get('quantity'), - 'amount': record.get('amount'), - 'note': record.get_note(AcqOrderLineNoteType.VENDOR), - 'account': record.account.dumps(dumper=AcqAccountGenericDumper()), - 'document': record.document.dumps( - dumper=document_acquisition_dumper) - }) + data.update( + { + "quantity": record.get("quantity"), + "amount": record.get("amount"), + "note": record.get_note(AcqOrderLineNoteType.VENDOR), + "account": record.account.dumps(dumper=AcqAccountGenericDumper()), + "document": record.document.dumps(dumper=document_acquisition_dumper), + } + ) data = {k: v for k, v in data.items() if v} return data diff --git a/rero_ils/modules/acquisition/acq_order_lines/extensions.py b/rero_ils/modules/acquisition/acq_order_lines/extensions.py index a764ddbc1d..6bec83e452 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/extensions.py +++ b/rero_ils/modules/acquisition/acq_order_lines/extensions.py @@ -22,8 +22,7 @@ from invenio_records.extensions import RecordExtension from jsonschema import ValidationError -from rero_ils.modules.acquisition.acq_accounts.models import \ - AcqAccountExceedanceType +from rero_ils.modules.acquisition.acq_accounts.models import AcqAccountExceedanceType class AcqOrderLineValidationExtension(RecordExtension): @@ -33,8 +32,9 @@ class AcqOrderLineValidationExtension(RecordExtension): def _check_balance(record): """Check if parent account balance has enough money.""" # compute the total amount of the order line - record['total_amount'] = record['amount'] * record['quantity'] \ - - record.get('discount_amount', 0) + record["total_amount"] = record["amount"] * record["quantity"] - record.get( + "discount_amount", 0 + ) original_record = record.__class__.get_record_by_pid(record.pid) @@ -44,9 +44,9 @@ def _check_balance(record): # record total_amount. # - either the account changes : in such case, we need to the check # if the new destination account balance accept this order line. - amount_to_check = record.get('total_amount', 0) + amount_to_check = record.get("total_amount", 0) if original_record and original_record.account == record.account: - amount_to_check -= original_record.get('total_amount', 0) + amount_to_check -= original_record.get("total_amount", 0) # If we decease the total amount of this order line, no need to check. # There will just be more available money on the related account. Enjoy @@ -57,10 +57,11 @@ def _check_balance(record): # ValidationError. if amount_to_check > 0: account = record.account - available_money = account.remaining_balance[0] \ - + account.get_exceedance(AcqAccountExceedanceType.ENCUMBRANCE) + available_money = account.remaining_balance[0] + account.get_exceedance( + AcqAccountExceedanceType.ENCUMBRANCE + ) if available_money < amount_to_check: - msg = _('Parent account available amount too low') + msg = _("Parent account available amount too low") raise ValidationError(msg) @staticmethod @@ -68,7 +69,7 @@ def _check_harvested(record): """Harvested document cannot be linked to an order line.""" related_document = record.document if related_document and related_document.harvested: - msg = _('Cannot link to an harvested document') + msg = _("Cannot link to an harvested document") raise ValidationError(msg) # INVENIO EXTENSION HOOKS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/rero_ils/modules/acquisition/acq_order_lines/jsonresolver.py b/rero_ils/modules/acquisition/acq_order_lines/jsonresolver.py index 12f1f8ef7c..53a577668d 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/jsonresolver.py +++ b/rero_ils/modules/acquisition/acq_order_lines/jsonresolver.py @@ -22,13 +22,13 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/acq_order_lines/', host='bib.rero.ch') +@jsonresolver.route("/api/acq_order_lines/", host="bib.rero.ch") def acq_order_line_resolver(pid): """Resolver for Acquisition Order Line record.""" - persistent_id = PersistentIdentifier.get('acol', pid) + persistent_id = PersistentIdentifier.get("acol", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - f'Doc resolver error: /api/acq_order_lines/{pid} {persistent_id}' + f"Doc resolver error: /api/acq_order_lines/{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/acquisition/acq_order_lines/listener.py b/rero_ils/modules/acquisition/acq_order_lines/listener.py index 7e29b23142..f850c139d7 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/listener.py +++ b/rero_ils/modules/acquisition/acq_order_lines/listener.py @@ -18,12 +18,21 @@ """Signals connector for Order lines.""" -from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine, \ - AcqOrderLinesSearch +from rero_ils.modules.acquisition.acq_order_lines.api import ( + AcqOrderLine, + AcqOrderLinesSearch, +) -def enrich_acq_order_line_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_acq_order_line_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -31,12 +40,11 @@ def enrich_acq_order_line_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == AcqOrderLinesSearch.Meta.index: + if index.split("-")[0] == AcqOrderLinesSearch.Meta.index: if not isinstance(record, AcqOrderLine): - record = AcqOrderLine.get_record_by_pid(record.get('pid')) + record = AcqOrderLine.get_record_by_pid(record.get("pid")) unreceived_quantity = record.unreceived_quantity # other dynamic keys - json['total_unreceived_amount'] = \ - unreceived_quantity * record['amount'] - json['status'] = record.status - json['received_quantity'] = record.received_quantity + json["total_unreceived_amount"] = unreceived_quantity * record["amount"] + json["status"] = record.status + json["received_quantity"] = record.received_quantity diff --git a/rero_ils/modules/acquisition/acq_order_lines/models.py b/rero_ils/modules/acquisition/acq_order_lines/models.py index 04ac9011b7..8ff340004b 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/models.py +++ b/rero_ils/modules/acquisition/acq_order_lines/models.py @@ -27,29 +27,30 @@ class AcqOrderLineIdentifier(RecordIdentifier): """Sequence generator for Acquisition Order Line identifiers.""" - __tablename__ = 'acq_order_line_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "acq_order_line_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class AcqOrderLineMetadata(db.Model, RecordMetadataBase): """AcqOrderLine record metadata.""" - __tablename__ = 'acq_order_line_metadata' + __tablename__ = "acq_order_line_metadata" class AcqOrderLineStatus: """Available statuses about an Acquisition Order Line.""" - APPROVED = 'approved' - CANCELLED = 'cancelled' - ORDERED = 'ordered' - RECEIVED = 'received' - PARTIALLY_RECEIVED = 'partially_received' + APPROVED = "approved" + CANCELLED = "cancelled" + ORDERED = "ordered" + RECEIVED = "received" + PARTIALLY_RECEIVED = "partially_received" RECEIVED_STATUSES = [RECEIVED, PARTIALLY_RECEIVED] @@ -57,5 +58,5 @@ class AcqOrderLineStatus: class AcqOrderLineNoteType: """Type of acquisition order line note.""" - STAFF = 'staff_note' - VENDOR = 'vendor_note' + STAFF = "staff_note" + VENDOR = "vendor_note" diff --git a/rero_ils/modules/acquisition/acq_order_lines/permissions.py b/rero_ils/modules/acquisition/acq_order_lines/permissions.py index 20a65accfc..83f95fe760 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/permissions.py +++ b/rero_ils/modules/acquisition/acq_order_lines/permissions.py @@ -19,19 +19,22 @@ """Permissions for Acquisition order line.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, DisallowedIfRollovered, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + DisallowedIfRollovered, + RecordPermissionPolicy, +) from .api import AcqOrderLine # Actions to control acquisition order lines resource policies -search_action = action_factory('acol-search') -read_action = action_factory('acol-read') -create_action = action_factory('acol-create') -update_action = action_factory('acol-update') -delete_action = action_factory('acol-delete') -access_action = action_factory('acol-access') +search_action = action_factory("acol-search") +read_action = action_factory("acol-read") +create_action = action_factory("acol-create") +update_action = action_factory("acol-update") +delete_action = action_factory("acol-delete") +access_action = action_factory("acol-access") class AcqOrderLinePermissionPolicy(RecordPermissionPolicy): @@ -41,13 +44,13 @@ class AcqOrderLinePermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByActionRestrictByManageableLibrary(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowedIfRollovered(AcqOrderLine) + DisallowedIfRollovered(AcqOrderLine), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowedIfRollovered(AcqOrderLine) + DisallowedIfRollovered(AcqOrderLine), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowedIfRollovered(AcqOrderLine) + DisallowedIfRollovered(AcqOrderLine), ] diff --git a/rero_ils/modules/acquisition/acq_order_lines/serializers.py b/rero_ils/modules/acquisition/acq_order_lines/serializers.py index 4af6f67a72..e9bd2681ff 100644 --- a/rero_ils/modules/acquisition/acq_order_lines/serializers.py +++ b/rero_ils/modules/acquisition/acq_order_lines/serializers.py @@ -22,4 +22,4 @@ from rero_ils.modules.serializers import ACQJSONSerializer, RecordSchemaJSONV1 _json = ACQJSONSerializer(RecordSchemaJSONV1) -json_acol_record = record_responsify(_json, 'application/rero+json') +json_acol_record = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/acquisition/acq_orders/api.py b/rero_ils/modules/acquisition/acq_orders/api.py index f08165c6a8..0e88776c72 100644 --- a/rero_ils/modules/acquisition/acq_orders/api.py +++ b/rero_ils/modules/acquisition/acq_orders/api.py @@ -23,12 +23,12 @@ from flask_babel import gettext as _ from invenio_records_rest.utils import obj_or_import_string -from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine, \ - AcqOrderLinesSearch -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineStatus -from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, \ - AcqReceiptsSearch +from rero_ils.modules.acquisition.acq_order_lines.api import ( + AcqOrderLine, + AcqOrderLinesSearch, +) +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, AcqReceiptsSearch from rero_ils.modules.acquisition.api import AcquisitionIlsRecord from rero_ils.modules.acquisition.budgets.api import Budget from rero_ils.modules.api import IlsRecordsIndexer, IlsRecordsSearch @@ -38,18 +38,22 @@ from rero_ils.modules.notifications.dispatcher import Dispatcher from rero_ils.modules.notifications.models import NotificationType from rero_ils.modules.providers import Provider -from rero_ils.modules.utils import extracted_data_from_ref, \ - get_endpoint_configuration, get_objects, get_ref_for_pid, sorted_pids +from rero_ils.modules.utils import ( + extracted_data_from_ref, + get_endpoint_configuration, + get_objects, + get_ref_for_pid, + sorted_pids, +) -from .extensions import AcquisitionOrderCompleteDataExtension, \ - AcquisitionOrderExtension +from .extensions import AcquisitionOrderCompleteDataExtension, AcquisitionOrderExtension from .models import AcqOrderIdentifier, AcqOrderMetadata, AcqOrderStatus # provider AcqOrderProvider = type( - 'AcqOrderProvider', + "AcqOrderProvider", (Provider,), - dict(identifier=AcqOrderIdentifier, pid_type='acor') + dict(identifier=AcqOrderIdentifier, pid_type="acor"), ) # minter acq_order_id_minter = partial(id_minter, provider=AcqOrderProvider) @@ -63,9 +67,9 @@ class AcqOrdersSearch(IlsRecordsSearch): class Meta: """Search only on acq_order index.""" - index = 'acq_orders' + index = "acq_orders" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -74,23 +78,15 @@ class Meta: class AcqOrder(AcquisitionIlsRecord): """AcqOrder class.""" - _extensions = [ - AcquisitionOrderExtension(), - AcquisitionOrderCompleteDataExtension() - ] + _extensions = [AcquisitionOrderExtension(), AcquisitionOrderCompleteDataExtension()] minter = acq_order_id_minter fetcher = acq_order_id_fetcher provider = AcqOrderProvider model_cls = AcqOrderMetadata pids_exist_check = { - 'required': { - 'lib': 'library', - 'vndr': 'vendor' - }, - 'not_required': { - 'org': 'organisation' - } + "required": {"lib": "library", "vndr": "vendor"}, + "not_required": {"org": "organisation"}, } def extended_validation(self, **kwargs): @@ -100,47 +96,47 @@ def extended_validation(self, **kwargs): - notes array has multiple notes with same type """ # NOTES fields testing - note_types = [note.get('type') for note in self.get('notes', [])] + note_types = [note.get("type") for note in self.get("notes", [])] if len(note_types) != len(set(note_types)): - return _('Can not have multiple notes of the same type.') + return _("Can not have multiple notes of the same type.") return True @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=False, reindex=False, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=False, reindex=False, **kwargs + ): """Create acquisition order record.""" # TODO : should be used into `pre_create` hook extensions but seems not # work as expected. AcquisitionOrderCompleteDataExtension.populate_currency(data) - return super().create(data, id_, delete_pid, dbcommit, reindex, - **kwargs) + return super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) @property def vendor(self): """Shortcut for vendor.""" - return extracted_data_from_ref(self.get('vendor'), data='record') + return extracted_data_from_ref(self.get("vendor"), data="record") @property def organisation_pid(self): """Shortcut for acquisition order organisation pid.""" - library = extracted_data_from_ref(self.get('library'), data='record') + library = extracted_data_from_ref(self.get("library"), data="record") return library.organisation_pid @property def library_pid(self): """Shortcut for acquisition order library pid.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) @property def library(self): """Shortcut for acquisition order library.""" - return extracted_data_from_ref(self.get('library'), data='record') + return extracted_data_from_ref(self.get("library"), data="record") @property def vendor_pid(self): """Shortcut for acquisition order vendor pid.""" - return extracted_data_from_ref(self.get('vendor')) + return extracted_data_from_ref(self.get("vendor")) @property def status(self): @@ -161,8 +157,8 @@ def status(self): RECEIVED: if all related order lines has RECEIVED status. """ status = AcqOrderStatus.PENDING - search = AcqOrderLinesSearch().filter('term', acq_order__pid=self.pid) - search.aggs.bucket('status', 'terms', field='status') + search = AcqOrderLinesSearch().filter("term", acq_order__pid=self.pid) + search.aggs.bucket("status", "terms", field="status") results = search.execute() statuses = [hit.key for hit in results.aggregations.status.buckets] @@ -172,69 +168,76 @@ def status(self): statuses.remove(AcqOrderLineStatus.CANCELLED) if len(statuses) > 1: - if any(s in AcqOrderLineStatus.RECEIVED_STATUSES - for s in statuses): + if any(s in AcqOrderLineStatus.RECEIVED_STATUSES for s in statuses): status = AcqOrderStatus.PARTIALLY_RECEIVED elif AcqOrderLineStatus.ORDERED in statuses: status = AcqOrderStatus.ORDERED elif len(statuses) == 1: - map = { + status_map = { AcqOrderLineStatus.APPROVED: AcqOrderStatus.PENDING, AcqOrderLineStatus.ORDERED: AcqOrderStatus.ORDERED, AcqOrderLineStatus.RECEIVED: AcqOrderStatus.RECEIVED, - AcqOrderLineStatus.PARTIALLY_RECEIVED: - AcqOrderStatus.PARTIALLY_RECEIVED, + AcqOrderLineStatus.PARTIALLY_RECEIVED: AcqOrderStatus.PARTIALLY_RECEIVED, AcqOrderLineStatus.CANCELLED: AcqOrderStatus.CANCELLED, } - if statuses[0] in map: - status = map[statuses[0]] + if statuses[0] in status_map: + status = status_map[statuses[0]] return status @property def order_date(self): """Get the order date of this order.""" - result = AcqOrderLinesSearch()\ - .filter('term', acq_order__pid=self.pid)\ - .filter('exists', field='order_date')\ - .source(['order_date']).scan() + result = ( + AcqOrderLinesSearch() + .filter("term", acq_order__pid=self.pid) + .filter("exists", field="order_date") + .source(["order_date"]) + .scan() + ) dates = [hit.order_date for hit in result] return next(iter(dates or []), None) @property def item_quantity(self): """Get the total of item quantity for this order.""" - search = AcqOrderLinesSearch() \ - .filter('term', acq_order__pid=self.pid) \ - .exclude('term', status=AcqOrderLineStatus.CANCELLED) - search.aggs.metric('total_quantity', 'sum', field='quantity') + search = ( + AcqOrderLinesSearch() + .filter("term", acq_order__pid=self.pid) + .exclude("term", status=AcqOrderLineStatus.CANCELLED) + ) + search.aggs.metric("total_quantity", "sum", field="quantity") results = search.execute() return results.aggregations.total_quantity.value @property def item_received_quantity(self): """Get the total of received item quantity for this order.""" - search = AcqOrderLinesSearch() \ - .filter('term', acq_order__pid=self.pid) \ - .exclude('term', status=AcqOrderLineStatus.CANCELLED) - search.aggs.metric('total_quantity', 'sum', field='received_quantity') + search = ( + AcqOrderLinesSearch() + .filter("term", acq_order__pid=self.pid) + .exclude("term", status=AcqOrderLineStatus.CANCELLED) + ) + search.aggs.metric("total_quantity", "sum", field="received_quantity") results = search.execute() return results.aggregations.total_quantity.value @property def previous_order(self): """Try to find the previous order in the order history.""" - if prev_version := self.get('previousVersion'): + if prev_version := self.get("previousVersion"): prev_pid = extracted_data_from_ref(prev_version) return AcqOrder.get_record_by_pid(prev_pid) @property def next_order(self): """Try to find an acquisition order representing next order.""" - query = AcqOrdersSearch() \ - .filter('term', previousVersion__pid=self.pid)\ - .source('pid') + query = ( + AcqOrdersSearch() + .filter("term", previousVersion__pid=self.pid) + .source("pid") + ) if hit := next(query.scan(), None): return AcqOrder.get_record(hit.meta.id) @@ -250,7 +253,7 @@ def budget(self): if self.pid and (line := next(self.get_order_lines(), None)): return line.account.budget org = self.library.get_organisation() - return Budget.get_record_by_pid(org.get('current_budget_pid')) + return Budget.get_record_by_pid(org.get("current_budget_pid")) @property def is_active(self): @@ -278,8 +281,9 @@ def get_note(self, note_type): :return the note content if exists, otherwise returns None. """ note = [ - note.get('content') for note in self.get('notes', []) - if note.get('type') == note_type + note.get("content") + for note in self.get("notes", []) + if note.get("type") == note_type ] return next(iter(note), None) @@ -289,10 +293,10 @@ def get_receipts(self, output=None): :param output: output method. 'count', 'query' or None. :return a generator of related AcqReceipts. """ - query = AcqReceiptsSearch().filter('term', acq_order__pid=self.pid) - if output == 'count': + query = AcqReceiptsSearch().filter("term", acq_order__pid=self.pid) + if output == "count": return query.count() - elif output == 'query': + elif output == "query": return query else: return get_objects(AcqReceipt, query) @@ -310,21 +314,22 @@ def get_related_notes(self, resource_filters=None): """ # Add here the SearchClass where to search about notes related to this # AcqOrder. - related_resources = ['acol', 'acre', 'acrl'] + related_resources = ["acol", "acre", "acrl"] resource_filters = resource_filters or related_resources for resource_acronym in resource_filters: # search about config for this acronym. If not found : continue config = get_endpoint_configuration(resource_acronym) if not config: continue - record_cls = obj_or_import_string(config['record_class']) - source_search_class = obj_or_import_string(config['search_class']) + record_cls = obj_or_import_string(config["record_class"]) + source_search_class = obj_or_import_string(config["search_class"]) search_class = source_search_class() - query = search_class \ - .filter('term', acq_order__pid=self.pid) \ - .filter('exists', field='notes') \ - .source(['notes', 'pid']) + query = ( + search_class.filter("term", acq_order__pid=self.pid) + .filter("exists", field="notes") + .source(["notes", "pid"]) + ) for hit in query.scan(): for note in hit.notes: yield note, record_cls, hit.pid @@ -334,11 +339,10 @@ def get_related_orders(self, output=None): :param output: output method : 'count', 'query' or None. """ - query = AcqOrdersSearch()\ - .filter('term', previousVersion__pid=self.pid) - if output == 'count': + query = AcqOrdersSearch().filter("term", previousVersion__pid=self.pid) + if output == "count": return query.count() - elif output == 'query': + elif output == "query": return query else: return get_objects(AcqOrder, query) @@ -357,42 +361,43 @@ def preserve_order(query): :param: query: the es query. :return the es query. """ - return query.params(preserve_order=True) \ - .sort({'pid': {"order": "asc"}}) + return query.params(preserve_order=True).sort({"pid": {"order": "asc"}}) - query = AcqOrderLinesSearch().filter('term', acq_order__pid=self.pid) + query = AcqOrderLinesSearch().filter("term", acq_order__pid=self.pid) if includes: - query = query.filter('terms', status=includes) + query = query.filter("terms", status=includes) - if output == 'count': + if output == "count": return query.count() - elif output == 'query': + elif output == "query": return preserve_order(query) else: return get_objects(AcqOrderLine, preserve_order(query)) def get_order_provisional_total_amount(self): """Get provisional total amount of this order.""" - search = AcqOrderLinesSearch()\ - .filter('term', acq_order__pid=self.pid) \ - .exclude('term', status=AcqOrderLineStatus.CANCELLED) + search = ( + AcqOrderLinesSearch() + .filter("term", acq_order__pid=self.pid) + .exclude("term", status=AcqOrderLineStatus.CANCELLED) + ) search.aggs.metric( - 'order_total_amount', - 'sum', - field='total_amount', - script={'source': 'Math.round(_value*100)/100.00'} + "order_total_amount", + "sum", + field="total_amount", + script={"source": "Math.round(_value*100)/100.00"}, ) results = search.execute() return round(results.aggregations.order_total_amount.value, 2) def get_order_expenditure_total_amount(self): """Get total amount of known expenditures of this order.""" - search = AcqReceiptsSearch().filter('term', acq_order__pid=self.pid) + search = AcqReceiptsSearch().filter("term", acq_order__pid=self.pid) search.aggs.metric( - 'receipt_total_amount', - 'sum', - field='total_amount', - script={'source': 'Math.round(_value*100)/100.00'} + "receipt_total_amount", + "sum", + field="total_amount", + script={"source": "Math.round(_value*100)/100.00"}, ) results = search.execute() return round(results.aggregations.receipt_total_amount.value, 2) @@ -405,14 +410,14 @@ def get_account_statement(self): related item quantity. """ return { - 'provisional': { - 'total_amount': self.get_order_provisional_total_amount(), - 'quantity': self.item_quantity, + "provisional": { + "total_amount": self.get_order_provisional_total_amount(), + "quantity": self.item_quantity, + }, + "expenditure": { + "total_amount": self.get_order_expenditure_total_amount(), + "quantity": self.item_received_quantity, }, - 'expenditure': { - 'total_amount': self.get_order_expenditure_total_amount(), - 'quantity': self.item_received_quantity - } } def get_links_to_me(self, get_pids=False): @@ -421,11 +426,11 @@ def get_links_to_me(self, get_pids=False): :param get_pids: if True list of related record pids, if False count of related records. """ - output = 'query' if get_pids else 'count' + output = "query" if get_pids else "count" links = { - 'orders': self.get_related_orders(output=output), - 'order_lines': self.get_order_lines(output=output), - 'receipts': self.get_receipts(output=output), + "orders": self.get_related_orders(output=output), + "order_lines": self.get_order_lines(output=output), + "receipts": self.get_receipts(output=output), } links = {k: v for k, v in links.items() if v} if get_pids: @@ -437,20 +442,18 @@ def reasons_not_to_delete(self): cannot_delete = {} # Note: not possible to delete records attached to rolled_over budget. if not self.is_active: - cannot_delete['links'] = {'rolled_over': True} + cannot_delete["links"] = {"rolled_over": True} return cannot_delete links = self.get_links_to_me() # The link with AcqOrderLine resources isn't a reason to not delete # an AcqOrder. Indeed, when we delete an AcqOrder, we also delete all # related AcqOrderLines (cascade delete). Check the extension # ``pre_delete`` hook. - links.pop('order_lines', None) + links.pop("order_lines", None) if self.status != AcqOrderStatus.PENDING: - cannot_delete['others'] = { - _('Order status is %s') % _(self.status): True - } + cannot_delete["others"] = {_("Order status is %s") % _(self.status): True} if links: - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def send_order(self, emails=None): @@ -466,26 +469,26 @@ def send_order(self, emails=None): """ # Create the notification and dispatch it synchronously. record = { - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'notification_type': NotificationType.ACQUISITION_ORDER, - 'context': { - 'order': {'$ref': get_ref_for_pid('acor', self.pid)}, - 'recipients': emails - } + "creation_date": datetime.now(timezone.utc).isoformat(), + "notification_type": NotificationType.ACQUISITION_ORDER, + "context": { + "order": {"$ref": get_ref_for_pid("acor", self.pid)}, + "recipients": emails, + }, } notif = Notification.create(data=record, dbcommit=True, reindex=True) dispatcher_result = Dispatcher.dispatch_notifications( - notification_pids=[notif.get('pid')]) + notification_pids=[notif.get("pid")] + ) # If the dispatcher result is correct, update the order_lines status # and reindex myself. Reload the notification to obtain the right # notification metadata (status, process_date, ...) - if dispatcher_result.get('sent', 0): - order_date = datetime.now().strftime('%Y-%m-%d') - order_lines = self.get_order_lines( - includes=[AcqOrderLineStatus.APPROVED]) + if dispatcher_result.get("sent", 0): + order_date = datetime.now().strftime("%Y-%m-%d") + order_lines = self.get_order_lines(includes=[AcqOrderLineStatus.APPROVED]) for order_line in order_lines: - order_line['order_date'] = order_date + order_line["order_date"] = order_date order_line.update(order_line, dbcommit=True, reindex=True) self.reindex() notif = Notification.get_record(notif.id) @@ -503,15 +506,14 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='acor') + super().bulk_index(record_id_iterator, doc_type="acor") def delete(self, record): """Delete a record from indexer. First delete order lines from the ES index, then delete the order. """ - es_query = AcqOrderLinesSearch()\ - .filter('term', acq_order__pid=record.pid) + es_query = AcqOrderLinesSearch().filter("term", acq_order__pid=record.pid) if es_query.count(): es_query.delete() AcqOrderLinesSearch.flush_and_refresh() diff --git a/rero_ils/modules/acquisition/acq_orders/dumpers.py b/rero_ils/modules/acquisition/acq_orders/dumpers.py index ade2d778c1..57dd3f63d0 100644 --- a/rero_ils/modules/acquisition/acq_orders/dumpers.py +++ b/rero_ils/modules/acquisition/acq_orders/dumpers.py @@ -21,18 +21,16 @@ from invenio_records.dumpers import Dumper as InvenioRecordsDumper -from rero_ils.modules.acquisition.acq_order_lines.dumpers import \ - AcqOrderLineNotificationDumper -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_order_lines.dumpers import ( + AcqOrderLineNotificationDumper, +) +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineStatus from rero_ils.modules.acquisition.acq_orders.models import AcqOrderNoteType from rero_ils.modules.libraries.api import Library -from rero_ils.modules.libraries.dumpers import \ - LibraryAcquisitionNotificationDumper +from rero_ils.modules.libraries.dumpers import LibraryAcquisitionNotificationDumper from rero_ils.modules.utils import get_ref_for_pid from rero_ils.modules.vendors.api import Vendor -from rero_ils.modules.vendors.dumpers import \ - VendorAcquisitionNotificationDumper +from rero_ils.modules.vendors.dumpers import VendorAcquisitionNotificationDumper class AcqOrderNotificationDumper(InvenioRecordsDumper): @@ -44,22 +42,23 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - today = date.today().strftime('%Y-%m-%d') - data.update({ - 'reference': record.get('reference'), - 'order_date': record.order_date or today, - 'note': record.get_note(AcqOrderNoteType.VENDOR), - }) + today = date.today().strftime("%Y-%m-%d") + data.update( + { + "reference": record.get("reference"), + "order_date": record.order_date or today, + "note": record.get_note(AcqOrderNoteType.VENDOR), + } + ) library = Library.get_record_by_pid(record.library_pid) - data['library'] = library.dumps( - dumper=LibraryAcquisitionNotificationDumper()) + data["library"] = library.dumps(dumper=LibraryAcquisitionNotificationDumper()) vendor = Vendor.get_record_by_pid(record.vendor_pid) - data['vendor'] = vendor.dumps( - dumper=VendorAcquisitionNotificationDumper()) - data['order_lines'] = [ + data["vendor"] = vendor.dumps(dumper=VendorAcquisitionNotificationDumper()) + data["order_lines"] = [ order_line.dumps(dumper=AcqOrderLineNotificationDumper()) for order_line in record.get_order_lines( - includes=[AcqOrderLineStatus.APPROVED]) + includes=[AcqOrderLineStatus.APPROVED] + ) ] data = {k: v for k, v in data.items() if v} return data @@ -74,13 +73,13 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - label = record.get('reference') + label = record.get("reference") if budget := record.budget: label = budget.name return { - '$ref': get_ref_for_pid('acor', record.pid), - 'label': label, - 'description': record.get('reference'), - 'created': record.created.isoformat(), - 'updated': record.updated.isoformat() + "$ref": get_ref_for_pid("acor", record.pid), + "label": label, + "description": record.get("reference"), + "created": record.created.isoformat(), + "updated": record.updated.isoformat(), } diff --git a/rero_ils/modules/acquisition/acq_orders/extensions.py b/rero_ils/modules/acquisition/acq_orders/extensions.py index 5a4b5fe865..c2a418a8b8 100644 --- a/rero_ils/modules/acquisition/acq_orders/extensions.py +++ b/rero_ils/modules/acquisition/acq_orders/extensions.py @@ -32,10 +32,9 @@ def pre_dump(self, record, dumper=None): :param dumper: the record dumper. """ if record.order_date: - record['order_date'] = record.order_date - record['account_statement'] = \ - record.get_account_statement() - record['status'] = record.status + record["order_date"] = record.order_date + record["account_statement"] = record.get_account_statement() + record["status"] = record.status def pre_load(self, data, loader=None): """Called before a record is loaded. @@ -43,9 +42,9 @@ def pre_load(self, data, loader=None): :param data: the data to load. :param loader: the record loader. """ - data.pop('account_statement', None) - data.pop('status', None) - data.pop('order_date', None) + data.pop("account_statement", None) + data.pop("status", None) + data.pop("order_date", None) def pre_delete(self, record, force=False): """Called before a record is deleted. @@ -67,10 +66,9 @@ class AcquisitionOrderCompleteDataExtension(RecordExtension): @staticmethod def populate_currency(record): """Add vendor currency to order data.""" - vendor = record.get('vendor') - if vendor: - vendor = extracted_data_from_ref(vendor, data='record') - record['currency'] = vendor.get('currency') + if vendor := record.get("vendor"): + vendor = extracted_data_from_ref(vendor, data="record") + record["currency"] = vendor.get("currency") # TODO : This hook doesn't work as expected now. # The record is well updated with currency key, but this key isn't store @@ -94,8 +92,8 @@ def post_create(self, record): :param record: the record metadata. """ - if not record.get('reference'): - record['reference'] = f'ORDER-{record.pid}' + if not record.get("reference"): + record["reference"] = f"ORDER-{record.pid}" record.update(record, dbcommit=True, reindex=True) def pre_commit(self, record): diff --git a/rero_ils/modules/acquisition/acq_orders/jsonresolver.py b/rero_ils/modules/acquisition/acq_orders/jsonresolver.py index 5bd916a5ab..c93e7d393c 100644 --- a/rero_ils/modules/acquisition/acq_orders/jsonresolver.py +++ b/rero_ils/modules/acquisition/acq_orders/jsonresolver.py @@ -22,7 +22,7 @@ from rero_ils.modules.jsonresolver import resolve_json_refs -@jsonresolver.route('/api/acq_orders/', host='bib.rero.ch') +@jsonresolver.route("/api/acq_orders/", host="bib.rero.ch") def acq_order_resolver(pid): """Resolver for acquisition order record.""" - return resolve_json_refs('acor', pid) + return resolve_json_refs("acor", pid) diff --git a/rero_ils/modules/acquisition/acq_orders/listener.py b/rero_ils/modules/acquisition/acq_orders/listener.py index ac51d690a2..c3c712e1dc 100644 --- a/rero_ils/modules/acquisition/acq_orders/listener.py +++ b/rero_ils/modules/acquisition/acq_orders/listener.py @@ -18,16 +18,21 @@ """Signals connector for acquisition order.""" -from rero_ils.modules.acquisition.acq_order_lines.dumpers import \ - AcqOrderLineESDumper -from rero_ils.modules.acquisition.acq_receipts.dumpers import \ - AcqReceiptESDumper +from rero_ils.modules.acquisition.acq_order_lines.dumpers import AcqOrderLineESDumper +from rero_ils.modules.acquisition.acq_receipts.dumpers import AcqReceiptESDumper from .api import AcqOrdersSearch -def enrich_acq_order_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_acq_order_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -35,59 +40,56 @@ def enrich_acq_order_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The document type of the record. """ - if index.split('-')[0] == AcqOrdersSearch.Meta.index: - - # NOTES PERFORMING ---------------------------------------------------- - # We include notes from multiple sources into the AcqOrder ES index - # to allow search on each terms of any notes related to this parent - # resource : - # - AcqOrder self notes. - # - related `AcqOrderLine` notes. - # - related `AcqReceipt` notes. - # - `AcqReceiptLine` notes related to `AcqReceipts`. - # So for any notes, we will include a `source` attribute to know the - # origin of the note. - for note in json.get('notes', []): - note['source'] = { - 'pid': record.pid, - 'type': record.provider.pid_type + if index.split("-")[0] != AcqOrdersSearch.Meta.index: + return + # NOTES PERFORMING ---------------------------------------------------- + # We include notes from multiple sources into the AcqOrder ES index + # to allow search on each terms of any notes related to this parent + # resource : + # - AcqOrder self notes. + # - related `AcqOrderLine` notes. + # - related `AcqReceipt` notes. + # - `AcqReceiptLine` notes related to `AcqReceipts`. + # So for any notes, we will include a `source` attribute to know the + # origin of the note. + for note in json.get("notes", []): + note["source"] = {"pid": record.pid, "type": record.provider.pid_type} + for note, source_class, resource_pid in record.get_related_notes(): + json.setdefault("notes", []).append( + { + "type": note["type"], + "content": note["content"], + "source": { + "pid": resource_pid, + "type": source_class.provider.pid_type, + }, } - for note, source_class, resource_pid in record.get_related_notes(): - json.setdefault('notes', []).append({ - 'type': note['type'], - 'content': note['content'], - 'source': { - 'pid': resource_pid, - 'type': source_class.provider.pid_type - } - }) + ) - # RELATED ORDER LINES METADATA ---------------------------------------- - order_line_dumper = AcqOrderLineESDumper() - json['order_lines'] = [ - order_line.dumps(dumper=order_line_dumper) - for order_line in record.get_order_lines() - ] + # RELATED ORDER LINES METADATA ---------------------------------------- + order_line_dumper = AcqOrderLineESDumper() + json["order_lines"] = [ + order_line.dumps(dumper=order_line_dumper) + for order_line in record.get_order_lines() + ] - # RELATED RECEIPTS ---------------------------------------------------- - receipt_dumper = AcqReceiptESDumper() - json['receipts'] = [ - receipt.dumps(dumper=receipt_dumper) - for receipt in record.get_receipts() - ] + # RELATED RECEIPTS ---------------------------------------------------- + receipt_dumper = AcqReceiptESDumper() + json["receipts"] = [ + receipt.dumps(dumper=receipt_dumper) for receipt in record.get_receipts() + ] - # RELATED BUDGET ------------------------------------------------------ - if budget := record.budget: - json['budget'] = { - 'pid': budget.pid, - 'type': 'budg' - } + # RELATED BUDGET ------------------------------------------------------ + if budget := record.budget: + json["budget"] = {"pid": budget.pid, "type": "budg"} - # ADD OTHERS DYNAMIC KEYS --------------------------------------------- - json.update({ - 'status': record.status, - 'organisation': { - 'pid': record.organisation_pid, - 'type': 'org', - } - }) + # ADD OTHERS DYNAMIC KEYS --------------------------------------------- + json.update( + { + "status": record.status, + "organisation": { + "pid": record.organisation_pid, + "type": "org", + }, + } + ) diff --git a/rero_ils/modules/acquisition/acq_orders/models.py b/rero_ils/modules/acquisition/acq_orders/models.py index b6bd87e5d4..360d2ed6d5 100644 --- a/rero_ils/modules/acquisition/acq_orders/models.py +++ b/rero_ils/modules/acquisition/acq_orders/models.py @@ -27,44 +27,45 @@ class AcqOrderIdentifier(RecordIdentifier): """Sequence generator for acquisition order identifiers.""" - __tablename__ = 'acq_order_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "acq_order_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class AcqOrderMetadata(db.Model, RecordMetadataBase): """AcqOrder record metadata.""" - __tablename__ = 'acq_order_metadata' + __tablename__ = "acq_order_metadata" class AcqOrderType: """Type of acquisition order.""" - MONOGRAPH = 'monograph' - MONOGRAPHIC_SET = 'monographic_set' - SERIAL = 'serial' - STANDING_ORDER = 'standing_order' - PLANNED_ORDER = 'planned_order' - MULTI_VOLUME = 'multi_volume' + MONOGRAPH = "monograph" + MONOGRAPHIC_SET = "monographic_set" + SERIAL = "serial" + STANDING_ORDER = "standing_order" + PLANNED_ORDER = "planned_order" + MULTI_VOLUME = "multi_volume" class AcqOrderStatus: """Available statuses for an acquisition order.""" - CANCELLED = 'cancelled' - ORDERED = 'ordered' - PENDING = 'pending' - PARTIALLY_RECEIVED = 'partially_received' - RECEIVED = 'received' + CANCELLED = "cancelled" + ORDERED = "ordered" + PENDING = "pending" + PARTIALLY_RECEIVED = "partially_received" + RECEIVED = "received" class AcqOrderNoteType: """Type of acquisition order note.""" - STAFF = 'staff_note' - VENDOR = 'vendor_note' + STAFF = "staff_note" + VENDOR = "vendor_note" diff --git a/rero_ils/modules/acquisition/acq_orders/permissions.py b/rero_ils/modules/acquisition/acq_orders/permissions.py index b67aa5a9ad..7d090b7d2f 100644 --- a/rero_ils/modules/acquisition/acq_orders/permissions.py +++ b/rero_ils/modules/acquisition/acq_orders/permissions.py @@ -19,19 +19,22 @@ """Permissions for Acquisition order.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, DisallowedIfRollovered, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + DisallowedIfRollovered, + RecordPermissionPolicy, +) from .api import AcqOrder # Actions to control acquisition orders resource policies -search_action = action_factory('acor-search') -read_action = action_factory('acor-read') -create_action = action_factory('acor-create') -update_action = action_factory('acor-update') -delete_action = action_factory('acor-delete') -access_action = action_factory('acor-access') +search_action = action_factory("acor-search") +read_action = action_factory("acor-read") +create_action = action_factory("acor-create") +update_action = action_factory("acor-update") +delete_action = action_factory("acor-delete") +access_action = action_factory("acor-access") class AcqOrderPermissionPolicy(RecordPermissionPolicy): @@ -41,13 +44,13 @@ class AcqOrderPermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByActionRestrictByManageableLibrary(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowedIfRollovered(AcqOrder) + DisallowedIfRollovered(AcqOrder), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowedIfRollovered(AcqOrder) + DisallowedIfRollovered(AcqOrder), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowedIfRollovered(AcqOrder) + DisallowedIfRollovered(AcqOrder), ] diff --git a/rero_ils/modules/acquisition/acq_orders/serializers/__init__.py b/rero_ils/modules/acquisition/acq_orders/serializers/__init__.py index f8a20eed0e..17a8870f07 100644 --- a/rero_ils/modules/acquisition/acq_orders/serializers/__init__.py +++ b/rero_ils/modules/acquisition/acq_orders/serializers/__init__.py @@ -20,42 +20,56 @@ from invenio_records_rest.serializers.response import record_responsify -from rero_ils.modules.serializers import RecordSchemaJSONV1, \ - search_responsify, search_responsify_file +from rero_ils.modules.serializers import ( + RecordSchemaJSONV1, + search_responsify, + search_responsify_file, +) from .csv import AcqOrderCSVSerializer from .json import AcqOrderJSONSerializer -__all__ = [ - 'json_acor_search', - 'json_acor_record', - 'csv_acor_search' -] +__all__ = ["json_acor_search", "json_acor_record", "csv_acor_search"] """JSON serializer.""" _json = AcqOrderJSONSerializer(RecordSchemaJSONV1) -json_acor_search = search_responsify(_json, 'application/rero+json') -json_acor_record = record_responsify(_json, 'application/rero+json') +json_acor_search = search_responsify(_json, "application/rero+json") +json_acor_record = record_responsify(_json, "application/rero+json") """CSV serializer.""" _csv = AcqOrderCSVSerializer( csv_included_fields=[ - 'order_pid', 'order_reference', 'order_date', 'order_staff_note', - 'order_vendor_note', 'order_type', 'order_status', 'vendor_name', - 'document_pid', 'document_creator', 'document_title', - 'document_publisher', 'document_publication_year', - 'document_edition_statement', 'document_series_statement', - 'document_isbn', 'account_name', 'account_number', - 'order_lines_priority', 'order_lines_notes', 'order_lines_status', - 'ordered_quantity', 'ordered_unit_price', 'ordered_amount', - 'receipt_reference', 'received_quantity', 'received_amount', - 'receipt_date' + "order_pid", + "order_reference", + "order_date", + "order_staff_note", + "order_vendor_note", + "order_type", + "order_status", + "vendor_name", + "document_pid", + "document_creator", + "document_title", + "document_publisher", + "document_publication_year", + "document_edition_statement", + "document_series_statement", + "document_isbn", + "account_name", + "account_number", + "order_lines_priority", + "order_lines_notes", + "order_lines_status", + "ordered_quantity", + "ordered_unit_price", + "ordered_amount", + "receipt_reference", + "received_quantity", + "received_amount", + "receipt_date", ] ) csv_acor_search = search_responsify_file( - _csv, - 'text/csv', - file_extension='csv', - file_prefix='export-orders' + _csv, "text/csv", file_extension="csv", file_prefix="export-orders" ) diff --git a/rero_ils/modules/acquisition/acq_orders/serializers/csv.py b/rero_ils/modules/acquisition/acq_orders/serializers/csv.py index ea9d64b9f6..9ae121e43f 100644 --- a/rero_ils/modules/acquisition/acq_orders/serializers/csv.py +++ b/rero_ils/modules/acquisition/acq_orders/serializers/csv.py @@ -25,10 +25,8 @@ from invenio_records_rest.serializers.csv import CSVSerializer, Line from rero_ils.modules.acquisition.acq_accounts.api import AcqAccountsSearch -from rero_ils.modules.acquisition.acq_order_lines.api import \ - AcqOrderLinesSearch -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLinesSearch +from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLinesSearch +from rero_ils.modules.acquisition.acq_receipt_lines.api import AcqReceiptLinesSearch from rero_ils.modules.acquisition.acq_receipts.api import AcqReceiptsSearch from rero_ils.modules.commons.identifiers import IdentifierStatus from rero_ils.modules.documents.api import DocumentsSearch @@ -37,16 +35,35 @@ from rero_ils.utils import get_i18n_supported_languages creator_role_filter = [ - 'rsp', 'cre', 'enj', 'dgs', 'prg', 'dsr', 'ctg', 'cmp', 'inv', 'com', - 'pht', 'ivr', 'art', 'ive', 'chr', 'aut', 'arc', 'fmk', 'pra', 'csl' + "rsp", + "cre", + "enj", + "dgs", + "prg", + "dsr", + "ctg", + "cmp", + "inv", + "com", + "pht", + "ivr", + "art", + "ive", + "chr", + "aut", + "arc", + "fmk", + "pra", + "csl", ] class AcqOrderCSVSerializer(CSVSerializer): """Mixin serializing records as CSV.""" - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -61,14 +78,14 @@ def serialize_search(self, pid_fetcher, search_result, links=None, # language language = request.args.get("lang", current_i18n.language) if not language or language not in get_i18n_supported_languages(): - language = current_app.config.get('BABEL_DEFAULT_LANGUAGE', 'en') + language = current_app.config.get("BABEL_DEFAULT_LANGUAGE", "en") order_fields = { - 'order_pid': 'pid', - 'order_reference': 'reference', - 'order_date': 'order_date', - 'order_type': 'type', - 'order_status': 'status' + "order_pid": "pid", + "order_reference": "reference", + "order_date": "order_date", + "order_type": "type", + "order_status": "status", } def generate_csv(): @@ -82,8 +99,8 @@ def batch(results): pids = [] for result in results: data = result.to_dict() - pids.append(data['pid']) - records[data['pid']] = data + pids.append(data["pid"]) + records[data["pid"]] = data if len(records) % chunk_size == 0: yield pids, records pids = [] @@ -93,14 +110,20 @@ def batch(results): def get_linked_records_by_order_pids(order_pids): """Get linked resources for the given order pids.""" order_line_fields = [ - 'pid', 'quantity', 'amount', 'total_amount', 'status', - 'priority', 'notes', 'document', 'acq_account', - 'acq_order' + "pid", + "quantity", + "amount", + "total_amount", + "status", + "priority", + "notes", + "document", + "acq_account", + "acq_order", ] - records = AcqOrderLinesSearch() \ - .get_records_by_terms(terms=order_pids, - key='acq_order__pid', - fields=order_line_fields) + records = AcqOrderLinesSearch().get_records_by_terms( + terms=order_pids, key="acq_order__pid", fields=order_line_fields + ) order_lines = {} doc_pids = [] account_pids = [] @@ -112,48 +135,58 @@ def get_linked_records_by_order_pids(order_pids): docs = get_documents_by_pids(doc_pids) accounts = get_accounts_by_pids(account_pids) receipt_lines = get_receipt_lines_by_order_line_pids( - list(order_lines.keys())) + list(order_lines.keys()) + ) return order_lines, docs, accounts, receipt_lines def get_documents_by_pids(doc_pids): """Get documents for the given pids.""" fields = [ - 'pid', 'contribution', 'editionStatement', 'identifiedBy', - 'provisionActivity', 'seriesStatement', 'title' + "pid", + "contribution", + "editionStatement", + "identifiedBy", + "provisionActivity", + "seriesStatement", + "title", ] - records = DocumentsSearch() \ - .get_records_by_terms(terms=doc_pids, fields=fields) + records = DocumentsSearch().get_records_by_terms( + terms=doc_pids, fields=fields + ) return { record.pid: OrderDocumentFormatter( - record=record.to_dict(), - language=language).format() + record=record.to_dict(), language=language + ).format() for record in records } def get_accounts_by_pids(account_pids): """Get accounts for the given pids.""" - fields = ['pid', 'name', 'number'] - return AcqAccountsSearch()\ - .get_records_by_terms(terms=account_pids, fields=fields, - as_dict=True) + fields = ["pid", "name", "number"] + return AcqAccountsSearch().get_records_by_terms( + terms=account_pids, fields=fields, as_dict=True + ) def get_receipts_by_order_pids(order_pids): """Get receipts for the given pids.""" - fields = ['pid', 'reference'] - return AcqReceiptsSearch() \ - .get_records_by_terms(terms=order_pids, - key='acq_order__pid', - fields=fields, - as_dict=True) + fields = ["pid", "reference"] + return AcqReceiptsSearch().get_records_by_terms( + terms=order_pids, key="acq_order__pid", fields=fields, as_dict=True + ) def get_receipt_lines_by_order_line_pids(order_lines_pids): """Get receipts for the given order lines pids.""" - fields = ['pid', 'quantity', 'receipt_date', 'total_amount', - 'acq_order_line', 'acq_receipt'] - receipt_line_results = AcqReceiptLinesSearch() \ - .get_records_by_terms(terms=order_lines_pids, - key='acq_order_line__pid', - fields=fields) + fields = [ + "pid", + "quantity", + "receipt_date", + "total_amount", + "acq_order_line", + "acq_receipt", + ] + receipt_line_results = AcqReceiptLinesSearch().get_records_by_terms( + terms=order_lines_pids, key="acq_order_line__pid", fields=fields + ) # organize receipt lines by order line pid receipt_lines = {} for record in receipt_line_results: @@ -165,100 +198,101 @@ def get_receipt_lines_by_order_line_pids(order_lines_pids): def get_vendors_by_pids(vendor_pids): """Get vendors for the given pids.""" - fields = ['pid', 'name'] - return VendorsSearch() \ - .get_records_by_terms(terms=vendor_pids, fields=fields, - as_dict=True) + fields = ["pid", "name"] + return VendorsSearch().get_records_by_terms( + terms=vendor_pids, fields=fields, as_dict=True + ) headers = dict.fromkeys(self.csv_included_fields) # write the CSV output in memory line = Line() - writer = csv.DictWriter(line, - quoting=csv.QUOTE_ALL, - fieldnames=headers) + writer = csv.DictWriter(line, quoting=csv.QUOTE_ALL, fieldnames=headers) writer.writeheader() yield line.read() for pids, order_batch_results in batch(search_result): - order_lines, documents, accounts, receipt_lines = \ + order_lines, documents, accounts, receipt_lines = ( get_linked_records_by_order_pids(pids) + ) receipts = get_receipts_by_order_pids(pids) # vendors - vendor_pids = [order['vendor']['pid'] - for order in order_batch_results.values() - if order['vendor']['pid'] not in vendors] + vendor_pids = [ + order["vendor"]["pid"] + for order in order_batch_results.values() + if order["vendor"]["pid"] not in vendors + ] vendors.update(get_vendors_by_pids(vendor_pids)) # prepare export based on order lines for order_line_pid, order_line in order_lines.items(): - order_pid = order_line['acq_order']['pid'] + order_pid = order_line["acq_order"]["pid"] order_data = order_batch_results[order_pid] - vendor_data = vendors.get(order_data['vendor']['pid']) + vendor_data = vendors.get(order_data["vendor"]["pid"]) - csv_data = { - k: order_data.get(f) for k, f in order_fields.items() - } + csv_data = {k: order_data.get(f) for k, f in order_fields.items()} # Update csv data with vendor - csv_data['vendor_name'] = vendor_data.get('name') + csv_data["vendor_name"] = vendor_data.get("name") # extract order notes order_notes = filter( - lambda x: x.get('source').get('type') == 'acor', - order_data.get('notes', {}) + lambda x: x.get("source").get("type") == "acor", + order_data.get("notes", {}), ) for note in order_notes: - note_type = note.get('type') - column_name = f'order_{note_type}' - csv_data[column_name] = note.get('content') + note_type = note.get("type") + column_name = f"order_{note_type}" + csv_data[column_name] = note.get("content") # update csv data with document infos - csv_data.update( - documents.get(order_line['document']['pid'])) + csv_data.update(documents.get(order_line["document"]["pid"])) # update csv data with account infos - account = accounts.get(order_line['acq_account']['pid']) - csv_data.update({ - 'account_name': account.get('name'), - 'account_number': account.get('number'), - }) + account = accounts.get(order_line["acq_account"]["pid"]) + csv_data.update( + { + "account_name": account.get("name"), + "account_number": account.get("number"), + } + ) # update csv data with order line infos - csv_data.update({ - 'order_lines_priority': order_line.get('priority'), - 'order_lines_notes': ' | '.join( - f"{note['type']}: {note['content']}" - for note in order_line.get('notes', []) - ), - 'order_lines_status': order_line['status'], - 'ordered_quantity': order_line['quantity'], - 'ordered_unit_price': order_line['amount'], - 'ordered_amount': order_line['total_amount'], - - }) + csv_data.update( + { + "order_lines_priority": order_line.get("priority"), + "order_lines_notes": " | ".join( + f"{note['type']}: {note['content']}" + for note in order_line.get("notes", []) + ), + "order_lines_status": order_line["status"], + "ordered_quantity": order_line["quantity"], + "ordered_unit_price": order_line["amount"], + "ordered_amount": order_line["total_amount"], + } + ) # if we are receipt lines, we need to iterate on # and return csv row receipt_line_data = receipt_lines.get(order_line_pid) if receipt_line_data: for receipt_line in receipt_line_data: - receipt = receipts\ - .get(receipt_line['acq_receipt']['pid']) - csv_data.update({ - 'received_amount': - receipt_line['total_amount'], - 'received_quantity': - receipt_line['quantity'], - 'receipt_reference': receipt['reference'], - 'receipt_date': receipt_line['receipt_date'], - }) + receipt = receipts.get(receipt_line["acq_receipt"]["pid"]) + csv_data.update( + { + "received_amount": receipt_line["total_amount"], + "received_quantity": receipt_line["quantity"], + "receipt_reference": receipt["reference"], + "receipt_date": receipt_line["receipt_date"], + } + ) writer.writerow(self.process_dict(csv_data)) yield line.read() else: # write csv data writer.writerow(self.process_dict(csv_data)) yield line.read() + # return streamed content return stream_with_context(generate_csv()) @@ -267,25 +301,29 @@ class OrderDocumentFormatter(DocumentFormatter): """Document formatter class for orders.""" # separator between multiple values - _separator = ' | ' + _separator = " | " def __init__(self, record, language=None, _include_fields=None): """Initialize RIS formatter with the specific record.""" super().__init__(record) - self._language = language or current_app\ - .config.get('BABEL_DEFAULT_LANGUAGE', 'en') + self._language = language or current_app.config.get( + "BABEL_DEFAULT_LANGUAGE", "en" + ) self._include_fields = _include_fields or [ - 'document_pid', 'document_creator', 'document_title', - 'document_publisher', 'document_publication_year', - 'document_edition_statement', 'document_series_statement', - 'document_isbn' + "document_pid", + "document_creator", + "document_title", + "document_publisher", + "document_publication_year", + "document_edition_statement", + "document_series_statement", + "document_isbn", ] def post_process(self, data): """Post process data.""" # join multiple values in data if needed.""" - return self._separator.join(map(str, data)) \ - if isinstance(data, list) else data + return self._separator.join(map(str, data)) if isinstance(data, list) else data def _get_isbn(self, states=None): """Return ISBN identifiers for the given states.""" diff --git a/rero_ils/modules/acquisition/acq_orders/serializers/json.py b/rero_ils/modules/acquisition/acq_orders/serializers/json.py index 2e72c17d97..c0accbd4a1 100644 --- a/rero_ils/modules/acquisition/acq_orders/serializers/json.py +++ b/rero_ils/modules/acquisition/acq_orders/serializers/json.py @@ -30,23 +30,21 @@ class AcqOrderJSONSerializer(ACQJSONSerializer): def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('vendor', {}).get('buckets', []), - VendorsSearch, 'name' + aggregations.get("vendor", {}).get("buckets", []), VendorsSearch, "name" ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('account', {}).get('buckets', []), - AcqAccountsSearch, 'name' + aggregations.get("account", {}).get("buckets", []), + AcqAccountsSearch, + "name", ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('budget', {}).get('buckets', []), - BudgetsSearch, 'name' + aggregations.get("budget", {}).get("buckets", []), BudgetsSearch, "name" ) # Add configuration for order_date and receipt_date buckets - for aggr_name in ['order_date', 'receipt_date']: + for aggr_name in ["order_date", "receipt_date"]: aggr = aggregations.get(aggr_name, {}) JSONSerializer.add_date_range_configuration(aggr) diff --git a/rero_ils/modules/acquisition/acq_orders/utils.py b/rero_ils/modules/acquisition/acq_orders/utils.py index 05ce4680ad..2aaf78d126 100644 --- a/rero_ils/modules/acquisition/acq_orders/utils.py +++ b/rero_ils/modules/acquisition/acq_orders/utils.py @@ -53,11 +53,12 @@ def get_recipient_suggestions(order): suggestions = {} if (vendor := order.vendor) and (email := vendor.order_email): suggestions.setdefault(email, set()).update([RecipientType.TO]) - if settings := (order.library or {}).get('acquisition_settings'): - if email := settings.get('shipping_informations', {}).get('email'): - suggestions.setdefault(email, set())\ - .update([RecipientType.CC, RecipientType.REPLY_TO]) - if email := settings.get('billing_informations', {}).get('email'): + if settings := (order.library or {}).get("acquisition_settings"): + if email := settings.get("shipping_informations", {}).get("email"): + suggestions.setdefault(email, set()).update( + [RecipientType.CC, RecipientType.REPLY_TO] + ) + if email := settings.get("billing_informations", {}).get("email"): suggestions.setdefault(email, set()) if email := current_librarian.user.email: suggestions.setdefault(email, set()) @@ -67,8 +68,8 @@ def get_recipient_suggestions(order): # return a recipient suggestion array. cleaned_suggestions = [] for recipient_address, recipient_types in suggestions.items(): - suggestion = {'address': recipient_address} + suggestion = {"address": recipient_address} if recipient_types: - suggestion['type'] = list(recipient_types) + suggestion["type"] = list(recipient_types) cleaned_suggestions.append(suggestion) return cleaned_suggestions diff --git a/rero_ils/modules/acquisition/acq_orders/views.py b/rero_ils/modules/acquisition/acq_orders/views.py index 96f8c06590..5095d60047 100644 --- a/rero_ils/modules/acquisition/acq_orders/views.py +++ b/rero_ils/modules/acquisition/acq_orders/views.py @@ -21,22 +21,18 @@ from flask import request as flask_request from jinja2 import TemplateNotFound -from rero_ils.modules.decorators import check_logged_as_librarian, \ - jsonify_error +from rero_ils.modules.decorators import check_logged_as_librarian, jsonify_error from .api import AcqOrder from .dumpers import AcqOrderHistoryDumper, AcqOrderNotificationDumper from .utils import get_history, get_recipient_suggestions api_blueprint = Blueprint( - 'api_order', - __name__, - url_prefix='/acq_order', - template_folder='templates' + "api_order", __name__, url_prefix="/acq_order", template_folder="templates" ) -@api_blueprint.route('//history', methods=['GET']) +@api_blueprint.route("//history", methods=["GET"]) @check_logged_as_librarian @jsonify_error def order_history(order_pid): @@ -48,12 +44,12 @@ def order_history(order_pid): data = [] for idx, acq_order in enumerate(get_history(order), 1): dump_data = acq_order.dumps(dumper=AcqOrderHistoryDumper()) - dump_data['order'] = idx + dump_data["order"] = idx data.append(dump_data) return jsonify(data) -@api_blueprint.route('//acquisition_order/preview', methods=['GET']) +@api_blueprint.route("//acquisition_order/preview", methods=["GET"]) @check_logged_as_librarian @jsonify_error def order_notification_preview(order_pid): @@ -62,28 +58,26 @@ def order_notification_preview(order_pid): if not order: abort(404, "Acquisition order not found") - response = {'recipient_suggestions': get_recipient_suggestions(order)} + response = {"recipient_suggestions": get_recipient_suggestions(order)} order_data = order.dumps(dumper=AcqOrderNotificationDumper()) - language = order_data.get('vendor', {}).get('language') + language = order_data.get("vendor", {}).get("language") try: - tmpl_file = f'rero_ils/vendor_order_mail/{language}.tpl.txt' - response['preview'] = render_template(tmpl_file, order=order_data) + tmpl_file = f"rero_ils/vendor_order_mail/{language}.tpl.txt" + response["preview"] = render_template(tmpl_file, order=order_data) except TemplateNotFound: # If the corresponding translated template isn't found, use the english # template as default template - msg = 'None "vendor_order_mail" template found for ' \ - f'"{language}" language' + msg = 'None "vendor_order_mail" template found for ' f'"{language}" language' current_app.logger.error(msg) - response['message'] = [{'type': 'error', 'content': msg}] - language = current_app.config.get( - 'RERO_ILS_APP_DEFAULT_LANGUAGE', 'eng') - tmpl_file = f'rero_ils/vendor_order_mail/{language}.tpl.txt' - response['preview'] = render_template(tmpl_file, order=order_data) + response["message"] = [{"type": "error", "content": msg}] + language = current_app.config.get("RERO_ILS_APP_DEFAULT_LANGUAGE", "eng") + tmpl_file = f"rero_ils/vendor_order_mail/{language}.tpl.txt" + response["preview"] = render_template(tmpl_file, order=order_data) return jsonify(response) -@api_blueprint.route('//send_order', methods=['POST']) +@api_blueprint.route("//send_order", methods=["POST"]) @check_logged_as_librarian @jsonify_error def send_order(order_pid): @@ -99,9 +93,9 @@ def send_order(order_pid): abort(404, "Acquisition order not found") data = flask_request.get_json() - emails = data.get('emails') + emails = data.get("emails") if not emails: abort(400, "Missing recipients emails.") notifications = order.send_order(emails=emails) - response = {'data': notifications} + response = {"data": notifications} return jsonify(response) diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/api.py b/rero_ils/modules/acquisition/acq_receipt_lines/api.py index 1b69856509..0de0522986 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/api.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/api.py @@ -30,22 +30,22 @@ from rero_ils.modules.providers import Provider from rero_ils.modules.utils import extracted_data_from_ref, get_ref_for_pid -from .extensions import AcqReceiptLineValidationExtension, \ - AcquisitionReceiptLineCompleteDataExtension +from .extensions import ( + AcqReceiptLineValidationExtension, + AcquisitionReceiptLineCompleteDataExtension, +) from .models import AcqReceiptLineIdentifier, AcqReceiptLineMetadata # provider AcqReceiptLineProvider = type( - 'AcqReceiptLineProvider', + "AcqReceiptLineProvider", (Provider,), - dict(identifier=AcqReceiptLineIdentifier, pid_type='acrl') + dict(identifier=AcqReceiptLineIdentifier, pid_type="acrl"), ) # minter -acq_receipt_line_id_minter = partial( - id_minter, provider=AcqReceiptLineProvider) +acq_receipt_line_id_minter = partial(id_minter, provider=AcqReceiptLineProvider) # fetcher -acq_receipt_line_id_fetcher = partial( - id_fetcher, provider=AcqReceiptLineProvider) +acq_receipt_line_id_fetcher = partial(id_fetcher, provider=AcqReceiptLineProvider) class AcqReceiptLinesSearch(IlsRecordsSearch): @@ -54,9 +54,9 @@ class AcqReceiptLinesSearch(IlsRecordsSearch): class Meta: """Search only on acq_receipt_lines index.""" - index = 'acq_receipt_lines' + index = "acq_receipt_lines" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -72,18 +72,18 @@ class AcqReceiptLine(AcquisitionIlsRecord): _extensions = [ AcquisitionReceiptLineCompleteDataExtension(), - AcqReceiptLineValidationExtension() + AcqReceiptLineValidationExtension(), ] @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=True, reindex=True, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=True, reindex=True, **kwargs + ): """Create Acquisition Receipt Line record.""" # TODO : should be used into `pre_create` hook extensions but seems not # work as expected. cls._build_additional_refs(data) - return super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) + return super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) def update(self, data, commit=True, dbcommit=True, reindex=True): """Update Acquisition Receipt Line record.""" @@ -91,7 +91,7 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): original_record = self.__class__.get_record(self.id) new_data = deepcopy(dict(self)) - new_data.update(data) + new_data |= data self._build_additional_refs(new_data) super().update(new_data, commit, dbcommit, reindex) @@ -100,15 +100,11 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): @classmethod def _build_additional_refs(cls, data): """Build $ref for the organisation and library the acq receipt line.""" - receipt = extracted_data_from_ref( - data.get('acq_receipt'), data='record') - if receipt: - data['organisation'] = { - '$ref': get_ref_for_pid('org', receipt.organisation_pid) - } - data['library'] = { - '$ref': get_ref_for_pid('lib', receipt.library_pid) + if receipt := extracted_data_from_ref(data.get("acq_receipt"), data="record"): + data["organisation"] = { + "$ref": get_ref_for_pid("org", receipt.organisation_pid) } + data["library"] = {"$ref": get_ref_for_pid("lib", receipt.library_pid)} # GETTER & SETTER ========================================================= # * Define some properties as shortcut to quickly access object attrs. @@ -116,25 +112,22 @@ def _build_additional_refs(cls, data): @cached_property def receipt(self): """Shortcut to the receipt of the receipt line.""" - return extracted_data_from_ref(self.get('acq_receipt'), data='record') + return extracted_data_from_ref(self.get("acq_receipt"), data="record") @property def library_pid(self): """Shortcut for acquisition receipt line library pid.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) @property def order_line_pid(self): """Shortcut for related acquisition order line pid.""" - return extracted_data_from_ref(self.get('acq_order_line')) + return extracted_data_from_ref(self.get("acq_order_line")) @cached_property def order_line(self): """Shortcut for related acquisition order line record.""" - return extracted_data_from_ref( - self.get('acq_order_line'), - data='record' - ) + return extracted_data_from_ref(self.get("acq_order_line"), data="record") @property def is_active(self): @@ -153,25 +146,24 @@ def acq_account_pid(self): @property def receipt_pid(self): """Shortcut for related acquisition receipt pid.""" - return extracted_data_from_ref(self.get('acq_receipt')) + return extracted_data_from_ref(self.get("acq_receipt")) @property def amount(self): """Shortcut for related acquisition amount.""" - return self.get('amount') + return self.get("amount") @property def total_amount(self): """Shortcut for related acquisition total_amount.""" - vat_factor = (100 + self.get('vat_rate', 0)) / 100 - total = self.amount * self.receipt.exchange_rate * self.quantity * \ - vat_factor + vat_factor = (100 + self.get("vat_rate", 0)) / 100 + total = self.amount * self.receipt.exchange_rate * self.quantity * vat_factor return round(total, 2) @property def quantity(self): """Shortcut for related acquisition quantity.""" - return self.get('quantity') + return self.get("quantity") @property def organisation_pid(self): @@ -187,8 +179,9 @@ def get_note(self, note_type): :return the note content if exists, otherwise returns None. """ note = [ - note.get('content') for note in self.get('notes', []) - if note.get('type') == note_type + note.get("content") + for note in self.get("notes", []) + if note.get("type") == note_type ] return next(iter(note), None) @@ -197,7 +190,7 @@ def reasons_not_to_delete(self): cannot_delete = {} # Note: not possible to delete records attached to rolled_over budget. if not self.is_active: - cannot_delete['links'] = {'rolled_over': True} + cannot_delete["links"] = {"rolled_over": True} return cannot_delete diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/dumpers.py b/rero_ils/modules/acquisition/acq_receipt_lines/dumpers.py index 63000b788c..5caed9a455 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/dumpers.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/dumpers.py @@ -38,27 +38,26 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ # Keep only some attributes from AcqReceiptLine object initial dump. - for attr in ['pid', 'receipt_date', 'amount', 'quantity', 'vat_rate']: + for attr in ["pid", "receipt_date", "amount", "quantity", "vat_rate"]: if value := record.get(attr): data.update({attr: value}) - if notes := record.get('notes', []): - data['notes'] = [note['content'] for note in notes] + if notes := record.get("notes", []): + data["notes"] = [note["content"] for note in notes] order_line = record.order_line # Add acq_account information's: pid - data['acq_account'] = {'pid': order_line.account_pid} + data["acq_account"] = {"pid": order_line.account_pid} # Add document information's: pid, formatted title and ISBN identifiers # (remove None values from document metadata) document = order_line.document identifiers = document.get_identifiers( - filters=[IdentifierType.ISBN], - with_alternatives=True + filters=[IdentifierType.ISBN], with_alternatives=True ) identifiers = [identifier.normalize() for identifier in identifiers] - data['document'] = { - 'pid': document.pid, - 'title': TitleExtension.format_text(document.get('title', [])), - 'identifiers': identifiers + data["document"] = { + "pid": document.pid, + "title": TitleExtension.format_text(document.get("title", [])), + "identifiers": identifiers, } - data['document'] = {k: v for k, v in data['document'].items() if v} + data["document"] = {k: v for k, v in data["document"].items() if v} return data diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/extensions.py b/rero_ils/modules/acquisition/acq_receipt_lines/extensions.py index 66a1eea219..04d111ac53 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/extensions.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/extensions.py @@ -16,6 +16,8 @@ # along with this program. If not, see . """Acquisition Receipt line record extensions.""" + +import contextlib from datetime import datetime from flask_babel import gettext as _ @@ -35,8 +37,8 @@ def post_init(self, record, data, model=None, **kwargs): :param data: The dict passed to the record's constructor :param model: The model class used for initialization. """ - if not record.get('receipt_date'): - record['receipt_date'] = datetime.now().strftime('%Y-%m-%d') + if not record.get("receipt_date"): + record["receipt_date"] = datetime.now().strftime("%Y-%m-%d") class AcqReceiptLineValidationExtension(RecordExtension): @@ -52,20 +54,14 @@ def _check_received_quantity(record): return original_quantity = 0 - try: + with contextlib.suppress(NoResultFound): original_record = record.__class__.get_record(record.id) original_quantity = original_record.quantity - except NoResultFound: - # it's probably because the record isn't yet into DB (but `id` - # field is already populated for very next integration) - # As the record isn't yet into DB, the original_quantity keep 0 - pass - quantity_to_check = record.quantity - original_quantity already_received_quantity = record.order_line.received_quantity new_total_quantity = quantity_to_check + already_received_quantity if new_total_quantity > record.order_line.quantity: - msg = _('Received quantity is grower than ordered quantity') + msg = _("Received quantity is grower than ordered quantity") raise ValidationError(msg) # INVENIO EXTENSION HOOKS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/jsonresolver.py b/rero_ils/modules/acquisition/acq_receipt_lines/jsonresolver.py index 98e7670857..5282d96748 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/jsonresolver.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/jsonresolver.py @@ -23,13 +23,13 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/acq_receipt_lines/', host='bib.rero.ch') +@jsonresolver.route("/api/acq_receipt_lines/", host="bib.rero.ch") def acq_receipt_line_resolver(pid): """Resolver for acquisition receipt line record.""" - persistent_id = PersistentIdentifier.get('acrl', pid) + persistent_id = PersistentIdentifier.get("acrl", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - f'Doc resolver error: /api/acq_receipt_lines/{pid} {persistent_id}' + f"Doc resolver error: /api/acq_receipt_lines/{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/listener.py b/rero_ils/modules/acquisition/acq_receipt_lines/listener.py index 7cc0b0df98..1632434c2b 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/listener.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/listener.py @@ -18,13 +18,21 @@ """Signals connector for Acq receipt lines.""" -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLine, AcqReceiptLinesSearch +from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLine, + AcqReceiptLinesSearch, +) -def enrich_acq_receipt_line_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs - ): +def enrich_acq_receipt_line_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -32,18 +40,14 @@ def enrich_acq_receipt_line_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == AcqReceiptLinesSearch.Meta.index: + if index.split("-")[0] == AcqReceiptLinesSearch.Meta.index: if not isinstance(record, AcqReceiptLine): - record = AcqReceiptLine.get_record_by_pid(record.get('pid')) + record = AcqReceiptLine.get_record_by_pid(record.get("pid")) # other dynamic keys - json.update({ - 'acq_account': { - 'pid': record.order_line.account_pid, - 'type': 'acac' - }, - 'document': { - 'pid': record.order_line.document_pid, - 'type': 'doc' - }, - 'total_amount': record.total_amount - }) + json.update( + { + "acq_account": {"pid": record.order_line.account_pid, "type": "acac"}, + "document": {"pid": record.order_line.document_pid, "type": "doc"}, + "total_amount": record.total_amount, + } + ) diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/models.py b/rero_ils/modules/acquisition/acq_receipt_lines/models.py index 8163a4c046..98571e0b89 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/models.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/models.py @@ -28,23 +28,24 @@ class AcqReceiptLineIdentifier(RecordIdentifier): """Sequence generator for acquisition receipt line identifiers.""" - __tablename__ = 'acq_receipt_line_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "acq_receipt_line_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class AcqReceiptLineMetadata(db.Model, RecordMetadataBase): """AcqReceiptLine record metadata.""" - __tablename__ = 'acq_receipt_line_metadata' + __tablename__ = "acq_receipt_line_metadata" class AcqReceiptLineNoteType: """Type of acquisition receipt line note.""" - STAFF = 'staff_note' - RECEIPT = 'receipt_note' + STAFF = "staff_note" + RECEIPT = "receipt_note" diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/permissions.py b/rero_ils/modules/acquisition/acq_receipt_lines/permissions.py index 4381cd0c8d..c0a4989041 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/permissions.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/permissions.py @@ -19,19 +19,22 @@ """Permissions for Acquisition receipt line.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, DisallowedIfRollovered, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + DisallowedIfRollovered, + RecordPermissionPolicy, +) from .api import AcqReceiptLine # Actions to control acquisition receipt lines resource policies -search_action = action_factory('acrl-search') -read_action = action_factory('acrl-read') -create_action = action_factory('acrl-create') -update_action = action_factory('acrl-update') -delete_action = action_factory('acrl-delete') -access_action = action_factory('acrl-access') +search_action = action_factory("acrl-search") +read_action = action_factory("acrl-read") +create_action = action_factory("acrl-create") +update_action = action_factory("acrl-update") +delete_action = action_factory("acrl-delete") +access_action = action_factory("acrl-access") class AcqReceiptLinePermissionPolicy(RecordPermissionPolicy): @@ -41,13 +44,13 @@ class AcqReceiptLinePermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByActionRestrictByManageableLibrary(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowedIfRollovered(AcqReceiptLine) + DisallowedIfRollovered(AcqReceiptLine), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowedIfRollovered(AcqReceiptLine) + DisallowedIfRollovered(AcqReceiptLine), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowedIfRollovered(AcqReceiptLine) + DisallowedIfRollovered(AcqReceiptLine), ] diff --git a/rero_ils/modules/acquisition/acq_receipt_lines/serializers.py b/rero_ils/modules/acquisition/acq_receipt_lines/serializers.py index 5cba14a65d..b1253df43e 100644 --- a/rero_ils/modules/acquisition/acq_receipt_lines/serializers.py +++ b/rero_ils/modules/acquisition/acq_receipt_lines/serializers.py @@ -20,9 +20,12 @@ from invenio_records_rest.serializers.response import record_responsify -from rero_ils.modules.serializers import ACQJSONSerializer, \ - RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + ACQJSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) _json = ACQJSONSerializer(RecordSchemaJSONV1) -json_acrl_search = search_responsify(_json, 'application/rero+json') -json_acrl_record = record_responsify(_json, 'application/rero+json') +json_acrl_search = search_responsify(_json, "application/rero+json") +json_acrl_record = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/acquisition/acq_receipts/api.py b/rero_ils/modules/acquisition/acq_receipts/api.py index 041c1aa428..d53344ef19 100644 --- a/rero_ils/modules/acquisition/acq_receipts/api.py +++ b/rero_ils/modules/acquisition/acq_receipts/api.py @@ -21,27 +21,35 @@ from copy import deepcopy from functools import partial -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLine, AcqReceiptLinesSearch +from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLine, + AcqReceiptLinesSearch, +) from rero_ils.modules.acquisition.api import AcquisitionIlsRecord from rero_ils.modules.api import IlsRecordsIndexer, IlsRecordsSearch from rero_ils.modules.extensions import DecimalAmountExtension from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.minters import id_minter from rero_ils.modules.providers import Provider -from rero_ils.modules.utils import extracted_data_from_ref, get_objects, \ - get_ref_for_pid, sorted_pids +from rero_ils.modules.utils import ( + extracted_data_from_ref, + get_objects, + get_ref_for_pid, + sorted_pids, +) -from .extensions import AcqReceiptExtension, \ - AcquisitionReceiptCompleteDataExtension -from .models import AcqReceiptIdentifier, AcqReceiptLineCreationStatus, \ - AcqReceiptMetadata +from .extensions import AcqReceiptExtension, AcquisitionReceiptCompleteDataExtension +from .models import ( + AcqReceiptIdentifier, + AcqReceiptLineCreationStatus, + AcqReceiptMetadata, +) # provider AcqReceiptProvider = type( - 'AcqReceiptProvider', + "AcqReceiptProvider", (Provider,), - dict(identifier=AcqReceiptIdentifier, pid_type='acre') + dict(identifier=AcqReceiptIdentifier, pid_type="acre"), ) # minter acq_receipt_id_minter = partial(id_minter, provider=AcqReceiptProvider) @@ -55,9 +63,9 @@ class AcqReceiptsSearch(IlsRecordsSearch): class Meta: """Search only on acq_order index.""" - index = 'acq_receipts' + index = "acq_receipts" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -75,14 +83,16 @@ class AcqReceipt(AcquisitionIlsRecord): AcqReceiptExtension(), AcquisitionReceiptCompleteDataExtension(), DecimalAmountExtension( - callback=lambda rec: - [adj['amount'] for adj in rec.get('amount_adjustments', [])] + callback=lambda rec: [ + adj["amount"] for adj in rec.get("amount_adjustments", []) + ] ), ] @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=True, reindex=True, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=True, reindex=True, **kwargs + ): """Create Acquisition receipt record. :param data: a dict data to create the record. @@ -94,8 +104,7 @@ def create(cls, data, id_=None, delete_pid=False, :returns: the created record """ cls._build_additional_refs(data) - record = super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) + record = super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) # reindex the related account if necessary if reindex: for account in record.get_adjustment_accounts(): @@ -120,7 +129,7 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): original_accounts = self.get_adjustment_accounts() new_data = deepcopy(dict(self)) - new_data.update(data) + new_data |= data self._build_additional_refs(new_data) record = super().update(new_data, commit, dbcommit, reindex) if reindex: @@ -134,17 +143,13 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): @classmethod def _build_additional_refs(cls, data): """Build $ref for the organisation of the acquisition receipt.""" - order = extracted_data_from_ref(data.get('acq_order'), data='record') - if order: - data['library'] = { - '$ref': get_ref_for_pid('lib', order.library_pid) - } - data['organisation'] = { - '$ref': get_ref_for_pid('org', order.organisation_pid) + if order := extracted_data_from_ref(data.get("acq_order"), data="record"): + data["library"] = {"$ref": get_ref_for_pid("lib", order.library_pid)} + data["organisation"] = { + "$ref": get_ref_for_pid("org", order.organisation_pid) } - def create_receipt_lines(self, receipt_lines=None, dbcommit=True, - reindex=True): + def create_receipt_lines(self, receipt_lines=None, dbcommit=True, reindex=True): """Create multiple receipt lines. :param receipt_lines: a list of dicts to create the records. @@ -160,19 +165,18 @@ def create_receipt_lines(self, receipt_lines=None, dbcommit=True, receipt_lines = receipt_lines or [] for receipt_line in receipt_lines: record = { - 'data': receipt_line, - 'status': AcqReceiptLineCreationStatus.SUCCESS - } - receipt_line['acq_receipt'] = { - '$ref': get_ref_for_pid('acre', self.pid) + "data": receipt_line, + "status": AcqReceiptLineCreationStatus.SUCCESS, } + receipt_line["acq_receipt"] = {"$ref": get_ref_for_pid("acre", self.pid)} try: - line = AcqReceiptLine.create(receipt_line, dbcommit=dbcommit, - reindex=reindex) - record['receipt_line'] = line + line = AcqReceiptLine.create( + receipt_line, dbcommit=dbcommit, reindex=reindex + ) + record["receipt_line"] = line except Exception as error: - record['status'] = AcqReceiptLineCreationStatus.FAILURE - record['error_message'] = str(error) + record["status"] = AcqReceiptLineCreationStatus.FAILURE + record["error_message"] = str(error) created_receipt_lines.append(record) return created_receipt_lines @@ -184,16 +188,18 @@ def get_links_to_me(self, get_pids=False): if False count of linked records """ from ..acq_receipt_lines.api import AcqReceiptLinesSearch + links = {} - receipt_lines_query = AcqReceiptLinesSearch() \ - .filter('term', acq_receipt__pid=self.pid) + receipt_lines_query = AcqReceiptLinesSearch().filter( + "term", acq_receipt__pid=self.pid + ) if get_pids: acq_receipt_lines = sorted_pids(receipt_lines_query) else: acq_receipt_lines = receipt_lines_query.count() if acq_receipt_lines: - links['acq_receipt_lines'] = acq_receipt_lines + links["acq_receipt_lines"] = acq_receipt_lines return links def reasons_not_to_delete(self): @@ -201,7 +207,7 @@ def reasons_not_to_delete(self): cannot_delete = {} # Note: not possible to delete records attached to rolled_over budget. if not self.is_active: - cannot_delete['links'] = {'rolled_over': True} + cannot_delete["links"] = {"rolled_over": True} return cannot_delete # Note : linked receipt lines aren't yet a reason to keep the record. # These lines will be deleted with the record. @@ -215,12 +221,12 @@ def reasons_not_to_delete(self): @property def order_pid(self): """Shortcut for related acquisition order pid.""" - return extracted_data_from_ref(self.get('acq_order')) + return extracted_data_from_ref(self.get("acq_order")) @property def order(self): """Shortcut to the related order.""" - return extracted_data_from_ref(self.get('acq_order'), data='record') + return extracted_data_from_ref(self.get("acq_order"), data="record") @property def is_active(self): @@ -234,12 +240,12 @@ def is_active(self): @property def amount_adjustments(self): """Shortcut to get receipt amount adjustments.""" - return self.get('amount_adjustments', []) + return self.get("amount_adjustments", []) @property def exchange_rate(self): """Shortcut to get receipt exchange_rate.""" - return self.get('exchange_rate') + return self.get("exchange_rate") @property def total_amount(self): @@ -252,40 +258,36 @@ def total_amount(self): :return the receipt total amount rounded on 0.01. """ # Compute the total of all related receipt line - search = AcqReceiptLinesSearch() \ - .filter('term', acq_receipt__pid=self.pid) + search = AcqReceiptLinesSearch().filter("term", acq_receipt__pid=self.pid) search.aggs.metric( - 'receipt_total_amount', - 'sum', - field='total_amount', - script={ - 'source': 'Math.round(_value*100)/100.00' - } + "receipt_total_amount", + "sum", + field="total_amount", + script={"source": "Math.round(_value*100)/100.00"}, ) results = search.execute() total = results.aggregations.receipt_total_amount.value # Add the sum of all adjustments - total += sum(fee.get('amount') for fee in self.amount_adjustments) + total += sum(fee.get("amount") for fee in self.amount_adjustments) return round(total, 2) @property def total_item_quantity(self): """Get the number of items related to this receipt.""" - search = AcqReceiptLinesSearch() \ - .filter('term', acq_receipt__pid=self.pid) - search.aggs.metric('quantity', 'sum', field='quantity') + search = AcqReceiptLinesSearch().filter("term", acq_receipt__pid=self.pid) + search.aggs.metric("quantity", "sum", field="quantity") results = search.execute() return results.aggregations.quantity.value @property def library_pid(self): """Shortcut for acquisition receipt library pid.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) @property def organisation_pid(self): """Shortcut for acquisition receipt organisation pid.""" - return extracted_data_from_ref(self.get('organisation')) + return extracted_data_from_ref(self.get("organisation")) def get_receipt_lines(self, output=None): """Get all receipt lines related to this receipt. @@ -293,12 +295,11 @@ def get_receipt_lines(self, output=None): :param output: output method. 'count', 'pids' or None. :return a generator of related order lines (or length). """ - query = AcqReceiptLinesSearch()\ - .filter('term', acq_receipt__pid=self.pid) + query = AcqReceiptLinesSearch().filter("term", acq_receipt__pid=self.pid) - if output == 'count': + if output == "count": return query.count() - elif output == 'pids': + elif output == "pids": return sorted_pids(query) else: return get_objects(AcqReceiptLine, query) @@ -311,17 +312,18 @@ def get_note(self, note_type): :return the note content if exists, otherwise returns None. """ note = [ - note.get('content') for note in self.get('notes', []) - if note.get('type') == note_type + note.get("content") + for note in self.get("notes", []) + if note.get("type") == note_type ] return next(iter(note), None) def get_adjustment_accounts(self): """Get the list of adjustment account pid related to this receipt.""" - return set([ - extracted_data_from_ref(adj.get('acq_account'), data='record') + return { + extracted_data_from_ref(adj.get("acq_account"), data="record") for adj in self.amount_adjustments - ]) + } class AcqReceiptsIndexer(IlsRecordsIndexer): diff --git a/rero_ils/modules/acquisition/acq_receipts/dumpers.py b/rero_ils/modules/acquisition/acq_receipts/dumpers.py index 6d29e3e1f4..08be98c21c 100644 --- a/rero_ils/modules/acquisition/acq_receipts/dumpers.py +++ b/rero_ils/modules/acquisition/acq_receipts/dumpers.py @@ -35,11 +35,11 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ metadata = { - 'pid': record.pid, - 'reference': record.get('reference'), - 'receipt_date': list(set([ - line.get('receipt_date') for line in record.get_receipt_lines() - ])) + "pid": record.pid, + "reference": record.get("reference"), + "receipt_date": list( + {line.get("receipt_date") for line in record.get_receipt_lines()} + ), } metadata = {k: v for k, v in metadata.items() if v} data.update(metadata) diff --git a/rero_ils/modules/acquisition/acq_receipts/extensions.py b/rero_ils/modules/acquisition/acq_receipts/extensions.py index dbcb670d92..c11243ea43 100644 --- a/rero_ils/modules/acquisition/acq_receipts/extensions.py +++ b/rero_ils/modules/acquisition/acq_receipts/extensions.py @@ -46,6 +46,6 @@ def post_create(self, record): :param record: the record metadata. """ - if not record.get('reference'): - record['reference'] = f'RECEIPT-{record.pid}' + if not record.get("reference"): + record["reference"] = f"RECEIPT-{record.pid}" record.update(record, dbcommit=True, reindex=True) diff --git a/rero_ils/modules/acquisition/acq_receipts/jsonresolver.py b/rero_ils/modules/acquisition/acq_receipts/jsonresolver.py index 0b8fd90fc5..a4907ec0e4 100644 --- a/rero_ils/modules/acquisition/acq_receipts/jsonresolver.py +++ b/rero_ils/modules/acquisition/acq_receipts/jsonresolver.py @@ -23,13 +23,13 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/acq_receipts/', host='bib.rero.ch') +@jsonresolver.route("/api/acq_receipts/", host="bib.rero.ch") def acq_receipt_resolver(pid): """Resolver for acquisition receipt record.""" - persistent_id = PersistentIdentifier.get('acre', pid) + persistent_id = PersistentIdentifier.get("acre", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - f'Doc resolver error: /api/acq_receipts/{pid} {persistent_id}' + f"Doc resolver error: /api/acq_receipts/{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/acquisition/acq_receipts/listener.py b/rero_ils/modules/acquisition/acq_receipts/listener.py index 4cfec99b5f..ef034d36f4 100644 --- a/rero_ils/modules/acquisition/acq_receipts/listener.py +++ b/rero_ils/modules/acquisition/acq_receipts/listener.py @@ -18,14 +18,22 @@ """Signals connector for acquisition receipt.""" -from rero_ils.modules.acquisition.acq_receipt_lines.dumpers import \ - AcqReceiptLineESDumper +from rero_ils.modules.acquisition.acq_receipt_lines.dumpers import ( + AcqReceiptLineESDumper, +) from .api import AcqReceiptsSearch -def enrich_acq_receipt_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_acq_receipt_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -33,12 +41,12 @@ def enrich_acq_receipt_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The document type of the record. """ - if index.split('-')[0] == AcqReceiptsSearch.Meta.index: + if index.split("-")[0] == AcqReceiptsSearch.Meta.index: # add related order lines metadata - json['receipt_lines'] = [ + json["receipt_lines"] = [ receipt_line.dumps(dumper=AcqReceiptLineESDumper()) for receipt_line in record.get_receipt_lines() ] # other dynamic keys - json['total_amount'] = record.total_amount - json['quantity'] = record.total_item_quantity + json["total_amount"] = record.total_amount + json["quantity"] = record.total_item_quantity diff --git a/rero_ils/modules/acquisition/acq_receipts/models.py b/rero_ils/modules/acquisition/acq_receipts/models.py index 7ac839ade5..4aef4675b6 100644 --- a/rero_ils/modules/acquisition/acq_receipts/models.py +++ b/rero_ils/modules/acquisition/acq_receipts/models.py @@ -28,29 +28,30 @@ class AcqReceiptIdentifier(RecordIdentifier): """Sequence generator for acquisition receipt identifiers.""" - __tablename__ = 'acq_receipt_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "acq_receipt_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class AcqReceiptMetadata(db.Model, RecordMetadataBase): """AcqReceipt record metadata.""" - __tablename__ = 'acq_receipt_metadata' + __tablename__ = "acq_receipt_metadata" class AcqReceiptNoteType: """Type of acquisition receipt note.""" - STAFF = 'staff_note' + STAFF = "staff_note" class AcqReceiptLineCreationStatus: """Status following an attempt to create a receipt line.""" - SUCCESS = 'success' - FAILURE = 'failure' + SUCCESS = "success" + FAILURE = "failure" diff --git a/rero_ils/modules/acquisition/acq_receipts/permissions.py b/rero_ils/modules/acquisition/acq_receipts/permissions.py index 0de63b5c3e..0eebba1d1a 100644 --- a/rero_ils/modules/acquisition/acq_receipts/permissions.py +++ b/rero_ils/modules/acquisition/acq_receipts/permissions.py @@ -19,19 +19,22 @@ """Permissions for Acquisition receipt.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, DisallowedIfRollovered, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + DisallowedIfRollovered, + RecordPermissionPolicy, +) from .api import AcqReceipt # Actions to control acquisition receipts resource policies -search_action = action_factory('acre-search') -read_action = action_factory('acre-read') -create_action = action_factory('acre-create') -update_action = action_factory('acre-update') -delete_action = action_factory('acre-delete') -access_action = action_factory('acre-access') +search_action = action_factory("acre-search") +read_action = action_factory("acre-read") +create_action = action_factory("acre-create") +update_action = action_factory("acre-update") +delete_action = action_factory("acre-delete") +access_action = action_factory("acre-access") class AcqReceiptPermissionPolicy(RecordPermissionPolicy): @@ -41,13 +44,13 @@ class AcqReceiptPermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByActionRestrictByManageableLibrary(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowedIfRollovered(AcqReceipt) + DisallowedIfRollovered(AcqReceipt), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowedIfRollovered(AcqReceipt) + DisallowedIfRollovered(AcqReceipt), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowedIfRollovered(AcqReceipt) + DisallowedIfRollovered(AcqReceipt), ] diff --git a/rero_ils/modules/acquisition/acq_receipts/serializers.py b/rero_ils/modules/acquisition/acq_receipts/serializers.py index a3a563aaf3..17fe04092c 100644 --- a/rero_ils/modules/acquisition/acq_receipts/serializers.py +++ b/rero_ils/modules/acquisition/acq_receipts/serializers.py @@ -20,8 +20,9 @@ from invenio_records_rest.serializers.response import record_responsify -from rero_ils.modules.acquisition.acq_receipt_lines.dumpers import \ - AcqReceiptLineESDumper +from rero_ils.modules.acquisition.acq_receipt_lines.dumpers import ( + AcqReceiptLineESDumper, +) from rero_ils.modules.serializers import ACQJSONSerializer, RecordSchemaJSONV1 @@ -31,18 +32,19 @@ class AcqReceiptReroJSONSerializer(ACQJSONSerializer): def preprocess_record(self, pid, record, links_factory=None, **kwargs): """Prepare a record and persistent identifier for serialization.""" # add some dynamic key related to the record. - record['total_amount'] = record.total_amount - record['quantity'] = record.total_item_quantity - record['receipt_lines'] = [ + record["total_amount"] = record.total_amount + record["quantity"] = record.total_item_quantity + record["receipt_lines"] = [ receipt_line.dumps(dumper=AcqReceiptLineESDumper()) for receipt_line in record.get_receipt_lines() ] # add currency to avoid to load related order_line->order to get it - record['currency'] = record.order.get('currency') + record["currency"] = record.order.get("currency") return super().preprocess_record( - pid=pid, record=record, links_factory=links_factory, kwargs=kwargs) + pid=pid, record=record, links_factory=links_factory, kwargs=kwargs + ) _json = AcqReceiptReroJSONSerializer(RecordSchemaJSONV1) -json_acre_record = record_responsify(_json, 'application/rero+json') +json_acre_record = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/acquisition/acq_receipts/views.py b/rero_ils/modules/acquisition/acq_receipts/views.py index 07923ceb2d..c094d6d567 100644 --- a/rero_ils/modules/acquisition/acq_receipts/views.py +++ b/rero_ils/modules/acquisition/acq_receipts/views.py @@ -21,18 +21,14 @@ from flask import request as flask_request from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt -from rero_ils.modules.decorators import check_logged_as_librarian, \ - jsonify_error +from rero_ils.modules.decorators import check_logged_as_librarian, jsonify_error api_blueprint = Blueprint( - 'api_receipt', - __name__, - url_prefix='/acq_receipt', - template_folder='templates' + "api_receipt", __name__, url_prefix="/acq_receipt", template_folder="templates" ) -@api_blueprint.route('//lines', methods=['POST']) +@api_blueprint.route("//lines", methods=["POST"]) @check_logged_as_librarian @jsonify_error def lines(receipt_pid): @@ -54,6 +50,5 @@ def lines(receipt_pid): receipt_lines = flask_request.get_json() if not receipt_lines: abort(400, "Missing receipt lines data.") - created_receipt_lines = receipt.create_receipt_lines( - receipt_lines=receipt_lines) + created_receipt_lines = receipt.create_receipt_lines(receipt_lines=receipt_lines) return jsonify(response=created_receipt_lines) diff --git a/rero_ils/modules/acquisition/api.py b/rero_ils/modules/acquisition/api.py index be90686e65..a01eb855e2 100644 --- a/rero_ils/modules/acquisition/api.py +++ b/rero_ils/modules/acquisition/api.py @@ -28,8 +28,8 @@ class AcquisitionIlsRecord(IlsRecord, ABC): def __str__(self): """Human-readable record string representation.""" - output = f'[{self.provider.pid_type}#{self.pid}]' - if 'name' in self: + output = f"[{self.provider.pid_type}#{self.pid}]" + if "name" in self: output += f" {self['name']}" return output diff --git a/rero_ils/modules/acquisition/budgets/api.py b/rero_ils/modules/acquisition/budgets/api.py index 98c55a0333..8de8a33253 100644 --- a/rero_ils/modules/acquisition/budgets/api.py +++ b/rero_ils/modules/acquisition/budgets/api.py @@ -18,12 +18,13 @@ """API for manipulating budgets.""" + +import contextlib from functools import partial from elasticsearch import NotFoundError -from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, \ - AcqAccountsSearch +from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, AcqAccountsSearch from rero_ils.modules.acquisition.api import AcquisitionIlsRecord from rero_ils.modules.api import IlsRecordsIndexer, IlsRecordsSearch from rero_ils.modules.fetchers import id_fetcher @@ -36,9 +37,7 @@ # provider BudgetProvider = type( - 'BudgetProvider', - (Provider,), - dict(identifier=BudgetIdentifier, pid_type='budg') + "BudgetProvider", (Provider,), dict(identifier=BudgetIdentifier, pid_type="budg") ) # minter budget_id_minter = partial(id_minter, provider=BudgetProvider) @@ -52,9 +51,9 @@ class BudgetsSearch(IlsRecordsSearch): class Meta: """Search only on budget index.""" - index = 'budgets' + index = "budgets" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -67,30 +66,24 @@ class Budget(AcquisitionIlsRecord): fetcher = budget_id_fetcher provider = BudgetProvider model_cls = BudgetMetadata - pids_exist_check = { - 'required': { - 'org': 'organisation' - } - } + pids_exist_check = {"required": {"org": "organisation"}} @property def name(self): """Shortcut for budget name.""" - return self.get('name') + return self.get("name") @property def is_active(self): """Check if the budget should be considered as active.""" - return self.get('is_active', False) + return self.get("is_active", False) def get_related_accounts(self): """Get account related to this budget. :rtype: an `AcqAccount` generator """ - query = AcqAccountsSearch() \ - .filter('term', budget__pid=self.pid) \ - .source(False) + query = AcqAccountsSearch().filter("term", budget__pid=self.pid).source(False) for hit in query.scan(): yield AcqAccount.get_record(hit.meta.id) @@ -101,10 +94,10 @@ def get_links_to_me(self, get_pids=False): if False count of linked records """ links = {} - query = AcqAccountsSearch().filter('term', budget__pid=self.pid) + query = AcqAccountsSearch().filter("term", budget__pid=self.pid) acq_accounts = sorted_pids(query) if get_pids else query.count() if acq_accounts: - links['acq_accounts'] = acq_accounts + links["acq_accounts"] = acq_accounts return links def reasons_not_to_delete(self): @@ -112,20 +105,20 @@ def reasons_not_to_delete(self): cannot_delete = {} # Note: not possible to delete records attached to rolled_over budget. if not self.is_active: - cannot_delete['links'] = {'rolled_over': True} + cannot_delete["links"] = {"rolled_over": True} return cannot_delete if others := self.reasons_to_keep(): - cannot_delete['others'] = others + cannot_delete["others"] = others if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def reasons_to_keep(self): """Reasons aside from record_links to keep a budget.""" others = {} organisation = Organisation.get_record_by_pid(self.organisation_pid) - if organisation.get('current_budget_pid') == self.pid: - others['is_default'] = True + if organisation.get("current_budget_pid") == self.pid: + others["is_default"] = True return others @@ -144,7 +137,7 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='budg') + super().bulk_index(record_id_iterator, doc_type="budg") @classmethod def _check_is_active_changed(cls, record): @@ -155,10 +148,8 @@ def _check_is_active_changed(cls, record): :param record: the record to index. """ - try: + with contextlib.suppress(NotFoundError): original_record = BudgetsSearch().get_record_by_pid(record.pid) - if record.is_active != original_record['is_active']: + if record.is_active != original_record["is_active"]: for account in record.get_related_accounts(): account.reindex() - except NotFoundError: - pass diff --git a/rero_ils/modules/acquisition/budgets/jsonresolver.py b/rero_ils/modules/acquisition/budgets/jsonresolver.py index 551b022865..9061f8d6f3 100644 --- a/rero_ils/modules/acquisition/budgets/jsonresolver.py +++ b/rero_ils/modules/acquisition/budgets/jsonresolver.py @@ -22,13 +22,11 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/budgets/', host='bib.rero.ch') +@jsonresolver.route("/api/budgets/", host="bib.rero.ch") def budget_resolver(pid): """Resolver for budget record.""" - persistent_id = PersistentIdentifier.get('budg', pid) + persistent_id = PersistentIdentifier.get("budg", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) - current_app.logger.error( - f'Doc resolver error: /api/budgets/{pid} {persistent_id}' - ) - raise Exception('unable to resolve') + current_app.logger.error(f"Doc resolver error: /api/budgets/{pid} {persistent_id}") + raise Exception("unable to resolve") diff --git a/rero_ils/modules/acquisition/budgets/listener.py b/rero_ils/modules/acquisition/budgets/listener.py index e2ce9e8645..4654dc3e06 100644 --- a/rero_ils/modules/acquisition/budgets/listener.py +++ b/rero_ils/modules/acquisition/budgets/listener.py @@ -18,8 +18,10 @@ """Signals connector for Budgets.""" -from rero_ils.modules.acquisition.acq_accounts.api import AcqAccountsIndexer, \ - AcqAccountsSearch +from rero_ils.modules.acquisition.acq_accounts.api import ( + AcqAccountsIndexer, + AcqAccountsSearch, +) from rero_ils.modules.acquisition.budgets.api import Budget from rero_ils.modules.tasks import process_bulk_queue @@ -28,14 +30,13 @@ def budget_is_active_changed(sender, record=None, *args, **kwargs): """Reindex related account if is_active field changes.""" if isinstance(record, Budget): ori_record = Budget.get_record_by_pid(record.pid) - if ori_record['is_active'] != record['is_active']: - # the `is_active` flag changed, we need to reindex all accounts - # related to this budget - uuids = [] - search = AcqAccountsSearch()\ - .filter('term', budget__pid=record.pid)\ - .source().scan() - for hit in search: - uuids.append(hit.meta.id) + if ori_record["is_active"] != record["is_active"]: + search = ( + AcqAccountsSearch() + .filter("term", budget__pid=record.pid) + .source() + .scan() + ) + uuids = [hit.meta.id for hit in search] AcqAccountsIndexer().bulk_index(uuids) process_bulk_queue.apply_async() diff --git a/rero_ils/modules/acquisition/budgets/models.py b/rero_ils/modules/acquisition/budgets/models.py index c4c2320e0c..c4097de943 100644 --- a/rero_ils/modules/acquisition/budgets/models.py +++ b/rero_ils/modules/acquisition/budgets/models.py @@ -27,16 +27,17 @@ class BudgetIdentifier(RecordIdentifier): """Sequence generator for Budget identifiers.""" - __tablename__ = 'budget_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "budget_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class BudgetMetadata(db.Model, RecordMetadataBase): """Budget record metadata.""" - __tablename__ = 'budget_metadata' + __tablename__ = "budget_metadata" diff --git a/rero_ils/modules/acquisition/budgets/permissions.py b/rero_ils/modules/acquisition/budgets/permissions.py index 2cab4e488e..1895389d29 100644 --- a/rero_ils/modules/acquisition/budgets/permissions.py +++ b/rero_ils/modules/acquisition/budgets/permissions.py @@ -19,16 +19,19 @@ """Permissions for Budgets.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control budget resource policies -search_action = action_factory('budg-search') -read_action = action_factory('budg-read') -create_action = action_factory('budg-create') -update_action = action_factory('budg-update') -delete_action = action_factory('budg-delete') -access_action = action_factory('budg-access') +search_action = action_factory("budg-search") +read_action = action_factory("budg-read") +create_action = action_factory("budg-create") +update_action = action_factory("budg-update") +delete_action = action_factory("budg-delete") +access_action = action_factory("budg-access") class BudgetPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/acquisition/budgets/serializers.py b/rero_ils/modules/acquisition/budgets/serializers.py index d24381681a..e61dd78062 100644 --- a/rero_ils/modules/acquisition/budgets/serializers.py +++ b/rero_ils/modules/acquisition/budgets/serializers.py @@ -23,4 +23,4 @@ from rero_ils.modules.serializers import ACQJSONSerializer, RecordSchemaJSONV1 _json = ACQJSONSerializer(RecordSchemaJSONV1) -json_budg_record = record_responsify(_json, 'application/rero+json') +json_budg_record = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/acquisition/cli.py b/rero_ils/modules/acquisition/cli.py index bcb6708dbb..9db1f166c5 100644 --- a/rero_ils/modules/acquisition/cli.py +++ b/rero_ils/modules/acquisition/cli.py @@ -33,61 +33,65 @@ def acquisition(): """Acquisition management commands.""" -@acquisition.command('rollover') -@click.argument('origin_budget_pid', - type=str) -@click.option('-d', '--destination', - 'dest_budget_pid', - type=str, - help='Destination budget pid') -@click.option('-i', '--interactive', - 'interactive', - is_flag=True, - default=False, - help='interactive mode') -@click.option('--new-budget', - is_flag=True, - default=False, - help='Create a new destination budget resource') -@click.option('--budget-name', - 'budget_name', - help='The new budget name') -@click.option('--budget-start-date', - 'budget_start_date', - help='The new budget start-date') -@click.option('--budget-end-date', - 'budget_end_date', - help='The new budget end-date') +@acquisition.command("rollover") +@click.argument("origin_budget_pid", type=str) +@click.option( + "-d", "--destination", "dest_budget_pid", type=str, help="Destination budget pid" +) +@click.option( + "-i", + "--interactive", + "interactive", + is_flag=True, + default=False, + help="interactive mode", +) +@click.option( + "--new-budget", + is_flag=True, + default=False, + help="Create a new destination budget resource", +) +@click.option("--budget-name", "budget_name", help="The new budget name") +@click.option( + "--budget-start-date", "budget_start_date", help="The new budget start-date" +) +@click.option("--budget-end-date", "budget_end_date", help="The new budget end-date") @with_appcontext def rollover( - origin_budget_pid, dest_budget_pid, + origin_budget_pid, + dest_budget_pid, interactive, - new_budget, budget_name, budget_start_date, budget_end_date + new_budget, + budget_name, + budget_start_date, + budget_end_date, ): """CLI to run rollover process between two acquisition budgets.""" # Check parameters if not dest_budget_pid and not new_budget: - raise UsageError('destination budget OR new budget is required') + raise UsageError("destination budget OR new budget is required") original_budget = Budget.get_record_by_pid(origin_budget_pid) # Try to create the destination budget if required if new_budget: if not all([budget_name, budget_start_date, budget_end_date]): - raise UsageError('budget name, start-date, end-date are required') - org_ref = get_ref_for_pid('org', original_budget.organisation_pid) + raise UsageError("budget name, start-date, end-date are required") + org_ref = get_ref_for_pid("org", original_budget.organisation_pid) data = { - 'organisation': {'$ref': org_ref}, - 'is_active': False, - 'name': budget_name, - 'start_date': budget_start_date, - 'end_date': budget_end_date + "organisation": {"$ref": org_ref}, + "is_active": False, + "name": budget_name, + "start_date": budget_start_date, + "end_date": budget_end_date, } destination_budget = Budget.create(data, dbcommit=True, reindex=True) if not destination_budget: - raise BudgetDoesNotExist('Unable to create new budget') + raise BudgetDoesNotExist("Unable to create new budget") dest_budget_pid = destination_budget.pid destination_budget = Budget.get_record_by_pid(dest_budget_pid) rollover_runner = AcqRollover( - original_budget, destination_budget, is_interactive=interactive) + original_budget, destination_budget, is_interactive=interactive + ) rollover_runner.run() diff --git a/rero_ils/modules/acquisition/dumpers.py b/rero_ils/modules/acquisition/dumpers.py index 230ad24f96..a46f48d71e 100644 --- a/rero_ils/modules/acquisition/dumpers.py +++ b/rero_ils/modules/acquisition/dumpers.py @@ -21,11 +21,15 @@ from invenio_records.dumpers import Dumper from rero_ils.modules.commons.dumpers import MultiDumper -from rero_ils.modules.commons.identifiers import IdentifierType, \ - QualifierIdentifierRenderer +from rero_ils.modules.commons.identifiers import ( + IdentifierType, + QualifierIdentifierRenderer, +) from rero_ils.modules.documents.dumpers import TitleDumper -from rero_ils.modules.documents.extensions import \ - ProvisionActivitiesExtension, SeriesStatementExtension +from rero_ils.modules.documents.extensions import ( + ProvisionActivitiesExtension, + SeriesStatementExtension, +) class DocumentAcquisitionDumper(Dumper): @@ -38,27 +42,32 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ # provision activity ------------------------ - provision_activities = filter(None, [ - ProvisionActivitiesExtension.format_text(activity) - for activity in record.get('provisionActivity', []) - ]) + provision_activities = filter( + None, + [ + ProvisionActivitiesExtension.format_text(activity) + for activity in record.get("provisionActivity", []) + ], + ) provision_activity = next(iter(provision_activities or []), None) if provision_activity: - provision_activity = provision_activity[0]['value'] + provision_activity = provision_activity[0]["value"] # series statement -------------------------- - series_statements = filter(None, [ - SeriesStatementExtension.format_text(statement) - for statement in record.get('seriesStatement', []) - ]) + series_statements = filter( + None, + [ + SeriesStatementExtension.format_text(statement) + for statement in record.get("seriesStatement", []) + ], + ) series_statement = next(iter(series_statements or []), None) if series_statement: - series_statement = series_statement[0]['value'] + series_statement = series_statement[0]["value"] # identifiers ------------------------------- identifiers = record.get_identifiers( - filters=[IdentifierType.ISBN, IdentifierType.EAN], - with_alternatives=True + filters=[IdentifierType.ISBN, IdentifierType.EAN], with_alternatives=True ) # keep only EAN identifiers - only EAN identifiers should be included # into acquisition notification. @@ -69,19 +78,23 @@ def dump(self, record, data): if identifier.type == IdentifierType.EAN ] - data.update({ - 'identifiers': identifiers, - 'provision_activity': provision_activity, - 'serie_statement': series_statement - }) + data.update( + { + "identifiers": identifiers, + "provision_activity": provision_activity, + "serie_statement": series_statement, + } + ) data = {k: v for k, v in data.items() if v} return data # specific acquisition dumper -document_acquisition_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - TitleDumper(), - DocumentAcquisitionDumper() -]) +document_acquisition_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + TitleDumper(), + DocumentAcquisitionDumper(), + ] +) diff --git a/rero_ils/modules/acquisition/exceptions.py b/rero_ils/modules/acquisition/exceptions.py index ea3370929f..e5581614d1 100644 --- a/rero_ils/modules/acquisition/exceptions.py +++ b/rero_ils/modules/acquisition/exceptions.py @@ -39,7 +39,7 @@ def __init__(self, pid_value, *args, **kwargs): def __str__(self): """Exception as string.""" - return f'Budget#{self.pid_value} is inactive' + return f"Budget#{self.pid_value} is inactive" class IncompatibleBudgetError(RolloverError): @@ -53,7 +53,7 @@ def __init__(self, pid1_value, pid2_value, *args, **kwargs): def __str__(self): """Exception as string.""" - return f'Budget#{self.pid1} isn\' compatible with Budget#{self.pid2}' + return f"Budget#{self.pid1} isn' compatible with Budget#{self.pid2}" class BudgetNotEmptyError(RolloverError): @@ -66,4 +66,4 @@ def __init__(self, pid, *args, **kwargs): def __str__(self): """Exception as string.""" - return f'Budget#{self.pid} are some linked children resources.' + return f"Budget#{self.pid} are some linked children resources." diff --git a/rero_ils/modules/acquisition/rollover.py b/rero_ils/modules/acquisition/rollover.py index dc1563d585..346ba17979 100644 --- a/rero_ils/modules/acquisition/rollover.py +++ b/rero_ils/modules/acquisition/rollover.py @@ -26,22 +26,23 @@ from elasticsearch_dsl import Q from flask import current_app -from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, \ - AcqAccountsSearch -from rero_ils.modules.acquisition.acq_accounts.utils import \ - sort_accounts_as_tree -from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine, \ - AcqOrderLinesSearch -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineStatus -from rero_ils.modules.acquisition.acq_orders.api import AcqOrder, \ - AcqOrdersSearch -from rero_ils.modules.acquisition.acq_orders.models import AcqOrderStatus, \ - AcqOrderType +from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, AcqAccountsSearch +from rero_ils.modules.acquisition.acq_accounts.utils import sort_accounts_as_tree +from rero_ils.modules.acquisition.acq_order_lines.api import ( + AcqOrderLine, + AcqOrderLinesSearch, +) +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_orders.api import AcqOrder, AcqOrdersSearch +from rero_ils.modules.acquisition.acq_orders.models import AcqOrderStatus, AcqOrderType from rero_ils.modules.acquisition.budgets.api import Budget -from rero_ils.modules.acquisition.exceptions import BudgetDoesNotExist, \ - BudgetNotEmptyError, InactiveBudgetError, IncompatibleBudgetError, \ - RolloverError +from rero_ils.modules.acquisition.exceptions import ( + BudgetDoesNotExist, + BudgetNotEmptyError, + InactiveBudgetError, + IncompatibleBudgetError, + RolloverError, +) from rero_ils.modules.libraries.api import Library from rero_ils.modules.libraries.models import AccountTransferOption from rero_ils.modules.organisations.api import Organisation @@ -111,9 +112,15 @@ class AcqRollover: """ - def __init__(self, original_budget, destination_budget=None, - logging_config=None, is_interactive=True, - propagate_errors=False, **kwargs): + def __init__( + self, + original_budget, + destination_budget=None, + logging_config=None, + is_interactive=True, + propagate_errors=False, + **kwargs, + ): """Initialization. :param original_budget: the `Budget` resource related to resources to @@ -141,10 +148,10 @@ def __init__(self, original_budget, destination_budget=None, self.propagate_errors = propagate_errors # Set special logging configuration for rollover process - default_config = current_app.config.get('ROLLOVER_LOGGING_CONFIG') + default_config = current_app.config.get("ROLLOVER_LOGGING_CONFIG") logging.config.dictConfig(logging_config or default_config) self.logger = logging.getLogger(__name__) - self.logger.info('ROLLOVER PROCESS ==================================') + self.logger.info("ROLLOVER PROCESS ==================================") self.original_budget = original_budget if destination_budget is None: @@ -167,7 +174,7 @@ def run(self): """Run the rollover process.""" log = self.logger try: - log.info('start running....') + log.info("start running....") log.info("parameters ::") log.info(f"\torigin : {self.original_budget}") log.info(f"\tdestination : {self.destination_budget}") @@ -181,25 +188,27 @@ def run(self): # rollover setting. orig_accounts = AcqRollover._get_accounts(self.original_budget.pid) if not orig_accounts: - raise RolloverError('Unable to find any account to rollovered') + raise RolloverError("Unable to find any account to rollovered") log.info("original accounts ::") columns = [ - ('ACCOUNT', 60), # title, max_length, alignment - ('AMOUNT', 16, 'right'), - ('ENCUMBRANCE', 16, 'right'), - ('MIGRATION_SETTING', 30) + ("ACCOUNT", 60), # title, max_length, alignment + ("AMOUNT", 16, "right"), + ("ENCUMBRANCE", 16, "right"), + ("MIGRATION_SETTING", 30), ] rows = [] for account in orig_accounts: - padding = ' ' * account.depth + padding = " " * account.depth label = f"[#{account.pid}] {account.name}" - rows.append(( - padding + label, - str(account.get('allocated_amount')), - str(account.encumbrance_amount[0]), - self._get_rollover_migration_setting(account) - )) - self._draw_data_table(columns, rows, padding=' ') + rows.append( + ( + padding + label, + str(account.get("allocated_amount")), + str(account.encumbrance_amount[0]), + self._get_rollover_migration_setting(account), + ) + ) + self._draw_data_table(columns, rows, padding=" ") # STEP#2 :: Get user confirmation # If interactive mode is activated, ask user for a confirmation @@ -207,12 +216,13 @@ def run(self): # confirm this table, a random password key will be asked to # avoid bad/quick confirmation click if self.is_interactive: - if not self._confirm('Are you agree ?', default="no"): - raise RolloverError("User doesn\'t agree") - key_confirm = ''.join(random.choices(string.ascii_letters, - k=10)) - log.info("To continue, please enter the confirmation " - f"key [{key_confirm}] :: ") + if not self._confirm("Are you agree ?", default="no"): + raise RolloverError("User doesn't agree") + key_confirm = "".join(random.choices(string.ascii_letters, k=10)) + log.info( + "To continue, please enter the confirmation " + f"key [{key_confirm}] :: " + ) key = input() if key != key_confirm: raise RolloverError("Confirmation key mismatch") @@ -222,9 +232,10 @@ def run(self): # * Get orders to migrate. # * Get orders lines to migrate. accounts = { - account.pid: account for account in orig_accounts - if self._get_rollover_migration_setting(account) != - AccountTransferOption.NO_TRANSFER + account.pid: account + for account in orig_accounts + if self._get_rollover_migration_setting(account) + != AccountTransferOption.NO_TRANSFER } account_pids = list(accounts.keys()) orders = { @@ -233,12 +244,13 @@ def run(self): } order_pids = list(orders.keys()) to_migrate = { - 'accounts': accounts.values(), - 'orders': orders.values(), - 'order_lines': AcqRollover._get_opened_order_lines( - order_pids, account_pids) + "accounts": accounts.values(), + "orders": orders.values(), + "order_lines": AcqRollover._get_opened_order_lines( + order_pids, account_pids + ), } - log.info('Resources to migrate (according rollover settings) :') + log.info("Resources to migrate (according rollover settings) :") log.info(f"\t#AcqAccount : {len(to_migrate['accounts'])}") log.info(f"\t#AcqOrder : {len(to_migrate['orders'])}") log.info(f"\t#AcqOrderLine : {len(to_migrate['order_lines'])}") @@ -247,54 +259,56 @@ def run(self): # Try to validate acquisition object that will be rollovered. # This check should prevent any problem during rollover process # that could cause a huge and slow rollover aborting process - log.info('Starting data validation process ...') + log.info("Starting data validation process ...") self._validate_data_to_migrate(to_migrate) # STEP#5 :: Proceed to rollover - log.info('Starting resources migrations ...') - self._migrate_accounts(to_migrate['accounts']) - self._migrate_orders(to_migrate['orders']) - self._migrate_order_lines(to_migrate['order_lines']) + log.info("Starting resources migrations ...") + self._migrate_accounts(to_migrate["accounts"]) + self._migrate_orders(to_migrate["orders"]) + self._migrate_order_lines(to_migrate["order_lines"]) # STEP#6 :: compare new budget account table with previous version. log.info("Completed process comparison table ::") columns = [ - ('ACCOUNT', 60), # title, max_length - ('AMOUNT', 16, 'right'), - ('ENCUMBRANCE', 16, 'right'), - ('SUCCESS ?', 11, 'center'), - ('NEW AMOUNT', 20, 'right'), - ('NEW ENCUMBRANCE', 20, 'right'), + ("ACCOUNT", 60), # title, max_length + ("AMOUNT", 16, "right"), + ("ENCUMBRANCE", 16, "right"), + ("SUCCESS ?", 11, "center"), + ("NEW AMOUNT", 20, "right"), + ("NEW ENCUMBRANCE", 20, "right"), ] rows = [] errors = 0 for account in accounts.values(): - padding = ' ' * account.depth + padding = " " * account.depth label = f"[#{account.pid}] {account.name}" - n_acc_pid = self._mapping_table['accounts'][account.pid] + n_acc_pid = self._mapping_table["accounts"][account.pid] new_acc = AcqAccount.get_record_by_pid(n_acc_pid) - rollover_status = 'OK' - if account.encumbrance_amount[0] != \ - new_acc.encumbrance_amount[0]: - rollover_status = '!! ERR !!' + rollover_status = "OK" + if account.encumbrance_amount[0] != new_acc.encumbrance_amount[0]: + rollover_status = "!! ERR !!" errors += 1 - rows.append(( - padding + label, - str(account.get('allocated_amount')), - str(account.encumbrance_amount[0]), - rollover_status, - str(new_acc.get('allocated_amount')), - str(new_acc.encumbrance_amount[0]), - )) - self._draw_data_table(columns, rows, padding=' ') + rows.append( + ( + padding + label, + str(account.get("allocated_amount")), + str(account.encumbrance_amount[0]), + rollover_status, + str(new_acc.get("allocated_amount")), + str(new_acc.encumbrance_amount[0]), + ) + ) + self._draw_data_table(columns, rows, padding=" ") if errors: raise RolloverError(f"{errors} detected on completion table.") # STEP#7 :: user confirmation that all seems correct for it - if self.is_interactive: - if not self._confirm('Are you agree ?', default="no"): - raise RolloverError("User doesn\'t agree") + if self.is_interactive and not self._confirm( + "Are you agree ?", default="no" + ): + raise RolloverError("User doesn't agree") self._update_budgets(False, True) self._update_organisation() log.info("Rollover complete.... it's time for 🍺🍺🍺🍹 party !") @@ -318,7 +332,7 @@ def _validate_data_to_migrate(self, data): # Testing acquisition order lines # - the unreceived_quantity for each order line should be > 0 log.info(" Testing order lines ...") - for line in data.get('order_lines', []): + for line in data.get("order_lines", []): if line.unreceived_quantity == 0: log.warning(f"\t* Unreceived quantity for {str(line)} is 0 !") error_count += 1 @@ -326,8 +340,9 @@ def _validate_data_to_migrate(self, data): log.warning(f"\t* {str(line)} related to harvested document !") error_count += 1 if error_count: - raise RolloverError(f"Data validation failed : {error_count} " - f"error(s) found") + raise RolloverError( + f"Data validation failed : {error_count} " f"error(s) found" + ) def _migrate_accounts(self, accounts): """Migrate a list of account to the destination budget. @@ -337,37 +352,39 @@ def _migrate_accounts(self, accounts): """ log = self.logger log.info(" Migrating accounts ...") - self._mapping_table['accounts'] = {} - new_budget_ref = get_ref_for_pid('budg', self.destination_budget.pid) + self._mapping_table["accounts"] = {} + new_budget_ref = get_ref_for_pid("budg", self.destination_budget.pid) for idx, acc in enumerate(accounts, 1): data = deepcopy(acc) - data['budget']['$ref'] = new_budget_ref + data["budget"]["$ref"] = new_budget_ref # Try to find the new parent account (checking the temporary # mapping table). This is possible because we sorted the accounts # in hierarchical tree, so root/parent account should be migrated # before children accounts. if old_parent_pid := acc.parent_pid: - p_pid = self._mapping_table.get('accounts').get(old_parent_pid) - if not p_pid: + if p_pid := self._mapping_table.get("accounts").get(old_parent_pid): + data["parent"]["$ref"] = get_ref_for_pid("acac", p_pid) + else: raise RolloverError( - f'Unable to find new parent account for {str(acc)}' - f' : parent pid was {old_parent_pid}' + f"Unable to find new parent account for {str(acc)}" + f" : parent pid was {old_parent_pid}" ) - data['parent']['$ref'] = get_ref_for_pid('acac', p_pid) # Create the new account. # If create failed :: raise an error. # If success :: fill the mapping table AND the stack of new obj. try: new_account = AcqAccount.create( - data, dbcommit=True, reindex=True, delete_pid=True) + data, dbcommit=True, reindex=True, delete_pid=True + ) self._stack.append(new_account) - self._mapping_table['accounts'][acc.pid] = new_account.pid + self._mapping_table["accounts"][acc.pid] = new_account.pid old_label = truncate(str(acc), 55).ljust(57) new_label = truncate(str(new_account), 55) log.info(f"\t* (#{idx}) migrate {old_label} --> {new_label}") except Exception as e: - raise RolloverError(f'Account creation failed on ' - f'[acac#{acc.pid}] :: {str(e)}') from e + raise RolloverError( + f"Account creation failed on " f"[acac#{acc.pid}] :: {str(e)}" + ) from e def _migrate_orders(self, orders): """Migrate a list of orders. @@ -377,28 +394,28 @@ def _migrate_orders(self, orders): """ log = self.logger log.info(" Migrating orders ...") - self._mapping_table['orders'] = {} + self._mapping_table["orders"] = {} for idx, order in enumerate(orders, 1): data = deepcopy(order) # Add a relation between the new order and the previous one. # This is useful to navigate in order history. - data['previousVersion'] = { - '$ref': get_ref_for_pid('acor', order.pid) - } + data["previousVersion"] = {"$ref": get_ref_for_pid("acor", order.pid)} # Create the new order. # If create failed :: raise an error. # If success :: fill the mapping table AND the stack of new obj. try: new_order = AcqOrder.create( - data, dbcommit=True, reindex=True, delete_pid=True) + data, dbcommit=True, reindex=True, delete_pid=True + ) self._stack.append(new_order) - self._mapping_table['orders'][order.pid] = new_order.pid + self._mapping_table["orders"][order.pid] = new_order.pid old_label = truncate(str(order), 55).ljust(57) new_label = truncate(str(new_order), 55) log.info(f"\t* (#{idx}) migrate {old_label} --> {new_label}") except Exception as e: - raise RolloverError(f'Order creation failed on ' - f'[acor#{order.pid}] :: {str(e)}') from e + raise RolloverError( + f"Order creation failed on " f"[acor#{order.pid}] :: {str(e)}" + ) from e def _migrate_order_lines(self, order_lines): """Migrate a list of order lines. @@ -408,45 +425,47 @@ def _migrate_order_lines(self, order_lines): """ log = self.logger log.info(" Migrating order lines ...") - self._mapping_table['order_lines'] = {} + self._mapping_table["order_lines"] = {} for idx, line in enumerate(order_lines, 1): data = deepcopy(line) # Try to find the new parent pids (checking the temporary # mapping table). o_order_pid = line.order_pid - p_order_pid = self._mapping_table.get('orders').get(o_order_pid) + p_order_pid = self._mapping_table.get("orders").get(o_order_pid) if not p_order_pid: raise RolloverError( - f'Unable to find new parent order for {str(line)}' - f' : parent pid was {p_order_pid}' + f"Unable to find new parent order for {str(line)}" + f" : parent pid was {p_order_pid}" ) o_acc_pid = line.account_pid - p_acc_pid = self._mapping_table.get('accounts').get(o_acc_pid) + p_acc_pid = self._mapping_table.get("accounts").get(o_acc_pid) if not p_acc_pid: raise RolloverError( - f'Unable to find new parent account for {str(line)}' - f' : parent pid was {p_acc_pid}' + f"Unable to find new parent account for {str(line)}" + f" : parent pid was {p_acc_pid}" ) - data['acq_order']['$ref'] = get_ref_for_pid('acor', p_order_pid) - data['acq_account']['$ref'] = get_ref_for_pid('acac', p_acc_pid) + data["acq_order"]["$ref"] = get_ref_for_pid("acor", p_order_pid) + data["acq_account"]["$ref"] = get_ref_for_pid("acac", p_acc_pid) # Update specific order line fields - data['quantity'] = line.unreceived_quantity - del data['total_amount'] + data["quantity"] = line.unreceived_quantity + del data["total_amount"] # Create the new order line. # If create failed :: raise an error. # If success :: fill the mapping table AND the stack of new obj. try: new_line = AcqOrderLine.create( - data, dbcommit=True, reindex=True, delete_pid=True) + data, dbcommit=True, reindex=True, delete_pid=True + ) self._stack.append(new_line) - self._mapping_table['order_lines'][line.pid] = new_line.pid + self._mapping_table["order_lines"][line.pid] = new_line.pid old_label = truncate(str(line), 55).ljust(57) new_label = truncate(str(new_line), 55) log.info(f"\t* (#{idx}) migrate {old_label} --> {new_label}") except Exception as e: - raise RolloverError(f'Order line creation failed on ' - f'[acol#{line.pid}] :: {str(e)}') from e + raise RolloverError( + f"Order line creation failed on " f"[acol#{line.pid}] :: {str(e)}" + ) from e def _update_budgets(self, orig_state=False, dest_state=False): """Update rollover budgets to activate/deactivate them. @@ -456,15 +475,15 @@ def _update_budgets(self, orig_state=False, dest_state=False): """ self.logger.info("\tUpdating budget resources...") orig_data = deepcopy(self.original_budget) - orig_data['is_active'] = orig_state + orig_data["is_active"] = orig_state self.original_budget.update(orig_data, dbcommit=True, reindex=True) - state_str = 'activated' if orig_state else 'deactivated' + state_str = "activated" if orig_state else "deactivated" self.logger.info(f"\t * Original budget is now {state_str}") dest_data = deepcopy(self.destination_budget) - dest_data['is_active'] = dest_state + dest_data["is_active"] = dest_state self.destination_budget.update(dest_data, dbcommit=True, reindex=True) - state_str = 'activated' if dest_state else 'deactivated' + state_str = "activated" if dest_state else "deactivated" self.logger.info(f"\t * Destination budget is now {state_str}") def _update_organisation(self): @@ -472,10 +491,12 @@ def _update_organisation(self): self.logger.info("\tUpdating organisation current budget...") org_pid = self.destination_budget.organisation_pid org = Organisation.get_record_by_pid(org_pid) - org['current_budget_pid'] = self.destination_budget.pid + org["current_budget_pid"] = self.destination_budget.pid org = org.update(org, dbcommit=True, reindex=True) - self.logger.info(f"\t * Current organisation budget is now " - f"{org.get('current_budget_pid')}") + self.logger.info( + f"\t * Current organisation budget is now " + f"{org.get('current_budget_pid')}" + ) # PRIVATE METHODS ========================================================= # These methods are used during the rollover process. They shouldn't be @@ -491,11 +512,11 @@ def _abort_rollover(self, message=None): """ if message: self.logger.warning(message) - self.logger.warning('Aborting rollover process !') + self.logger.warning("Aborting rollover process !") self._update_budgets(True, False) if not self._stack: return - self.logger.info('Purging created resources...') + self.logger.info("Purging created resources...") for obj in reversed(self._stack): obj.delete(force=True, dbcommit=True, delindex=True) self.logger.info(f"\t* object {str(obj)} deleted") @@ -517,7 +538,7 @@ def _confirm(self, question, default="yes"): elif default == "no": prompt = " [y/N] " else: - raise ValueError("invalid default answer: '%s'" % default) + raise ValueError(f"invalid default answer: '{default}'") while True: self.logger.info(question + prompt) @@ -538,17 +559,17 @@ def _create_new_budget(self, **kwargs): """ org_pid = self.original_budget.organisation_pid data = { - 'organisation': {'$ref': get_ref_for_pid('org', org_pid)}, - 'is_active': False + "organisation": {"$ref": get_ref_for_pid("org", org_pid)}, + "is_active": False, } - for required_param in ['name', 'start_date', 'end_date']: - assert required_param in kwargs, f'{required_param} param required' + for required_param in ["name", "start_date", "end_date"]: + assert required_param in kwargs, f"{required_param} param required" data[required_param] = kwargs[required_param] if budget := Budget.create(data, dbcommit=True, reindex=True): self._stack.append(budget) return budget - def _draw_data_table(self, columns, rows=None, padding=''): + def _draw_data_table(self, columns, rows=None, padding=""): """Draw data as a table using ASCII characters. :param columns: the column headers. Each column is a tuple that must @@ -568,19 +589,21 @@ def _get_rollover_migration_setting(self, account): :param account: the account to analyze. :return: the migration setting ; 'ALLOCATED_AMOUNT' by default. """ - return self._get_library(account.library_pid) \ - .get('rollover_settings', {}) \ - .get('account_transfer', AccountTransferOption.ALLOCATED_AMOUNT) + return ( + self._get_library(account.library_pid) + .get("rollover_settings", {}) + .get("account_transfer", AccountTransferOption.ALLOCATED_AMOUNT) + ) def _get_library(self, library_pid): """Get a `Library` resources from cache or load it. :param library_pid (string): the library_pid to get/load. """ - if library_pid not in self._cache.get('library', {}): + if library_pid not in self._cache.get("library", {}): library = Library.get_record_by_pid(library_pid) - self._cache.setdefault('library', {})[library_pid] = library - return self._cache['library'][library_pid] + self._cache.setdefault("library", {})[library_pid] = library + return self._cache["library"][library_pid] def _validate(self): """Validate the rollover parameters. @@ -618,11 +641,14 @@ def _get_accounts(budget_pid): :param budget_pid (string): the budget pid to filter. :return: the sorted list of `AcqAccount`. """ - query = AcqAccountsSearch() \ - .filter('term', budget__pid=budget_pid) \ - .params(preserve_order=True) \ - .sort({'depth': {'order': 'asc'}}) \ - .source(False).scan() + query = ( + AcqAccountsSearch() + .filter("term", budget__pid=budget_pid) + .params(preserve_order=True) + .sort({"depth": {"order": "asc"}}) + .source(False) + .scan() + ) return sort_accounts_as_tree( [AcqAccount.get_record(hit.meta.id) for hit in query] ) @@ -644,15 +670,17 @@ def _get_orders_to_migrate(account_pids): open_status = [ AcqOrderStatus.ORDERED, AcqOrderStatus.PENDING, - AcqOrderStatus.PARTIALLY_RECEIVED + AcqOrderStatus.PARTIALLY_RECEIVED, ] - filters = Q('terms', status=open_status) - filters |= Q('term', type=AcqOrderType.STANDING_ORDER) + filters = Q("terms", status=open_status) + filters |= Q("term", type=AcqOrderType.STANDING_ORDER) - query = AcqOrdersSearch() \ - .filter('terms', order_lines__account__pid=account_pids) \ - .filter(filters) \ + query = ( + AcqOrdersSearch() + .filter("terms", order_lines__account__pid=account_pids) + .filter(filters) .source(False) + ) return [AcqOrder.get_record(hit.meta.id) for hit in query.scan()] @staticmethod @@ -671,11 +699,14 @@ def _get_opened_order_lines(order_pids, account_pids): open_status = [ AcqOrderLineStatus.APPROVED, AcqOrderLineStatus.ORDERED, - AcqOrderLineStatus.PARTIALLY_RECEIVED + AcqOrderLineStatus.PARTIALLY_RECEIVED, ] - query = AcqOrderLinesSearch() \ - .filter('terms', acq_account__pid=account_pids) \ - .filter('terms', acq_order__pid=order_pids) \ - .filter('terms', status=open_status) \ - .source(False).scan() + query = ( + AcqOrderLinesSearch() + .filter("terms", acq_account__pid=account_pids) + .filter("terms", acq_order__pid=order_pids) + .filter("terms", status=open_status) + .source(False) + .scan() + ) return [AcqOrderLine.get_record(hit.meta.id) for hit in query] diff --git a/rero_ils/modules/api.py b/rero_ils/modules/api.py index 61cc2d864f..a38400cf91 100644 --- a/rero_ils/modules/api.py +++ b/rero_ils/modules/api.py @@ -49,8 +49,8 @@ ils_record_format_checker = FormatChecker() -@ils_record_format_checker.checks('email') -@ils_record_format_checker.checks('idn-email') +@ils_record_format_checker.checks("email") +@ils_record_format_checker.checks("idn-email") def _strong_email_validation(instance) -> bool: """Allow to validate an email address (only email format, not DNS).""" if not isinstance(instance, str): @@ -59,7 +59,7 @@ def _strong_email_validation(instance) -> bool: # into `email.validator.ts` file into @rero/ng-core. The best solution # should be to use a configuration setting available through an API, but # it should be a little overkill. - email_regexp = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b' + email_regexp = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b" return bool(re.fullmatch(email_regexp, instance)) @@ -94,9 +94,9 @@ class IlsRecordsSearch(RecordsSearch): class Meta: """Search only on item index.""" - index = 'records' + index = "records" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -117,7 +117,7 @@ def get_record_by_pid(self, pid, fields=None): """ if hit := next(self.get_records_by_pids([pid], fields), None): return hit - raise NotFoundError(f'Record not found pid: {pid}') + raise NotFoundError(f"Record not found pid: {pid}") def get_records_by_pids(self, pids, fields=None): """Get ES hits by pids. @@ -129,13 +129,12 @@ def get_records_by_pids(self, pids, fields=None): found. """ assert type(pids) is list - query = self.filter('terms', pid=pids) + query = self.filter("terms", pid=pids) if fields: query = query.source(includes=fields) return query.scan() - def get_records_by_terms(self, terms, key='pid', fields=None, - as_dict=False): + def get_records_by_terms(self, terms, key="pid", fields=None, as_dict=False): """Search records by terms. :param terms: list of term to search @@ -145,15 +144,12 @@ def get_records_by_terms(self, terms, key='pid', fields=None, :return: dictionary if as_dict is true else return generator :rtype: dictionary or generator """ - fields = fields or '*' + fields = fields or "*" params = {key: terms} - query = self.filter('terms', **params).source(includes=fields) + query = self.filter("terms", **params).source(includes=fields) if as_dict: - return { - result.pid: result.to_dict() - for result in query.scan() - } + return {result.pid: result.to_dict() for result in query.scan()} return query.scan() @@ -164,7 +160,7 @@ class IlsRecord(Record): minter = None fetcher = None provider = None - object_type = 'rec' + object_type = "rec" pids_exist_check = None pid_check = True @@ -173,11 +169,11 @@ class IlsRecord(Record): @classmethod def get_indexer_class(cls): """Get the indexer from config.""" - endpoints = current_app.config['RECORDS_REST_ENDPOINTS'] - endpoints.update(current_app.config['CIRCULATION_REST_ENDPOINTS']) + endpoints = current_app.config["RECORDS_REST_ENDPOINTS"] + endpoints.update(current_app.config["CIRCULATION_REST_ENDPOINTS"]) try: indexer = obj_or_import_string( - endpoints[cls.provider.pid_type]['indexer_class'] + endpoints[cls.provider.pid_type]["indexer_class"] ) except Exception: # provide default indexer if no indexer is defined in config. @@ -190,7 +186,7 @@ def _validate(self, **kwargs): extended validation per record class and test of pid existence. """ - if self.get('_draft'): + if self.get("_draft"): # No validation is needed for draft records return self @@ -198,19 +194,22 @@ def _validate(self, **kwargs): validation_message = self.extended_validation(**kwargs) # We only like to run pids_exist_check if validation_message is True # and not a string with error from extended_validation - if validation_message is True and self.pid_check and \ - self.pids_exist_check: + if validation_message is True and self.pid_check and self.pids_exist_check: from .utils import pids_exists_in_data - validation_message = pids_exists_in_data( - info=f'{self.provider.pid_type} ({self.pid})', - data=self, - required=self.pids_exist_check.get('required', {}), - not_required=self.pids_exist_check.get('not_required', {}) - ) or True + + validation_message = ( + pids_exists_in_data( + info=f"{self.provider.pid_type} ({self.pid})", + data=self, + required=self.pids_exist_check.get("required", {}), + not_required=self.pids_exist_check.get("not_required", {}), + ) + or True + ) if validation_message is not True: if not isinstance(validation_message, list): validation_message = [validation_message] - raise ValidationError(';'.join(validation_message)) + raise ValidationError(";".join(validation_message)) return json def extended_validation(self, **kwargs): @@ -221,25 +220,34 @@ def extended_validation(self, **kwargs): return True @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=False, reindex=False, pidcheck=True, **kwargs): + def create( + cls, + data, + id_=None, + delete_pid=False, + dbcommit=False, + reindex=False, + pidcheck=True, + **kwargs, + ): """Create a new ils record.""" assert cls.minter assert cls.provider - if '$schema' not in data: + if "$schema" not in data: pid_type = cls.provider.pid_type - schemas = current_app.config.get('RERO_ILS_DEFAULT_JSON_SCHEMA') + schemas = current_app.config.get("RERO_ILS_DEFAULT_JSON_SCHEMA") if pid_type in schemas: from .utils import get_schema_for_resource - data['$schema'] = get_schema_for_resource(pid_type) - pid = data.get('pid') + + data["$schema"] = get_schema_for_resource(pid_type) + pid = data.get("pid") if delete_pid and pid: - del data['pid'] + del data["pid"] elif pid: if test_rec := cls.get_record_by_pid(pid): raise IlsRecordError.PidAlreadyUsed( - f'PidAlreadyUsed {cls.provider.pid_type} ' - f'{test_rec.pid} {test_rec.id}' + f"PidAlreadyUsed {cls.provider.pid_type} " + f"{test_rec.pid} {test_rec.id}" ) if not id_: id_ = uuid4() @@ -251,7 +259,8 @@ def create(cls, data, id_=None, delete_pid=False, record.dbcommit(reindex) except Exception as err: current_app.logger.warning( - f'CREATE WARNING: {cls.__name__}: {err} data:{data}') + f"CREATE WARNING: {cls.__name__}: {err} data:{data}" + ) raise return record @@ -259,17 +268,15 @@ def create(cls, data, id_=None, delete_pid=False, def get_record_by_pid(cls, pid, with_deleted=False, verbose=False): """Get ILS record by pid value.""" if verbose: - click.echo(f'\t\tget_record_by_pid: {cls.__name__} {pid}') + click.echo(f"\t\tget_record_by_pid: {cls.__name__} {pid}") if pid: assert cls.provider try: persistent_identifier = PersistentIdentifier.get( - cls.provider.pid_type, - pid + cls.provider.pid_type, pid ) return super().get_record( - persistent_identifier.object_uuid, - with_deleted=with_deleted + persistent_identifier.object_uuid, with_deleted=with_deleted ) # TODO: is it better to raise a error or to return None? except (NoResultFound, PIDDoesNotExistError): @@ -296,10 +303,7 @@ def record_pid_exists(cls, pid): """ assert cls.provider try: - PersistentIdentifier.get( - cls.provider.pid_type, - pid - ) + PersistentIdentifier.get(cls.provider.pid_type, pid) return True except (NoResultFound, PIDDoesNotExistError): @@ -316,10 +320,7 @@ def get_id_by_pid(cls, pid): """Get uuid by pid.""" assert cls.provider try: - persistent_identifier = PersistentIdentifier.get( - cls.provider.pid_type, - pid - ) + persistent_identifier = PersistentIdentifier.get(cls.provider.pid_type, pid) return persistent_identifier.object_uuid except Exception: return None @@ -328,17 +329,13 @@ def get_id_by_pid(cls, pid): def get_persistent_identifier(cls, id): """Get Persistent Identifier.""" return PersistentIdentifier.get_by_object( - cls.provider.pid_type, - cls.object_type, - id + cls.provider.pid_type, cls.object_type, id ) @classmethod def _get_all(cls, with_deleted=False): """Get all persistent identifier records.""" - query = PersistentIdentifier.query.filter_by( - pid_type=cls.provider.pid_type - ) + query = PersistentIdentifier.query.filter_by(pid_type=cls.provider.pid_type) if not with_deleted: query = query.filter_by(status=PIDStatus.REGISTERED) return query @@ -350,7 +347,7 @@ def get_all_pids(cls, with_deleted=False, limit=100000): if limit: count = query.count() # slower, less memory - query = query.order_by(text('pid_value')).limit(limit) + query = query.order_by(text("pid_value")).limit(limit) offset = 0 while offset < count: for identifier in query.offset(offset): @@ -367,7 +364,7 @@ def get_all_ids(cls, with_deleted=False, limit=100000): query = cls._get_all(with_deleted=with_deleted) if limit: # slower, less memory - query = query.order_by(text('pid_value')).limit(limit) + query = query.order_by(text("pid_value")).limit(limit) offset = 0 count = cls.count(with_deleted=with_deleted) while offset < count: @@ -421,20 +418,22 @@ def update(self, data, commit=False, dbcommit=False, reindex=False): :param reindex: reindex the record. :returns: the modified record """ - if pid := data.get('pid'): + if pid := data.get("pid"): db_record = self.get_record(self.id) if pid != db_record.pid: raise IlsRecordError.PidChange( - f'{self.__class__.__name__} changed pid from ' - f'{db_record.pid} to {pid}') + f"{self.__class__.__name__} changed pid from " + f"{db_record.pid} to {pid}" + ) record = self # Add schema if missing. - if not data.get('$schema'): + if not data.get("$schema"): pid_type = self.provider.pid_type from .utils import get_schema_for_resource + if schema := get_schema_for_resource(pid_type): - data['$schema'] = schema + data["$schema"] = schema super().update(data) if commit or dbcommit: @@ -447,12 +446,11 @@ def update(self, data, commit=False, dbcommit=False, reindex=False): def replace(self, data, commit=True, dbcommit=False, reindex=False): """Replace data in record.""" new_data = deepcopy(data) - pid = new_data.get('pid') + pid = new_data.get("pid") if not pid: - raise IlsRecordError.PidMissing(f'missing pid={self.pid}') + raise IlsRecordError.PidMissing(f"missing pid={self.pid}") self.clear() - return self.update( - new_data, commit=commit, dbcommit=dbcommit, reindex=reindex) + return self.update(new_data, commit=commit, dbcommit=dbcommit, reindex=reindex) def revert(self, revision_id, reindex=False): """Revert the record to a specific revision.""" @@ -499,13 +497,13 @@ def delete_from_index(self): indexer().delete(self) except NotFoundError: current_app.logger.warning( - f'Can not delete from index {self.__class__.__name__}' - f': {self.pid}') + f"Can not delete from index {self.__class__.__name__}" f": {self.pid}" + ) @property def pid(self): """Get ils record pid value.""" - return self.get('pid') + return self.get("pid") @property def persistent_identifier(self): @@ -536,7 +534,7 @@ def can_delete(self): @property def organisation_pid(self): """Get organisation pid for circulation policy.""" - return extracted_data_from_ref(self.get('organisation')) + return extracted_data_from_ref(self.get("organisation")) @classmethod def get_metadata_identifier_names(cls): @@ -569,22 +567,21 @@ class IlsRecordsIndexer(RecordIndexer): def index(self, record): """Indexing a record.""" - return super().index(record, arguments=dict(refresh='true')) + return super().index(record, arguments=dict(refresh="true")) def delete(self, record): """Delete a record. :param record: Record instance. """ - return super().delete(record, refresh='true') + return super().delete(record, refresh="true") def bulk_index(self, record_id_iterator, doc_type=None): """Bulk index records. :param record_id_iterator: Iterator yielding record UUIDs. """ - self._bulk_op( - record_id_iterator, op_type='index', doc_type=doc_type) + self._bulk_op(record_id_iterator, op_type="index", doc_type=doc_type) def process_bulk_queue(self, search_bulk_kwargs=None, stats_only=True): """Process bulk indexing queue. @@ -603,7 +600,7 @@ def process_bulk_queue(self, search_bulk_kwargs=None, stats_only=True): routing_key=self.mq_routing_key, ) - req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT'] + req_timeout = current_app.config["INDEXER_BULK_REQUEST_TIMEOUT"] search_bulk_kwargs = search_bulk_kwargs or {} @@ -613,7 +610,7 @@ def process_bulk_queue(self, search_bulk_kwargs=None, stats_only=True): stats_only=stats_only, request_timeout=req_timeout, expand_action_callback=search.helpers.expand_action, - **search_bulk_kwargs + **search_bulk_kwargs, ) consumer.close() @@ -625,7 +622,7 @@ def _get_record_class(self, payload): from .utils import get_record_class_from_schema_or_pid_type # take the first defined doc type for finding the class - pid_type = payload.get('doc_type', 'rec') + pid_type = payload.get("doc_type", "rec") return get_record_class_from_schema_or_pid_type(pid_type=pid_type) # @@ -642,12 +639,7 @@ def _bulk_op(self, record_id_iterator, op_type, index=None, doc_type=None): """ with self.create_producer() as producer: for rec in record_id_iterator: - data = dict( - id=str(rec), - op=op_type, - index=index, - doc_type=doc_type - ) + data = dict(id=str(rec), op=op_type, index=index, doc_type=doc_type) producer.publish( data, declare=[self.mq_queue], @@ -663,7 +655,7 @@ def _actionsiter(self, message_iterator): payload = message.decode() try: indexer = self._get_record_class(payload).get_indexer_class() - if payload['op'] == 'delete': + if payload["op"] == "delete": yield indexer()._delete_action(payload=payload) else: yield indexer()._index_action(payload=payload) @@ -676,7 +668,7 @@ def _actionsiter(self, message_iterator): f"Failed to {payload['op']}" f" {payload.get('doc_type'), 'rec'} " f"{payload.get('pid')}:{payload.get('id')}", - exc_info=True + exc_info=True, ) def _index_action(self, payload): @@ -685,26 +677,25 @@ def _index_action(self, payload): :param payload: Decoded message body. :return: Dictionary defining an Elasticsearch bulk 'index' action. """ - record = self.record_cls.get_record(payload['id']) + record = self.record_cls.get_record(payload["id"]) index = self.record_to_index(record) arguments = {} - index = payload.get('index') or index + index = payload.get("index") or index body = self._prepare_record(record, index, arguments) action = { - '_op_type': 'index', - '_index': index, - '_id': str(record.id), - '_version': record.revision_id, - '_version_type': self._version_type, - '_source': body + "_op_type": "index", + "_index": index, + "_id": str(record.id), + "_version": record.revision_id, + "_version_type": self._version_type, + "_source": body, } action.update(arguments) return action - def _prepare_record( - self, record, index, arguments=None, **kwargs): + def _prepare_record(self, record, index, arguments=None, **kwargs): """Prepare record data for indexing. :param record: The record to prepare. @@ -713,7 +704,7 @@ def _prepare_record( :param **kwargs: Extra parameters. :return: The record metadata. """ - if not getattr(record, 'enable_jsonref', True): + if not getattr(record, "enable_jsonref", True): # If dumper is None, dumps() will use the default configured dumper # on the Record class. data = record.dumps(dumper=self.record_dumper) @@ -724,17 +715,19 @@ def _prepare_record( # records. Also, we're adding extra information into the record # like _created and _updated afterwards, which the Record.dumps() # have no control over. - if current_app.config.get('INDEXER_REPLACE_REFS'): + if current_app.config.get("INDEXER_REPLACE_REFS"): data = record.replace_refs().dumps() # Original code # data = copy.deepcopy(record.replace_refs()) else: data = record.dumps() - data['_created'] = pytz.utc.localize(record.created).isoformat() \ - if record.created else None - data['_updated'] = pytz.utc.localize(record.updated).isoformat() \ - if record.updated else None + data["_created"] = ( + pytz.utc.localize(record.created).isoformat() if record.created else None + ) + data["_updated"] = ( + pytz.utc.localize(record.updated).isoformat() if record.updated else None + ) # Allow modification of data prior to sending to Elasticsearch. before_record_index.send( @@ -743,7 +736,7 @@ def _prepare_record( record=record, index=index, arguments={} if arguments is None else arguments, - **kwargs + **kwargs, ) return data @@ -762,7 +755,7 @@ def index(self, indexer_class, referenced): indexer = indexer_class() for r in referenced: try: - record_to_index = r['record'] + record_to_index = r["record"] indexer.index(record_to_index) except Exception as err: current_app.logger.error( diff --git a/rero_ils/modules/apiharvester/cli.py b/rero_ils/modules/apiharvester/cli.py index 7f6d5d17c0..81a3e4ffdf 100644 --- a/rero_ils/modules/apiharvester/cli.py +++ b/rero_ils/modules/apiharvester/cli.py @@ -30,7 +30,7 @@ from .models import ApiHarvestConfig from .utils import api_source -datastore = LocalProxy(lambda: current_app.extensions['security'].datastore) +datastore = LocalProxy(lambda: current_app.extensions["security"].datastore) @click.group() @@ -38,99 +38,122 @@ def apiharvester(): """Api harvester commands.""" -@apiharvester.command('source') -@click.argument('name') -@click.option('-U', '--url', default='', help='Url') -@click.option('-m', '--mimetype', default='', help='Mimetype') -@click.option('-s', '--size', default=-1, type=int, help='Size') -@click.option('-c', '--comment', default='', help='Comment') -@click.option( - '-u', '--update', is_flag=True, default=False, help='Update config' -) +@apiharvester.command("source") +@click.argument("name") +@click.option("-U", "--url", default="", help="Url") +@click.option("-m", "--mimetype", default="", help="Mimetype") +@click.option("-s", "--size", default=-1, type=int, help="Size") +@click.option("-c", "--comment", default="", help="Comment") +@click.option("-u", "--update", is_flag=True, default=False, help="Update config") @with_appcontext def api_source_config(name, url, mimetype, size, comment, update): """Add or Update ApiHarvestConfig.""" - click.echo(f'ApiHarvesterConfig: {name} ', nl=False) + click.echo(f"ApiHarvesterConfig: {name} ", nl=False) msg = api_source( - name=name, - url=url, - mimetype=mimetype, - size=size, - comment=comment, - update=update + name=name, url=url, mimetype=mimetype, size=size, comment=comment, update=update ) click.echo(msg) -@apiharvester.command('sources') -@click.argument('configfile', type=click.File('rb')) -@click.option( - '-u', '--update', is_flag=True, default=False, help='Update config' -) +@apiharvester.command("sources") +@click.argument("configfile", type=click.File("rb")) +@click.option("-u", "--update", is_flag=True, default=False, help="Update config") @with_appcontext def api_source_config_from_file(configfile, update): """Add or update ApiHarvestConfigs from file.""" configs = yaml.load(configfile, Loader=yaml.FullLoader) for name, values in sorted(configs.items()): - url = values.get('url', '') - mimetype = values.get('mimetype', '') - size = values.get('size', 100) - comment = values.get('comment', '') - click.echo(f'ApiHarvesterConfig: {name} {url} ', nl=False) + url = values.get("url", "") + mimetype = values.get("mimetype", "") + size = values.get("size", 100) + comment = values.get("comment", "") + click.echo(f"ApiHarvesterConfig: {name} {url} ", nl=False) msg = api_source( name=name, url=url, mimetype=mimetype, size=size, comment=comment, - update=update + update=update, ) click.echo(msg) -@apiharvester.command('harvest') -@click.option('-n', '--name', default=None, - help='Name of persistent configuration to use.') -@click.option('-f', '--from-date', default=None, - help='The lower bound date for the harvesting (optional).') -@click.option('-u', '--url', default=None, - help='The upper bound date for the harvesting (optional).') -@click.option('-k', '--enqueue', is_flag=True, default=False, - help='Enqueue harvesting and return immediately.') -@click.option('--signals/--no-signals', default=True, - help='Signals sent with Api harvesting results.') -@click.option('-s', '--size', type=int, default=0, - help='Size of chunks (optional).') -@click.option('-m', '--max_results', type=int, default=0, - help='maximum of records to harvest (optional).') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@apiharvester.command("harvest") +@click.option( + "-n", "--name", default=None, help="Name of persistent configuration to use." +) +@click.option( + "-f", + "--from-date", + default=None, + help="The lower bound date for the harvesting (optional).", +) +@click.option( + "-u", + "--url", + default=None, + help="The upper bound date for the harvesting (optional).", +) +@click.option( + "-k", + "--enqueue", + is_flag=True, + default=False, + help="Enqueue harvesting and return immediately.", +) +@click.option( + "--signals/--no-signals", + default=True, + help="Signals sent with Api harvesting results.", +) +@click.option("-s", "--size", type=int, default=0, help="Size of chunks (optional).") +@click.option( + "-m", + "--max_results", + type=int, + default=0, + help="maximum of records to harvest (optional).", +) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext -def harvest(name, from_date, url, enqueue, signals, size, max_results, - verbose): +def harvest(name, from_date, url, enqueue, signals, size, max_results, verbose): """Harvest api.""" if name: - click.secho(f'Harvest api: {name}', fg='green') + click.secho(f"Harvest api: {name}", fg="green") elif url: - click.secho(f'Harvest api: {url}', fg='green') + click.secho(f"Harvest api: {url}", fg="green") if enqueue: - harvest_records.delay(url=url, name=name, from_date=from_date, - signals=signals, size=size, - max_results=max_results, verbose=verbose) + harvest_records.delay( + url=url, + name=name, + from_date=from_date, + signals=signals, + size=size, + max_results=max_results, + verbose=verbose, + ) else: - harvest_records(url=url, name=name, from_date=from_date, - signals=signals, size=size, max_results=max_results, - verbose=verbose) + harvest_records( + url=url, + name=name, + from_date=from_date, + signals=signals, + size=size, + max_results=max_results, + verbose=verbose, + ) -@apiharvester.command('info') +@apiharvester.command("info") @with_appcontext def info(): """List infos for tasks.""" apis = ApiHarvestConfig.query.all() for api in apis: click.echo(api.name) - click.echo(f'\tlastrun : {api.lastrun}') - click.echo(f'\turl : {api.url}') - click.echo(f'\tmimetype : {api.mimetype}') - click.echo(f'\tsize : {api.size}') - click.echo(f'\tcomment : {api.comment}') + click.echo(f"\tlastrun : {api.lastrun}") + click.echo(f"\turl : {api.url}") + click.echo(f"\tmimetype : {api.mimetype}") + click.echo(f"\tsize : {api.size}") + click.echo(f"\tcomment : {api.comment}") diff --git a/rero_ils/modules/apiharvester/models.py b/rero_ils/modules/apiharvester/models.py index 383845c7d2..c657a127ea 100644 --- a/rero_ils/modules/apiharvester/models.py +++ b/rero_ils/modules/apiharvester/models.py @@ -29,18 +29,19 @@ class ApiHarvestConfig(RecordIdentifier): """Sequence generator for Document identifiers.""" - __tablename__ = 'apiharvester_config' - __mapper_args__ = {'concrete': True} + __tablename__ = "apiharvester_config" + __mapper_args__ = {"concrete": True} id = db.Column(db.Integer, primary_key=True) - url = db.Column(db.String(255), nullable=False, server_default='') + url = db.Column(db.String(255), nullable=False, server_default="") name = db.Column(db.String(255), nullable=False) mimetype = db.Column(db.String(255), nullable=False) size = db.Column(db.Integer, nullable=False) comment = db.Column(db.Text, nullable=True) - default_last_run = datetime.strptime('1900-1-1', '%Y-%m-%d') - lastrun = db.Column(db.DateTime, default=pytz.utc.localize( - default_last_run), nullable=True) + default_last_run = datetime.strptime("1900-1-1", "%Y-%m-%d") + lastrun = db.Column( + db.DateTime, default=pytz.utc.localize(default_last_run), nullable=True + ) def save(self): """Save object to persistent storage.""" diff --git a/rero_ils/modules/apiharvester/signals.py b/rero_ils/modules/apiharvester/signals.py index a361d2e602..f24e2b621c 100644 --- a/rero_ils/modules/apiharvester/signals.py +++ b/rero_ils/modules/apiharvester/signals.py @@ -21,4 +21,4 @@ _signals = Namespace() -apiharvest_part = _signals.signal('apiharvest_part') +apiharvest_part = _signals.signal("apiharvest_part") diff --git a/rero_ils/modules/apiharvester/tasks.py b/rero_ils/modules/apiharvester/tasks.py index 021b79c241..fdc6134fba 100644 --- a/rero_ils/modules/apiharvester/tasks.py +++ b/rero_ils/modules/apiharvester/tasks.py @@ -26,8 +26,15 @@ @shared_task(ignore_result=True) -def harvest_records(url=None, name=None, from_date=None, signals=True, size=0, - max_results=0, verbose=False): +def harvest_records( + url=None, + name=None, + from_date=None, + signals=True, + size=0, + max_results=0, + verbose=False, +): """Harvest records.""" config = ApiHarvestConfig.query.filter_by(name=name).first() if config: @@ -40,7 +47,12 @@ def harvest_records(url=None, name=None, from_date=None, signals=True, size=0, size = config.size for next, records in get_records( - url=url, name=name, from_date=from_date, size=size, - max_results=max_results, signals=signals, verbose=verbose + url=url, + name=name, + from_date=from_date, + size=size, + max_results=max_results, + signals=signals, + verbose=verbose, ): pass diff --git a/rero_ils/modules/apiharvester/utils.py b/rero_ils/modules/apiharvester/utils.py index 44e5c412ef..3365da4b80 100644 --- a/rero_ils/modules/apiharvester/utils.py +++ b/rero_ils/modules/apiharvester/utils.py @@ -29,79 +29,83 @@ from .signals import apiharvest_part -def api_source(name, url='', mimetype='', size=100, comment='', update=False): +def api_source(name, url="", mimetype="", size=100, comment="", update=False): """Add ApiHarvesterConfig.""" with current_app.app_context(): source = ApiHarvestConfig.query.filter_by(name=name).first() if not source: source = ApiHarvestConfig( - name=name, - url=url, - mimetype=mimetype, - size=100, - comment=comment + name=name, url=url, mimetype=mimetype, size=100, comment=comment ) source.save() db.session.commit() - return 'Added' + return "Added" elif update: source.name = name msg = [] - if url != '': + if url != "": source.url = url - msg.append(f'url:{url}') - if mimetype != '': + msg.append(f"url:{url}") + if mimetype != "": source.mimetype = mimetype - msg.append(f'mimetype:{mimetype}') + msg.append(f"mimetype:{mimetype}") if size != -1: source.size = size - msg.append(f'size:{size}') - if comment != '': + msg.append(f"size:{size}") + if comment != "": source.comment = comment - msg.append(f'comment:{comment}') + msg.append(f"comment:{comment}") db.session.commit() return f'Updated: {", ".join(msg)}' - return 'Not Updated' + return "Not Updated" def extract_records(data): """Extract a record from REST data.""" records = [] - hits = data.get('hits', {}).get('hits', {}) + hits = data.get("hits", {}).get("hits", {}) for hit in hits: # pid = data.get('id', '') # updated = data.get('updated', '') # links = data.get('links', {}).get('self', '') - record = hit.get('metadata', '') + record = hit.get("metadata", "") records.append(record) return records -def get_records(url=None, name=None, from_date=None, max_results=0, size=100, - signals=True, verbose=False, **kwargs): +def get_records( + url=None, + name=None, + from_date=None, + max_results=0, + size=100, + signals=True, + verbose=False, + **kwargs, +): """Harvest multiple records from invenio api.""" - url += f'/?size={size}' + url += f"/?size={size}" if from_date: if isinstance(from_date, str): from_date = parser.parse(from_date) from_date = from_date.isoformat() # we have to urlencode the : from the time with \: - from_date = from_date.replace(':', '%5C:') - url += f'&q=_updated:>{from_date}' - url += f'&size={size}' + from_date = from_date.replace(":", "%5C:") + url += f"&q=_updated:>{from_date}" + url += f"&size={size}" if verbose: - click.echo(f'Get records from {url}') + click.echo(f"Get records from {url}") try: count = 0 request = requests.get(url) data = request.json() - total = data['hits']['total']['value'] - click.echo(f'API records found: {total}') + total = data["hits"]["total"]["value"] + click.echo(f"API records found: {total}") - next_url = data.get('links', {}).get('self', True) + next_url = data.get("links", {}).get("self", True) while next_url and (count < max_results or max_results == 0): records = extract_records(data) count += len(records) @@ -113,17 +117,11 @@ def get_records(url=None, name=None, from_date=None, max_results=0, size=100, data = request.json() if signals: apiharvest_part.send( - records=records, - name=name, - url=next, - verbose=verbose, - **kwargs) + records=records, name=name, url=next, verbose=verbose, **kwargs + ) else: yield next_url, records - next_url = data.get('links', {}).get('next', None) + next_url = data.get("links", {}).get("next", None) except Exception as error: - click.secho( - f'Harvesting API ConnectionRefusedError: {error}', - fg='red' - ) + click.secho(f"Harvesting API ConnectionRefusedError: {error}", fg="red") yield url, [] diff --git a/rero_ils/modules/babel_extractors.py b/rero_ils/modules/babel_extractors.py index 1f15ed78d2..f453c14cf2 100644 --- a/rero_ils/modules/babel_extractors.py +++ b/rero_ils/modules/babel_extractors.py @@ -25,7 +25,7 @@ KEY_VAL_REGEX = re.compile(r'"(.*?)"\s*:\s*"(.*?)"') -def translate(data, keys=['title']): +def translate(data, keys=["title"]): """Translate strings in a data structure.""" to_return = data if isinstance(data, dict): @@ -40,17 +40,17 @@ def translate(data, keys=['title']): return to_return -def extract(fileobj, keys=['title']): +def extract(fileobj, keys=["title"]): """Extract translation from a json file.""" translations = [] line = 1 for v in fileobj: - for match in KEY_VAL_REGEX.finditer(v.decode('utf-8')): + for match in KEY_VAL_REGEX.finditer(v.decode("utf-8")): k_match, v_match = match.groups() # if k_match in keys and v_match: for regexkey in keys: if re.match(regexkey, k_match): - translations.append((line, 'gettext', v_match, [])) + translations.append((line, "gettext", v_match, [])) continue line += 1 return translations @@ -70,5 +70,5 @@ def extract_json(fileobj, keywords, comment_tags, options): tuples :rtype: ``iterator`` """ - keys_to_translate = eval(options.get('keys_to_translate', "['title']")) + keys_to_translate = eval(options.get("keys_to_translate", "['title']")) return extract(fileobj, keys_to_translate) diff --git a/rero_ils/modules/circ_policies/api.py b/rero_ils/modules/circ_policies/api.py index 3ef2820f76..9c0618f157 100644 --- a/rero_ils/modules/circ_policies/api.py +++ b/rero_ils/modules/circ_policies/api.py @@ -30,20 +30,19 @@ from rero_ils.modules.libraries.api import Library from rero_ils.modules.minters import id_minter from rero_ils.modules.providers import Provider -from rero_ils.modules.utils import extracted_data_from_ref, \ - get_patron_from_arguments +from rero_ils.modules.utils import extracted_data_from_ref, get_patron_from_arguments from .extensions import CircPolicyFieldsExtension from .models import CircPolicyIdentifier, CircPolicyMetadata -DUE_SOON_REMINDER_TYPE = 'due_soon' -OVERDUE_REMINDER_TYPE = 'overdue' +DUE_SOON_REMINDER_TYPE = "due_soon" +OVERDUE_REMINDER_TYPE = "overdue" # cipo provider CircPolicyProvider = type( - 'CircPolicyProvider', + "CircPolicyProvider", (Provider,), - dict(identifier=CircPolicyIdentifier, pid_type='cipo') + dict(identifier=CircPolicyIdentifier, pid_type="cipo"), ) # cipo minter circ_policy_id_minter = partial(id_minter, provider=CircPolicyProvider) @@ -57,9 +56,9 @@ class CircPoliciesSearch(IlsRecordsSearch): class Meta: """Search only on Circulation policies index.""" - index = 'circ_policies' + index = "circ_policies" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -73,20 +72,18 @@ class CircPolicy(IlsRecord): provider = CircPolicyProvider model_cls = CircPolicyMetadata pids_exist_check = { - 'required': { - 'org': 'organisation', + "required": { + "org": "organisation", }, - 'not_required': { + "not_required": { # TODO: this is in list of settings # 'lib': 'library', # list # 'ptty': 'patron_type', # settings list # 'itty': 'item_type' # setings list - } + }, } - _extensions = [ - CircPolicyFieldsExtension() - ] + _extensions = [CircPolicyFieldsExtension()] def extended_validation(self, **kwargs): """Validate record against schema. @@ -98,42 +95,43 @@ def extended_validation(self, **kwargs): from ..patron_types.api import PatronType # Only one default policy by organisation - if self.get('is_default', False): - default_cipo = CircPolicy.get_default_circ_policy( - self.organisation_pid) + if self.get("is_default", False): + default_cipo = CircPolicy.get_default_circ_policy(self.organisation_pid) if default_cipo and default_cipo.pid != self.pid: - return 'CircPolicy: already a default policy for this org' + return "CircPolicy: already a default policy for this org" - for library in self.get('libraries', []): + for library in self.get("libraries", []): library_pid = extracted_data_from_ref(library) if not Library.get_record_by_pid(library_pid): return f"CircPolicy: no library: {library.get('pid')}" # check all patron_types & item_types from settings belongs to the # same organisation than the cipo - org = self.get('organisation') - for setting in self.get('settings', []): - patron_type_pid = extracted_data_from_ref(setting.get( - 'patron_type' - )) + org = self.get("organisation") + for setting in self.get("settings", []): + patron_type_pid = extracted_data_from_ref(setting.get("patron_type")) patron_type = PatronType.get_record_by_pid(patron_type_pid) - patron_type_org = patron_type.get('organisation') - item_type_pid = extracted_data_from_ref(setting.get('item_type')) + patron_type_org = patron_type.get("organisation") + item_type_pid = extracted_data_from_ref(setting.get("item_type")) item_type = ItemType.get_record_by_pid(item_type_pid) - item_type_org = item_type.get('organisation') + item_type_org = item_type.get("organisation") if patron_type_org != org or item_type_org != org: - return 'CircPolicy: PatronType ItemType Org diff' + return "CircPolicy: PatronType ItemType Org diff" # check reminders :: # 1) only one "before" reminder can be defined. # 2) each delay of "after" reminder must be unique. - reminders = self.get('reminders', []) - due_soon_reminders = [r for r in reminders - if r.get('type') == DUE_SOON_REMINDER_TYPE] + reminders = self.get("reminders", []) + due_soon_reminders = [ + r for r in reminders if r.get("type") == DUE_SOON_REMINDER_TYPE + ] if len(due_soon_reminders) > 1: return 'Only one "due soon" reminder can be defined by CircPolicy' - overdue_reminder_delays = [r.get('days_delay') for r in reminders - if r.get('type') == OVERDUE_REMINDER_TYPE] + overdue_reminder_delays = [ + r.get("days_delay") + for r in reminders + if r.get("type") == OVERDUE_REMINDER_TYPE + ] unique_delays = set(overdue_reminder_delays) if len(unique_delays) != len(overdue_reminder_delays): return 'Delay for "overdue" reminder should be unique.' @@ -142,40 +140,51 @@ def extended_validation(self, **kwargs): # 1) None interval can overlap other one. # 2) Only the last interval can omit an upper limit. intervals = sorted( - self.get('overdue_fees', {}).get('intervals', []), - key=lambda interval: interval.get('from') + self.get("overdue_fees", {}).get("intervals", []), + key=lambda interval: interval.get("from"), ) last_lower_limit = -1 last_upper_limit = 0 for interval in intervals: - lower_limit = interval.get('from', 0) - upper_limit = interval.get('to') + lower_limit = interval.get("from", 0) + upper_limit = interval.get("to") if upper_limit is None and interval != intervals[-1]: - return 'Only the last interval can omit the upper limit.' + return "Only the last interval can omit the upper limit." if lower_limit <= last_upper_limit: - return 'Another interval covers this lower limit interval ' \ - f':: [{lower_limit}-{upper_limit}]' + return ( + "Another interval covers this lower limit interval " + f":: [{lower_limit}-{upper_limit}]" + ) if upper_limit and upper_limit <= last_lower_limit: - return 'Another interval covers this upper limit interval ' \ - f':: [{lower_limit}-{upper_limit}]' + return ( + "Another interval covers this upper limit interval " + f":: [{lower_limit}-{upper_limit}]" + ) last_lower_limit = lower_limit last_upper_limit = upper_limit # Check renewal duration # If renewals are enabled, a renewal duration is required. - if self.get('number_renewals') and not self.get('renewal_duration'): - return 'A renewal duration is required if renewals are enabled.' + if self.get("number_renewals") and not self.get("renewal_duration"): + return "A renewal duration is required if renewals are enabled." return True @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=True, reindex=True, pidcheck=True, **kwargs): + def create( + cls, + data, + id_=None, + delete_pid=False, + dbcommit=True, + reindex=True, + pidcheck=True, + **kwargs, + ): """Create a new circulation policy record.""" # default behavior is to reindex the record. Needed to check that there # is only one default policy by organisation - return super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) + return super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) @classmethod def exist_name_and_organisation_pid(cls, name, organisation_pid): @@ -185,10 +194,13 @@ def exist_name_and_organisation_pid(cls, name, organisation_pid): :param organisation_pid: the organisation pid. :return `True` if name already exists, `False` otherwise. """ - result = CircPoliciesSearch()\ - .filter('term', circ_policy_name=name)\ - .filter('term', organisation__pid=organisation_pid)\ - .source().scan() + result = ( + CircPoliciesSearch() + .filter("term", circ_policy_name=name) + .filter("term", organisation__pid=organisation_pid) + .source() + .scan() + ) try: return next(result) except StopIteration: @@ -205,20 +217,25 @@ def get_circ_policy_by_LPI(cls, org_pid, lib_pid, ptty_pid, itty_pid): :return the first circulation policy corresponding to criteria ; `None` if no policy found. """ - result = CircPoliciesSearch()\ - .filter('term', policy_library_level=True)\ - .filter('term', organisation__pid=org_pid)\ - .filter('term', libraries__pid=lib_pid)\ - .filter('nested', - path='settings', - query=Q( - 'bool', - must=[ - Q('match', settings__patron_type__pid=ptty_pid), - Q('match', settings__item_type__pid=itty_pid) - ] - ))\ - .source('pid').scan() + result = ( + CircPoliciesSearch() + .filter("term", policy_library_level=True) + .filter("term", organisation__pid=org_pid) + .filter("term", libraries__pid=lib_pid) + .filter( + "nested", + path="settings", + query=Q( + "bool", + must=[ + Q("match", settings__patron_type__pid=ptty_pid), + Q("match", settings__item_type__pid=itty_pid), + ], + ), + ) + .source("pid") + .scan() + ) try: return CircPolicy.get_record_by_pid(next(result).pid) except StopIteration: @@ -234,19 +251,24 @@ def get_circ_policy_by_OPI(cls, org_pid, ptty_pid, itty_pid): :return the first circulation policy corresponding to criteria ; `None` if no policy found. """ - result = CircPoliciesSearch()\ - .filter('term', policy_library_level=False)\ - .filter('term', organisation__pid=org_pid)\ - .filter('nested', - path='settings', - query=Q( - 'bool', - must=[ - Q('match', settings__patron_type__pid=ptty_pid), - Q('match', settings__item_type__pid=itty_pid) - ] - ))\ - .source('pid').scan() + result = ( + CircPoliciesSearch() + .filter("term", policy_library_level=False) + .filter("term", organisation__pid=org_pid) + .filter( + "nested", + path="settings", + query=Q( + "bool", + must=[ + Q("match", settings__patron_type__pid=ptty_pid), + Q("match", settings__item_type__pid=itty_pid), + ], + ), + ) + .source("pid") + .scan() + ) try: return CircPolicy.get_record_by_pid(next(result).pid) except StopIteration: @@ -260,18 +282,22 @@ def get_default_circ_policy(cls, organisation_pid): :return the first circulation policy corresponding to criteria ; `None` if no policy found. """ - result = CircPoliciesSearch()\ - .filter('term', organisation__pid=organisation_pid)\ - .filter('term', is_default=True)\ - .source('pid').scan() + result = ( + CircPoliciesSearch() + .filter("term", organisation__pid=organisation_pid) + .filter("term", is_default=True) + .source("pid") + .scan() + ) try: return CircPolicy.get_record_by_pid(next(result).pid) except StopIteration: return None @classmethod - def provide_circ_policy(cls, organisation_pid, library_pid, - patron_type_pid, item_type_pid): + def provide_circ_policy( + cls, organisation_pid, library_pid, patron_type_pid, item_type_pid + ): """Return a circ policy for library/patron/item. :param organisation_pid: the organisation_pid. @@ -280,29 +306,21 @@ def provide_circ_policy(cls, organisation_pid, library_pid, :param item_type_pid: the item_type pid. :return the best circulation policy corresponding to criteria. """ - LPI_policy = CircPolicy.get_circ_policy_by_LPI( - organisation_pid, - library_pid, - patron_type_pid, - item_type_pid - ) - if LPI_policy: + if LPI_policy := CircPolicy.get_circ_policy_by_LPI( + organisation_pid, library_pid, patron_type_pid, item_type_pid + ): return LPI_policy - PI_policy = CircPolicy.get_circ_policy_by_OPI( - organisation_pid, - patron_type_pid, - item_type_pid - ) - if PI_policy: + if PI_policy := CircPolicy.get_circ_policy_by_OPI( + organisation_pid, patron_type_pid, item_type_pid + ): return PI_policy return CircPolicy.get_default_circ_policy(organisation_pid) def reasons_to_keep(self): """Reasons aside from record_links to keep a circ policy.""" others = {} - is_default = self.get('is_default') - if is_default: - others['is_default'] = is_default + if is_default := self.get("is_default"): + others["is_default"] = is_default return others def reasons_not_to_delete(self): @@ -311,22 +329,25 @@ def reasons_not_to_delete(self): others = self.reasons_to_keep() links = self.get_links_to_me() if others: - cannot_delete['others'] = others + cannot_delete["others"] = others if links: - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete @property def can_checkout(self): """Shortcut to know if circulation policy allow checkout.""" - return 'checkout_duration' in self + return "checkout_duration" in self @property def due_soon_interval_days(self): """Get number of days to check if loan is considered as due_soon.""" - reminder = [r for r in self.get('reminders', []) - if r.get('type') == DUE_SOON_REMINDER_TYPE] - return reminder[0].get('days_delay') if reminder else 1 + reminder = [ + r + for r in self.get("reminders", []) + if r.get("type") == DUE_SOON_REMINDER_TYPE + ] + return reminder[0].get("days_delay") if reminder else 1 @property def initial_overdue_days(self): @@ -338,21 +359,20 @@ def initial_overdue_days(self): intervals = self.get_overdue_intervals() reminder = self.get_reminder(reminder_type=OVERDUE_REMINDER_TYPE) if intervals or reminder: - return min([ - intervals[0].get('from') if intervals else sys.maxsize, - reminder.get('days_delay') if reminder else sys.maxsize - ]) + return min( + [ + intervals[0].get("from") if intervals else sys.maxsize, + reminder.get("days_delay") if reminder else sys.maxsize, + ] + ) def get_overdue_intervals(self): """Return sorted overdue intervals for this circulation policy.""" - intervals = self.get('overdue_fees', {}).get('intervals', []) + intervals = self.get("overdue_fees", {}).get("intervals", []) if intervals: - intervals = sorted( - intervals, - key=lambda interval: interval.get('from') - ) - if 'to' not in intervals[-1]: - intervals[-1]['to'] = float('+inf') + intervals = sorted(intervals, key=lambda interval: interval.get("from")) + if "to" not in intervals[-1]: + intervals[-1]["to"] = float("+inf") return intervals def get_reminders(self, reminder_type=DUE_SOON_REMINDER_TYPE, limit=None): @@ -364,9 +384,11 @@ def get_reminders(self, reminder_type=DUE_SOON_REMINDER_TYPE, limit=None): """ if limit is None: limit = math.inf - for reminder in self.get('reminders', []): - if reminder.get('type') == reminder_type \ - and reminder.get('days_delay') <= limit: + for reminder in self.get("reminders", []): + if ( + reminder.get("type") == reminder_type + and reminder.get("days_delay") <= limit + ): yield reminder def get_reminder(self, reminder_type=DUE_SOON_REMINDER_TYPE, idx=0): @@ -396,30 +418,31 @@ def can_request(cls, record, **kwargs): # none patron get be load from kwargs argument. This check can't # be relevant --> return True by default return True, [] - if 'patron' not in patron.get('roles', []): + if "patron" not in patron.get("roles", []): # without 'patron' role, we can't find any patron_type and so we # can't find any corresponding cipo --> return False return False, ["Patron doesn't have the correct role"] - library_pid = kwargs['library'].pid if kwargs.get('library') \ - else record.library_pid + library_pid = ( + kwargs["library"].pid if kwargs.get("library") else record.library_pid + ) if isinstance(record, Item): - record_circulation_category_pid = \ - record.item_type_circulation_category_pid + record_circulation_category_pid = record.item_type_circulation_category_pid elif isinstance(record, Holding): - record_circulation_category_pid = \ - record.circulation_category_pid + record_circulation_category_pid = record.circulation_category_pid else: - raise Exception(f'This resource cannot be \ - requested : {record.__class__.__name__}') + raise Exception( + f"This resource cannot be \ + requested : {record.__class__.__name__}" + ) cipo = cls.provide_circ_policy( - record.organisation_pid, - library_pid, - patron.patron_type_pid, - record_circulation_category_pid - ) - if not cipo.get('allow_requests', False): + record.organisation_pid, + library_pid, + patron.patron_type_pid, + record_circulation_category_pid, + ) + if not cipo.get("allow_requests", False): return False, ["Circulation policy disallows the operation."] return True, [] @@ -437,17 +460,18 @@ def allow_checkout(cls, item, **kwargs): # none patron get be load from kwargs argument. This check can't # be relevant --> return True by default return True, [] - if 'patron' not in patron.get('roles', []): + if "patron" not in patron.get("roles", []): # without 'patron' role, we can't find any patron_type and so we # can't find any corresponding cipo --> return False return False, ["Patron doesn't have the correct role"] - library_pid = kwargs['library'].pid if kwargs.get('library') \ - else item.library_pid + library_pid = ( + kwargs["library"].pid if kwargs.get("library") else item.library_pid + ) cipo = cls.provide_circ_policy( item.organisation_pid, library_pid, patron.patron_type_pid, - item.item_type_circulation_category_pid + item.item_type_circulation_category_pid, ) if not cipo.can_checkout: return False, ["Circulation policy disallows the operation."] @@ -464,4 +488,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='cipo') + super().bulk_index(record_id_iterator, doc_type="cipo") diff --git a/rero_ils/modules/circ_policies/extensions.py b/rero_ils/modules/circ_policies/extensions.py index ba8fdbe174..ee1722b0d0 100644 --- a/rero_ils/modules/circ_policies/extensions.py +++ b/rero_ils/modules/circ_policies/extensions.py @@ -32,9 +32,8 @@ def _pickup_hold_duration_check(self, record): :param record: the record to check. """ - if not record.get('allow_requests') \ - and 'pickup_hold_duration' in record: - del record['pickup_hold_duration'] + if not record.get("allow_requests") and "pickup_hold_duration" in record: + del record["pickup_hold_duration"] pre_commit = _pickup_hold_duration_check pre_create = _pickup_hold_duration_check diff --git a/rero_ils/modules/circ_policies/models.py b/rero_ils/modules/circ_policies/models.py index acf7ae9379..8962b70c2a 100644 --- a/rero_ils/modules/circ_policies/models.py +++ b/rero_ils/modules/circ_policies/models.py @@ -27,16 +27,17 @@ class CircPolicyIdentifier(RecordIdentifier): """Sequence generator for circultion policies identifiers.""" - __tablename__ = 'circ_policy_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "circ_policy_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class CircPolicyMetadata(db.Model, RecordMetadataBase): """CircPolicy record metadata.""" - __tablename__ = 'circ_policy_metadata' + __tablename__ = "circ_policy_metadata" diff --git a/rero_ils/modules/circ_policies/permissions.py b/rero_ils/modules/circ_policies/permissions.py index b3caefd209..6f0811b704 100644 --- a/rero_ils/modules/circ_policies/permissions.py +++ b/rero_ils/modules/circ_policies/permissions.py @@ -19,15 +19,18 @@ """Circulation policies permissions.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) -search_action = action_factory('cipo-search') -read_action = action_factory('cipo-read') -create_action = action_factory('cipo-create') -update_action = action_factory('cipo-update') -delete_action = action_factory('cipo-delete') -access_action = action_factory('cipo-access') +search_action = action_factory("cipo-search") +read_action = action_factory("cipo-read") +create_action = action_factory("cipo-create") +update_action = action_factory("cipo-update") +delete_action = action_factory("cipo-delete") +access_action = action_factory("cipo-access") class CirculationPolicyPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/circ_policies/views.py b/rero_ils/modules/circ_policies/views.py index 09e897da3d..4b06455dd3 100644 --- a/rero_ils/modules/circ_policies/views.py +++ b/rero_ils/modules/circ_policies/views.py @@ -26,26 +26,20 @@ from ..patrons.api import current_librarian blueprint = Blueprint( - 'circ_policies', + "circ_policies", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) -@blueprint.route('/circ_policies/name/validate/', methods=["GET"]) +@blueprint.route("/circ_policies/name/validate/", methods=["GET"]) @check_logged_as_librarian def name_validate(name): """Circulation policy name validation.""" - response = { - 'name': None - } - circ_policy = CircPolicy.exist_name_and_organisation_pid( - name, - current_librarian.organisation.pid - ) - if circ_policy: - response = { - 'name': circ_policy.name - } + response = {"name": None} + if circ_policy := CircPolicy.exist_name_and_organisation_pid( + name, current_librarian.organisation.pid + ): + response = {"name": circ_policy.name} return jsonify(response) diff --git a/rero_ils/modules/cli/fixtures.py b/rero_ils/modules/cli/fixtures.py index 17342c559f..f2b8a96539 100644 --- a/rero_ils/modules/cli/fixtures.py +++ b/rero_ils/modules/cli/fixtures.py @@ -44,18 +44,31 @@ from ..ill_requests.cli import create_ill_requests from ..items.cli import create_items, reindex_items from ..loans.cli import create_loans, load_virtua_transactions -from ..operation_logs.cli import create_operation_logs, \ - destroy_operation_logs, dump_operation_logs +from ..operation_logs.cli import ( + create_operation_logs, + destroy_operation_logs, + dump_operation_logs, +) from ..patrons.cli import import_users from ..providers import append_fixtures_new_identifiers -from ..utils import JsonWriter, bulk_load_metadata, bulk_load_pids, \ - bulk_load_pidstore, bulk_save_metadata, bulk_save_pids, \ - bulk_save_pidstore, csv_metadata_line, csv_pidstore_line, \ - get_record_class_from_schema_or_pid_type, get_schema_for_resource, \ - number_records_in_file, read_json_record - -_datastore = LocalProxy(lambda: current_app.extensions['security'].datastore) -_records_state = LocalProxy(lambda: current_app.extensions['invenio-records']) +from ..utils import ( + JsonWriter, + bulk_load_metadata, + bulk_load_pids, + bulk_load_pidstore, + bulk_save_metadata, + bulk_save_pids, + bulk_save_pidstore, + csv_metadata_line, + csv_pidstore_line, + get_record_class_from_schema_or_pid_type, + get_schema_for_resource, + number_records_in_file, + read_json_record, +) + +_datastore = LocalProxy(lambda: current_app.extensions["security"].datastore) +_records_state = LocalProxy(lambda: current_app.extensions["invenio-records"]) @click.group() @@ -106,53 +119,64 @@ def load_system_role_policies(data): return True -@fixtures.command('import_role_policies') +@fixtures.command("import_role_policies") @with_appcontext -@click.argument('infile', type=click.File('r'), default=sys.stdin) +@click.argument("infile", type=click.File("r"), default=sys.stdin) def import_role_policies(infile): """Import the action roles policies. :param infile: Json file """ if load_role_policies(json.load(infile)): - click.secho('Success', fg='green') + click.secho("Success", fg="green") -@fixtures.command('import_system_role_policies') +@fixtures.command("import_system_role_policies") @with_appcontext -@click.argument('infile', type=click.File('r'), default=sys.stdin) +@click.argument("infile", type=click.File("r"), default=sys.stdin) def import_system_role_policies(infile): """Import the action system roles policies. :param infile: Json file """ if load_system_role_policies(json.load(infile)): - click.secho('Success', fg='green') - - -@fixtures.command('create') -@click.option('-u', '--create_or_update', 'create_or_update', is_flag=True, - default=False) -@click.option('-a', '--append', 'append', is_flag=True, default=False) -@click.option('-r', '--reindex', 'reindex', is_flag=True, default=False) -@click.option('-c', '--dbcommit', 'dbcommit', is_flag=True, default=False) -@click.option('-C', '--commit', 'commit', default=100000) -@click.option('-v', '--verbose/--no-verbose', 'verbose', - is_flag=True, default=True) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.option('-s', '--schema', 'schema', default=None) -@click.option('-t', '--pid_type', 'pid_type', default=None) -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.option('-o', '--dont-stop', 'dont_stop_on_error', - is_flag=True, default=False) -@click.option('-P', '--pid-check', 'pid_check', - is_flag=True, default=False) -@click.option('-e', '--save_errors', 'save_errors', type=click.File('w')) -@click.argument('infile', type=click.File('r'), default=sys.stdin) + click.secho("Success", fg="green") + + +@fixtures.command("create") +@click.option( + "-u", "--create_or_update", "create_or_update", is_flag=True, default=False +) +@click.option("-a", "--append", "append", is_flag=True, default=False) +@click.option("-r", "--reindex", "reindex", is_flag=True, default=False) +@click.option("-c", "--dbcommit", "dbcommit", is_flag=True, default=False) +@click.option("-C", "--commit", "commit", default=100000) +@click.option("-v", "--verbose/--no-verbose", "verbose", is_flag=True, default=True) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.option("-s", "--schema", "schema", default=None) +@click.option("-t", "--pid_type", "pid_type", default=None) +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.option("-o", "--dont-stop", "dont_stop_on_error", is_flag=True, default=False) +@click.option("-P", "--pid-check", "pid_check", is_flag=True, default=False) +@click.option("-e", "--save_errors", "save_errors", type=click.File("w")) +@click.argument("infile", type=click.File("r"), default=sys.stdin) @with_appcontext -def create(infile, create_or_update, append, reindex, dbcommit, commit, - verbose, debug, schema, pid_type, lazy, dont_stop_on_error, - pid_check, save_errors): +def create( + infile, + create_or_update, + append, + reindex, + dbcommit, + commit, + verbose, + debug, + schema, + pid_type, + lazy, + dont_stop_on_error, + pid_check, + save_errors, +): """Load REROILS record. :param infile: Json file @@ -168,81 +192,71 @@ def create(infile, create_or_update, append, reindex, dbcommit, commit, :param pidcheck: check pids :param save_errors: save error records to file """ - click.secho( - f'Loading {pid_type} records from {infile.name}.', - fg='green' - ) + click.secho(f"Loading {pid_type} records from {infile.name}.", fg="green") record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) if save_errors: - errors = 0 name, ext = os.path.splitext(infile.name) - err_file_name = f'{name}_errors{ext}' + err_file_name = f"{name}_errors{ext}" error_file = JsonWriter(err_file_name) pids = [] - if lazy: - # try to lazy read json file (slower, better memory management) - records = read_json_record(infile) - else: - # load everything in memory (faster, bad memory management) - records = json.load(infile) + records = read_json_record(infile) if lazy else json.load(infile) count = 0 now = datetime.now(timezone.utc) - order_date = now.strftime('%Y-%m-%d') + order_date = now.strftime("%Y-%m-%d") year = str(now.year) for count, record in enumerate(records, 1): - if pid_type == 'budg' and not record.get('name'): + if pid_type == "budg" and not record.get("name"): # ensure a budget is created for the current year - record['name'] = year - record['start_date'] = f'{year}-01-01' - record['end_date'] = f'{year}-12-31' - elif pid_type == 'acol' and record.pop('send_now', None): + record["name"] = year + record["start_date"] = f"{year}-01-01" + record["end_date"] = f"{year}-12-31" + elif pid_type == "acol" and record.pop("send_now", None): # ensure all orders are sent - record['order_date'] = f'{order_date}' - elif pid_type == 'acrl' and record.pop('receive_now', None): + record["order_date"] = f"{order_date}" + elif pid_type == "acrl" and record.pop("receive_now", None): # ensure all receipt lines are received - record['receipt_date'] = f'{order_date}' + record["receipt_date"] = f"{order_date}" if schema: - record['$schema'] = schema + record["$schema"] = schema try: - pid = record.get('pid') - msg = 'created' + pid = record.get("pid") + msg = "created" db_record = record_class.get_record_by_pid(pid) if create_or_update and db_record: # case when record already exist in database db_record = record_class.get_record_by_pid(pid) - rec = db_record.update( - record, dbcommit=dbcommit, reindex=reindex) - msg = 'updated' - elif create_or_update and pid and not db_record \ - and record_class.record_pid_exists(pid): + rec = db_record.update(record, dbcommit=dbcommit, reindex=reindex) + msg = "updated" + elif ( + create_or_update + and pid + and not db_record + and record_class.record_pid_exists(pid) + ): # case when record not in db but pid is reserved presist_id = PersistentIdentifier.get( - record_class.provider.pid_type, pid) - rec = record_class.create( - record, dbcommit=dbcommit, reindex=reindex) + record_class.provider.pid_type, pid + ) + rec = record_class.create(record, dbcommit=dbcommit, reindex=reindex) if presist_id.status != PIDStatus.REGISTERED: presist_id.register() presist_id.assign(record_class.object_type, rec.id) - msg = 'created' + msg = "created" else: # case when record and pid are not in db rec = record_class.create( - record, dbcommit=dbcommit, reindex=reindex, - pidcheck=pid_check) + record, dbcommit=dbcommit, reindex=reindex, pidcheck=pid_check + ) if append: pids.append(rec.pid) if verbose: - click.echo( - f'{count: <8} {pid_type} {msg} {rec.pid}:{rec.id}') + click.echo(f"{count: <8} {pid_type} {msg} {rec.pid}:{rec.id}") except Exception as err: - pid = record.get('pid', '???') - click.secho( - f'{count: <8} {type} create error {pid}: {err}', - fg='red' - ) + pid = record.get("pid", "???") + click.secho(f"{count: <8} {type} create error {pid}: {err}", fg="red") if debug: traceback.print_exc() @@ -253,30 +267,25 @@ def create(infile, create_or_update, append, reindex, dbcommit, commit, db.session.flush() if count > 0 and count % commit == 0: if verbose: - click.echo(f'DB commit: {count}') + click.echo(f"DB commit: {count}") db.session.commit() - click.echo(f'DB commit: {count}') + click.echo(f"DB commit: {count}") db.session.commit() if append: - click.secho(f'Append fixtures new identifiers: {len(pids)}') + click.secho(f"Append fixtures new identifiers: {len(pids)}") identifier = record_class.provider.identifier try: append_fixtures_new_identifiers( - identifier, - sorted(pids, key=lambda x: int(x)), - pid_type + identifier, sorted(pids, key=lambda x: int(x)), pid_type ) except Exception as err: - click.secho( - f'ERROR append fixtures new identifiers: {err}', - fg='red' - ) + click.secho(f"ERROR append fixtures new identifiers: {err}", fg="red") -@fixtures.command('count') -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.argument('infile', type=click.File('r'), default=sys.stdin) +@fixtures.command("count") +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.argument("infile", type=click.File("r"), default=sys.stdin) def count_cli(infile, lazy): """Count records in file. @@ -284,127 +293,110 @@ def count_cli(infile, lazy): :param lazy: lazy reads file :return: count of records """ - click.secho( - f'Count records from {infile.name}.', - fg='green' - ) - if lazy: - # try to lazy read json file (slower, better memory management) - records = read_json_record(infile) - else: - # load everything in memory (faster, bad memory management) - records = json.load(infile) - count = 0 - for record in records: - count += 1 - click.echo(f'Count: {count}') - - -@fixtures.command('create_csv') -@click.argument('record_type') -@click.argument('json_file') -@click.argument('output_directory') -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-p', '--create_pid', 'create_pid', is_flag=True, default=False) + click.secho(f"Count records from {infile.name}.", fg="green") + records = read_json_record(infile) if lazy else json.load(infile) + count = sum(1 for _ in records) + click.echo(f"Count: {count}") + + +@fixtures.command("create_csv") +@click.argument("record_type") +@click.argument("json_file") +@click.argument("output_directory") +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-p", "--create_pid", "create_pid", is_flag=True, default=False) @with_appcontext -def create_csv(record_type, json_file, output_directory, lazy, verbose, - create_pid): +def create_csv(record_type, json_file, output_directory, lazy, verbose, create_pid): """Create csv files from json. :param verbose: Verbose. """ - click.secho( - f"Create CSV files for: {record_type} from: {json_file}", - fg='green' - ) + click.secho(f"Create CSV files for: {record_type} from: {json_file}", fg="green") - path = current_jsonschemas.url_to_path( - get_schema_for_resource(record_type) - ) + path = current_jsonschemas.url_to_path(get_schema_for_resource(record_type)) add_schema = get_schema_for_resource(record_type) schema = current_jsonschemas.get_schema(path=path) schema = _records_state.replace_refs(schema) count = 0 errors_count = 0 with open(json_file) as infile: - if lazy: - # try to lazy read json file (slower, better memory management) - records = read_json_record(infile) - else: - # load everything in memory (faster, bad memory management) - records = json.load(infile) - + records = read_json_record(infile) if lazy else json.load(infile) file_name_pidstore = os.path.join( - output_directory, f'{record_type}_pidstore.csv') - click.secho(f'\t{file_name_pidstore}', fg='green') - file_pidstore = open(file_name_pidstore, 'w') - file_name_metadata = os.path.join( - output_directory, f'{record_type}_metadata.csv' + output_directory, f"{record_type}_pidstore.csv" ) - click.secho(f'\t{file_name_metadata}', fg='green') - file_metadata = open(file_name_metadata, 'w') - file_name_pids = os.path.join( - output_directory, f'{record_type}_pids.csv') - click.secho(f'\t{file_name_pids}', fg='green') - file_pids = open(file_name_pids, 'w') - file_name_errors = os.path.join( - output_directory, f'{record_type}_errors.json') - file_errors = open(file_name_errors, 'w') - file_errors.write('[') - - for count, record in enumerate(records, 1): - pid = record.get('pid') - if create_pid: - pid = str(count) - record['pid'] = pid - uuid = str(uuid4()) - if verbose: - click.secho(f'{count}\t{record_type}\t{pid}:{uuid}') - date = str(datetime.utcnow()) - record['$schema'] = add_schema - try: - validate(record, schema) - file_metadata.write( - csv_metadata_line(record, uuid, date) - ) - file_pidstore.write( - csv_pidstore_line(record_type, pid, uuid, date) - ) - file_pids.write(pid + '\n') - except Exception as err: - click.secho( - f'{count}\t{record_type}: Error validate in record: ', - fg='red') - click.secho(str(err)) - if errors_count > 0: - file_errors.write(',') - errors_count += 1 - file_errors.write('\n') - for line in json.dumps(record, indent=2).split('\n'): - file_errors.write(' ' + line + '\n') - - file_pidstore.close() + click.secho(f"\t{file_name_pidstore}", fg="green") + with open(file_name_pidstore, "w") as file_pidstore: + file_name_metadata = os.path.join( + output_directory, f"{record_type}_metadata.csv" + ) + click.secho(f"\t{file_name_metadata}", fg="green") + file_metadata = open(file_name_metadata, "w") + file_name_pids = os.path.join(output_directory, f"{record_type}_pids.csv") + click.secho(f"\t{file_name_pids}", fg="green") + file_pids = open(file_name_pids, "w") + file_name_errors = os.path.join( + output_directory, f"{record_type}_errors.json" + ) + file_errors = open(file_name_errors, "w") + file_errors.write("[") + + for count, record in enumerate(records, 1): + pid = record.get("pid") + if create_pid: + pid = str(count) + record["pid"] = pid + uuid = str(uuid4()) + if verbose: + click.secho(f"{count}\t{record_type}\t{pid}:{uuid}") + date = str(datetime.utcnow()) + record["$schema"] = add_schema + try: + validate(record, schema) + file_metadata.write(csv_metadata_line(record, uuid, date)) + file_pidstore.write(csv_pidstore_line(record_type, pid, uuid, date)) + file_pids.write(pid + "\n") + except Exception as err: + click.secho( + f"{count}\t{record_type}: Error validate in record: ", fg="red" + ) + click.secho(str(err)) + if errors_count > 0: + file_errors.write(",") + errors_count += 1 + file_errors.write("\n") + for line in json.dumps(record, indent=2).split("\n"): + file_errors.write(f" {line}\n") + file_metadata.close() file_pids.close() - file_errors.write('\n]') + file_errors.write("\n]") file_errors.close() if errors_count == 0: os.remove(file_name_errors) - click.secho( - f'Created: {count-errors_count} Errors: {errors_count}', - fg='yellow' - ) - - -@fixtures.command('bulk_load') -@click.argument('record_type') -@click.argument('csv_metadata_file') -@click.option('-c', '--bulk_count', 'bulkcount', default=0, type=int, - help='Set the bulk load chunk size.') -@click.option('-r', '--reindex', 'reindex', help='add record to reindex.', - is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) + click.secho(f"Created: {count-errors_count} Errors: {errors_count}", fg="yellow") + + +@fixtures.command("bulk_load") +@click.argument("record_type") +@click.argument("csv_metadata_file") +@click.option( + "-c", + "--bulk_count", + "bulkcount", + default=0, + type=int, + help="Set the bulk load chunk size.", +) +@click.option( + "-r", + "--reindex", + "reindex", + help="add record to reindex.", + is_flag=True, + default=False, +) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext def bulk_load(record_type, csv_metadata_file, bulkcount, reindex, verbose): """Agency record management. @@ -417,34 +409,44 @@ def bulk_load(record_type, csv_metadata_file, bulkcount, reindex, verbose): if bulkcount > 0: bulk_count = bulkcount else: - bulk_count = current_app.config.get('BULK_CHUNK_COUNT', 100000) + bulk_count = current_app.config.get("BULK_CHUNK_COUNT", 100000) - message = f'Load {record_type} CSV files into database.' - click.secho(message, fg='green') + message = f"Load {record_type} CSV files into database." + click.secho(message, fg="green") file_name_metadata = csv_metadata_file - file_name_pidstore = file_name_metadata.replace('metadata', 'pidstore') - file_name_pids = file_name_metadata.replace('metadata', 'pids') - - record_counts = number_records_in_file(file_name_pidstore, 'csv') - message = f' Number of records to load: {record_counts}' - click.secho(message, fg='green') - - click.secho(f' Load pids: {file_name_pids}') - bulk_load_pids(pid_type=record_type, ids=file_name_pids, - bulk_count=bulk_count, verbose=verbose) - click.secho(f' Load pidstore: {file_name_pidstore}') - bulk_load_pidstore(pid_type=record_type, pidstore=file_name_pidstore, - bulk_count=bulk_count, verbose=verbose) - click.secho(f' Load metatada: {file_name_metadata}') - bulk_load_metadata(pid_type=record_type, metadata=file_name_metadata, - bulk_count=bulk_count, verbose=verbose, reindex=reindex) - - -@fixtures.command('bulk_save') -@click.argument('output_directory') -@click.option('-t', '--pid_types', multiple=True, default=['all']) -@click.option('-d', '--deployment', 'deployment', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) + file_name_pidstore = file_name_metadata.replace("metadata", "pidstore") + file_name_pids = file_name_metadata.replace("metadata", "pids") + + record_counts = number_records_in_file(file_name_pidstore, "csv") + message = f" Number of records to load: {record_counts}" + click.secho(message, fg="green") + + click.secho(f" Load pids: {file_name_pids}") + bulk_load_pids( + pid_type=record_type, ids=file_name_pids, bulk_count=bulk_count, verbose=verbose + ) + click.secho(f" Load pidstore: {file_name_pidstore}") + bulk_load_pidstore( + pid_type=record_type, + pidstore=file_name_pidstore, + bulk_count=bulk_count, + verbose=verbose, + ) + click.secho(f" Load metatada: {file_name_metadata}") + bulk_load_metadata( + pid_type=record_type, + metadata=file_name_metadata, + bulk_count=bulk_count, + verbose=verbose, + reindex=reindex, + ) + + +@fixtures.command("bulk_save") +@click.argument("output_directory") +@click.option("-t", "--pid_types", multiple=True, default=["all"]) +@click.option("-d", "--deployment", "deployment", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext def bulk_save(pid_types, output_directory, deployment, verbose): """Record dump. @@ -453,67 +455,54 @@ def bulk_save(pid_types, output_directory, deployment, verbose): default=//all// :param verbose: Verbose. """ - file_name_tmp_pidstore = os.path.join( - output_directory, - 'tmp_pidstore.csv' - ) + file_name_tmp_pidstore = os.path.join(output_directory, "tmp_pidstore.csv") try: os.remove(file_name_tmp_pidstore) except OSError: pass all_pid_types = [] - endpoints = current_app.config.get('RECORDS_REST_ENDPOINTS') + endpoints = current_app.config.get("RECORDS_REST_ENDPOINTS") for endpoint in endpoints: all_pid_types.append(endpoint) - if pid_types[0] == 'all': + if pid_types[0] == "all": pid_types = all_pid_types for p_type in pid_types: if p_type not in all_pid_types: - click.secho( - f'Error {p_type} does not exist!', - fg='red' - ) + click.secho(f"Error {p_type} does not exist!", fg="red") continue # TODO: do we have to save loanid and how we can save it? - if p_type in ['loanid', 'oplg']: + if p_type in ["loanid", "oplg"]: continue click.secho( - f'Save {p_type} CSV files to directory: {output_directory}', - fg='green' + f"Save {p_type} CSV files to directory: {output_directory}", fg="green" ) - file_prefix = endpoints[p_type].get('search_index') - if p_type in ['doc', 'hold', 'item', 'count']: + file_prefix = endpoints[p_type].get("search_index") + if p_type in ["doc", "hold", "item", "count"]: if deployment: - file_prefix += '_big' + file_prefix += "_big" else: - file_prefix += '_small' + file_prefix += "_small" file_name_metadata = os.path.join( - output_directory, - f'{file_prefix}_metadata.csv' + output_directory, f"{file_prefix}_metadata.csv" ) - bulk_save_metadata(pid_type=p_type, file_name=file_name_metadata, - verbose=verbose) - file_name_pidstore = os.path.join( - output_directory, - f'{file_prefix}_pidstore.csv' + bulk_save_metadata( + pid_type=p_type, file_name=file_name_metadata, verbose=verbose ) - count = bulk_save_pidstore(pid_type=p_type, - file_name=file_name_pidstore, - file_name_tmp=file_name_tmp_pidstore, - verbose=verbose) - - file_name_pids = os.path.join( - output_directory, - f'{file_prefix}_pids.csv' + file_name_pidstore = os.path.join( + output_directory, f"{file_prefix}_pidstore.csv" ) - bulk_save_pids(pid_type=p_type, file_name=file_name_pids, - verbose=verbose) - click.secho( - f'Saved records: {count}', - fg='yellow' + count = bulk_save_pidstore( + pid_type=p_type, + file_name=file_name_pidstore, + file_name_tmp=file_name_tmp_pidstore, + verbose=verbose, ) + + file_name_pids = os.path.join(output_directory, f"{file_prefix}_pids.csv") + bulk_save_pids(pid_type=p_type, file_name=file_name_pids, verbose=verbose) + click.secho(f"Saved records: {count}", fg="yellow") try: os.remove(file_name_tmp_pidstore) except OSError: diff --git a/rero_ils/modules/cli/index.py b/rero_ils/modules/cli/index.py index 03568970db..e1ee93a4d5 100644 --- a/rero_ils/modules/cli/index.py +++ b/rero_ils/modules/cli/index.py @@ -25,7 +25,6 @@ import click import dateparser -from celery.messaging import establish_connection from flask import current_app from flask.cli import with_appcontext from invenio_pidstore.models import PersistentIdentifier, PIDStatus @@ -36,10 +35,10 @@ from jsonpatch import make_patch from kombu import Queue -from .utils import get_record_class_from_schema_or_pid_type from ..api import IlsRecordsIndexer from ..monitoring.api import Monitoring from ..tasks import process_bulk_queue +from .utils import get_record_class_from_schema_or_pid_type def abort_if_false(ctx, param, value): @@ -55,13 +54,12 @@ def connect_queue(connection, name): :returns: connected queue. """ if name: - exchange = current_app.config.get('INDEXER_MQ_EXCHANGE') + exchange = current_app.config.get("INDEXER_MQ_EXCHANGE") exchange = exchange(connection) queue = Queue(name, exchange=exchange, routing_key=name) else: - queue = current_app.config['INDEXER_MQ_QUEUE'] - connected_queue = queue(connection) - return connected_queue + queue = current_app.config["INDEXER_MQ_QUEUE"] + return queue(connection) @click.group() @@ -70,47 +68,63 @@ def index(): @index.command() -@click.option('--delayed', '-d', is_flag=True, - help='Run indexing in background.') -@click.option('--concurrency', '-c', default=1, type=int, - help='Number of concurrent indexing tasks to start.') -@click.option('--with_stats', is_flag=True, default=False, - help='report number of success and list failed error response.') -@click.option('--queue', '-q', type=str, default=None, - help='Name of the celery queue used to put the tasks into.') -@click.option('--version-type', help='Elasticsearch version type to use.') -@click.option('--raise-on-error/--skip-errors', default=True, - help='Controls if ES bulk indexing errors raise an exception.') +@click.option("--delayed", "-d", is_flag=True, help="Run indexing in background.") +@click.option( + "--concurrency", + "-c", + default=1, + type=int, + help="Number of concurrent indexing tasks to start.", +) +@click.option( + "--with_stats", + is_flag=True, + default=False, + help="report number of success and list failed error response.", +) +@click.option( + "--queue", + "-q", + type=str, + default=None, + help="Name of the celery queue used to put the tasks into.", +) +@click.option("--version-type", help="Elasticsearch version type to use.") +@click.option( + "--raise-on-error/--skip-errors", + default=True, + help="Controls if ES bulk indexing errors raise an exception.", +) @with_appcontext -def run(delayed, concurrency, with_stats, version_type=None, queue=None, - raise_on_error=True): +def run( + delayed, concurrency, with_stats, version_type=None, queue=None, raise_on_error=True +): """Run bulk record indexing.""" if delayed: click.secho( - f'Starting {concurrency} tasks for indexing records ' - f'({queue})...', - fg='green' + f"Starting {concurrency} tasks for indexing records " f"({queue})...", + fg="green", ) celery_kwargs = { - 'kwargs': { - 'version_type': version_type, - 'queue': queue, - 'search_bulk_kwargs': {'raise_on_error': raise_on_error}, - 'stats_only': not with_stats + "kwargs": { + "version_type": version_type, + "queue": queue, + "search_bulk_kwargs": {"raise_on_error": raise_on_error}, + "stats_only": not with_stats, } } - for _ in range(0, concurrency): + for _ in range(concurrency): process_id = process_bulk_queue.apply_async(**celery_kwargs) - click.secho(f'Index async ({queue}): {process_id}', fg='yellow') + click.secho(f"Index async ({queue}): {process_id}", fg="yellow") else: if queue: - click.secho(f'Indexing records ({queue})...', fg='green') + click.secho(f"Indexing records ({queue})...", fg="green") else: - click.secho(f'Indexing records ...', fg='green') + click.secho("Indexing records ...", fg="green") connected_queue = None if queue: - connection = establish_connection() + connection = current_app.extensions["invenio-celery"].celery.connection() connected_queue = connect_queue(connection, queue) indexer = IlsRecordsIndexer( version_type=version_type, @@ -118,22 +132,25 @@ def run(delayed, concurrency, with_stats, version_type=None, queue=None, routing_key=queue, ) name, count = indexer.process_bulk_queue( - search_bulk_kwargs={'raise_on_error': raise_on_error}, - stats_only=(not with_stats) - ) - click.secho( - f'"{name}" indexed: {count[0]} error: {count[1]}', fg='yellow') + search_bulk_kwargs={"raise_on_error": raise_on_error}, + stats_only=(not with_stats), + ) + click.secho(f'"{name}" indexed: {count[0]} error: {count[1]}', fg="yellow") @index.command() -@click.option('--yes-i-know', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Do you really want to reindex records?') -@click.option('-t', '--pid-types', multiple=True) -@click.option('-f', '--from_date', 'from_date') -@click.option('-u', '--until_date', 'until_date') -@click.option('-d', '--direct', 'direct', is_flag=True, default=False) -@click.option('-q', '--queue', 'queue', default='indexer') +@click.option( + "--yes-i-know", + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt="Do you really want to reindex records?", +) +@click.option("-t", "--pid-types", multiple=True) +@click.option("-f", "--from_date", "from_date") +@click.option("-u", "--until_date", "until_date") +@click.option("-d", "--direct", "direct", is_flag=True, default=False) +@click.option("-q", "--queue", "queue", default="indexer") @with_appcontext def reindex(pid_types, from_date, until_date, direct, queue): """Reindex records. @@ -144,72 +161,74 @@ def reindex(pid_types, from_date, until_date, direct, queue): :param direct: Use record class for indexing. :param queue: Queue name to use. """ - endpoints = current_app.config.get('RECORDS_REST_ENDPOINTS') + endpoints = current_app.config.get("RECORDS_REST_ENDPOINTS") if not pid_types: pid_types = list(endpoints) for pid_type in pid_types: if pid_type in endpoints: - msg = f'Sending {pid_type} to indexing queue ' - msg += f'({queue}): ' if queue else ': ' + msg = f"Sending {pid_type} to indexing queue " + msg += f"({queue}): " if queue else ": " if direct: - msg = f'Indexing {pid_type}: ' - click.secho(msg, fg='green', nl=False) + msg = f"Indexing {pid_type}: " + click.secho(msg, fg="green", nl=False) query = None - record_cls = obj_or_import_string( - endpoints[pid_type].get('record_class')) + record_cls = obj_or_import_string(endpoints[pid_type].get("record_class")) if from_date or until_date: model_cls = record_cls.model_cls if model_cls != RecordMetadata: - query = model_cls.query \ - .filter(model_cls.is_deleted.is_(False)) \ - .with_entities(model_cls.id) \ + query = ( + model_cls.query.filter(model_cls.is_deleted.is_(False)) + .with_entities(model_cls.id) .order_by(model_cls.created) + ) if from_date: query = query.filter( - model_cls.updated > dateparser.parse(from_date)) + model_cls.updated > dateparser.parse(from_date) + ) if until_date: query = query.filter( - model_cls.updated <= dateparser.parse(until_date)) + model_cls.updated <= dateparser.parse(until_date) + ) else: - query = PersistentIdentifier.query \ - .filter_by(object_type='rec', status=PIDStatus.REGISTERED)\ - .filter_by(pid_type=pid_type) \ + query = ( + PersistentIdentifier.query.filter_by( + object_type="rec", status=PIDStatus.REGISTERED + ) + .filter_by(pid_type=pid_type) .with_entities(PersistentIdentifier.object_uuid) + ) if query: - click.echo(f'{query.count()}') + click.echo(f"{query.count()}") if direct: for idx, id in enumerate((x[0] for x in query), 1): - msg = f'{idx}\t{id}\t' + msg = f"{idx}\t{id}\t" try: rec = record_cls.get_record(id) - msg += f'{rec.pid}' + msg += f"{rec.pid}" rec.reindex() except Exception as err: - msg += f'\t{err}' + msg += f"\t{err}" click.echo(msg) else: - exchange = current_app.config.get('INDEXER_MQ_EXCHANGE') - simple_queue = Queue( - queue, exchange=exchange, routing_key=queue) - indxer = IlsRecordsIndexer( - queue=simple_queue, routing_key=queue) - indxer.bulk_index( - (x[0] for x in query), doc_type=pid_type) + exchange = current_app.config.get("INDEXER_MQ_EXCHANGE") + simple_queue = Queue(queue, exchange=exchange, routing_key=queue) + indxer = IlsRecordsIndexer(queue=simple_queue, routing_key=queue) + indxer.bulk_index((x[0] for x in query), doc_type=pid_type) else: - click.echo('Can not index by date.') + click.echo("Can not index by date.") else: - click.secho(f'ERROR type does not exist: {pid_type}', fg='red') + click.secho(f"ERROR type does not exist: {pid_type}", fg="red") if not direct: msg = 'Execute "invenio reroils index run' - if queue != 'indexer': - msg = f'{msg} -q {queue}' + if queue != "indexer": + msg = f"{msg} -q {queue}" msg = f'{msg}" command to process the queue!' - click.secho(msg, fg='yellow') + click.secho(msg, fg="yellow") -@index.command('reindex_missing') -@click.option('-t', '--pid-types', multiple=True, required=True) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@index.command("reindex_missing") +@click.option("-t", "--pid-types", multiple=True, required=True) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext def reindex_missing(pid_types, verbose): """Index all missing records. @@ -217,88 +236,83 @@ def reindex_missing(pid_types, verbose): :param pid_type: Pid type. """ for p_type in pid_types: - click.secho( - f'Indexing missing {p_type}: ', - fg='green', - nl=False - ) - record_class = get_record_class_from_schema_or_pid_type( - pid_type=p_type - ) + click.secho(f"Indexing missing {p_type}: ", fg="green", nl=False) + record_class = get_record_class_from_schema_or_pid_type(pid_type=p_type) if not record_class: click.secho( - 'ERROR pid type does not exist!', - fg='red', + "ERROR pid type does not exist!", + fg="red", ) continue monitoring = Monitoring(time_delta=0) - pids_es, pids_db, pids_es_double, index = \ - monitoring.get_es_db_missing_pids(p_type) + pids_es, pids_db, pids_es_double, index = monitoring.get_es_db_missing_pids( + p_type + ) click.secho( - f'{len(pids_db)}', - fg='green', + f"{len(pids_db)}", + fg="green", ) for idx, pid in enumerate(pids_db, 1): record = record_class.get_record_by_pid(pid) if record: record.reindex() if verbose: - click.secho(f'{idx}\t{p_type}\t{pid}') + click.secho(f"{idx}\t{p_type}\t{pid}") else: if verbose: - click.secho(f'NOT FOUND: {idx}\t{p_type}\t{pid}', fg='red') + click.secho(f"NOT FOUND: {idx}\t{p_type}\t{pid}", fg="red") @index.command() -@click.option('--force', is_flag=True, default=False) +@click.option("--force", is_flag=True, default=False) @with_appcontext @search_version_check def init(force): """Initialize registered templates, aliases and mappings.""" # TODO: to remove once it is fixed in invenio-search module - click.secho('Putting templates...', fg='green', bold=True, file=sys.stderr) + click.secho("Putting templates...", fg="green", bold=True, file=sys.stderr) with click.progressbar( - current_search.put_templates(ignore=[400] if force else None), - length=len(current_search.templates)) as bar: + current_search.put_templates(ignore=[400] if force else None), + length=len(current_search.templates), + ) as bar: for response in bar: bar.label = response - click.secho('Creating indexes...', fg='green', bold=True, file=sys.stderr) + click.secho("Creating indexes...", fg="green", bold=True, file=sys.stderr) with click.progressbar( - current_search.create(ignore=[400] if force else None), - length=len(current_search.mappings)) as bar: + current_search.create(ignore=[400] if force else None), + length=len(current_search.mappings), + ) as bar: for name, response in bar: bar.label = name -@index.command('switch_index') +@index.command("switch_index") @with_appcontext @search_version_check -@click.argument('old') -@click.argument('new') +@click.argument("old") +@click.argument("new") def switch_index(old, new): """Switch index using the elasticsearch aliases. :param old: full name of the old index :param new: full name of the fresh created index """ - aliases = current_search_client.indices.get_alias().get(old)\ - .get('aliases').keys() + aliases = current_search_client.indices.get_alias().get(old).get("aliases").keys() for alias in aliases: current_search_client.indices.put_alias(new, alias) current_search_client.indices.delete_alias(old, alias) - click.secho('Sucessfully switched.', fg='green') + click.secho("Sucessfully switched.", fg="green") -@index.command('create_index') +@index.command("create_index") @with_appcontext @search_version_check @click.option( - '-t', '--templates/--no-templates', 'templates', is_flag=True, - default=True) -@click.option( - '-v', '--verbose/--no-verbose', 'verbose', is_flag=True, default=False) -@click.argument('resource') -@click.argument('index') + "-t", "--templates/--no-templates", "templates", is_flag=True, default=True +) +@click.option("-v", "--verbose/--no-verbose", "verbose", is_flag=True, default=False) +@click.argument("resource") +@click.argument("index") def create_index(resource, index, verbose, templates): """Create a new index based on the mapping of a given resource. @@ -310,28 +324,25 @@ def create_index(resource, index, verbose, templates): if templates: tbody = current_search_client.indices.get_template() for tmpl in current_search.put_templates(): - click.secho(f'file:{tmpl[0]}, ok: {tmpl[1]}', fg='green') + click.secho(f"file:{tmpl[0]}, ok: {tmpl[1]}", fg="green") new_tbody = current_search_client.indices.get_template() - patch = make_patch(new_tbody, tbody) - if patch: - click.secho('Templates are updated.', fg='green') + if patch := make_patch(new_tbody, tbody): + click.secho("Templates are updated.", fg="green") if verbose: - click.secho('Diff in templates', fg='green') + click.secho("Diff in templates", fg="green") click.echo(patch) else: - click.secho('Templates did not changed.', fg='yellow') + click.secho("Templates did not changed.", fg="yellow") - f_mapping = [ - v for v in current_search.aliases.get(resource).values()].pop() + f_mapping = list(current_search.aliases.get(resource).values()).pop() mapping = json.load(open(f_mapping)) current_search_client.indices.create(index, mapping) - click.secho(f'Index {index} has been created.', fg='green') + click.secho(f"Index {index} has been created.", fg="green") -@index.command('update_mapping') -@click.option('--aliases', '-a', multiple=True, help='all if not specified') -@click.option( - '-s', '--settings/--no-settings', 'settings', is_flag=True, default=False) +@index.command("update_mapping") +@click.option("--aliases", "-a", multiple=True, help="all if not specified") +@click.option("-s", "--settings/--no-settings", "settings", is_flag=True, default=False) @with_appcontext @search_version_check def update_mapping(aliases, settings): @@ -339,27 +350,24 @@ def update_mapping(aliases, settings): if not aliases: aliases = current_search.aliases.keys() for alias in aliases: - for index, f_mapping in iter( - current_search.aliases.get(alias).items() - ): + for index, f_mapping in iter(current_search.aliases.get(alias).items()): mapping = json.load(open(f_mapping)) try: - if mapping.get('settings') and settings: + if mapping.get("settings") and settings: current_search_client.indices.close(index=index) current_search_client.indices.put_settings( - body=mapping.get('settings'), index=index) + body=mapping.get("settings"), index=index + ) current_search_client.indices.open(index=index) res = current_search_client.indices.put_mapping( - body=mapping.get('mappings'), index=index) + body=mapping.get("mappings"), index=index + ) except Exception as excep: - click.secho( - f'error: {excep}', fg='red') - if res.get('acknowledged'): - click.secho( - f'index: {index} has been sucessfully updated', fg='green') + click.secho(f"error: {excep}", fg="red") + if res.get("acknowledged"): + click.secho(f"index: {index} has been sucessfully updated", fg="green") else: - click.secho( - f'error: {res}', fg='red') + click.secho(f"error: {res}", fg="red") @index.group() @@ -367,46 +375,40 @@ def queue(): """Manage indexing queue.""" -@queue.command('init') -@click.option('-n', '--name', default=None) +@queue.command("init") +@click.option("-n", "--name", default=None) @with_appcontext def init_queue(name): """Initialize indexing queue. :papram name: Name of queue. """ - with establish_connection() as connection: + with current_app.extensions["invenio-celery"].celery.connection() as connection: queue = connect_queue(connection, name) result = queue.declare() - click.secho( - f'Queue has been initialized: {result}', - fg='green' - ) + click.secho(f"Queue has been initialized: {result}", fg="green") -@queue.command('purge') -@click.option('-n', '--name', default=None) +@queue.command("purge") +@click.option("-n", "--name", default=None) @with_appcontext def purge_queue(name): """Purge indexing queue. :papram name: Name of queue. """ - with establish_connection() as connection: + with current_app.extensions["invenio-celery"].celery.connection() as connection: queue = connect_queue(connection, name) try: result = queue.purge() except Exception as err: result = err - click.secho( - f'Queue has been purged: {queue.name} {result}', - fg='green' - ) + click.secho(f"Queue has been purged: {queue.name} {result}", fg="green") -@queue.command('delete') -@click.option('-n', '--name', default=None) -@click.option('-f', '--force', is_flag=True, default=True) +@queue.command("delete") +@click.option("-n", "--name", default=None) +@click.option("-f", "--force", is_flag=True, default=True) @with_appcontext def delete_queue(name, force): """Delete indexing queue. @@ -414,14 +416,11 @@ def delete_queue(name, force): :papram name: Name of queue. :papram force: Force delete the queue. """ - with establish_connection() as connection: + with current_app.extensions["invenio-celery"].celery.connection() as connection: queue = connect_queue(connection, name) - error = '' + error = "" try: queue.delete(if_empty=force) except Exception as err: error = err - click.secho( - f'Queue has been deleted: {queue.name} {error}', - fg='green' - ) + click.secho(f"Queue has been deleted: {queue.name} {error}", fg="green") diff --git a/rero_ils/modules/cli/utils.py b/rero_ils/modules/cli/utils.py index 1046107f54..ed16198565 100644 --- a/rero_ils/modules/cli/utils.py +++ b/rero_ils/modules/cli/utils.py @@ -18,8 +18,10 @@ """Click command-line utilities.""" + from __future__ import absolute_import, print_function +import contextlib import difflib import itertools import json @@ -63,18 +65,27 @@ from rero_ils.modules.files.cli import load_files from rero_ils.modules.items.api import Item from rero_ils.modules.libraries.api import Library -from rero_ils.modules.loans.tasks import \ - delete_loans_created as task_delete_loans_created +from rero_ils.modules.loans.tasks import ( + delete_loans_created as task_delete_loans_created, +) from rero_ils.modules.local_fields.api import LocalField from rero_ils.modules.locations.api import Location from rero_ils.modules.patrons.cli import users_validate -from rero_ils.modules.selfcheck.cli import create_terminal, list_terminal, \ - update_terminal -from rero_ils.modules.utils import JsonWriter, extracted_data_from_ref, \ - get_record_class_from_schema_or_pid_type, get_schema_for_resource, \ - read_json_record, read_xml_record +from rero_ils.modules.selfcheck.cli import ( + create_terminal, + list_terminal, + update_terminal, +) +from rero_ils.modules.utils import ( + JsonWriter, + extracted_data_from_ref, + get_record_class_from_schema_or_pid_type, + get_schema_for_resource, + read_json_record, + read_xml_record, +) -_records_state = LocalProxy(lambda: current_app.extensions['invenio-records']) +_records_state = LocalProxy(lambda: current_app.extensions["invenio-records"]) def queue_count(): @@ -93,20 +104,14 @@ def queue_count(): def wait_empty_tasks(delay, verbose=False): """Wait for tasks to be empty.""" if verbose: - spinner = itertools.cycle(['-', '\\', '|', '/']) - click.echo( - f'Waiting: {next(spinner)}\r', - nl=False - ) + spinner = itertools.cycle(["-", "\\", "|", "/"]) + click.echo(f"Waiting: {next(spinner)}\r", nl=False) count = queue_count() sleep(5) count += queue_count() while count: if verbose: - click.echo( - f'Waiting: {next(spinner)}\r', - nl=False - ) + click.echo(f"Waiting: {next(spinner)}\r", nl=False) sleep(delay) count = queue_count() sleep(5) @@ -125,87 +130,96 @@ def utils(): utils.add_command(update_terminal) -@utils.command('wait_empty_tasks') -@click.option('-d', '--delay', 'delay', default=3) +@utils.command("wait_empty_tasks") +@click.option("-d", "--delay", "delay", default=3) @with_appcontext def wait_empty_tasks_cli(delay): """Wait for tasks to be empty.""" wait_empty_tasks(delay=delay, verbose=True) - click.secho('No active celery tasks.', fg='green') + click.secho("No active celery tasks.", fg="green") -@utils.command('show') -@click.argument('pid_value', nargs=1) -@click.option('-t', '--pid-type', 'pid-type, default(document_id)', - default='document_id') +@utils.command("show") +@click.argument("pid_value", nargs=1) +@click.option( + "-t", "--pid-type", "pid-type, default(document_id)", default="document_id" +) @with_appcontext def show(pid_value, pid_type): """Show records.""" - record = PersistentIdentifier.query.filter_by(pid_type=pid_type, - pid_value=pid_value).first() + record = PersistentIdentifier.query.filter_by( + pid_type=pid_type, pid_value=pid_value + ).first() recitem = Record.get_record(record.object_uuid) click.echo(json.dumps(recitem.dumps(), indent=2)) -@utils.command('check_json') -@click.argument('paths', nargs=-1) +@utils.command("check_json") +@click.argument("paths", nargs=-1) @click.option( - '-r', '--replace', 'replace', is_flag=True, default=False, - help='change file in place default=False' + "-r", + "--replace", + "replace", + is_flag=True, + default=False, + help="change file in place default=False", ) @click.option( - '-s', '--sort-keys', 'sort_keys', is_flag=True, default=False, - help='order keys during replacement default=False' + "-s", + "--sort-keys", + "sort_keys", + is_flag=True, + default=False, + help="order keys during replacement default=False", ) @click.option( - '-i', '--indent', 'indent', type=click.INT, default=2, - help='indent default=2' + "-i", "--indent", "indent", type=click.INT, default=2, help="indent default=2" ) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) def check_json(paths, replace, indent, sort_keys, verbose): """Check json files.""" - click.secho('Testing JSON indentation.', fg='green') + click.secho("Testing JSON indentation.", fg="green") files_list = [] for path in paths: if os.path.isfile(path): files_list.append(path) elif os.path.isdir(path): - files_list = files_list + glob(os.path.join(path, '**/*.json'), - recursive=True) + files_list = files_list + glob( + os.path.join(path, "**/*.json"), recursive=True + ) if not paths: - files_list = glob('**/*.json', recursive=True) + files_list = glob("**/*.json", recursive=True) tot_error_cnt = 0 for path_file in files_list: error_cnt = 0 try: fname = path_file - with open(fname, 'r') as opened_file: + with open(fname, "r") as opened_file: json_orig = opened_file.read().rstrip() opened_file.seek(0) - json_file = json.load(opened_file, - object_pairs_hook=OrderedDict) + json_file = json.load(opened_file, object_pairs_hook=OrderedDict) json_dump = json.dumps(json_file, indent=indent).rstrip() if json_dump != json_orig: error_cnt = 1 if replace: - with open(fname, 'w') as opened_file: - opened_file.write(json.dumps(json_file, - indent=indent, - sort_keys=sort_keys)) - click.echo(fname + ': ', nl=False) - click.secho('File replaced', fg='yellow') + with open(fname, "w") as opened_file: + opened_file.write( + json.dumps(json_file, indent=indent, sort_keys=sort_keys) + ) + click.echo(fname + ": ", nl=False) + click.secho("File replaced", fg="yellow") else: if error_cnt == 0: if verbose: - click.echo(fname + ': ', nl=False) - click.secho('Well indented', fg='green') + click.echo(fname + ": ", nl=False) + click.secho("Well indented", fg="green") else: - click.echo(fname + ': ', nl=False) - click.secho('Bad indentation', fg='red') + click.echo(fname + ": ", nl=False) + click.secho("Bad indentation", fg="red") except ValueError as error: - click.echo(fname + ': ', nl=False) - click.secho('Invalid JSON', fg='red', nl=False) - click.echo(f' -- {error}') + click.echo(fname + ": ", nl=False) + click.secho("Invalid JSON", fg="red", nl=False) + click.echo(f" -- {error}") error_cnt = 1 tot_error_cnt += error_cnt @@ -213,20 +227,20 @@ def check_json(paths, replace, indent, sort_keys, verbose): sys.exit(tot_error_cnt) -@utils.command('schedules') +@utils.command("schedules") @with_appcontext def schedules(): """List harvesting schedules.""" - celery_ext = current_app.extensions.get('invenio-celery') + celery_ext = current_app.extensions.get("invenio-celery") for key, value in celery_ext.celery.conf.beat_schedule.items(): - click.echo(key + '\t', nl=False) + click.echo(key + "\t", nl=False) click.echo(value) @utils.command() -@click.argument('infile', type=click.File('r'), default=sys.stdin) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) +@click.argument("infile", type=click.File("r"), default=sys.stdin) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) @with_appcontext def validate_documents_with_items_lofis(infile, verbose, debug): """Validate REROILS records with items. @@ -235,104 +249,97 @@ def validate_documents_with_items_lofis(infile, verbose, debug): :param verbose: verbose print :param debug: print traceback """ + def print_error(err, record_type, debug): """Print error.""" if debug: trace = traceback.format_exc(1) else: - trace = "\n".join(traceback.format_exc(1).split('\n')[:6]) - click.secho( - f'Error {err.args[0]} in {record_type}:\n{trace}', - fg='red' - ) + trace = "\n".join(traceback.format_exc(1).split("\n")[:6]) + click.secho(f"Error {err.args[0]} in {record_type}:\n{trace}", fg="red") - def validate_lofi(idx, local_field, lofi_schema, verbose, debug, - local_field_errors): + def validate_lofi( + idx, local_field, lofi_schema, verbose, debug, local_field_errors + ): """Validate local fields.""" if verbose: - click.echo(f'\t{idx:<4} local_field validate') + click.echo(f"\t{idx:<4} local_field validate") try: - if not local_field.get('pid'): - local_field['pid'] = f'dummy_{count}' + if not local_field.get("pid"): + local_field["pid"] = f"dummy_{count}" validate(local_field, lofi_schema) except Exception as err: local_field_errors += 1 - print_error(err, 'local field', debug) + print_error(err, "local field", debug) return local_field_errors def add_org_lib_doc(item): """Add organisation, library and document to item for validation.""" - item['pid'] = 'dummy' - if not item.get('location'): + item["pid"] = "dummy" + if not item.get("location"): raise ValueError('No "location" in item') - location_pid = extracted_data_from_ref(item.get('location')) + location_pid = extracted_data_from_ref(item.get("location")) location = Location.get_record_by_pid(location_pid) if not location: - raise ValueError(f'Location not found: {location_pid}') + raise ValueError(f"Location not found: {location_pid}") library = Library.get_record_by_pid(location.library_pid) if not library: - raise ValueError(f'Library not found: {location.library_pid}') - item['organisation'] = library.get('organisation') - item['library'] = location.get('library') - item['document']['$ref'] = item[ - 'document']['$ref'].replace('{document_pid}', 'dummy_1') + raise ValueError(f"Library not found: {location.library_pid}") + item["organisation"] = library.get("organisation") + item["library"] = location.get("library") + item["document"]["$ref"] = item["document"]["$ref"].replace( + "{document_pid}", "dummy_1" + ) return item - click.secho( - f'Validate documents items lofis from {infile.name}.', - fg='green' - ) + click.secho(f"Validate documents items lofis from {infile.name}.", fg="green") # document schema - schema_path = current_jsonschemas.url_to_path( - get_schema_for_resource('doc')) + schema_path = current_jsonschemas.url_to_path(get_schema_for_resource("doc")) schema = current_jsonschemas.get_schema(path=schema_path) doc_schema = _records_state.replace_refs(schema) # item schema - schema_path = current_jsonschemas.url_to_path( - get_schema_for_resource('item')) + schema_path = current_jsonschemas.url_to_path(get_schema_for_resource("item")) schema = current_jsonschemas.get_schema(path=schema_path) item_schema = _records_state.replace_refs(schema) # local field schema - schema_path = current_jsonschemas.url_to_path( - get_schema_for_resource('lofi')) + schema_path = current_jsonschemas.url_to_path(get_schema_for_resource("lofi")) schema = current_jsonschemas.get_schema(path=schema_path) lofi_schema = _records_state.replace_refs(schema) - doc_pid = next( - DocumentsSearch().filter('match_all').source('pid').scan()).pid + doc_pid = next(DocumentsSearch().filter("match_all").source("pid").scan()).pid document_errors = 0 item_errors = 0 local_field_errors = 0 # Documents for count, record in enumerate(read_json_record(infile), 1): - items = record.pop('items', []) - local_field_docs = record.pop('local_fields', []) - if pid := record.get('pid'): + items = record.pop("items", []) + local_field_docs = record.pop("local_fields", []) + if pid := record.get("pid"): if verbose: - click.echo(f'{count: <8} document use {doc_pid}') + click.echo(f"{count: <8} document use {doc_pid}") else: - record['pid'] = f'dummy_{count}' - pid = record.get('pid') + record["pid"] = f"dummy_{count}" + pid = record.get("pid") if verbose: - click.echo(f'{count: <7} document validate {pid}') + click.echo(f"{count: <7} document validate {pid}") try: validate(record, doc_schema) except ValidationError as err: document_errors += 1 - print_error(err, 'document', debug) + print_error(err, "document", debug) for idx, item in enumerate(items, 1): - local_field_items = item.pop('local_fields', []) - if item_pid := item.get('pid'): + local_field_items = item.pop("local_fields", []) + if item_pid := item.get("pid"): if verbose: - click.echo(f'\t{idx:<4} item use {item_pid}') + click.echo(f"\t{idx:<4} item use {item_pid}") else: if verbose: - click.echo(f'\t{idx:<4} item validate') + click.echo(f"\t{idx:<4} item validate") try: validate(add_org_lib_doc(item), item_schema) except Exception as err: item_errors += 1 - print_error(err, 'item', debug) + print_error(err, "item", debug) # Local fields for items for idx, local_field in enumerate(local_field_items, 1): local_field_errors = validate_lofi( @@ -341,7 +348,7 @@ def add_org_lib_doc(item): lofi_schema=lofi_schema, verbose=verbose, debug=debug, - local_field_errors=local_field_errors + local_field_errors=local_field_errors, ) # Local fields for documents for idx, local_field in enumerate(local_field_docs, 1): @@ -351,31 +358,30 @@ def add_org_lib_doc(item): lofi_schema=lofi_schema, verbose=verbose, debug=debug, - local_field_errors=local_field_errors + local_field_errors=local_field_errors, ) - color = 'green' + color = "green" if document_errors or item_errors: - color = 'red' + color = "red" click.secho( - f'Errors documents: {document_errors} ' - f'items: {item_errors} ' - f'local fields: {local_field_errors}', - fg=color + f"Errors documents: {document_errors} " + f"items: {item_errors} " + f"local fields: {local_field_errors}", + fg=color, ) @utils.command() -@click.argument('infile', type=click.File('r'), default=sys.stdin) -@click.option('-o', '--dont-stop', 'dont_stop_on_error', - is_flag=True, default=False) -@click.option('-e', '--save_errors', 'save_errors', is_flag=True, - default=False) -@click.option('-c', '--commit', 'commit', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) +@click.argument("infile", type=click.File("r"), default=sys.stdin) +@click.option("-o", "--dont-stop", "dont_stop_on_error", is_flag=True, default=False) +@click.option("-e", "--save_errors", "save_errors", is_flag=True, default=False) +@click.option("-c", "--commit", "commit", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) @with_appcontext -def create_documents_with_items_lofis(infile, dont_stop_on_error, - save_errors, commit, verbose, debug): +def create_documents_with_items_lofis( + infile, dont_stop_on_error, save_errors, commit, verbose, debug +): """Load REROILS record with items. :param infile: Json file @@ -387,39 +393,45 @@ def create_documents_with_items_lofis(infile, dont_stop_on_error, :param debug: print traceback """ - def create_lofi(count, local_field, parent_pid, file_lofi, commit, debug, - save_errors, error_file_lofi, dont_stop_on_error, counts): + def create_lofi( + count, + local_field, + parent_pid, + file_lofi, + commit, + debug, + save_errors, + error_file_lofi, + dont_stop_on_error, + counts, + ): """Create local field.""" try: # change the parent pid # "parent": { # "$ref": # "https://bib.rero.ch/api/documents/{parent_pid}"} - local_field['parent']['$ref'] = local_field['parent'][ - '$ref'].format(parent_pid=parent_pid) + local_field["parent"]["$ref"] = local_field["parent"]["$ref"].format( + parent_pid=parent_pid + ) local_field_rec = LocalField.create( - data=local_field, - delete_pid=True, - dbcommit=commit, - reindex=commit + data=local_field, delete_pid=True, dbcommit=commit, reindex=commit ) - counts['created']['lofis'] += 1 + counts["created"]["lofis"] += 1 file_lofi.write(local_field_rec) if verbose: - parent = ' '.join( - local_field['parent']['$ref'].split('/')[-2:]) + parent = " ".join(local_field["parent"]["$ref"].split("/")[-2:]) click.echo( - f'\t - {count: <8}' - ' local field created ' - f' {local_field_rec.pid} : {local_field_rec.id}' - f' parent: {parent}' + f"\t - {count: <8}" + " local field created " + f" {local_field_rec.pid} : {local_field_rec.id}" + f" parent: {parent}" ) except Exception as err: - counts['errors']['lofis'] += 1 + counts["errors"]["lofis"] += 1 click.secho( - f'\t - {count_lofi: <8}' - f' local field create error {err.args[0]}', - fg='red' + f"\t - {count_lofi: <8}" f" local field create error {err.args[0]}", + fg="red", ) if debug: traceback.print_exc() @@ -429,118 +441,98 @@ def create_lofi(count, local_field, parent_pid, file_lofi, commit, debug, sys.exit(1) return counts - click.secho( - f'Loading documents items lofis from {infile.name}.', - fg='green' - ) + click.secho(f"Loading documents items lofis from {infile.name}.", fg="green") name, ext = os.path.splitext(infile.name) - file_document = JsonWriter(f'{name}_documents{ext}') - file_item = JsonWriter(f'{name}_items{ext}') - file_lofi = JsonWriter(f'{name}_lofis{ext}') + file_document = JsonWriter(f"{name}_documents{ext}") + file_item = JsonWriter(f"{name}_items{ext}") + file_lofi = JsonWriter(f"{name}_lofis{ext}") if save_errors: - error_file_doc = JsonWriter(f'{name}_documents_errors{ext}') - error_file_item = JsonWriter(f'{name}_items_errors{ext}') - error_file_lofi = JsonWriter(f'{name}_lofis_errors{ext}') + error_file_doc = JsonWriter(f"{name}_documents_errors{ext}") + error_file_item = JsonWriter(f"{name}_items_errors{ext}") + error_file_lofi = JsonWriter(f"{name}_lofis_errors{ext}") # Documents # If we don't have a pid we will create the document # No updated will be made if a pid exists counts = { - 'created': { - 'docs': 0, - 'items': 0, - 'lofis': 0 - }, - 'used': { - 'docs': 0, - 'items': 0 - }, - 'errors': { - 'docs': 0, - 'items': 0, - 'lofis': 0 - } + "created": {"docs": 0, "items": 0, "lofis": 0}, + "used": {"docs": 0, "items": 0}, + "errors": {"docs": 0, "items": 0, "lofis": 0}, } for count, record in enumerate(read_json_record(infile), 1): try: - items = record.pop('items', []) - doc_local_fields = record.pop('local_fields', []) - if doc_pid := record.get('pid'): + items = record.pop("items", []) + doc_local_fields = record.pop("local_fields", []) + if doc_pid := record.get("pid"): if verbose: - click.echo(f'{count: <8} document use {doc_pid}') - counts['used']['docs'] += 1 + click.echo(f"{count: <8} document use {doc_pid}") + counts["used"]["docs"] += 1 else: # find existing document by ISBN def filter_isbn(identified_by): """Filter identified_by for type bf:Isbn.""" - return identified_by.get('type') == 'bf:Isbn' + return identified_by.get("type") == "bf:Isbn" filtered_identified_by = filter( - filter_isbn, - record.get('identifiedBy', []) + filter_isbn, record.get("identifiedBy", []) ) isbns = set() for identified_by in filtered_identified_by: - isbn = identified_by['value'] + isbn = identified_by["value"] isbns.add(isbn) isbns = list(isbns) - search = DocumentsSearch().filter('terms', isbn=isbns) + search = DocumentsSearch().filter("terms", isbn=isbns) exists = search.count() rec = Document.create( - data=record, - dbcommit=commit, - reindex=commit, - delete_pid=True + data=record, dbcommit=commit, reindex=commit, delete_pid=True ) - counts['created']['docs'] += 1 + counts["created"]["docs"] += 1 doc_pid = rec.pid file_document.write(rec) if verbose: click.echo( - f'{count: <8} document created ' - f'{rec.pid} : {rec.id} {exists}') + f"{count: <8} document created " + f"{rec.pid} : {rec.id} {exists}" + ) item_pids = [] # Items # If we don't have a pid we will create the item # No updated will be made if a pid exists for count_item, item in enumerate(items, 1): try: - if item_pid := item.get('pid'): + if item_pid := item.get("pid"): if verbose: - click.echo( - f'\t - {count_item: <8} item use {item_pid}') - counts['used']['items'] += 1 + click.echo(f"\t - {count_item: <8} item use {item_pid}") + counts["used"]["items"] += 1 else: # change the document pid # "document": { # "$ref": # "https://bib.rero.ch/api/documents/{document_pid}"} - item['document']['$ref'] = item['document'][ - '$ref'].format(document_pid=doc_pid) - item_local_fields = item.pop('local_fields', []) + item["document"]["$ref"] = item["document"]["$ref"].format( + document_pid=doc_pid + ) + item_local_fields = item.pop("local_fields", []) item_rec = Item.create( - data=item, - delete_pid=True, - dbcommit=commit, - reindex=commit + data=item, delete_pid=True, dbcommit=commit, reindex=commit ) - counts['created']['items'] += 1 + counts["created"]["items"] += 1 item_pid = item_rec.pid item_pids.append(item_rec.pid) file_item.write(item_rec) if verbose: click.echo( - f'\t - {count_item: <8}' - ' item created' - f' {item_rec.pid} : {item_rec.id}') + f"\t - {count_item: <8}" + " item created" + f" {item_rec.pid} : {item_rec.id}" + ) except Exception as err: - counts['errors']['items'] += 1 + counts["errors"]["items"] += 1 click.secho( - f'\t - {count_item: <8}' - f' item create error {err.args[0]}', - fg='red' + f"\t - {count_item: <8}" f" item create error {err.args[0]}", + fg="red", ) if debug: traceback.print_exc() @@ -560,7 +552,7 @@ def filter_isbn(identified_by): save_errors=save_errors, error_file_lofi=error_file_lofi, dont_stop_on_error=dont_stop_on_error, - counts=counts + counts=counts, ) # Local fields for documents for count_lofi, local_field in enumerate(doc_local_fields, 1): @@ -574,15 +566,15 @@ def filter_isbn(identified_by): save_errors=save_errors, error_file_lofi=error_file_lofi, dont_stop_on_error=dont_stop_on_error, - counts=counts + counts=counts, ) except Exception as err: - counts['errors']['docs'] += 1 + counts["errors"]["docs"] += 1 click.secho( - f'{count: <8} create error {doc_pid}' + f"{count: <8} create error {doc_pid}" f' {record.get("pid")}: {err.args[0]}', - fg='red' + fg="red", ) if debug: traceback.print_exc() @@ -591,20 +583,20 @@ def filter_isbn(identified_by): error_file_doc.write(record) if not dont_stop_on_error: sys.exit(1) - click.secho(f'Counts: {counts}', fg='green') + click.secho(f"Counts: {counts}", fg="green") for count_type, infos in counts.items(): - click.secho(f'\t {count_type}', fg='green') + click.secho(f"\t {count_type}", fg="green") for info, count in infos.items(): - click.secho(f'\t\t {info:<5}: {count}', fg='green') + click.secho(f"\t\t {info:<5}: {count}", fg="green") -@utils.command('check_license') -@click.argument('configfile', type=click.File('r'), default=sys.stdin) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-p', '--progress', 'progress', is_flag=True, default=False) +@utils.command("check_license") +@click.argument("configfile", type=click.File("r"), default=sys.stdin) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-p", "--progress", "progress", is_flag=True, default=False) def check_license(configfile, verbose, progress): """Check licenses.""" - click.secho('Testing licenses in files.', fg='green') + click.secho("Testing licenses in files.", fg="green") def get_files(paths, extensions, recursive=True): """Get files from paths.""" @@ -615,8 +607,7 @@ def get_files(paths, extensions, recursive=True): elif os.path.isdir(path): for extension in extensions: files_list += glob( - os.path.join(path, f'**/*.{extension}'), - recursive=recursive + os.path.join(path, f"**/*.{extension}"), recursive=recursive ) return files_list @@ -628,63 +619,53 @@ def delete_prefix(prefix, line): def is_copyright(line): """Line is copyright.""" - if line.startswith('Copyright (C)'): - return True - return False + return bool(line.startswith("Copyright (C)")) def get_line(lines, index, prefix): """Get line on index.""" line = delete_prefix(prefix, lines[index]) - return line, index+1 + return line, index + 1 def show_diff(linenbr, text, n_text): """Show string diffs.""" seqm = difflib.SequenceMatcher( - None, - text.replace(' ', '◼︎'), - n_text.replace(' ', '◼︎') + None, text.replace(" ", "◼︎"), n_text.replace(" ", "◼︎") ) - click.echo(f'{linenbr}: ', nl=False) + click.echo(f"{linenbr}: ", nl=False) for opcode, a0, a1, b0, b1 in seqm.get_opcodes(): - if opcode == 'equal': + if opcode == "equal": click.echo(seqm.a[a0:a1], nl=False) - elif opcode == 'insert': - click.secho(seqm.b[b0:b1], fg='red', nl=False) - elif opcode == 'delete': - click.secho(seqm.a[a0:a1], fg='blue', nl=False) - elif opcode == 'replace': + elif opcode == "insert": + click.secho(seqm.b[b0:b1], fg="red", nl=False) + elif opcode == "delete": + click.secho(seqm.a[a0:a1], fg="blue", nl=False) + elif opcode == "replace": # seqm.a[a0:a1] -> seqm.b[b0:b1] - click.secho(seqm.b[b0:b1], fg='green', nl=False) + click.secho(seqm.b[b0:b1], fg="green", nl=False) click.echo() - def test_file(file_name, extensions, extension, license_lines, - verbose, progress): + def test_file(file_name, extensions, extension, license_lines, verbose, progress): """Test the license in file.""" if progress: - click.secho('License test: ', fg='green', nl=False) + click.secho("License test: ", fg="green", nl=False) click.echo(file_name) - with open(file_name, 'r') as file: + with open(file_name, "r") as file: result = test_license( file=file, extension=extensions[extension], license_lines=license_lines, - verbose=verbose + verbose=verbose, ) if result != []: - click.secho( - f'License error in {file_name} in lines {result}', - fg='red' - ) + click.secho(f"License error in {file_name} in lines {result}", fg="red") # We have an error return 1 # No error found return 0 def is_slash_directive(file, line): - is_js_file = file.name.split('.')[-1] == 'js' - if is_js_file and re.search(triple_slash, line): - return True - return False + is_js_file = file.name.split(".")[-1] == "js" + return bool(is_js_file and re.search(triple_slash, line)) def test_license(file, extension, license_lines, verbose): """Test the license in file.""" @@ -692,21 +673,20 @@ def test_license(file, extension, license_lines, verbose): lines = [line.rstrip() for line in file] linenbr = 0 linemaxnbr = len(lines) - prefix = extension.get('prefix') + prefix = extension.get("prefix") line, linenbr = get_line(lines, linenbr, prefix) # Get over Shebang lines or Triple-Slash Directives (for Javascript # files) - while lines[linenbr-1].startswith('#!') or \ - is_slash_directive(file, lines[linenbr-1]): + while lines[linenbr - 1].startswith("#!") or is_slash_directive( + file, lines[linenbr - 1] + ): # get over Shebang line, linenbr = get_line(lines, linenbr, prefix) - if extension.get('top'): - # read the top - if line not in extension.get('top'): - if verbose: - for t in extension['top']: - show_diff(linenbr, t, line) - lines_with_errors.append(linenbr+1) + if extension.get("top") and line not in extension.get("top"): + if verbose: + for t in extension["top"]: + show_diff(linenbr, t, line) + lines_with_errors.append(linenbr + 1) line, linenbr = get_line(lines, linenbr, prefix) for license_line in license_lines: # compare the license lines @@ -714,7 +694,7 @@ def test_license(file, extension, license_lines, verbose): while is_copyright(line): line, linenbr = get_line(lines, linenbr, prefix) linenbr -= 1 - line = 'Copyright (C)' + line = "Copyright (C)" if license_line != line: if verbose: show_diff(linenbr, license_line, line) @@ -726,37 +706,33 @@ def test_license(file, extension, license_lines, verbose): return lines_with_errors config = yaml.safe_load(configfile) - file_extensions = config['file_extensions'] + file_extensions = config["file_extensions"] extensions = {} for file_extension in file_extensions: - for ext in file_extension.split(','): + for ext in file_extension.split(","): extensions.setdefault(ext.strip(), file_extensions[file_extension]) # create recursive file list files_list = get_files( - paths=config['directories']['recursive'], - extensions=extensions, - recursive=True + paths=config["directories"]["recursive"], extensions=extensions, recursive=True ) # add flat file list files_list += get_files( - paths=config['directories']['flat'], - extensions=extensions, - recursive=False + paths=config["directories"]["flat"], extensions=extensions, recursive=False ) # remove excluded files exclude_list = [] - for ext in config['directories']['exclude']: + for ext in config["directories"]["exclude"]: exclude_list += get_files( - paths=config['directories']['exclude'][ext], + paths=config["directories"]["exclude"][ext], extensions=[ext], - recursive=True + recursive=True, ) files_list = list(set(files_list) - set(exclude_list)) # set regexp expression for Triple-Slash directives - triple_slash = r'^/// $' + triple_slash = r"^/// $" - license_lines = config['license_text'].split('\n') + license_lines = config["license_text"].split("\n") tot_error_cnt = 0 for file_name in files_list: # test every file @@ -767,43 +743,38 @@ def test_license(file, extension, license_lines, verbose): extension=extension, license_lines=license_lines, verbose=verbose, - progress=progress + progress=progress, ) - for extension in config['files']: + for extension in config["files"]: # test every files - for file_name in config['files'][extension]: + for file_name in config["files"][extension]: tot_error_cnt += test_file( file_name=file_name, extensions=extensions, extension=extension, license_lines=license_lines, verbose=verbose, - progress=progress + progress=progress, ) sys.exit(tot_error_cnt) -@utils.command('validate') -@click.argument('jsonfile', type=click.File('r')) -@click.argument('type', default='doc') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.option('-e', '--error_file', 'error_file_name', default=None, - help='error file') -@click.option('-o', '--ok_file', 'ok_file_name', default=None, - help='ok file') +@utils.command("validate") +@click.argument("jsonfile", type=click.File("r")) +@click.argument("type", default="doc") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.option("-e", "--error_file", "error_file_name", default=None, help="error file") +@click.option("-o", "--ok_file", "ok_file_name", default=None, help="ok file") @with_appcontext -def check_validate(jsonfile, type, verbose, debug, error_file_name, - ok_file_name): +def check_validate(jsonfile, type, verbose, debug, error_file_name, ok_file_name): """Check record validation.""" click.secho( - f'Testing json schema for file: {jsonfile.name} type: {type}', - fg='green' + f"Testing json schema for file: {jsonfile.name} type: {type}", fg="green" ) - schema_path = current_jsonschemas.url_to_path( - get_schema_for_resource(type)) + schema_path = current_jsonschemas.url_to_path(get_schema_for_resource(type)) schema = current_jsonschemas.get_schema(path=schema_path) schema = _records_state.replace_refs(schema) @@ -815,29 +786,26 @@ def check_validate(jsonfile, type, verbose, debug, error_file_name, ok_file = JsonWriter(ok_file_name) for count, data in enumerate(datas, 1): if verbose: - click.echo(f'\tTest record: {count}') - if not data.get('$schema'): - scheme = current_app.config.get('JSONSCHEMAS_URL_SCHEME') - host = current_app.config.get('JSONSCHEMAS_HOST') - endpoint = current_app.config.get('JSONSCHEMAS_ENDPOINT') - url_schema = f'{scheme}://{host}{endpoint}{schema_path}' - data['$schema'] = url_schema - if not data.get('pid'): + click.echo(f"\tTest record: {count}") + if not data.get("$schema"): + scheme = current_app.config.get("JSONSCHEMAS_URL_SCHEME") + host = current_app.config.get("JSONSCHEMAS_HOST") + endpoint = current_app.config.get("JSONSCHEMAS_ENDPOINT") + url_schema = f"{scheme}://{host}{endpoint}{schema_path}" + data["$schema"] = url_schema + if not data.get("pid"): # create dummy pid in data - data['pid'] = 'dummy' + data["pid"] = "dummy" try: validate(data, schema) if ok_file_name: - if data['pid'] == 'dummy': - del data['pid'] + if data["pid"] == "dummy": + del data["pid"] ok_file.write(data) except ValidationError: - trace_lines = traceback.format_exc(1).split('\n') + trace_lines = traceback.format_exc(1).split("\n") trace = trace_lines[5].strip() - click.secho( - f'Error validate in record: {count} {trace}', - fg='red' - ) + click.secho(f"Error validate in record: {count} {trace}", fg="red") if error_file_name: error_file.write(data) if debug: @@ -847,138 +815,147 @@ def check_validate(jsonfile, type, verbose, debug, error_file_name, def error_record(pid, record, notes): """Error record.""" error_rec = { - "adminMetadata": { - "encodingLevel": "Full level", - "note": notes - }, - "issuance": { - "main_type": "rdami:1001", - "subtype": "materialUnit" - }, - "language": [{ - "type": "bf:Language", - "value": "und" - }], + "adminMetadata": {"encodingLevel": "Full level", "note": notes}, + "issuance": {"main_type": "rdami:1001", "subtype": "materialUnit"}, + "language": [{"type": "bf:Language", "value": "und"}], "pid": pid, - "provisionActivity": [{ - "startDate": int(datetime.now().year), - "type": "bf:Publication"} + "provisionActivity": [ + {"startDate": int(datetime.now().year), "type": "bf:Publication"} + ], + "title": [ + {"mainTitle": [{"value": f"ERROR DOCUMENT {pid}"}], "type": "bf:Title"} ], - "title": [{ - "mainTitle": [{"value": f"ERROR DOCUMENT {pid}"}], - "type": "bf:Title" - }], "type": [{"main_type": "docmaintype_other"}], - "_masked": True - } + "_masked": True, + } if record: - schema = record.get('$schema') - if schema and schema != 'dummy': - error_rec['$schema'] = schema - identified_by = record.get('identifiedBy') - if identified_by: - error_rec['identifiedBy'] = identified_by + schema = record.get("$schema") + if schema and schema != "dummy": + error_rec["$schema"] = schema + if identified_by := record.get("identifiedBy"): + error_rec["identifiedBy"] = identified_by return error_rec -def do_worker(marc21records, results, pid_required, debug, dojson, - schema=None): +def do_worker(marc21records, results, pid_required, debug, dojson, schema=None): """Worker for marc21 to json transformation.""" if dojson: dojson = obj_or_import_string( - f'rero_ils.modules.documents.dojson.' - f'contrib.marc21tojson.{dojson}:marc21') + f"rero_ils.modules.documents.dojson." + f"contrib.marc21tojson.{dojson}:marc21" + ) else: dojson = marc21 for data in marc21records: - data_json = data['json'] - pid = data_json.get('001', '???') + data_json = data["json"] + pid = data_json.get("001", "???") record = {} try: record = dojson.do(data_json) if not record.get("$schema"): # create dummy schema in data - record["$schema"] = 'dummy' + record["$schema"] = "dummy" if not pid_required and not record.get("pid"): # create dummy pid in data - record["pid"] = 'dummy' + record["pid"] = "dummy" if schema: - items = record.pop('items', None) - local_fields = record.pop('local_fields', None) + items = record.pop("items", None) + local_fields = record.pop("local_fields", None) validate(record, schema) if items: # TODO: items validation # for item in items: # validate(item, _records_state.replace_refs(schema)) - record['items'] = items + record["items"] = items if local_fields: # TODO: local fields validation # for local_field in local_fields: # validate(local_field, True) - record['local_fields'] = local_fields - if record["$schema"] == 'dummy': + record["local_fields"] = local_fields + if record["$schema"] == "dummy": del record["$schema"] - if not pid_required and record["pid"] == 'dummy': + if not pid_required and record["pid"] == "dummy": del record["pid"] - results.append({ - 'status': True, - 'record': record - }) + results.append({"status": True, "record": record}) except ValidationError as err: if debug: pprint(record) - trace_lines = traceback.format_exc(1).split('\n') + trace_lines = traceback.format_exc(1).split("\n") trace = trace_lines[5].strip() - field_035 = data_json.get('035__', {}) + field_035 = data_json.get("035__", {}) if isinstance(field_035, tuple): field_035 = field_035[0] - rero_pid = field_035.get('a', 'UNKNOWN'), - msg = f'ERROR:\t{pid}\t{rero_pid}\t{err.args[0]}\t-\t{trace}' - click.secho(msg, fg='red') - results.append({ - 'pid': pid, - 'status': False, - 'data': data['xml'], - 'record': error_record( - pid, - record, - [f'{err.args[0]}', f'{trace}'] - ) - }) + rero_pid = (field_035.get("a", "UNKNOWN"),) + msg = f"ERROR:\t{pid}\t{rero_pid}\t{err.args[0]}\t-\t{trace}" + click.secho(msg, fg="red") + results.append( + { + "pid": pid, + "status": False, + "data": data["xml"], + "record": error_record(pid, record, [f"{err.args[0]}", f"{trace}"]), + } + ) except Exception as err: - field_035 = data_json.get('035__', {}) + field_035 = data_json.get("035__", {}) if isinstance(field_035, tuple): field_035 = field_035[0] - rero_pid = field_035.get('a', 'UNKNOWN'), - msg = f'ERROR:\t{pid}\t{rero_pid}\t{err.args[0]}' - click.secho(msg, fg='red') + rero_pid = (field_035.get("a", "UNKNOWN"),) + msg = f"ERROR:\t{pid}\t{rero_pid}\t{err.args[0]}" + click.secho(msg, fg="red") if debug: traceback.print_exc() - results.append({ - 'pid': pid, - 'status': False, - 'data': data['xml'], - 'record': error_record( - pid, - record, - [f'{err.args[0]}'] - ) - }) + results.append( + { + "pid": pid, + "status": False, + "data": data["xml"], + "record": error_record(pid, record, [f"{err.args[0]}"]), + } + ) -class Marc21toJson(): +class Marc21toJson: """Class for Marc21 recorts to Json transformation.""" - __slots__ = ['xml_file', 'json_file_ok', 'xml_file_error', 'parallel', - 'chunk', 'dojson', 'verbose', 'debug', 'pid_required', - 'count', 'count_ok', 'count_ko', 'ctx', - 'results', 'active_buffer', 'buffer', - 'schema', 'pid_mapping', 'pids', 'error_records'] - - def __init__(self, xml_file, json_file_ok, xml_file_error, - parallel=8, chunk=10000, dojson=None, - verbose=False, debug=False, pid_required=False, schema=None, - pid_mapping=None, error_records=False): + __slots__ = [ + "xml_file", + "json_file_ok", + "xml_file_error", + "parallel", + "chunk", + "dojson", + "verbose", + "debug", + "pid_required", + "count", + "count_ok", + "count_ko", + "ctx", + "results", + "active_buffer", + "buffer", + "schema", + "pid_mapping", + "pids", + "error_records", + ] + + def __init__( + self, + xml_file, + json_file_ok, + xml_file_error, + parallel=8, + chunk=10000, + dojson=None, + verbose=False, + debug=False, + pid_required=False, + schema=None, + pid_mapping=None, + error_records=False, + ): """Constructor.""" self.count = 0 self.count_ok = 0 @@ -992,8 +969,7 @@ def __init__(self, xml_file, json_file_ok, xml_file_error, self.schema = schema self.dojson = dojson if verbose: - click.echo( - f'Main process pid: {multiprocessing.current_process().pid}') + click.echo(f"Main process pid: {multiprocessing.current_process().pid}") self.debug = debug if debug: multiprocessing.log_to_stderr(logging.DEBUG) @@ -1001,18 +977,17 @@ def __init__(self, xml_file, json_file_ok, xml_file_error, self.pid_required = pid_required self.pids = {} if pid_mapping: - click.echo(f'Read pid mapping: {pid_mapping.name}') + click.echo(f"Read pid mapping: {pid_mapping.name}") datas = read_json_record(pid_mapping) - self.pids = { - data['bib_id']: data['document_pid'] for data in datas} - click.echo(f' Found pids: {len(self.pids)}') - self.ctx = multiprocessing.get_context('spawn') + self.pids = {data["bib_id"]: data["document_pid"] for data in datas} + click.echo(f" Found pids: {len(self.pids)}") + self.ctx = multiprocessing.get_context("spawn") manager = self.ctx.Manager() self.results = manager.list() self.active_buffer = 0 self.buffer = [] - for index in range(parallel): - self.buffer.append({'process': None, 'records': []}) + for _ in range(parallel): + self.buffer.append({"process": None, "records": []}) self.start() def counts(self): @@ -1023,9 +998,9 @@ def write_results(self): """Write results from multiprocess to file.""" while self.results: value = self.results.pop(0) - status = value.get('status') - data = value.get('data') - record = value.get('record') + status = value.get("status") + data = value.get("data") + record = value.get("record") if status: self.count_ok += 1 else: @@ -1037,16 +1012,15 @@ def write_results(self): def wait_free_process(self): """Wait for next process to finish.""" index = (self.active_buffer + 1) % self.parallel - process = self.buffer[index]['process'] - if process: + if process := self.buffer[index]["process"]: process.join() # reset data for finished jobs for index in range(self.parallel): - process = self.buffer[index].get('process') + process = self.buffer[index].get("process") if process and process.exitcode is not None: - del self.buffer[index]['process'] + del self.buffer[index]["process"] self.buffer[index].clear() - self.buffer[index] = {'process': None, 'records': []} + self.buffer[index] = {"process": None, "records": []} def next_active_buffer(self): """Set the next active buffer index.""" @@ -1054,7 +1028,7 @@ def next_active_buffer(self): def wait_all_free_process(self): """Wait for all processes to finish.""" - for index in range(self.parallel): + for _ in range(self.parallel): self.wait_free_process() self.next_active_buffer() @@ -1062,8 +1036,14 @@ def start_new_process(self): """Start a new process in context.""" new_process = self.ctx.Process( target=do_worker, - args=(self.active_records, self.results, self.pid_required, - self.debug, self.dojson, self.schema) + args=( + self.active_records, + self.results, + self.pid_required, + self.debug, + self.dojson, + self.schema, + ), ) self.wait_free_process() new_process.start() @@ -1074,7 +1054,7 @@ def start_new_process(self): else: start = self.count - len(self.active_records) + 1 pid = new_process.pid - click.echo(f'Start process: {pid} records: {start}..{self.count}') + click.echo(f"Start process: {pid} records: {start}..{self.count}") self.next_active_buffer() def write_start(self): @@ -1086,35 +1066,33 @@ def write_start(self): def write_stop(self): """Write finishing lines to files.""" - self.xml_file_error.write(b'\n') + self.xml_file_error.write(b"\n") def start(self): """Start the transformation.""" - click.echo('Start processing ...') + click.echo("Start processing ...") self.write_start() for marc21xml in read_xml_record(self.xml_file): # change 001 to REROILS:pid if we have a mapping. if self.pids: for child in marc21xml: - if child.attrib.get('tag') == '001': + if child.attrib.get("tag") == "001": old_pid = child.text - pid = self.pids.get(old_pid) - if pid: - new_pid = f'REROILS:{pid}' + if pid := self.pids.get(old_pid): + new_pid = f"REROILS:{pid}" child.text = new_pid else: - click.echo( - f'ERROR: No pid mapping for {old_pid}') + click.echo(f"ERROR: No pid mapping for {old_pid}") break marc21json_record = create_record(marc21xml) - self.active_records.append({ - 'json': marc21json_record, - 'xml': etree.tostring( - marc21xml, - pretty_print=True, - encoding='UTF-8' - ).strip() - }) + self.active_records.append( + { + "json": marc21json_record, + "xml": etree.tostring( + marc21xml, pretty_print=True, encoding="UTF-8" + ).strip(), + } + ) self.count += 1 if len(self.active_records) % self.chunk == 0: self.write_results() @@ -1132,169 +1110,187 @@ def start(self): @property def active_process(self): """Get the active process.""" - return self.buffer[self.active_buffer]['process'] + return self.buffer[self.active_buffer]["process"] @active_process.setter def active_process(self, process): """Set the active process.""" - self.buffer[self.active_buffer]['process'] = process + self.buffer[self.active_buffer]["process"] = process @property def active_records(self): """Get the active records.""" - return self.buffer[self.active_buffer]['records'] - - -@utils.command('marc21tojson') -@click.argument('xml_file', type=click.File('r')) -@click.argument('json_file_ok') -@click.argument('xml_file_error', type=click.File('wb')) -@click.option('-p', '--parallel', 'parallel', default=8) -@click.option('-c', '--chunk', 'chunk', default=10000) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.option('-r', '--pid_required', 'pid_required', is_flag=True, - default=False) -@click.option('-t', '--transformation', 'transformation', default=None) -@click.option('-P', '--pid_mapping', 'pid_mapping', type=click.File('r'), - default=None) -@click.option('-e', '--error_records', 'error_records', is_flag=True, - default=False) + return self.buffer[self.active_buffer]["records"] + + +@utils.command("marc21tojson") +@click.argument("xml_file", type=click.File("r")) +@click.argument("json_file_ok") +@click.argument("xml_file_error", type=click.File("wb")) +@click.option("-p", "--parallel", "parallel", default=8) +@click.option("-c", "--chunk", "chunk", default=10000) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.option("-r", "--pid_required", "pid_required", is_flag=True, default=False) +@click.option("-t", "--transformation", "transformation", default=None) +@click.option("-P", "--pid_mapping", "pid_mapping", type=click.File("r"), default=None) +@click.option("-e", "--error_records", "error_records", is_flag=True, default=False) @with_appcontext -def marc21json(xml_file, json_file_ok, xml_file_error, parallel, chunk, - verbose, debug, pid_required, transformation, pid_mapping, - error_records): +def marc21json( + xml_file, + json_file_ok, + xml_file_error, + parallel, + chunk, + verbose, + debug, + pid_required, + transformation, + pid_mapping, + error_records, +): """Convert xml file to json with dojson.""" - click.secho('Marc21 to Json transform: ', fg='green', nl=False) + click.secho("Marc21 to Json transform: ", fg="green", nl=False) if pid_required and verbose: - click.secho(' (validation tests pid) ', nl=False) + click.secho(" (validation tests pid) ", nl=False) click.secho(xml_file.name) json_file_ok = JsonWriter(json_file_ok) - path = current_jsonschemas.url_to_path(get_schema_for_resource('doc')) + path = current_jsonschemas.url_to_path(get_schema_for_resource("doc")) schema = current_jsonschemas.get_schema(path=path) schema = _records_state.replace_refs(schema) - transform = Marc21toJson(xml_file, json_file_ok, xml_file_error, parallel, - chunk, transformation, verbose, debug, - pid_required, schema, pid_mapping, error_records) + transform = Marc21toJson( + xml_file, + json_file_ok, + xml_file_error, + parallel, + chunk, + transformation, + verbose, + debug, + pid_required, + schema, + pid_mapping, + error_records, + ) count, count_ok, count_ko = transform.counts() - click.secho('Total records: ', fg='green', nl=False) + click.secho("Total records: ", fg="green", nl=False) click.secho(str(count), nl=False) - click.secho('-', nl=False) + click.secho("-", nl=False) click.secho(str(count_ok + count_ko)) - click.secho('Records transformed: ', fg='green', nl=False) + click.secho("Records transformed: ", fg="green", nl=False) click.secho(str(count_ok)) if count_ko: - click.secho('Records with errors: ', fg='red', nl=False) + click.secho("Records with errors: ", fg="red", nl=False) click.secho(str(count_ko)) @utils.command() -@click.argument('pid_file', type=click.File('r')) -@click.argument('xml_file_in', type=click.File('r')) -@click.argument('xml_file_out', type=click.File('wb')) -@click.option('-t', '--tag', 'tag', default='001') -@click.option('-p', '--progress', 'progress', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -def extract_from_xml(pid_file, xml_file_in, xml_file_out, tag, progress, - verbose): +@click.argument("pid_file", type=click.File("r")) +@click.argument("xml_file_in", type=click.File("r")) +@click.argument("xml_file_out", type=click.File("wb")) +@click.option("-t", "--tag", "tag", default="001") +@click.option("-p", "--progress", "progress", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +def extract_from_xml(pid_file, xml_file_in, xml_file_out, tag, progress, verbose): """Extracts xml records with pids.""" - click.secho('Extract pids from xml: ', fg='green') - click.secho(f'PID file : {pid_file.name}') - click.secho(f'XML file in : {xml_file_in.name}') - click.secho(f'XML file out: {xml_file_out.name}') + click.secho("Extract pids from xml: ", fg="green") + click.secho(f"PID file : {pid_file.name}") + click.secho(f"XML file in : {xml_file_in.name}") + click.secho(f"XML file out: {xml_file_out.name}") - pids = {} found_pids = {} - for line in pid_file: - pids[line.strip()] = 0 + pids = {line.strip(): 0 for line in pid_file} count = len(pids) - click.secho(f'Search pids count: {count}') + click.secho(f"Search pids count: {count}") xml_file_out.write(b'\n') - xml_file_out.write( - b'\n\n' - ) + xml_file_out.write(b'\n\n') found = 0 for idx, xml in enumerate(read_xml_record(xml_file_in), 1): for child in xml: - is_controlfield = child.tag == 'controlfield' - is_tag = child.get('tag') == tag + is_controlfield = child.tag == "controlfield" + is_tag = child.get("tag") == tag if is_controlfield and is_tag: if progress: - click.secho( - f'{idx:>10} {repr(child.text):>20}\r', nl=False) + click.secho(f"{idx:>10} {repr(child.text):>20}\r", nl=False) if pids.get(child.text, -1) >= 0: found += 1 pids[child.text] += 1 data = etree.tostring( - xml, - pretty_print=True, - encoding='UTF-8' + xml, pretty_print=True, encoding="UTF-8" ).strip() xml_file_out.write(data) found_pids[child.text] = True if verbose: - click.secho(f'Found: {child.text} on position: {idx}') + click.secho(f"Found: {child.text} on position: {idx}") break - xml_file_out.write(b'\n') + xml_file_out.write(b"\n") if count != found: - click.secho(f'Count: {count} Found: {found}', fg='red') + click.secho(f"Count: {count} Found: {found}", fg="red") for key, value in pids.items(): if value == 0: click.secho(key) @utils.command() -@click.argument('pid_file', type=click.File('r')) -@click.argument('json_file_in', type=click.File('r')) -@click.argument('json_file_out') -@click.option('-t', '--tag', 'tag', default='pid') -@click.option('-p', '--progress', 'progress', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -def extract_from_json(pid_file, json_file_in, json_file_out, tag, progress, - verbose): +@click.argument("pid_file", type=click.File("r")) +@click.argument("json_file_in", type=click.File("r")) +@click.argument("json_file_out") +@click.option("-t", "--tag", "tag", default="pid") +@click.option("-p", "--progress", "progress", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +def extract_from_json(pid_file, json_file_in, json_file_out, tag, progress, verbose): """Extracts json records with pids.""" - click.secho('Extract pids from json: ', fg='green') - click.secho(f'PID file : {pid_file.name}') - click.secho(f'JSON file in : {json_file_in.name}') - click.secho(f'JSON file out: {json_file_out}') + click.secho("Extract pids from json: ", fg="green") + click.secho(f"PID file : {pid_file.name}") + click.secho(f"JSON file in : {json_file_in.name}") + click.secho(f"JSON file out: {json_file_out}") - pids = {} - found_pids = {} - for line in pid_file: - pids[line.strip()] = 0 + pids = {line.strip(): 0 for line in pid_file} count = len(pids) - click.secho(f'Search pids count: {count}') + click.secho(f"Search pids count: {count}") out = JsonWriter(json_file_out) found = 0 for idx, data in enumerate(read_json_record(json_file_in), 1): pid = data.get(tag) if progress: - click.secho(f'{idx:>10} {pid:>20}\r', nl=False) + click.secho(f"{idx:>10} {pid:>20}\r", nl=False) if pid in pids: found += 1 pids[pid] += 1 out.write(data) if verbose: - click.secho(f'Found: {pid} on position: {idx}') + click.secho(f"Found: {pid} on position: {idx}") if count != found: - click.secho(f'Count: {count} Found: {found}', fg='red') + click.secho(f"Count: {count} Found: {found}", fg="red") for key, value in pids.items(): if value == 0: click.secho(key) -@utils.command('reserve_pid_range') -@click.option('-t', '--pid_type', 'pid_type', default=None, - help='pid type of the resource') -@click.option('-n', '--records_number', 'records_number', default=None, - help='Number of records to load') -@click.option('-u', '--unused', 'unused', is_flag=True, default=False, - help='Set unused (gaps) pids status to NEW ') +@utils.command("reserve_pid_range") +@click.option( + "-t", "--pid_type", "pid_type", default=None, help="pid type of the resource" +) +@click.option( + "-n", + "--records_number", + "records_number", + default=None, + help="Number of records to load", +) +@click.option( + "-u", + "--unused", + "unused", + is_flag=True, + default=False, + help="Set unused (gaps) pids status to NEW ", +) @with_appcontext def reserve_pid_range(pid_type, records_number, unused): """Reserve a range of pids for future records loading. @@ -1304,47 +1300,52 @@ def reserve_pid_range(pid_type, records_number, unused): - records_number: number of new records(with pids) to load. - unused: set that the status of unused (gaps) pids to NEW. """ - click.secho('Reserving pids for loading "%s" records' % - pid_type, fg='green') + click.secho(f'Reserving pids for loading "{pid_type}" records', fg="green") try: records_number = int(records_number) except ValueError: - raise ValueError('Parameter records_number must be integer.') + raise ValueError("Parameter records_number must be integer.") record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) if not record_class: - raise AttributeError('Invalid pid type.') + raise AttributeError("Invalid pid type.") identifier = record_class.provider.identifier reserved_pids = [] - for number in range(0, records_number): + for _ in range(records_number): pid = identifier.next() reserved_pids.append(pid) - record_class.provider.create(pid_type, pid_value=pid, - status=PIDStatus.RESERVED) + record_class.provider.create(pid_type, pid_value=pid, status=PIDStatus.RESERVED) db.session.commit() min_pid = min(reserved_pids) max_pid = max(reserved_pids) - click.secho(f'reserved_pids range, from: {min_pid} to: {max_pid}') + click.secho(f"reserved_pids range, from: {min_pid} to: {max_pid}") if unused: for pid in range(1, identifier.max()): if not db.session.query( - identifier.query.filter(identifier.recid == pid).exists() + identifier.query.filter(identifier.recid == pid).exists() ).scalar(): - record_class.provider.create(pid_type, pid_value=pid, - status=PIDStatus.NEW) + record_class.provider.create( + pid_type, pid_value=pid, status=PIDStatus.NEW + ) db.session.add(identifier(recid=pid)) db.session.commit() -@utils.command('check_pid_dependencies') -@click.option('-i', '--dependency_file', 'dependency_file', - type=click.File('r'), default='./data/pid_dependencies_big.json') -@click.option('-d', '--directory', 'directory', default='./data') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@utils.command("check_pid_dependencies") +@click.option( + "-i", + "--dependency_file", + "dependency_file", + type=click.File("r"), + default="./data/pid_dependencies_big.json", +) +@click.option("-d", "--directory", "directory", default="./data") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) def check_pid_dependencies(dependency_file, directory, verbose): """Check record dependencies.""" - class Dependencies(): + + class Dependencies: """Class for dependencies checking.""" test_data = {} @@ -1354,8 +1355,8 @@ def __init__(self, directory, verbose=False): self.directory = directory self.verbose = verbose self.record = {} - self.name = '' - self.pid = '0' + self.name = "" + self.pid = "0" self.dependencies_pids = [] self.dependencies = set() self.missing = 0 @@ -1363,30 +1364,28 @@ def __init__(self, directory, verbose=False): def get_pid(self, data): """Get pid from end of $ref string.""" - return data['$ref'].split('/')[-1] + return data["$ref"].split("/")[-1] def get_pid_type(self, data): """Get pid and type from end of $ref string.""" - data_split = data['$ref'].split('/') + data_split = data["$ref"].split("/") return data_split[-1], data_split[-2] def get_ref_pids(self, data, dependency_name): """Get pids from data.""" pids = [] - try: + with contextlib.suppress(Exception): if isinstance(data[dependency_name], list): for dat in data[dependency_name]: pids.append(self.get_pid(dat)) else: pids = [self.get_pid(data[dependency_name])] - except Exception as err: - pass return pids def get_ref_type_pids(self, data, dependency_name, ref_type): """Get pids from data.""" pids = [] - try: + with contextlib.suppress(Exception): if isinstance(data[dependency_name], list): for dat in data[dependency_name]: pid, pid_type = self.get_pid_type(dat) @@ -1396,70 +1395,59 @@ def get_ref_type_pids(self, data, dependency_name, ref_type): pid, pid_type = self.get_pid_type(data[dependency_name]) if pid_type == ref_type: pids.append(pid) - except Exception as err: - pass return pids def add_pids_to_dependencies(self, dependency_name, pids, optional): """Add pids to dependoencies_pid.""" - if not (pids or optional): + if pids or optional: + self.dependencies_pids.append({dependency_name: pids}) + self.dependencies.add(dependency_name) + + else: click.secho( - f'{self.name}: dependencies not found: {dependency_name}', - fg='red' + f"{self.name}: dependencies not found: {dependency_name}", fg="red" ) self.not_found += 1 - else: - self.dependencies_pids.append({ - dependency_name: pids - }) - self.dependencies.add(dependency_name) def set_dependencies_pids(self, dependencies): """Get all dependencies and pids.""" self.dependencies_pids = [] for dependency in dependencies: - dependency_ref = dependency.get('ref') - dependency_refs = dependency.get('refs') + dependency_ref = dependency.get("ref") + dependency_refs = dependency.get("refs") if not dependency_ref: - dependency_ref = dependency['name'] - sublist = dependency.get('sublist', []) + dependency_ref = dependency["name"] + sublist = dependency.get("sublist", []) for sub in sublist: - datas = self.record.get(dependency['name'], []) - if not (datas or dependency.get('optional')): + datas = self.record.get(dependency["name"], []) + if not datas and not dependency.get("optional"): click.secho( - f'{self.name}: sublist not found: ' - f'{dependency["name"]}', - fg='red' + f"{self.name}: sublist not found: " f'{dependency["name"]}', + fg="red", ) self.not_found += 1 else: for data in datas: - dependency_ref = sub.get('ref') - if not dependency_ref: - dependency_ref = sub['name'] + dependency_ref = sub.get("ref") or sub["name"] self.add_pids_to_dependencies( dependency_ref, - self.get_ref_pids(data, sub['name']), - sub.get('optional') + self.get_ref_pids(data, sub["name"]), + sub.get("optional"), ) if not sublist: if dependency_refs: for ref, ref_type in dependency_refs.items(): pids = self.get_ref_type_pids( - self.record, - dependency['name'], - ref_type + self.record, dependency["name"], ref_type ) self.add_pids_to_dependencies( - ref, - pids, - dependency.get('optional') + ref, pids, dependency.get("optional") ) else: self.add_pids_to_dependencies( dependency_ref, - self.get_ref_pids(self.record, dependency['name']), - dependency.get('optional') + self.get_ref_pids(self.record, dependency["name"]), + dependency.get("optional"), ) def test_dependencies(self): @@ -1471,52 +1459,43 @@ def test_dependencies(self): self.test_data[key][value] except Exception: click.secho( - f'{self.name}: {self.pid} missing ' - f'{key}: {value}', - fg='red' + f"{self.name}: {self.pid} missing " f"{key}: {value}", + fg="red", ) self.missing += 1 def init_and_test_data(self, test): """Init data and test data.""" - self.name = test['name'] - file_name = os.path.join(self.directory, test['filename']) + self.name = test["name"] + file_name = os.path.join(self.directory, test["filename"]) self.test_data.setdefault(self.name, {}) - with open(file_name, 'r') as infile: + with open(file_name, "r") as infile: if self.verbose: - click.echo(f'{self.name}: {file_name}') + click.echo(f"{self.name}: {file_name}") records = read_json_record(infile) for idx, self.record in enumerate(records, 1): - self.pid = self.record.get('pid', idx) + self.pid = self.record.get("pid", idx) if self.test_data[self.name].get(self.pid): - click.secho( - f'Double pid in {self.name}: {self.pid}', - fg='red' - ) + click.secho(f"Double pid in {self.name}: {self.pid}", fg="red") else: self.test_data[self.name][self.pid] = {} - self.set_dependencies_pids( - test.get('dependencies', []) - ) + self.set_dependencies_pids(test.get("dependencies", [])) self.test_dependencies() if self.verbose: for dependency in self.dependencies: - click.echo(f'\tTested dependency: {dependency}') + click.echo(f"\tTested dependency: {dependency}") def run_tests(self, tests): """Run the tests.""" for test in tests: self.init_and_test_data(test) if self.missing: - click.secho(f'Missing relations: {self.missing}', fg='red') + click.secho(f"Missing relations: {self.missing}", fg="red") if self.not_found: - click.secho(f'Relation not found: {self.not_found}', fg='red') + click.secho(f"Relation not found: {self.not_found}", fg="red") # start of tests - click.secho( - f'Check dependencies {dependency_file.name}: {directory}', - fg='green' - ) + click.secho(f"Check dependencies {dependency_file.name}: {directory}", fg="green") dependency_tests = Dependencies(directory, verbose=verbose) tests = json.load(dependency_file) dependency_tests.run_tests(tests) @@ -1524,35 +1503,34 @@ def run_tests(self, tests): sys.exit(dependency_tests.missing + dependency_tests.not_found) -@utils.command('dump_es_mappings') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-o', '--outfile', 'outfile', type=click.File('w'), default=None) +@utils.command("dump_es_mappings") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-o", "--outfile", "outfile", type=click.File("w"), default=None) @with_appcontext def dump_es_mappings(verbose, outfile): """Dumps ES mappings.""" - click.secho('Dump ES mappings:', fg='green') - aliases = current_search.client.indices.get_alias('*') + click.secho("Dump ES mappings:", fg="green") + aliases = current_search.client.indices.get_alias("*") mappings = current_search.client.indices.get_mapping() for alias in sorted(aliases): - if alias[0] != '.': - mapping = mappings.get(alias, {}).get('mappings') + if alias[0] != ".": + mapping = mappings.get(alias, {}).get("mappings") click.echo(alias) if verbose or not outfile: print(json.dumps(mapping, indent=2)) if outfile: - outfile.write(f'{alias}\n') + outfile.write(f"{alias}\n") json.dump(mapping, outfile, indent=2) - outfile.write('\n') + outfile.write("\n") -@utils.command('export') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-t', '--pid_type', 'pid_type', default='doc') -@click.option('-o', '--outfile', 'outfile_name', required=True) -@click.option('-i', '--pidfile', 'pidfile', type=click.File('r'), - default=None) -@click.option('-I', '--indent', 'indent', type=click.INT, default=2) -@click.option('-s', '--schema', 'schema', is_flag=True, default=False) +@utils.command("export") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-t", "--pid_type", "pid_type", default="doc") +@click.option("-o", "--outfile", "outfile_name", required=True) +@click.option("-i", "--pidfile", "pidfile", type=click.File("r"), default=None) +@click.option("-I", "--indent", "indent", type=click.INT, default=2) +@click.option("-s", "--schema", "schema", is_flag=True, default=False) @with_appcontext def export(verbose, pid_type, outfile_name, pidfile, indent, schema): """Export REROILS record. @@ -1564,7 +1542,7 @@ def export(verbose, pid_type, outfile_name, pidfile, indent, schema): :param indent: indent for output :param schema: do not delete $schema """ - click.secho(f'Export {pid_type} records: {outfile_name}', fg='green') + click.secho(f"Export {pid_type} records: {outfile_name}", fg="green") outfile = JsonWriter(outfile_name) record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) @@ -1573,26 +1551,24 @@ def export(verbose, pid_type, outfile_name, pidfile, indent, schema): else: pids = record_class.get_all_pids() - agents_sources = current_app.config.get('RERO_ILS_AGENTS_SOURCES', []) + agents_sources = current_app.config.get("RERO_ILS_AGENTS_SOURCES", []) for count, pid in enumerate(pids, 1): try: rec = record_class.get_record_by_pid(pid) if verbose: - click.echo( - f'{count: <8} {pid_type} export {rec.pid}:{rec.id}') + click.echo(f"{count: <8} {pid_type} export {rec.pid}:{rec.id}") if not schema: - rec.pop('$schema', None) + rec.pop("$schema", None) if isinstance(rec, RemoteEntity): for agent_source in agents_sources: - rec.get(agent_source, {}).pop('$schema', None) + rec.get(agent_source, {}).pop("$schema", None) outfile.write(rec) except Exception as err: click.echo(err) - click.echo(f'ERROR: Can not export pid:{pid}') + click.echo(f"ERROR: Can not export pid:{pid}") -def create_personal( - name, user_id, scopes=None, is_internal=False, access_token=None): +def create_personal(name, user_id, scopes=None, is_internal=False, access_token=None): """Create a personal access token. A token that is bound to a specific user and which doesn't expire, i.e. @@ -1614,14 +1590,13 @@ def create_personal( user_id=user_id, is_internal=True, is_confidential=False, - _default_scopes=scopes + _default_scopes=scopes, ) client.gen_salt() if not access_token: access_token = gen_salt( - current_app.config.get( - 'OAUTH2SERVER_TOKEN_PERSONAL_SALT_LEN') + current_app.config.get("OAUTH2SERVER_TOKEN_PERSONAL_SALT_LEN") ) token = Token( client_id=client.client_id, @@ -1640,55 +1615,66 @@ def create_personal( @utils.command() -@click.option('-n', '--name', required=True) -@click.option( - '-u', '--user', required=True, callback=process_user, - help='User ID or email.') +@click.option("-n", "--name", required=True) @click.option( - '-s', '--scope', 'scopes', multiple=True, callback=process_scopes) -@click.option('-i', '--internal', is_flag=True) + "-u", "--user", required=True, callback=process_user, help="User ID or email." +) +@click.option("-s", "--scope", "scopes", multiple=True, callback=process_scopes) +@click.option("-i", "--internal", is_flag=True) @click.option( - '-t', '--access_token', 'access_token', required=False, - help='personalized access_token.') + "-t", + "--access_token", + "access_token", + required=False, + help="personalized access_token.", +) @with_appcontext def token_create(name, user, scopes, internal, access_token): """Create a personal OAuth token.""" if user: token = create_personal( - name, user.id, scopes=scopes, is_internal=internal, - access_token=access_token) + name, + user.id, + scopes=scopes, + is_internal=internal, + access_token=access_token, + ) db.session.commit() - click.secho(token.access_token, fg='blue') + click.secho(token.access_token, fg="blue") else: - click.secho('No user found', fg='red') + click.secho("No user found", fg="red") -@utils.command('add_cover_urls') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@utils.command("add_cover_urls") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext def add_cover_urls(verbose): """Add cover urls to all documents with isbns.""" - click.secho('Add cover urls.', fg='green') - search = DocumentsSearch() \ - .filter('term', identifiedBy__type='bf:Isbn') \ - .filter('bool', must_not=[ - Q('term', electronicLocator__content='coverImage')]) \ - .params(preserve_order=True) \ - .sort({'pid': {"order": "asc"}}) \ - .source('pid') + click.secho("Add cover urls.", fg="green") + search = ( + DocumentsSearch() + .filter("term", identifiedBy__type="bf:Isbn") + .filter("bool", must_not=[Q("term", electronicLocator__content="coverImage")]) + .params(preserve_order=True) + .sort({"pid": {"order": "asc"}}) + .source("pid") + ) for idx, hit in enumerate(search.scan()): pid = hit.pid record = Document.get_record_by_pid(pid) url = get_cover_art(record=record, save_cover_url=True) if verbose: - click.echo(f'{idx}:\tdocument: {pid}\t{url}') + click.echo(f"{idx}:\tdocument: {pid}\t{url}") @utils.command() -@click.option('-v', '--verbose', is_flag=True, default=False, - help='Verbose print.') -@click.option('-h', '--hours', default=1, - help='How many houres befor now not to delete default=1.') +@click.option("-v", "--verbose", is_flag=True, default=False, help="Verbose print.") +@click.option( + "-h", + "--hours", + default=1, + help="How many houres befor now not to delete default=1.", +) @with_appcontext def delete_loans_created(verbose, hours): """Delete loans with state CREATED.""" diff --git a/rero_ils/modules/collections/api.py b/rero_ils/modules/collections/api.py index b080e63163..d593f656a4 100644 --- a/rero_ils/modules/collections/api.py +++ b/rero_ils/modules/collections/api.py @@ -22,18 +22,18 @@ from rero_ils.modules.items.api import Item -from .models import CollectionIdentifier, CollectionMetadata from ..api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch from ..fetchers import id_fetcher from ..minters import id_minter from ..providers import Provider from ..utils import extracted_data_from_ref +from .models import CollectionIdentifier, CollectionMetadata # provider CollectionProvider = type( - 'CollectionProvider', + "CollectionProvider", (Provider,), - dict(identifier=CollectionIdentifier, pid_type='coll') + dict(identifier=CollectionIdentifier, pid_type="coll"), ) # minter collection_id_minter = partial(id_minter, provider=CollectionProvider) @@ -47,7 +47,7 @@ class CollectionsSearch(IlsRecordsSearch): class Meta: """Search only on collection index.""" - index = 'collections' + index = "collections" doc_types = None def active_by_item_pid(self, item_pid): @@ -56,10 +56,11 @@ def active_by_item_pid(self, item_pid): :param item_pid: string - the item to filter with. :return: An ElasticSearch query to get hits related the entity. """ - return self \ - .filter('term', items__pid=item_pid) \ - .filter('range', end_date={'gte': datetime.now(timezone.utc)}) \ - .sort({'end_date': {'order': 'asc'}}) + return ( + self.filter("term", items__pid=item_pid) + .filter("range", end_date={"gte": datetime.now(timezone.utc)}) + .sort({"end_date": {"order": "asc"}}) + ) class Collection(IlsRecord): @@ -70,11 +71,11 @@ class Collection(IlsRecord): provider = CollectionProvider model_cls = CollectionMetadata pids_exist_check = { - 'not_required': { - 'doc': 'document', - 'lib': 'library', - 'loc': 'location', - 'item': 'item' + "not_required": { + "doc": "document", + "lib": "library", + "loc": "location", + "item": "item", } } @@ -85,15 +86,15 @@ def get_items(self): :return: list of items linked to collection """ items = [] - for item in self.get('items', []): + for item in self.get("items", []): item_pid = extracted_data_from_ref(item) item = Item.get_record_by_pid(item_pid) # inherit holdings first call number when possible if first_call_number := item.issue_inherited_first_call_number: - item['call_number'] = first_call_number + item["call_number"] = first_call_number # inherit holdings second call number when possible if second_call_number := item.issue_inherited_second_call_number: - item['second_call_number'] = second_call_number + item["second_call_number"] = second_call_number items.append(item) return items @@ -109,4 +110,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='coll') + super().bulk_index(record_id_iterator, doc_type="coll") diff --git a/rero_ils/modules/collections/cli.py b/rero_ils/modules/collections/cli.py index c9287a031f..08d2f03773 100644 --- a/rero_ils/modules/collections/cli.py +++ b/rero_ils/modules/collections/cli.py @@ -30,39 +30,36 @@ from rero_ils.modules.utils import extracted_data_from_ref, get_ref_for_pid -@click.command('create_collections') -@click.option('-f', '--requests_file', 'input_file', help='Request input file') +@click.command("create_collections") +@click.option("-f", "--requests_file", "input_file", help="Request input file") @with_appcontext def create_collections(input_file, max_item=10): """Create collections.""" organisation_items = {} - with open(input_file, 'r', encoding='utf-8') as request_file: + with open(input_file, "r", encoding="utf-8") as request_file: collections = json.load(request_file) for collection_data in collections: organisation_pid = extracted_data_from_ref( - collection_data.get('organisation').get('$ref')) + collection_data.get("organisation").get("$ref") + ) if organisation_pid not in organisation_items: - organisation_items[organisation_pid] =\ - get_items_by_organisation_pid(organisation_pid) + organisation_items[organisation_pid] = get_items_by_organisation_pid( + organisation_pid + ) items = random.choices( - organisation_items[organisation_pid], - k=random.randint(1, max_item) + organisation_items[organisation_pid], k=random.randint(1, max_item) ) - collection_data['items'] = [] + collection_data["items"] = [] for item_pid in items: - ref = get_ref_for_pid('items', item_pid) - collection_data['items'].append({'$ref': ref}) - request = Collection.create( - collection_data, - dbcommit=True, - reindex=True - ) - click.echo(f'\tCollection: #{request.pid}') + ref = get_ref_for_pid("items", item_pid) + collection_data["items"].append({"$ref": ref}) + request = Collection.create(collection_data, dbcommit=True, reindex=True) + click.echo(f"\tCollection: #{request.pid}") def get_items_by_organisation_pid(organisation_pid): """Get items by organisation pid.""" - query = ItemsSearch().filter( - 'term', organisation__pid=organisation_pid)\ - .source('pid') + query = ( + ItemsSearch().filter("term", organisation__pid=organisation_pid).source("pid") + ) return [item.pid for item in query.scan()] diff --git a/rero_ils/modules/collections/jsonresolver.py b/rero_ils/modules/collections/jsonresolver.py index be77dc7e6b..a8ecedac75 100644 --- a/rero_ils/modules/collections/jsonresolver.py +++ b/rero_ils/modules/collections/jsonresolver.py @@ -22,13 +22,13 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/collections/', host='bib.rero.ch') +@jsonresolver.route("/api/collections/", host="bib.rero.ch") def collection_resolver(pid): """Resolver for collection record.""" - persistent_id = PersistentIdentifier.get('coll', pid) + persistent_id = PersistentIdentifier.get("coll", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - f'Doc resolver error: /api/collections/{pid} {persistent_id}' + f"Doc resolver error: /api/collections/{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/collections/listener.py b/rero_ils/modules/collections/listener.py index 336a739958..14bf0e6e1b 100644 --- a/rero_ils/modules/collections/listener.py +++ b/rero_ils/modules/collections/listener.py @@ -17,12 +17,13 @@ """Signals connector for Collection.""" -from .api import Collection, CollectionsSearch from ..utils import extracted_data_from_ref +from .api import Collection, CollectionsSearch -def enrich_collection_data(sender, json=None, record=None, index=None, - doc_type=None, **dummy_kwargs): +def enrich_collection_data( + sender, json=None, record=None, index=None, doc_type=None, **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -30,11 +31,11 @@ def enrich_collection_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == CollectionsSearch.Meta.index: + if index.split("-")[0] == CollectionsSearch.Meta.index: collection = record if not isinstance(record, Collection): - collection = Collection.get_record_by_pid(record.get('pid')) - json['organisation'] = { - 'pid': extracted_data_from_ref(collection.get('organisation')), - 'type': 'org' + collection = Collection.get_record_by_pid(record.get("pid")) + json["organisation"] = { + "pid": extracted_data_from_ref(collection.get("organisation")), + "type": "org", } diff --git a/rero_ils/modules/collections/models.py b/rero_ils/modules/collections/models.py index 2b05315354..424874f799 100644 --- a/rero_ils/modules/collections/models.py +++ b/rero_ils/modules/collections/models.py @@ -27,16 +27,17 @@ class CollectionIdentifier(RecordIdentifier): """Sequence generator for collection identifiers.""" - __tablename__ = 'collection_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "collection_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class CollectionMetadata(db.Model, RecordMetadataBase): """Collection record metadata.""" - __tablename__ = 'collection_metadata' + __tablename__ = "collection_metadata" diff --git a/rero_ils/modules/collections/permissions.py b/rero_ils/modules/collections/permissions.py index 8de1b4330b..cc6de8c86f 100644 --- a/rero_ils/modules/collections/permissions.py +++ b/rero_ils/modules/collections/permissions.py @@ -19,16 +19,19 @@ """Collection permissions.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control Items policies for CRUD operations -search_action = action_factory('coll-search') -read_action = action_factory('coll-read') -create_action = action_factory('coll-create') -update_action = action_factory('coll-update') -delete_action = action_factory('coll-delete') -access_action = action_factory('coll-access') +search_action = action_factory("coll-search") +read_action = action_factory("coll-read") +create_action = action_factory("coll-create") +update_action = action_factory("coll-update") +delete_action = action_factory("coll-delete") +access_action = action_factory("coll-access") class CollectionPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/collections/serializers.py b/rero_ils/modules/collections/serializers.py index 2f5e8bf7b6..90f0b71046 100644 --- a/rero_ils/modules/collections/serializers.py +++ b/rero_ils/modules/collections/serializers.py @@ -18,8 +18,11 @@ """Collection serialization.""" from rero_ils.modules.libraries.api import LibrariesSearch -from rero_ils.modules.serializers import JSONSerializer, RecordSchemaJSONV1, \ - search_responsify +from rero_ils.modules.serializers import ( + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) class CollectionJSONSerializer(JSONSerializer): @@ -28,11 +31,10 @@ class CollectionJSONSerializer(JSONSerializer): def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) super()._postprocess_search_aggregations(aggregations) _json = CollectionJSONSerializer(RecordSchemaJSONV1) -json_coll_search = search_responsify(_json, 'application/rero+json') +json_coll_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/collections/views.py b/rero_ils/modules/collections/views.py index f2e732d97c..7b70b13a29 100644 --- a/rero_ils/modules/collections/views.py +++ b/rero_ils/modules/collections/views.py @@ -30,10 +30,10 @@ from ..utils import extracted_data_from_ref blueprint = Blueprint( - 'collections', + "collections", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) @@ -43,57 +43,41 @@ def collection_view_method(pid, record, template=None, **kwargs): Sends record_viewed signal and renders template. :param pid: PID object. """ - record_viewed.send( - current_app._get_current_object(), pid=pid, record=record) + record_viewed.send(current_app._get_current_object(), pid=pid, record=record) - viewcode = kwargs['viewcode'] - org_pid = Organisation.get_record_by_viewcode(viewcode)['pid'] + viewcode = kwargs["viewcode"] + org_pid = Organisation.get_record_by_viewcode(viewcode)["pid"] rec = record libraries = [] - if org_pid != extracted_data_from_ref(record.get('organisation')): - abort( - 404, 'The collections is not referenced for this organisation' - ) + if org_pid != extracted_data_from_ref(record.get("organisation")): + abort(404, "The collections is not referenced for this organisation") # Get items and document title - rec['items'] = record.get_items() - for item in rec['items']: - document_pid = extracted_data_from_ref(item.get('document')) - item['document'] = Document.get_record_by_pid(document_pid) + rec["items"] = record.get_items() + for item in rec["items"]: + document_pid = extracted_data_from_ref(item.get("document")) + item["document"] = Document.get_record_by_pid(document_pid) # Get libraries names - if rec.get('libraries'): - for library in rec.get('libraries'): + if rec.get("libraries"): + for library in rec.get("libraries"): library_pid = extracted_data_from_ref(library) - libraries.append( - Library.get_record_by_pid(library_pid).get('name') - ) - rec['libraries'] = ', '.join(libraries) + libraries.append(Library.get_record_by_pid(library_pid).get("name")) + rec["libraries"] = ", ".join(libraries) # Format date - rec['date'] = _start_end_date( - record.get('start_date'), record.get('end_date')) + rec["date"] = _start_end_date(record.get("start_date"), record.get("end_date")) - return render_template( - template, - record=rec, - viewcode=viewcode - ) + return render_template(template, record=rec, viewcode=viewcode) def _start_end_date(start_date, end_date): """Format date.""" start = format_date_filter( - start_date, - date_format='short', - time_format=None, - locale='fr' + start_date, date_format="short", time_format=None, locale="fr" ) end = format_date_filter( - end_date, - date_format='short', - time_format=None, - locale='fr' + end_date, date_format="short", time_format=None, locale="fr" ) - return f'{start} - {end}' + return f"{start} - {end}" @blueprint.app_template_filter() @@ -104,6 +88,6 @@ def get_teachers(record): :return: list of teachers of the collection """ teachers = filter( - None, [teacher.get('name') for teacher in record.get('teachers', [])] + None, [teacher.get("name") for teacher in record.get("teachers", [])] ) - return ', '.join(teachers) + return ", ".join(teachers) diff --git a/rero_ils/modules/commons/dumpers.py b/rero_ils/modules/commons/dumpers.py index f7b9627050..0078e1997a 100644 --- a/rero_ils/modules/commons/dumpers.py +++ b/rero_ils/modules/commons/dumpers.py @@ -73,5 +73,6 @@ def dump(self, record, data): :return a dict with dumped data. """ from copy import deepcopy + data = deepcopy(_records_state.replace_refs(data)) return data diff --git a/rero_ils/modules/commons/exceptions.py b/rero_ils/modules/commons/exceptions.py index 522d0f35eb..f09d333cd3 100644 --- a/rero_ils/modules/commons/exceptions.py +++ b/rero_ils/modules/commons/exceptions.py @@ -33,7 +33,7 @@ def __init__(self, record_cls, record_pid): def __repr__(self): """String representation of the exception.""" - return f'{self.record_cls.__name__}#{self.record_pid} not found' + return f"{self.record_cls.__name__}#{self.record_pid} not found" class MissingDataException(KeyError): diff --git a/rero_ils/modules/commons/identifiers.py b/rero_ils/modules/commons/identifiers.py index 01a4d07dc9..177e7bec77 100644 --- a/rero_ils/modules/commons/identifiers.py +++ b/rero_ils/modules/commons/identifiers.py @@ -48,8 +48,16 @@ from dataclasses import dataclass, field from typing import Optional, TypeVar -from isbnlib import NotValidISBNError, canonical, ean13, is_isbn10, mask, \ - notisbn, to_isbn10, to_isbn13 +from isbnlib import ( + NotValidISBNError, + canonical, + ean13, + is_isbn10, + mask, + notisbn, + to_isbn10, + to_isbn13, +) class InvalidIdentifierException(Exception): @@ -59,44 +67,39 @@ class InvalidIdentifierException(Exception): class IdentifierType: """Type of identifier.""" - AUDIO_ISSUE_NUMBER = 'bf:AudioIssueNumber' - DOI = 'bf:Doi' - EAN = 'bf:Ean' - GTIN_14 = 'bf:Gtin14Number' - IDENTIFIER = 'bf:Identifier' - ISAN = 'bf:Isan' - ISBN = 'bf:Isbn' - ISMN = 'bf:Ismn' - ISRC = 'bf:Isrc' - ISSN = 'bf:Issn' - L_ISSN = 'bf:IssnL' - LCCN = 'bf:Lccn' - LOCAL = 'bf:Local' - MATRIX_NUMBER = 'bf:MatrixNumber' - MUSIC_DISTRIBUTOR_NUMBER = 'bf:MusicDistributorNumber' - MUSIC_PLATE = 'bf:MusicPlate' - MUSIC_PUBLISHER_NUMBER = 'bf:MusicPublisherNumber' - PUBLISHER_NUMBER = 'bf:PublisherNumber' - UPC = 'bf:Upc' - URN = 'bf:Urn' - VIDEO_RECORDING_NUMBER = 'bf:VideoRecordingNumber' - URI = 'uri' + AUDIO_ISSUE_NUMBER = "bf:AudioIssueNumber" + DOI = "bf:Doi" + EAN = "bf:Ean" + GTIN_14 = "bf:Gtin14Number" + IDENTIFIER = "bf:Identifier" + ISAN = "bf:Isan" + ISBN = "bf:Isbn" + ISMN = "bf:Ismn" + ISRC = "bf:Isrc" + ISSN = "bf:Issn" + L_ISSN = "bf:IssnL" + LCCN = "bf:Lccn" + LOCAL = "bf:Local" + MATRIX_NUMBER = "bf:MatrixNumber" + MUSIC_DISTRIBUTOR_NUMBER = "bf:MusicDistributorNumber" + MUSIC_PLATE = "bf:MusicPlate" + MUSIC_PUBLISHER_NUMBER = "bf:MusicPublisherNumber" + PUBLISHER_NUMBER = "bf:PublisherNumber" + UPC = "bf:Upc" + URN = "bf:Urn" + VIDEO_RECORDING_NUMBER = "bf:VideoRecordingNumber" + URI = "uri" class IdentifierStatus: """Status of identifier.""" UNDEFINED = None - INVALID = 'invalid' - CANCELLED = 'cancelled' - INVALID_OR_CANCELLED = 'invalid or cancelled' + INVALID = "invalid" + CANCELLED = "cancelled" + INVALID_OR_CANCELLED = "invalid or cancelled" - ALL = [ - UNDEFINED, - INVALID, - CANCELLED, - INVALID_OR_CANCELLED - ] + ALL = [UNDEFINED, INVALID, CANCELLED, INVALID_OR_CANCELLED] # ============================================================================= @@ -106,7 +109,7 @@ class IdentifierStatus: # * `ISBNIdentifier` class represent any ISBN identifier (isbn-10, isbn-13) # * `EANIdentifier` class to represent an isbn without any hyphens # ============================================================================= -DocIdentifier = TypeVar('DocIdentifier') +DocIdentifier = TypeVar("DocIdentifier") @dataclass(repr=False) @@ -124,7 +127,7 @@ def __post_init__(self): """Post initialization dataclass magic function.""" if self.status == IdentifierStatus.UNDEFINED and not self.is_valid(): self.status = IdentifierStatus.INVALID - if hasattr(self, '__type__'): + if hasattr(self, "__type__"): self.type = self.__type__ if not self.type: raise InvalidIdentifierException("'type' is a required property.") @@ -146,7 +149,7 @@ def __str__(self) -> str: def to_dict(self): """Expose identifier as a dictionary.""" data = self.__dict__ - data.pop('__type__', None) + data.pop("__type__", None) return data def normalize(self) -> str: @@ -169,12 +172,12 @@ def dump(self) -> dict: """Dump this identifier.""" status = self.status if self.is_valid() else IdentifierStatus.INVALID data = { - 'type': self.type, - 'value': self.normalize(), - 'note': self.note, - 'qualifier': self.qualifier, - 'source': self.source, - 'status': status + "type": self.type, + "value": self.normalize(), + "note": self.note, + "qualifier": self.qualifier, + "source": self.source, + "status": status, } return {k: v for k, v in data.items() if v} @@ -186,7 +189,7 @@ def render(self, **kwargs) -> str: representation. :return: the string representation of the identifier. """ - render_class = kwargs.pop('render_class', DefaultIdentifierRenderer()) + render_class = kwargs.pop("render_class", DefaultIdentifierRenderer()) return render_class.render(self, **kwargs) def get_alternatives(self) -> list[DocIdentifier]: @@ -242,6 +245,7 @@ class EANIdentifier(Identifier): def normalize(self) -> str: """Get the normalized value for this EAN.""" return canonical(self.value) or self.value + __str__ = normalize def is_valid(self) -> bool: @@ -273,6 +277,7 @@ def get_alternatives(self) -> list[Identifier]: # >> xxx, t.2 # ============================================================================= + class IdentifierRenderer(ABC): """Identifier renderer class.""" @@ -297,7 +302,7 @@ def render(self, identifier: Identifier, **kwargs) -> str: """Get the string representation of an identifier.""" output = str(identifier) if identifier.qualifier: - output += f', {identifier.qualifier}' + output += f", {identifier.qualifier}" return output @@ -305,12 +310,13 @@ def render(self, identifier: Identifier, **kwargs) -> str: # FACTORY # ============================================================================= + class IdentifierFactory: """Factory to build `Identifier` object from dictionary.""" _mapping_table = { IdentifierType.ISBN: ISBNIdentifier, - IdentifierType.EAN: EANIdentifier + IdentifierType.EAN: EANIdentifier, } @staticmethod @@ -320,9 +326,9 @@ def create_identifier(data) -> Identifier: :param data: the dictionary representing the identifier. :return the created Identifier. """ - if 'type' not in data: + if "type" not in data: raise AttributeError("'type' is a required property.") - if data['type'] in IdentifierFactory._mapping_table: - return IdentifierFactory._mapping_table[data['type']](**data) + if data["type"] in IdentifierFactory._mapping_table: + return IdentifierFactory._mapping_table[data["type"]](**data) return Identifier(**data) diff --git a/rero_ils/modules/commons/models.py b/rero_ils/modules/commons/models.py index 1709206c3d..0415f33f14 100644 --- a/rero_ils/modules/commons/models.py +++ b/rero_ils/modules/commons/models.py @@ -22,5 +22,5 @@ class NoteTypes: """List of note type.""" - PUBLIC_NOTE = 'public_note' - STAFF_NOTE = 'staff_note' + PUBLIC_NOTE = "public_note" + STAFF_NOTE = "staff_note" diff --git a/rero_ils/modules/commons/schemas.py b/rero_ils/modules/commons/schemas.py index 65145f858b..87f3d7a813 100644 --- a/rero_ils/modules/commons/schemas.py +++ b/rero_ils/modules/commons/schemas.py @@ -33,12 +33,15 @@ def http_applicable_method(*http_methods): function will be applicable. If request method isn't in this list, the decorated function will be skipped/uncalled. """ + def inner(func): @wraps(func) def wrapper(*args, **kwargs): if request.method in http_methods: return func(*args, **kwargs) + return wrapper + return inner @@ -46,7 +49,7 @@ class RefSchema(Schema): """Schema to describe a reference to another resources.""" # TODO : find a way to validate the `$ref` using a variable pattern. - ref = SanitizedUnicode(data_key='$ref', attribute='$ref') + ref = SanitizedUnicode(data_key="$ref", attribute="$ref") class NoteSchema(Schema): diff --git a/rero_ils/modules/decorators.py b/rero_ils/modules/decorators.py index a0b9f12df6..5a960ab9ae 100644 --- a/rero_ils/modules/decorators.py +++ b/rero_ils/modules/decorators.py @@ -25,20 +25,24 @@ from invenio_access import Permission from werkzeug.exceptions import HTTPException -from rero_ils.permissions import librarian_permission, login_and_librarian, \ - login_and_patron +from rero_ils.permissions import ( + librarian_permission, + login_and_librarian, + login_and_patron, +) from .permissions import PermissionContext def check_authentication(fn): """Decorator to check authentication for permissions HTTP API.""" + @wraps(fn) def decorated_view(*args, **kwargs): if not current_user.is_authenticated: - return jsonify({'status': 'error: Unauthorized'}), 401 + return jsonify({"status": "error: Unauthorized"}), 401 if not librarian_permission.require().can(): - return jsonify({'status': 'error: Forbidden'}), 403 + return jsonify({"status": "error: Forbidden"}), 403 return fn(*args, **kwargs) return decorated_view @@ -50,10 +54,12 @@ def check_logged_as_librarian(fn): If no user is connected: return 401 (unauthorized) If current logged user isn't `librarian`: return 403 (forbidden) """ + @wraps(fn) def wrapper(*args, **kwargs): login_and_librarian() return fn(*args, **kwargs) + return wrapper @@ -63,6 +69,7 @@ def check_logged_as_patron(fn): If no user is connected: redirect the user to sign-in page If current logged user isn't `patron`: return 403 (forbidden) """ + @wraps(fn) def wrapper(*args, **kwargs): status, code, redirect_url = login_and_patron() @@ -72,15 +79,17 @@ def wrapper(*args, **kwargs): return redirect(redirect_url) else: abort(code) + return wrapper def check_logged_user_authentication(func): """Decorator to check authentication for user HTTP API.""" + @wraps(func) def decorated_view(*args, **kwargs): if not current_user.is_authenticated: - return jsonify({'status': 'error: Unauthorized'}), 401 + return jsonify({"status": "error: Unauthorized"}), 401 return func(*args, **kwargs) return decorated_view @@ -92,15 +101,18 @@ def check_permission(actions): :param actions: List of `ActionNeed` to test. If one permission failed then the access should be unauthorized. """ + def inner(func): @wraps(func) def wrapper(*args, **kwargs): for action in actions: permission = Permission(action) if not permission.can(): - return jsonify({'status': 'error: Unauthorized'}), 401 + return jsonify({"status": "error: Unauthorized"}), 401 return func(*args, **kwargs) + return wrapper + return inner @@ -113,14 +125,15 @@ def parse_permission_payload(func): :raises KeyError - If a required parameter isn't available. """ + @wraps(func) def wrapper(*args, **kwargs): data = request.get_json() or {} - kwargs['method'] = 'deny' if request.method == 'DELETE' else 'allow' + kwargs["method"] = "deny" if request.method == "DELETE" else "allow" # define required parameters depending on request context. - required_arguments = ['context', 'permission'] - if data.get('context') == PermissionContext.BY_ROLE: - required_arguments.extend(['role_name']) + required_arguments = ["context", "permission"] + if data.get("context") == PermissionContext.BY_ROLE: + required_arguments.extend(["role_name"]) # check parameter exists and fill the keyword argument with them. for param_name in required_arguments: try: @@ -128,19 +141,22 @@ def wrapper(*args, **kwargs): except KeyError: abort(400, f"'{param_name}' argument required") return func(*args, **kwargs) + return wrapper def jsonify_error(func): """Jsonify errors.""" + @wraps(func) def decorated_view(*args, **kwargs): try: return func(*args, **kwargs) except HTTPException as httpe: - return jsonify({'message': f'{httpe}'}), httpe.code + return jsonify({"message": f"{httpe}"}), httpe.code except Exception as error: # raise error # current_app.logger.error(str(error)) - return jsonify({'message': f'{error}'}), 400 + return jsonify({"message": f"{error}"}), 400 + return decorated_view diff --git a/rero_ils/modules/documents/api.py b/rero_ils/modules/documents/api.py index 043078bc47..16df66a5cd 100644 --- a/rero_ils/modules/documents/api.py +++ b/rero_ils/modules/documents/api.py @@ -29,31 +29,30 @@ from invenio_search import current_search_client from jsonschema.exceptions import ValidationError -from rero_ils.modules.acquisition.acq_order_lines.api import \ - AcqOrderLinesSearch +from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLinesSearch from rero_ils.modules.api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch -from rero_ils.modules.commons.identifiers import IdentifierFactory, \ - IdentifierType +from rero_ils.modules.commons.identifiers import IdentifierFactory, IdentifierType from rero_ils.modules.fetchers import id_fetcher -from rero_ils.modules.local_fields.extensions import \ - DeleteRelatedLocalFieldExtension +from rero_ils.modules.local_fields.extensions import DeleteRelatedLocalFieldExtension from rero_ils.modules.minters import id_minter -from rero_ils.modules.operation_logs.extensions import \ - OperationLogObserverExtension +from rero_ils.modules.operation_logs.extensions import OperationLogObserverExtension from rero_ils.modules.organisations.api import Organisation from rero_ils.modules.providers import Provider from rero_ils.modules.utils import sorted_pids from .dumpers import document_indexer_dumper, document_replace_refs_dumper -from .extensions import AddMEFPidExtension, EditionStatementExtension, \ - ProvisionActivitiesExtension, SeriesStatementExtension, TitleExtension +from .extensions import ( + AddMEFPidExtension, + EditionStatementExtension, + ProvisionActivitiesExtension, + SeriesStatementExtension, + TitleExtension, +) from .models import DocumentIdentifier, DocumentMetadata # provider DocumentProvider = type( - 'DocumentProvider', - (Provider,), - dict(identifier=DocumentIdentifier, pid_type='doc') + "DocumentProvider", (Provider,), dict(identifier=DocumentIdentifier, pid_type="doc") ) # minter document_id_minter = partial(id_minter, provider=DocumentProvider) @@ -67,15 +66,16 @@ class DocumentsSearch(IlsRecordsSearch): class Meta: """Search only on documents index.""" - index = 'documents' + index = "documents" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None - def by_entity(self, entity, subjects=True, imported_subjects=True, - genre_forms=True): + def by_entity( + self, entity, subjects=True, imported_subjects=True, genre_forms=True + ): """Build a search to get hits related to an entity. :param entity: the entity record to search. @@ -85,17 +85,17 @@ def by_entity(self, entity, subjects=True, imported_subjects=True, :returns: An ElasticSearch query to get hits related the entity. :rtype: `elasticsearch_dsl.Search` """ - field = f'contribution.entity.pids.{entity.resource_type}' - filters = Q('term', **{field: entity.pid}) + field = f"contribution.entity.pids.{entity.resource_type}" + filters = Q("term", **{field: entity.pid}) if subjects: - field = f'subjects.entity.pids.{entity.resource_type}' - filters |= Q('term', **{field: entity.pid}) + field = f"subjects.entity.pids.{entity.resource_type}" + filters |= Q("term", **{field: entity.pid}) if imported_subjects: - field = f'subjects_imported.pids.{entity.resource_type}' - filters |= Q('term', **{field: entity.pid}) + field = f"subjects_imported.pids.{entity.resource_type}" + filters |= Q("term", **{field: entity.pid}) if genre_forms: - field = f'genreForm.entity.pids.{entity.resource_type}' - filters |= Q('term', **{field: entity.pid}) + field = f"genreForm.entity.pids.{entity.resource_type}" + filters |= Q("term", **{field: entity.pid}) return self.filter(filters) def by_library_pid(self, library_pid): @@ -105,8 +105,7 @@ def by_library_pid(self, library_pid): :returns: An ElasticSearch query to get hits related the entity. :rtype: `elasticsearch_dsl.Search` """ - return self.filter( - 'term', holdings__organisation__library_pid=library_pid) + return self.filter("term", holdings__organisation__library_pid=library_pid) class Document(IlsRecord): @@ -121,12 +120,12 @@ class Document(IlsRecord): _extensions = [ OperationLogObserverExtension(), - AddMEFPidExtension('subjects', 'contribution', 'genreForm'), + AddMEFPidExtension("subjects", "contribution", "genreForm"), ProvisionActivitiesExtension(), SeriesStatementExtension(), EditionStatementExtension(), TitleExtension(), - DeleteRelatedLocalFieldExtension() + DeleteRelatedLocalFieldExtension(), ] def _validate(self, **kwargs): @@ -135,30 +134,45 @@ def _validate(self, **kwargs): if self.pid_check: from ..utils import pids_exists_in_data - validation_message = pids_exists_in_data( - info=f'{self.provider.pid_type} ({self.pid})', - data=self, - required={}, - not_required={'doc': [ - 'supplement', 'supplementTo', 'otherEdition', - 'otherPhysicalFormat', 'issuedWith', 'precededBy', - 'succeededBy', 'relatedTo', 'hasReproduction', - 'reproductionOf' - ]} - ) or True + + validation_message = ( + pids_exists_in_data( + info=f"{self.provider.pid_type} ({self.pid})", + data=self, + required={}, + not_required={ + "doc": [ + "supplement", + "supplementTo", + "otherEdition", + "otherPhysicalFormat", + "issuedWith", + "precededBy", + "succeededBy", + "relatedTo", + "hasReproduction", + "reproductionOf", + ] + }, + ) + or True + ) if validation_message is True: # also test partOf - if part_of := self.get('partOf', []): + if part_of := self.get("partOf", []): # make a list of refs for easier testing - part_of_documents = [doc['document'] for doc in part_of] - validation_message = pids_exists_in_data( - info=f'{self.provider.pid_type} ({self.pid})', - data={'partOf': part_of_documents}, - required={}, - not_required={'doc': 'partOf'} - ) or True + part_of_documents = [doc["document"] for doc in part_of] + validation_message = ( + pids_exists_in_data( + info=f"{self.provider.pid_type} ({self.pid})", + data={"partOf": part_of_documents}, + required={}, + not_required={"doc": "partOf"}, + ) + or True + ) if validation_message is not True: - raise ValidationError(';'.join(validation_message)) + raise ValidationError(";".join(validation_message)) return json @classmethod @@ -175,16 +189,15 @@ def get_n_available_holdings(cls, pid, org_pid=None): holding_query = HoldingsSearch().available_query() # filter by the current document - filters = Q('term', document__pid=pid) + filters = Q("term", document__pid=pid) # filter by organisation if org_pid: - filters &= Q('term', organisation__pid=org_pid) + filters &= Q("term", organisation__pid=org_pid) holding_query = holding_query.filter(filters) # get the number of electronic holdings - n_electronic_holdings = holding_query\ - .filter('term', holdings_type='electronic') + n_electronic_holdings = holding_query.filter("term", holdings_type="electronic") return holding_query.count(), n_electronic_holdings.count() @@ -202,15 +215,13 @@ def get_available_item_pids(cls, pid, org_pid=None): items_query = ItemsSearch().available_query() # filter by the current document - filters = Q('term', document__pid=pid) + filters = Q("term", document__pid=pid) # filter by organisation if org_pid: - filters &= Q('term', organisation__pid=org_pid) + filters &= Q("term", organisation__pid=org_pid) - return [ - hit.pid for hit in items_query.filter(filters).source('pid').scan() - ] + return [hit.pid for hit in items_query.filter(filters).source("pid").scan()] @classmethod def get_item_pids_with_active_loan(cls, pid, org_pid=None): @@ -225,17 +236,15 @@ def get_item_pids_with_active_loan(cls, pid, org_pid=None): loan_query = LoansSearch().unavailable_query() # filter by the current document - filters = Q('term', document_pid=pid) + filters = Q("term", document_pid=pid) # filter by organisation if org_pid: - filters &= Q('term', organisation__pid=org_pid) + filters &= Q("term", organisation__pid=org_pid) loan_query = loan_query.filter(filters) - return [ - hit.item_pid.value for hit in loan_query.source('item_pid').scan() - ] + return [hit.item_pid.value for hit in loan_query.source("item_pid").scan()] @classmethod def is_available(cls, pid, view_code=None): @@ -249,14 +258,14 @@ def is_available(cls, pid, view_code=None): """ # get the organisation pid corresponding to the view code org_pid = None - if view_code != current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): - org_pid = Organisation.get_record_by_viewcode(view_code)['pid'] + if view_code != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): + org_pid = Organisation.get_record_by_viewcode(view_code)["pid"] # -------------- Holdings -------------------- # get the number of available and electronic holdings - n_available_holdings, n_electronic_holdings = \ - cls.get_n_available_holdings(pid, org_pid) + n_available_holdings, n_electronic_holdings = cls.get_n_available_holdings( + pid, org_pid + ) # available if an electronic holding exists if n_electronic_holdings: @@ -276,8 +285,7 @@ def is_available(cls, pid, view_code=None): # --------------- Loans ------------------- # get item pids that have active loans - unavailable_item_pids = \ - cls.get_item_pids_with_active_loan(pid, org_pid) + unavailable_item_pids = cls.get_item_pids_with_active_loan(pid, org_pid) # available if at least one item don't have active loan return bool(set(available_item_pids) - set(unavailable_item_pids)) @@ -285,7 +293,7 @@ def is_available(cls, pid, view_code=None): @property def harvested(self): """Is this record harvested from an external service.""" - return self.get('harvested', False) + return self.get("harvested", False) @property def can_edit(self): @@ -303,29 +311,32 @@ def get_links_to_me(self, get_pids=False): from ..items.api import ItemsSearch from ..loans.models import LoanState from ..local_fields.api import LocalFieldsSearch - hold_query = HoldingsSearch().filter('term', document__pid=self.pid) - item_query = ItemsSearch().filter('term', document__pid=self.pid) + + hold_query = HoldingsSearch().filter("term", document__pid=self.pid) + item_query = ItemsSearch().filter("term", document__pid=self.pid) loan_query = search_by_pid( document_pid=self.pid, - exclude_states=[LoanState.CANCELLED, LoanState.ITEM_RETURNED] + exclude_states=[LoanState.CANCELLED, LoanState.ITEM_RETURNED], ) file_query = self.get_records_files_query().source() - acq_order_lines_query = AcqOrderLinesSearch() \ - .filter('term', document__pid=self.pid) - local_fields_query = LocalFieldsSearch()\ - .get_local_fields(self.provider.pid_type, self.pid) + acq_order_lines_query = AcqOrderLinesSearch().filter( + "term", document__pid=self.pid + ) + local_fields_query = LocalFieldsSearch().get_local_fields( + self.provider.pid_type, self.pid + ) relation_types = { - 'partOf': 'partOf.document.pid', - 'supplement': 'supplement.pid', - 'supplementTo': 'supplementTo.pid', - 'otherEdition': 'otherEdition.pid', - 'otherPhysicalFormat': 'otherPhysicalFormat.pid', - 'issuedWith': 'issuedWith.pid', - 'precededBy': 'precededBy.pid', - 'succeededBy': 'succeededBy.pid', - 'relatedTo': 'relatedTo.pid', - 'hasReproduction': 'hasReproduction.pid', - 'reproductionOf': 'reproductionOf.pid' + "partOf": "partOf.document.pid", + "supplement": "supplement.pid", + "supplementTo": "supplementTo.pid", + "otherEdition": "otherEdition.pid", + "otherPhysicalFormat": "otherPhysicalFormat.pid", + "issuedWith": "issuedWith.pid", + "precededBy": "precededBy.pid", + "succeededBy": "succeededBy.pid", + "relatedTo": "relatedTo.pid", + "hasReproduction": "hasReproduction.pid", + "reproductionOf": "reproductionOf.pid", } if get_pids: @@ -337,8 +348,7 @@ def get_links_to_me(self, get_pids=False): local_fields = sorted_pids(local_fields_query) documents = {} for relation, relation_es in relation_types.items(): - doc_query = DocumentsSearch() \ - .filter({'term': {relation_es: self.pid}}) + doc_query = DocumentsSearch().filter({"term": {relation_es: self.pid}}) if pids := sorted_pids(doc_query): documents[relation] = pids else: @@ -350,24 +360,23 @@ def get_links_to_me(self, get_pids=False): local_fields = local_fields_query.count() documents = 0 for relation_es in relation_types.values(): - doc_query = DocumentsSearch() \ - .filter({'term': {relation_es: self.pid}}) + doc_query = DocumentsSearch().filter({"term": {relation_es: self.pid}}) documents += doc_query.count() links = { - 'holdings': holdings, - 'items': items, - 'files': files, - 'loans': loans, - 'acq_order_lines': acq_order_lines, - 'documents': documents, - 'local_fields': local_fields + "holdings": holdings, + "items": items, + "files": files, + "loans": loans, + "acq_order_lines": acq_order_lines, + "documents": documents, + "local_fields": local_fields, } return {k: v for k, v in links.items() if v} def get_records_files_query(self, lib_pids=None): """Creates an es query to retrieves the record files.""" - ext = current_app.extensions['rero-invenio-files'] + ext = current_app.extensions["rero-invenio-files"] sfr = ext.records_service search = sfr.search_request( system_identity, dict(size=1), sfr.record_cls, sfr.config.search @@ -375,20 +384,19 @@ def get_records_files_query(self, lib_pids=None): # required to avoid exception during the `count()` call # TODO: remove this once the issue is solved search._params = {} - search = search.source(['uuid', 'id'])\ - .filter('term', metadata__document__pid=self.pid) + search = search.source(["uuid", "id"]).filter( + "term", metadata__document__pid=self.pid + ) # filter by library pids if lib_pids: - search = search.filter('terms', metadata__library__pid=lib_pids) + search = search.filter("terms", metadata__library__pid=lib_pids) return search def get_records_files(self, lib_pids=None): """Get the record files linked to the current document.""" - ext = current_app.extensions['rero-invenio-files'] + ext = current_app.extensions["rero-invenio-files"] sfr = ext.records_service - for rec in (self.get_records_files_query( - lib_pids=lib_pids).source().scan() - ): + for rec in self.get_records_files_query(lib_pids=lib_pids).source().scan(): yield sfr.record_cls.get_record(rec.uuid) def reasons_not_to_delete(self): @@ -396,24 +404,27 @@ def reasons_not_to_delete(self): cannot_delete = {} links = self.get_links_to_me() # related LocalFields isn't a reason to block suppression - links.pop('local_fields', None) + links.pop("local_fields", None) if links: - cannot_delete['links'] = links + cannot_delete["links"] = links if self.harvested: - cannot_delete['others'] = dict(harvested=True) + cannot_delete["others"] = dict(harvested=True) return cannot_delete def index_entities(self, bulk=False): """Index all attached entities.""" - from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesIndexer, RemoteEntity + from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesIndexer, + RemoteEntity, + ) from ..tasks import process_bulk_queue + entities_ids = [] - fields = ('contribution', 'subjects', 'genreForm') + fields = ("contribution", "subjects", "genreForm") for field in fields: for entity in self.get(field, []): - if ent_pid := entity['entity'].get('pid'): + if ent_pid := entity["entity"].get("pid"): if bulk: uid = RemoteEntity.get_id_by_pid(ent_pid) entities_ids.append(uid) @@ -431,9 +442,12 @@ def get_all_serial_pids(cls): a serial document has mode_of_issuance main_type equal to rdami:1003 """ - es_documents = DocumentsSearch()\ - .filter('term', issuance__main_type="rdami:1003")\ - .source(['pid']).scan() + es_documents = ( + DocumentsSearch() + .filter("term", issuance__main_type="rdami:1003") + .source(["pid"]) + .scan() + ) for es_document in es_documents: yield es_document.pid @@ -445,11 +459,14 @@ def get_document_pids_by_issn(cls, issn_number: str): :return: the pids of the record having the given ISSN :rtype: generator """ - criteria = Q('term', nested_identifiers__type=IdentifierType.ISSN) - criteria &= Q('term', nested_identifiers__value__raw=issn_number) - es_documents = DocumentsSearch()\ - .filter('nested', path='nested_identifiers', query=criteria)\ - .source('pid').scan() + criteria = Q("term", nested_identifiers__type=IdentifierType.ISSN) + criteria &= Q("term", nested_identifiers__value__raw=issn_number) + es_documents = ( + DocumentsSearch() + .filter("nested", path="nested_identifiers", query=criteria) + .source("pid") + .scan() + ) for es_document in es_documents: yield es_document.pid @@ -466,8 +483,8 @@ def get_identifiers(self, filters=None, with_alternatives=False): filters = [] or filters identifiers = { IdentifierFactory.create_identifier(data) - for data in self.get('identifiedBy', []) - if not filters or data.get('type') in filters + for data in self.get("identifiedBy", []) + if not filters or data.get("type") in filters } if with_alternatives: for identifier in list(identifiers): @@ -477,10 +494,10 @@ def get_identifiers(self, filters=None, with_alternatives=False): @property def document_type(self): """Get first document type of document.""" - document_type = 'docmaintype_other' - if document_types := self.get('type', []): - document_type = document_types[0]['main_type'] - if document_subtype := document_types[0].get('subtype'): + document_type = "docmaintype_other" + if document_types := self.get("type", []): + document_type = document_types[0]["main_type"] + if document_subtype := document_types[0].get("subtype"): document_type = document_subtype return document_type @@ -488,33 +505,30 @@ def document_type(self): def document_types(self): """All types of document.""" document_types = [] - for document_type in self.get('type', []): - main_type = document_type.get('main_type') - if sub_type := document_type.get('subtype'): + for document_type in self.get("type", []): + main_type = document_type.get("main_type") + if sub_type := document_type.get("subtype"): main_type = sub_type document_types.append(main_type) - return document_types or ['docmaintype_other'] + return document_types or ["docmaintype_other"] def add_cover_url(self, url, dbcommit=False, reindex=False): """Adds electronicLocator with coverImage to document.""" - electronic_locators = self.get('electronicLocator', []) + electronic_locators = self.get("electronicLocator", []) for electronic_locator in electronic_locators: - e_content = electronic_locator.get('content') - e_type = electronic_locator.get('type') + e_content = electronic_locator.get("content") + e_type = electronic_locator.get("type") if ( - e_content == 'coverImage' - and e_type == 'relatedResource' - and electronic_locator.get('url') == url + e_content == "coverImage" + and e_type == "relatedResource" + and electronic_locator.get("url") == url ): return self, False - electronic_locators.append({ - 'content': 'coverImage', - 'type': 'relatedResource', - 'url': url - }) - self['electronicLocator'] = electronic_locators - self = self.update( - data=self, commit=True, dbcommit=dbcommit, reindex=reindex) + electronic_locators.append( + {"content": "coverImage", "type": "relatedResource", "url": url} + ) + self["electronicLocator"] = electronic_locators + self = self.update(data=self, commit=True, dbcommit=dbcommit, reindex=reindex) return self, True def resolve(self): @@ -543,9 +557,8 @@ def _es_document(cls, record): :returns: the elasticsearch document or {} """ try: - es_item = current_search_client.get( - DocumentsSearch.Meta.index, record.id) - return es_item['_source'] + es_item = current_search_client.get(DocumentsSearch.Meta.index, record.id) + return es_item["_source"] except NotFoundError: return {} @@ -565,10 +578,8 @@ def index(self, record): # has been changed # the comparison should be done on the dumps as _text is # added for indexing - if not es_document \ - or (record.dumps().get('title') != es_document.get('title')): - search = DocumentsSearch().filter( - 'term', partOf__document__pid=record.pid) + if not es_document or (record.dumps().get("title") != es_document.get("title")): + search = DocumentsSearch().filter("term", partOf__document__pid=record.pid) if ids := [doc.meta.id for doc in search.source().scan()]: # reindex in background as the list can be huge self.bulk_index(ids) @@ -579,4 +590,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='doc') + super().bulk_index(record_id_iterator, doc_type="doc") diff --git a/rero_ils/modules/documents/api_views.py b/rero_ils/modules/documents/api_views.py index 5f299d1a95..21325e45b9 100644 --- a/rero_ils/modules/documents/api_views.py +++ b/rero_ils/modules/documents/api_views.py @@ -27,38 +27,30 @@ from rero_ils.modules.decorators import check_logged_as_librarian +from ..utils import cached from .api import Document from .utils import get_remote_cover -from ..utils import cached -api_blueprint = Blueprint( - 'api_documents', - __name__, - url_prefix='/document' -) +api_blueprint = Blueprint("api_documents", __name__, url_prefix="/document") -@api_blueprint.route('/cover/') +@api_blueprint.route("/cover/") @cached(timeout=300, query_string=True) def cover(isbn): """Document cover service.""" return jsonify(get_remote_cover(isbn)) -@api_blueprint.route('//availability', methods=['GET']) +@api_blueprint.route("//availability", methods=["GET"]) def document_availability(pid): """HTTP GET request for document availability.""" if not Document.record_pid_exists(pid): abort(404) - view_code = flask_request.args.get('view_code') - if not view_code: - view_code = 'global' - return jsonify({ - 'available': Document.is_available(pid, view_code) - }) + view_code = flask_request.args.get("view_code") or "global" + return jsonify({"available": Document.is_available(pid, view_code)}) -@api_blueprint.route('/advanced-search-config') +@api_blueprint.route("/advanced-search-config") @cached(timeout=300, query_string=True) @check_logged_as_librarian def advanced_search_config(): @@ -66,49 +58,58 @@ def advanced_search_config(): def sort_medias(a, b): """Sort only media start with rda in label.""" - a, b = a['label'], b['label'] - if a.startswith('rda') and b.startswith('rda'): + a, b = a["label"], b["label"] + if a.startswith("rda") and b.startswith("rda"): return a > b - elif a.startswith('rda'): + elif a.startswith("rda"): return -1 - elif b.startswith('rda'): + elif b.startswith("rda"): return 1 else: return a > b try: - cantons = current_jsonschemas.get_schema('common/cantons-v0.0.1.json') - countries = current_jsonschemas.get_schema( - 'common/countries-v0.0.1.json') + cantons = current_jsonschemas.get_schema("common/cantons-v0.0.1.json") + countries = current_jsonschemas.get_schema("common/countries-v0.0.1.json") medias = current_jsonschemas.get_schema( - 'documents/document_content_media_carrier-v0.0.1.json') + "documents/document_content_media_carrier-v0.0.1.json" + ) except JSONSchemaNotFound: abort(404) - media_items = medias['contentMediaCarrier']['items']['oneOf'] + media_items = medias["contentMediaCarrier"]["items"]["oneOf"] media_types = [] carrier_types = [] for item in media_items: - if rda_type := item.get('properties', {}).get('mediaType', {}): - data = rda_type.get('title') - media_types.append({'label': data, 'value': data}) - if rda_type := item.get('properties', {}).get('carrierType', {}): - for option in rda_type.get('widget', {}).get('formlyConfig', {})\ - .get('props', []).get('options'): + if rda_type := item.get("properties", {}).get("mediaType", {}): + data = rda_type.get("title") + media_types.append({"label": data, "value": data}) + if rda_type := item.get("properties", {}).get("carrierType", {}): + for option in ( + rda_type.get("widget", {}) + .get("formlyConfig", {}) + .get("props", []) + .get("options") + ): if option not in carrier_types: carrier_types.append(option) - return jsonify({ - 'fieldsConfig': current_app.config.get( - 'RERO_ILS_APP_ADVANCED_SEARCH_CONFIG', []), - 'fieldsData': { - 'country': countries['country']['widget']['formlyConfig'] - ['props']['options'], - 'canton': cantons['canton']['widget']['formlyConfig'] - ['props']['options'], - 'rdaContentType': medias['definitions']['contentType']['items'] - ['widget']['formlyConfig']['props']['options'], - 'rdaMediaType': sorted(media_types, key=cmp_to_key(sort_medias)), - 'rdaCarrierType': sorted( - carrier_types, key=cmp_to_key(sort_medias)) + return jsonify( + { + "fieldsConfig": current_app.config.get( + "RERO_ILS_APP_ADVANCED_SEARCH_CONFIG", [] + ), + "fieldsData": { + "country": countries["country"]["widget"]["formlyConfig"]["props"][ + "options" + ], + "canton": cantons["canton"]["widget"]["formlyConfig"]["props"][ + "options" + ], + "rdaContentType": medias["definitions"]["contentType"]["items"][ + "widget" + ]["formlyConfig"]["props"]["options"], + "rdaMediaType": sorted(media_types, key=cmp_to_key(sort_medias)), + "rdaCarrierType": sorted(carrier_types, key=cmp_to_key(sort_medias)), + }, } - }) + ) diff --git a/rero_ils/modules/documents/dojson/contrib/jsontodc/__init__.py b/rero_ils/modules/documents/dojson/contrib/jsontodc/__init__.py index d6fb1b4d90..0a5a25d879 100644 --- a/rero_ils/modules/documents/dojson/contrib/jsontodc/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/jsontodc/__init__.py @@ -19,4 +19,4 @@ from .model import dublincore -__all__ = ('dublincore') +__all__ = "dublincore" diff --git a/rero_ils/modules/documents/dojson/contrib/jsontodc/model.py b/rero_ils/modules/documents/dojson/contrib/jsontodc/model.py index 969f914778..93894240a2 100644 --- a/rero_ils/modules/documents/dojson/contrib/jsontodc/model.py +++ b/rero_ils/modules/documents/dojson/contrib/jsontodc/model.py @@ -22,15 +22,13 @@ from rero_ils.modules.documents.extensions import TitleExtension from rero_ils.modules.entities.models import EntityType -from rero_ils.modules.entities.remote_entities.utils import \ - get_entity_localized_value +from rero_ils.modules.entities.remote_entities.utils import get_entity_localized_value class DublinCoreOverdo(Overdo): """Specialized Overdo for Dublin Core.""" - def do(self, blob, ignore_missing=True, exception_handlers=None, - language='fr'): + def do(self, blob, ignore_missing=True, exception_handlers=None, language="fr"): """Translate blob values and instantiate new model instance. Raises ``MissingRule`` when no rule matched and ``ignore_missing`` @@ -49,188 +47,193 @@ def do(self, blob, ignore_missing=True, exception_handlers=None, self.language = language result = super().do( - blob, - ignore_missing=ignore_missing, - exception_handlers=exception_handlers + blob, ignore_missing=ignore_missing, exception_handlers=exception_handlers ) - titles = blob.get('title', []) - bf_titles = list(filter(lambda t: t['type'] == 'bf:Title', titles)) + titles = blob.get("title", []) + bf_titles = list(filter(lambda t: t["type"] == "bf:Title", titles)) - text = TitleExtension.format_text( + if text := TitleExtension.format_text( titles=bf_titles, - responsabilities=blob.get('responsibilityStatement', []), - with_subtitle=True - ) - if text: - result['titles'] = [text] - - pid = blob.get('pid') - if pid: - identifiers = result.get('identifiers', []) - identifiers.insert(0, f'bf:Local|{pid}') + responsabilities=blob.get("responsibilityStatement", []), + with_subtitle=True, + ): + result["titles"] = [text] + + if pid := blob.get("pid"): + identifiers = result.get("identifiers", []) + identifiers.insert(0, f"bf:Local|{pid}") return result dublincore = DublinCoreOverdo() CREATOR_ROLES = [ - 'aut', 'cmp', 'pht', 'ape', 'aqt', 'arc', 'art', 'aus', 'chr', 'cll', - 'com', 'drt', 'dsr', 'enj', 'fmk', 'inv', 'ive', 'ivr', 'lbt', 'lsa', - 'lyr', 'pra', 'prg', 'rsp', 'scl' + "aut", + "cmp", + "pht", + "ape", + "aqt", + "arc", + "art", + "aus", + "chr", + "cll", + "com", + "drt", + "dsr", + "enj", + "fmk", + "inv", + "ive", + "ivr", + "lbt", + "lsa", + "lyr", + "pra", + "prg", + "rsp", + "scl", ] # creator and contributor -@dublincore.over('creators', 'contribution') +@dublincore.over("creators", "contribution") @utils.for_each_value @utils.ignore_value def json_to_contributors(self, key, value): """Get creators and contributors data.""" authorized_access_point = get_entity_localized_value( - entity=value.get('entity', {}), - key='authorized_access_point', - language=dublincore.language + entity=value.get("entity", {}), + key="authorized_access_point", + language=dublincore.language, ) result = authorized_access_point if result is None: - result = value.get('entity', {}).get('authorized_access_point') + result = value.get("entity", {}).get("authorized_access_point") if result: - if value.get('role') in CREATOR_ROLES: + if value.get("role") in CREATOR_ROLES: return result - contributors = self.get('contributors', []) + contributors = self.get("contributors", []) contributors.append(result) # save contributors directly into self - self['contributors'] = contributors + self["contributors"] = contributors -@dublincore.over('descriptions', - '^(summary|note|dissertation|supplementaryContent)') +@dublincore.over("descriptions", "^(summary|note|dissertation|supplementaryContent)") @utils.ignore_value def json_to_descriptions(self, key, value): """Get descriptions data.""" - descriptions = self.get('descriptions', []) + descriptions = self.get("descriptions", []) for data in utils.force_list(value): - if key == 'supplementaryContent': + if key == "supplementaryContent": descriptions.append(data) - elif key == 'summary': - descriptions += [label['value'] for label in data.get('label', [])] - else: - label = data.get('label') - if label: - descriptions.append(label) + elif key == "summary": + descriptions += [label["value"] for label in data.get("label", [])] + elif label := data.get("label"): + descriptions.append(label) if descriptions: # write the discriptions directly into self - self['descriptions'] = descriptions + self["descriptions"] = descriptions -@dublincore.over('languages', '^language') +@dublincore.over("languages", "^language") @utils.ignore_value def json_to_languages(self, key, value): """Get languages data.""" - languages = [language.get('value') for language in utils.force_list(value)] + languages = [language.get("value") for language in utils.force_list(value)] return languages or None -@dublincore.over('publishers', '^provisionActivity') +@dublincore.over("publishers", "^provisionActivity") @utils.ignore_value def json_to_dates(self, key, value): """Get publishers data and date.""" - publishers = self.get('publisher', []) - dates = self.get('dates', []) + publishers = self.get("publisher", []) + dates = self.get("dates", []) for data in value: # only take the first date: - if data.get('type') == 'bf:Publication' and not self.get('date'): - start_date = str(data.get('startDate', '')) + if data.get("type") == "bf:Publication" and not self.get("date"): + start_date = str(data.get("startDate", "")) date = [start_date] - end_date = str(data.get('endDate', '')) - if end_date: + if end_date := str(data.get("endDate", "")): date.append(end_date) if date: - dates.append('-'.join(date)) - statements = data.get('statement', []) + dates.append("-".join(date)) + statements = data.get("statement", []) for statement in statements: - if statement['type'] == 'bf:Agent': + if statement["type"] == "bf:Agent": # TODO: witch value do we need to take? - publishers.append(statement['label'][0].get('value')) + publishers.append(statement["label"][0].get("value")) if dates: # write the dates directly into self - self['dates'] = dates + self["dates"] = dates if publishers: # write the publishers directly into self - self['publishers'] = publishers + self["publishers"] = publishers -@dublincore.over('types', '^type') +@dublincore.over("types", "^type") @utils.for_each_value @utils.ignore_value def json_to_types(self, key, value): """Get types data.""" - main_type = value.get('main_type') - subtype_type = value.get('subtype') - if subtype_type: - return ' / '.join([_(main_type), _(subtype_type)]) + main_type = value.get("main_type") + if subtype_type := value.get("subtype"): + return " / ".join([_(main_type), _(subtype_type)]) else: return _(main_type) -@dublincore.over('identifiers', '^identifiedBy') +@dublincore.over("identifiers", "^identifiedBy") @utils.for_each_value @utils.ignore_value def json_to_identifiers(self, key, value): """Get identifiers data.""" - itype = value.get('type') - identifier_value = value.get('value') - source = value.get('source') - if source: - return f'{itype}|{identifier_value}({source})' - return f'{itype}|{identifier_value}' - - -@dublincore.over('relations', - '^(issuedWith|otherEdition|otherPhysicalFormat|precededBy|' - 'relatedTo|succeededBy|supplement|supplementTo)') + itype = value.get("type") + identifier_value = value.get("value") + if source := value.get("source"): + return f"{itype}|{identifier_value}({source})" + return f"{itype}|{identifier_value}" + + +@dublincore.over( + "relations", + "^(issuedWith|otherEdition|otherPhysicalFormat|precededBy|" + "relatedTo|succeededBy|supplement|supplementTo)", +) @utils.for_each_value @utils.ignore_value def json_to_relations(self, key, value): """Get relations data.""" - label = value.get('label') - if label: + if label := value.get("label"): return label - else: - # TODO: make shure the $ref was replaced and had a _text field. - titles = [] - for title in value.get('title', []): - titles.append(title['_text']) - if titles: - return ', '.join(titles) + if titles := [title["_text"] for title in value.get("title", [])]: + return ", ".join(titles) -@dublincore.over('subjects', '^subjects') +@dublincore.over("subjects", "^subjects") @utils.for_each_value @utils.ignore_value def json_to_subject(self, key, value): """Get subject data.""" - result = '' - _type = value.get('type') + result = "" + _type = value.get("type") if _type in [EntityType.PERSON, EntityType.ORGANISATION, EntityType.PLACE]: - # TODO: set the language - authorized_access_point = get_entity_localized_value( + if authorized_access_point := get_entity_localized_value( entity=value, - key='authorized_access_point', - language=dublincore.language - ) - if authorized_access_point: + key="authorized_access_point", + language=dublincore.language, + ): result = authorized_access_point else: - result = value.get('preferred_name') + result = value.get("preferred_name") elif _type == EntityType.WORK: work = [] - creator = value.get('creator') - if creator: + if creator := value.get("creator"): work.append(creator) - work.append(value.get('title')) - result = '. - '.join(work) + work.append(value.get("title")) + result = ". - ".join(work) elif _type in [EntityType.TOPIC, EntityType.TEMPORAL]: - result = value.get('term') + result = value.get("term") return result or None diff --git a/rero_ils/modules/documents/dojson/contrib/jsontomarc21/__init__.py b/rero_ils/modules/documents/dojson/contrib/jsontomarc21/__init__.py index 767382030e..892b88eb86 100644 --- a/rero_ils/modules/documents/dojson/contrib/jsontomarc21/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/jsontomarc21/__init__.py @@ -19,4 +19,4 @@ from .model import to_marc21 -__all__ = ('to_marc21') +__all__ = "to_marc21" diff --git a/rero_ils/modules/documents/dojson/contrib/jsontomarc21/model.py b/rero_ils/modules/documents/dojson/contrib/jsontomarc21/model.py index fee913afcc..5c64e51cbf 100644 --- a/rero_ils/modules/documents/dojson/contrib/jsontomarc21/model.py +++ b/rero_ils/modules/documents/dojson/contrib/jsontomarc21/model.py @@ -27,8 +27,10 @@ from rero_ils.modules.documents.utils import display_alternate_graphic_first from rero_ils.modules.documents.views import create_title_responsibilites from rero_ils.modules.entities.models import EntityType -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) from rero_ils.modules.holdings.api import Holding, HoldingsSearch from rero_ils.modules.items.api import Item, ItemsSearch from rero_ils.modules.libraries.api import Library @@ -61,21 +63,27 @@ def replace_contribution_sources(contribution, source_order): :returns: contribution entity with localized values. """ refs = [] - entity = contribution.get('entity') + entity = contribution.get("entity") for source in source_order: if source_data := entity.get(source): - refs.append({ - 'source': source, - 'pid': source_data['pid'] - }) - for key in ['type', 'preferred_name', 'numeration', - 'qualifier', 'date_of_birth', 'date_of_death', - 'subordinate_unit', 'conference', 'conference_number', - 'conference_date', 'conference_place']: + refs.append({"source": source, "pid": source_data["pid"]}) + for key in [ + "type", + "preferred_name", + "numeration", + "qualifier", + "date_of_birth", + "date_of_death", + "subordinate_unit", + "conference", + "conference_number", + "conference_date", + "conference_place", + ]: entity = set_value(source_data, entity, key) entity.pop(source) - entity['refs'] = refs - contribution['entity'] = entity + entity["refs"] = refs + contribution["entity"] = entity return contribution @@ -89,14 +97,11 @@ def replace_concept_sources(concept, source_order): refs = [] for source in source_order: if source_data := concept.get(source): - refs.append({ - 'source': source, - 'pid': source_data['pid'] - }) - for key in ['type', 'authorized_access_point']: + refs.append({"source": source, "pid": source_data["pid"]}) + for key in ["type", "authorized_access_point"]: concept = set_value(source_data, concept, key) concept.pop(source) - concept['refs'] = refs + concept["refs"] = refs return concept @@ -112,58 +117,64 @@ def do_contribution(contribution, source_order): is it a conference :rtype: tuple(dict, str, bool, bool) """ - roles = contribution.get('role', []) - entity = contribution.get('entity') - if pid := entity.get('pid'): + roles = contribution.get("role", []) + entity = contribution.get("entity") + if pid := entity.get("pid"): # we have a $ref, get the real entity - ref = entity.get('$ref') + ref = entity.get("$ref") if entity_db := RemoteEntity.get_record_by_pid(pid): contribution = replace_contribution_sources( - contribution={'entity': entity_db}, - source_order=source_order + contribution={"entity": entity_db}, source_order=source_order ) # We got an entity from db. Replace the used entity with this one. - entity = contribution['entity'] + entity = contribution["entity"] else: - error_print(f'No entity found for pid:{pid} {ref}') + error_print(f"No entity found for pid:{pid} {ref}") return None, None, False, False - if not (preferred_name := entity.get('preferred_name')): - preferred_name = entity.get( - f'authorized_access_point_{to_marc21.language}') + if not (preferred_name := entity.get("preferred_name")): + preferred_name = entity.get(f"authorized_access_point_{to_marc21.language}") result = {} conference = False surname = False - result = add_value(result, 'a', preferred_name) - entity_type = entity.get('type') + result = add_value(result, "a", preferred_name) + entity_type = entity.get("type") if entity_type == EntityType.PERSON: - if ',' in preferred_name: + if "," in preferred_name: surname = True - result = add_value(result, 'b', entity.get('numeration')) - result = add_value(result, 'c', entity.get('qualifier')) - - dates = ' - '.join([ - entity['date_of_birth'][:4] if len( - entity.get('date_of_birth', '')) > 3 else '', - entity['date_of_death'][:4] if len( - entity.get('date_of_death', '')) > 3 else '' - ]) - if dates != ' - ': - result = add_value(result, 'd', dates) + result = add_value(result, "b", entity.get("numeration")) + result = add_value(result, "c", entity.get("qualifier")) + + dates = " - ".join( + [ + ( + entity["date_of_birth"][:4] + if len(entity.get("date_of_birth", "")) > 3 + else "" + ), + ( + entity["date_of_death"][:4] + if len(entity.get("date_of_death", "")) > 3 + else "" + ), + ] + ) + if dates != " - ": + result = add_value(result, "d", dates) elif entity_type == EntityType.ORGANISATION: - if entity.get('conference'): + if entity.get("conference"): conference = True - result = add_values(result, 'b', entity.get('subordinate_unit')) - result = add_value(result, 'n', entity.get('conference_number')) - result = add_value(result, 'd', entity.get('conference_date')) - result = add_value(result, 'c', entity.get('conference_place')) - result = add_values(result, '4', roles) - refs = entity.get('refs', []) + result = add_values(result, "b", entity.get("subordinate_unit")) + result = add_value(result, "n", entity.get("conference_number")) + result = add_value(result, "d", entity.get("conference_date")) + result = add_value(result, "c", entity.get("conference_place")) + result = add_values(result, "4", roles) + refs = entity.get("refs", []) if refs: - result['0'] = [] + result["0"] = [] for ref in refs: - result['__order__'].append('0') - result['0'].append(f'({ref["source"]}){ref["pid"]}') + result["__order__"].append("0") + result["0"].append(f'({ref["source"]}){ref["pid"]}') return result, entity_type, surname, conference @@ -175,30 +186,28 @@ def do_concept(entity, source_order): :returns: result marc dictionary """ authorized_access_point = None - if pid := entity.get('pid'): - ref = entity.get('$ref') + if pid := entity.get("pid"): + ref = entity.get("$ref") # we have a $ref, get the real entity if entity := RemoteEntity.get_record_by_pid(pid): - entity = replace_concept_sources( - concept=entity, - source_order=source_order - ) - authorized_access_point = entity.get('authorized_access_point') + entity = replace_concept_sources(concept=entity, source_order=source_order) + authorized_access_point = entity.get("authorized_access_point") else: - error_print(f'No entity found for pid:{pid} {ref}') + error_print(f"No entity found for pid:{pid} {ref}") return None else: authorized_access_point = entity.get( - f'authorized_access_point_{to_marc21.language}' - ) or entity.get('authorized_access_point') + f"authorized_access_point_{to_marc21.language}" + ) or entity.get("authorized_access_point") result = {} if authorized_access_point: - result = add_value(result, 'a', authorized_access_point) + result = add_value(result, "a", authorized_access_point) return result -def get_holdings_items(document_pid, organisation_pids=None, library_pids=None, - location_pids=None): +def get_holdings_items( + document_pid, organisation_pids=None, library_pids=None, location_pids=None +): """Create Holding and Item informations. :param document_pid: document pid to use for holdings search @@ -209,6 +218,7 @@ def get_holdings_items(document_pid, organisation_pids=None, library_pids=None, :returns: list of holding informations with associated organisation, library and location pid, name informations. """ + def get_name(resource, pid): """Get name from resource. @@ -219,35 +229,32 @@ def get_name(resource, pid): """ data = resource.get_record_by_pid(pid) if data: - return data.get('name') + return data.get("name") results = [] if document_pid: holding_pids = Holding.get_holdings_pid_by_document_pid( - document_pid=document_pid, - with_masked=False + document_pid=document_pid, with_masked=False ) holding_pids = list(holding_pids) organisations = {} libraries = {} locations = {} - query = HoldingsSearch().filter('terms', pid=holding_pids) + query = HoldingsSearch().filter("terms", pid=holding_pids) if organisation_pids: - query = query.filter({ - 'terms': {'organisation.pid': organisation_pids}}) + query = query.filter({"terms": {"organisation.pid": organisation_pids}}) if library_pids: - query = query.filter({ - 'terms': {'library.pid': library_pids}}) + query = query.filter({"terms": {"library.pid": library_pids}}) if location_pids: - query = query.filter({ - 'terms': {'location.pid': location_pids}}) + query = query.filter({"terms": {"location.pid": location_pids}}) for hit in query.scan(): holding = hit.to_dict() organisation_pid = hit.organisation.pid if organisation_pid not in organisations: - organisations[organisation_pid] = get_name(Organisation, - organisation_pid) + organisations[organisation_pid] = get_name( + Organisation, organisation_pid + ) library_pid = hit.library.pid if library_pid not in libraries: libraries[library_pid] = get_name(Library, library_pid) @@ -256,52 +263,40 @@ def get_name(resource, pid): locations[location_pid] = get_name(Location, location_pid) result = { - 'organisation': { - 'pid': organisation_pid, - 'name': organisations[organisation_pid] - }, - 'library': { - 'pid': library_pid, - 'name': libraries[library_pid] + "organisation": { + "pid": organisation_pid, + "name": organisations[organisation_pid], }, - 'location': { - 'pid': location_pid, - 'name': locations[location_pid] + "library": {"pid": library_pid, "name": libraries[library_pid]}, + "location": {"pid": location_pid, "name": locations[location_pid]}, + "holdings": { + "call_number": holding.get("call_number"), + "second_call_number": holding.get("second_call_number"), + "enumerationAndChronology": holding.get("enumerationAndChronology"), + "electronic_location": holding.get("electronic_location", []), + "notes": holding.get("notes", []), + "supplementaryContent": holding.get("supplementaryContent"), + "index": holding.get("index"), + "missing_issues": holding.get("missing_issues"), }, - 'holdings': { - 'call_number': holding.get('call_number'), - 'second_call_number': holding.get('second_call_number'), - 'enumerationAndChronology': holding.get( - 'enumerationAndChronology'), - 'electronic_location': holding.get( - 'electronic_location', []), - 'notes': holding.get('notes', []), - 'supplementaryContent': holding.get( - 'supplementaryContent'), - 'index': holding.get('index'), - 'missing_issues': holding.get('missing_issues'), - } } - if hit.holdings_type == 'standard': + if hit.holdings_type == "standard": item_pids = Item.get_items_pid_by_holding_pid( - hit.pid, - with_masked=False + hit.pid, with_masked=False ) - item_hits = ItemsSearch() \ - .filter('terms', pid=list(item_pids)) \ - .scan() + item_hits = ItemsSearch().filter("terms", pid=list(item_pids)).scan() for item_hit in item_hits: item_data = item_hit.to_dict() item_result = result - item_result['item'] = { - 'barcode': item_data.get('barcode'), - 'all_number': item_data.get('all_number'), - 'second_call_number': item_data.get( - 'second_call_number'), - 'enumerationAndChronology': item_data.get( - 'enumerationAndChronology'), - 'url': item_data.get('url'), - 'notes': item_data.get('notes', []), + item_result["item"] = { + "barcode": item_data.get("barcode"), + "all_number": item_data.get("all_number"), + "second_call_number": item_data.get("second_call_number"), + "enumerationAndChronology": item_data.get( + "enumerationAndChronology" + ), + "url": item_data.get("url"), + "notes": item_data.get("notes", []), } results.append(item_result) else: @@ -309,12 +304,23 @@ def get_name(resource, pid): return results -ORDER = ['leader', 'pid', 'date_and_time_of_latest_transaction', - 'fixed_length_data_elements', 'identifiedBy', - 'title_responsibility', 'provisionActivity', - 'copyrightDate', 'physical_description', 'subjects', 'genreForm', - 'contribution', 'type', 'holdings_items'] -LEADER = '00000cam a2200000zu 4500' +ORDER = [ + "leader", + "pid", + "date_and_time_of_latest_transaction", + "fixed_length_data_elements", + "identifiedBy", + "title_responsibility", + "provisionActivity", + "copyrightDate", + "physical_description", + "subjects", + "genreForm", + "contribution", + "type", + "holdings_items", +] +LEADER = "00000cam a2200000zu 4500" class ToMarc21Overdo(Underdo): @@ -322,9 +328,17 @@ class ToMarc21Overdo(Underdo): responsibility_statement = {} - def do(self, blob, language='en', ignore_missing=True, - exception_handlers=None, with_holdings_items=False, - organisation_pids=None, library_pids=None, location_pids=None): + def do( + self, + blob, + language="en", + ignore_missing=True, + exception_handlers=None, + with_holdings_items=False, + organisation_pids=None, + library_pids=None, + location_pids=None, + ): """Translate blob values and instantiate new model instance. Raises ``MissingRule`` when no rule matched and ``ignore_missing`` @@ -347,110 +361,106 @@ def do(self, blob, language='en', ignore_missing=True, """ # TODO: real leader self.language = language - blob['leader'] = LEADER + blob["leader"] = LEADER # create fixed_length_data_elements for 008 - created = date_string_to_utc(blob['_created']).strftime('%y%m%d') - fixed_data = f'{created}|||||||||xx#|||||||||||||||||||||c' - fiction = blob.get('fiction_statement') + created = date_string_to_utc(blob["_created"]).strftime("%y%m%d") + fixed_data = f"{created}|||||||||xx#|||||||||||||||||||||c" + fiction = blob.get("fiction_statement") if fiction == DocumentFictionType.Fiction.value: - fixed_data = f'{fixed_data[:33]}1{fixed_data[34:]}' + fixed_data = f"{fixed_data[:33]}1{fixed_data[34:]}" elif fiction == DocumentFictionType.NonFiction.value: - fixed_data = f'{fixed_data[:33]}0{fixed_data[34:]}' - provision_activity = blob.get('provisionActivity', []) + fixed_data = f"{fixed_data[:33]}0{fixed_data[34:]}" + provision_activity = blob.get("provisionActivity", []) for p_activity in provision_activity: - if p_activity.get('type') == 'bf:Publication': - end_date = str(p_activity.get('endDate', '')) + if p_activity.get("type") == "bf:Publication": + end_date = str(p_activity.get("endDate", "")) if end_date: - fixed_data = \ - f'{fixed_data[:11]}{end_date}{fixed_data[15:]}' - start_date = str(p_activity.get('startDate', '')) + fixed_data = f"{fixed_data[:11]}{end_date}{fixed_data[15:]}" + start_date = str(p_activity.get("startDate", "")) if start_date: - type_of_date = 's' + type_of_date = "s" if end_date: - type_of_date = 'm' + type_of_date = "m" fixed_data = ( - f'{fixed_data[:6]}{type_of_date}' - f'{start_date}{fixed_data[11:]}' + f"{fixed_data[:6]}{type_of_date}" + f"{start_date}{fixed_data[11:]}" ) break - language = utils.force_list(blob.get('language')) + language = utils.force_list(blob.get("language")) if language: - language = language[0].get('value') - fixed_data = f'{fixed_data[:35]}{language}{fixed_data[38:]}' - blob['fixed_length_data_elements'] = fixed_data + language = language[0].get("value") + fixed_data = f"{fixed_data[:35]}{language}{fixed_data[38:]}" + blob["fixed_length_data_elements"] = fixed_data # Add date and time of latest transaction - updated = date_string_to_utc(blob['_updated']) - blob['date_and_time_of_latest_transaction'] = updated.strftime( - '%Y%m%d%H%M%S.0') + updated = date_string_to_utc(blob["_updated"]) + blob["date_and_time_of_latest_transaction"] = updated.strftime("%Y%m%d%H%M%S.0") # Add responsibilityStatement to title - if blob.get('title'): - blob['title_responsibility'] = { - 'titles': blob.get('title', {}), - 'responsibility': ' ; '.join(create_title_responsibilites( - blob.get('responsibilityStatement', []) - )) + if blob.get("title"): + blob["title_responsibility"] = { + "titles": blob.get("title", {}), + "responsibility": " ; ".join( + create_title_responsibilites( + blob.get("responsibilityStatement", []) + ) + ), } # Fix ContributionsSearch # Try to get RERO_ILS_AGENTS_LABEL_ORDER from current app # In the dojson cli is no current app and we have to get the value # directly from config.py try: - order = current_app.config.get('RERO_ILS_AGENTS_LABEL_ORDER', []) + order = current_app.config.get("RERO_ILS_AGENTS_LABEL_ORDER", []) except Exception: from rero_ils.config import RERO_ILS_AGENTS_LABEL_ORDER as order - self.source_order = order.get( - self.language, - order.get(order['fallback'], []) - ) + self.source_order = order.get(self.language, order.get(order["fallback"], [])) if with_holdings_items: # add holdings items informations get_holdings_items - blob['holdings_items'] = get_holdings_items( - document_pid=blob.get('pid'), + blob["holdings_items"] = get_holdings_items( + document_pid=blob.get("pid"), organisation_pids=organisation_pids, library_pids=library_pids, - location_pids=location_pids + location_pids=location_pids, ) # Physical Description physical_description = {} - extent = blob.get('extent') - durations = ', '.join(blob.get('duration', [])) + extent = blob.get("extent") + durations = ", ".join(blob.get("duration", [])) if extent: if durations: - if f'({durations})' in extent: - physical_description['extent'] = extent + if f"({durations})" in extent: + physical_description["extent"] = extent else: - physical_description['extent'] = f'{extent} ({durations})' + physical_description["extent"] = f"{extent} ({durations})" else: - physical_description['extent'] = extent - note = blob.get('note', []) + physical_description["extent"] = extent + note = blob.get("note", []) other_physical_details = [] for value in note: - if value['noteType'] == 'otherPhysicalDetails': - other_physical_details.append(value['label']) + if value["noteType"] == "otherPhysicalDetails": + other_physical_details.append(value["label"]) if not other_physical_details: - for value in blob.get('productionMethod', []): + for value in blob.get("productionMethod", []): other_physical_details.append(translate(value)) - for value in blob.get('illustrativeContent', []): + for value in blob.get("illustrativeContent", []): other_physical_details.append(value) - for value in blob.get('colorContent', []): + for value in blob.get("colorContent", []): other_physical_details.append(translate(value)) if other_physical_details: - physical_description['other_physical_details'] = \ - ' ; '.join(other_physical_details) - accompanying_material = ' ; '.join( - [v.get('label') for v in note - if v['noteType'] == 'accompanyingMaterial'] + physical_description["other_physical_details"] = " ; ".join( + other_physical_details + ) + accompanying_material = " ; ".join( + [v.get("label") for v in note if v["noteType"] == "accompanyingMaterial"] ) if accompanying_material: - physical_description['accompanying_material'] = \ - accompanying_material - dimensions = blob.get('dimensions', []) - book_formats = blob.get('bookFormat', []) + physical_description["accompanying_material"] = accompanying_material + dimensions = blob.get("dimensions", []) + book_formats = blob.get("bookFormat", []) upper_book_formats = [v.upper() for v in book_formats] new_dimensions = [] for dimension in dimensions: @@ -463,10 +473,10 @@ def do(self, blob, language='en', ignore_missing=True, for book_format in book_formats: new_dimensions.append(book_format) if new_dimensions: - physical_description['dimensions'] = ' ; '.join(new_dimensions) + physical_description["dimensions"] = " ; ".join(new_dimensions) if physical_description: - blob['physical_description'] = physical_description + blob["physical_description"] = physical_description # Add order keys = {} @@ -474,17 +484,15 @@ def do(self, blob, language='en', ignore_missing=True, count = 1 if isinstance(value, (list, set, tuple)): count = len(value) - keys.setdefault(key, count-1) + keys.setdefault(key, count - 1) keys[key] += 1 order = [] for key in ORDER: for count in range(keys.get(key, 0)): order.append(key) - blob['__order__'] = order + blob["__order__"] = order result = super().do( - blob, - ignore_missing=ignore_missing, - exception_handlers=exception_handlers + blob, ignore_missing=ignore_missing, exception_handlers=exception_handlers ) return result @@ -492,7 +500,7 @@ def do(self, blob, language='en', ignore_missing=True, def add_value(result, sub_tag, value): """Add value with tag to result.""" if value: - result.setdefault('__order__', []).append(sub_tag) + result.setdefault("__order__", []).append(sub_tag) result[sub_tag] = value return result @@ -501,7 +509,7 @@ def add_values(result, sub_tag, values): """Add values with tag to result.""" if values: for _ in range(len(values)): - result.setdefault('__order__', []).append(sub_tag) + result.setdefault("__order__", []).append(sub_tag) result[sub_tag] = values return result @@ -509,155 +517,158 @@ def add_values(result, sub_tag, values): to_marc21 = ToMarc21Overdo() -@to_marc21.over('leader', '^leader') +@to_marc21.over("leader", "^leader") def reverse_leader(self, key, value): """Reverse - leader.""" assert len(value) == 24 return value -@to_marc21.over('001', '^pid') +@to_marc21.over("001", "^pid") def reverse_pid(self, key, value): """Reverse - pid.""" return [value] -@to_marc21.over('005', '^date_and_time_of_latest_transaction') +@to_marc21.over("005", "^date_and_time_of_latest_transaction") def reverse_latest_transaction(self, key, value): """Reverse - date and time of latest transaction.""" return value -@to_marc21.over('008', '^fixed_length_data_elements') +@to_marc21.over("008", "^fixed_length_data_elements") def reverse_fixed_length_data_elements(self, key, value): """Reverse - fixed length data elements.""" return [value] -@to_marc21.over('02X', '^identifiedBy') +@to_marc21.over("02X", "^identifiedBy") @utils.reverse_for_each_value @utils.ignore_value def reverse_identified_by(self, key, value): """Reverse - identified by.""" - status = value.get('status') - qualifier = value.get('qualifier') - identified_by_type = value['type'] - identified_by_value = value['value'] + status = value.get("status") + qualifier = value.get("qualifier") + identified_by_type = value["type"] + identified_by_value = value["value"] result = {} - if identified_by_type == 'bf:Isbn': - subfield = 'z' if status else 'a' - result['__order__'] = [subfield] + if identified_by_type == "bf:Isbn": + subfield = "z" if status else "a" + result["__order__"] = [subfield] result[subfield] = identified_by_value if qualifier: - result['__order__'].append('q') - result['q'] = qualifier - self.append(('020__', utils.GroupableOrderedDict(result))) + result["__order__"].append("q") + result["q"] = qualifier + self.append(("020__", utils.GroupableOrderedDict(result))) return None -@to_marc21.over('245', '^title_responsibility') +@to_marc21.over("245", "^title_responsibility") @utils.ignore_value def reverse_title(self, key, value): """Reverse - title.""" + def get_part(parts, new_parts): """Create part list.""" for part in new_parts: part_numbers = [] - for part_number in part.get('partNumber', []): - language = part_number.get('language', 'default') + for part_number in part.get("partNumber", []): + language = part_number.get("language", "default") if display_alternate_graphic_first(language): - part_numbers.insert(0, part_number['value']) + part_numbers.insert(0, part_number["value"]) else: - part_numbers.append(part_number['value']) + part_numbers.append(part_number["value"]) part_names = [] - for part_name in part.get('partName', []): - language = part_name.get('language', 'default') + for part_name in part.get("partName", []): + language = part_name.get("language", "default") if display_alternate_graphic_first(language): - part_names.insert(0, part_name['value']) + part_names.insert(0, part_name["value"]) else: - part_names.append(part_name['value']) - parts.append({ - 'part_number': '. '.join(part_numbers), - 'part_name': '. '.join(part_names) - }) + part_names.append(part_name["value"]) + parts.append( + { + "part_number": ". ".join(part_numbers), + "part_name": ". ".join(part_names), + } + ) return parts result = None - titles = value.get('titles') - responsibility = value.get('responsibility') + titles = value.get("titles") + responsibility = value.get("responsibility") main_titles = [] sub_titles = [] main_titles_parallel = [] sub_titles_parallel = [] parts = [] for title in titles: - if title.get('type') == 'bf:Title': - for main_title in title.get('mainTitle'): + if title.get("type") == "bf:Title": + for main_title in title.get("mainTitle"): if display_alternate_graphic_first( - main_title.get('language', 'default')): - main_titles.insert(0, main_title['value']) + main_title.get("language", "default") + ): + main_titles.insert(0, main_title["value"]) else: - main_titles.append(main_title['value']) - for sub_title in title.get('subtitle', []): + main_titles.append(main_title["value"]) + for sub_title in title.get("subtitle", []): if display_alternate_graphic_first( - sub_title.get('language', 'default')): - sub_titles.insert(0, sub_title['value']) + sub_title.get("language", "default") + ): + sub_titles.insert(0, sub_title["value"]) else: - sub_titles.append(sub_title['value']) - if title.get('type') == 'bf:ParallelTitle': - for main_title in title.get('mainTitle'): + sub_titles.append(sub_title["value"]) + if title.get("type") == "bf:ParallelTitle": + for main_title in title.get("mainTitle"): if display_alternate_graphic_first( - main_title.get('language', 'default')): - main_titles_parallel.insert(0, main_title['value']) + main_title.get("language", "default") + ): + main_titles_parallel.insert(0, main_title["value"]) else: - main_titles_parallel.append(main_title['value']) - for sub_title in title.get('subtitle', []): + main_titles_parallel.append(main_title["value"]) + for sub_title in title.get("subtitle", []): if display_alternate_graphic_first( - sub_title.get('language', 'default')): - sub_titles_parallel.insert(0, sub_title['value']) + sub_title.get("language", "default") + ): + sub_titles_parallel.insert(0, sub_title["value"]) else: - sub_titles_parallel.append(sub_title['value']) - parts = get_part(parts, title.get('part', [])) + sub_titles_parallel.append(sub_title["value"]) + parts = get_part(parts, title.get("part", [])) - result = { - '__order__': ['a'], - '$ind1': '0', - 'a': '. '.join(main_titles) - } + result = {"__order__": ["a"], "$ind1": "0", "a": ". ".join(main_titles)} if sub_titles: - result['__order__'].append('b') - result['b'] = '. '.join(sub_titles) + result["__order__"].append("b") + result["b"] = ". ".join(sub_titles) if main_titles_parallel: - if result.get('b'): - result['b'] += f' = {". ".join(main_titles_parallel)}' + if result.get("b"): + result["b"] += f' = {". ".join(main_titles_parallel)}' else: - result['__order__'].append('b') - result['b'] = '. '.join(main_titles_parallel) + result["__order__"].append("b") + result["b"] = ". ".join(main_titles_parallel) if sub_titles_parallel: - if result.get('b'): - result['b'] += f' : {". ".join(sub_titles_parallel)}' + if result.get("b"): + result["b"] += f' : {". ".join(sub_titles_parallel)}' else: - result['__order__'].append('b') - result['b'] = '. '.join(sub_titles_parallel) + result["__order__"].append("b") + result["b"] = ". ".join(sub_titles_parallel) if responsibility: - result['__order__'].append('c') - result['c'] = responsibility + result["__order__"].append("c") + result["c"] = responsibility for part in parts: - part_number = part.get('part_number') + part_number = part.get("part_number") if part_number: - result['__order__'].append('n') - result.setdefault('n', []) - result['n'].append(part_number) - part_name = part.get('part_name') + result["__order__"].append("n") + result.setdefault("n", []) + result["n"].append(part_number) + part_name = part.get("part_name") if part_name: - result['__order__'].append('p') - result.setdefault('p', []) - result['p'].append(part_name) + result["__order__"].append("p") + result.setdefault("p", []) + result["p"].append(part_name) return result or None -@to_marc21.over('264', '^(provisionActivity|copyrightDate)') +@to_marc21.over("264", "^(provisionActivity|copyrightDate)") @utils.reverse_for_each_value @utils.ignore_value def reverse_provision_activity(self, key, value): @@ -673,59 +684,59 @@ def reverse_provision_activity(self, key, value): # * $a = [label] si type=bf:Date # Pour chaque "copyrightDate" : # * 264 ind2=4 $a = [copyrightDate] - if key == 'copyrightDate': + if key == "copyrightDate": result = { - '$ind2': '4', + "$ind2": "4", } - result = add_value(result, 'a', value) + result = add_value(result, "a", value) return result else: data = {} order = [] - for statement in value.get('statement', []): - statement_type = statement.get('type') - subfield = 'a' - if statement_type == 'bf:Agent': - subfield = 'b' - elif statement_type == 'Date': - subfield = 'c' - for label in statement.get('label'): + for statement in value.get("statement", []): + statement_type = statement.get("type") + subfield = "a" + if statement_type == "bf:Agent": + subfield = "b" + elif statement_type == "Date": + subfield = "c" + for label in statement.get("label"): order.append(subfield) data.setdefault(subfield, []) - data[subfield].append(label['value']) + data[subfield].append(label["value"]) # only take the first label break if data: - provision_activity_type = value.get('type') - ind2 = '' - if provision_activity_type == 'bf:Publication': - ind2 = '1' - elif provision_activity_type == 'bf:Distribution': - ind2 = '2' - elif provision_activity_type == 'bf:Manufacture': - ind2 = '3' - elif provision_activity_type == 'bf:Production': - ind2 = '0' - result = {'$ind2': ind2} + provision_activity_type = value.get("type") + ind2 = "" + if provision_activity_type == "bf:Publication": + ind2 = "1" + elif provision_activity_type == "bf:Distribution": + ind2 = "2" + elif provision_activity_type == "bf:Manufacture": + ind2 = "3" + elif provision_activity_type == "bf:Production": + ind2 = "0" + result = {"$ind2": ind2} for key, value in data.items(): result = add_values(result, key, value) - result['__order__'] = order + result["__order__"] = order return result -@to_marc21.over('300', '^physical_description') +@to_marc21.over("300", "^physical_description") @utils.ignore_value def reverse_physical_description(self, key, value): """Reverse - physical_description.""" result = {} - add_value(result, 'a', value.get('extent')) - add_value(result, 'b', value.get('other_physical_details')) - add_value(result, 'c', value.get('dimensions')) - add_value(result, 'e', value.get('accompanying_material')) + add_value(result, "a", value.get("extent")) + add_value(result, "b", value.get("other_physical_details")) + add_value(result, "c", value.get("dimensions")) + add_value(result, "e", value.get("accompanying_material")) return result or None -@to_marc21.over('6XX', '^subjects') +@to_marc21.over("6XX", "^subjects") @utils.reverse_for_each_value @utils.ignore_value def reverse_subjects(self, key, value): @@ -735,74 +746,71 @@ def reverse_subjects(self, key, value): Sujet Organisation > 610 OU 611 Conference Sujet Concept > 650 """ + def add_identified_by(result, identified_by): """Adds $2 and $0 to result.""" - result = add_value(result, '2', identified_by['type'].lower()) - result = add_value(result, '0', identified_by['value']) + result = add_value(result, "2", identified_by["type"].lower()) + result = add_value(result, "0", identified_by["value"]) return result - if entity := value.get('entity'): + if entity := value.get("entity"): tag = None - entity_type = entity.get('type') - if entity_pid := entity.get('pid'): - query = RemoteEntitiesSearch().filter('term', pid=entity_pid) + entity_type = entity.get("type") + if entity_pid := entity.get("pid"): + query = RemoteEntitiesSearch().filter("term", pid=entity_pid) if query.count(): - entity_type = next(query.source('type').scan()).type + entity_type = next(query.source("type").scan()).type if entity_type in [EntityType.PERSON, EntityType.ORGANISATION]: result, entity_type, surname, conference = do_contribution( - contribution={'entity': entity}, - source_order=to_marc21.source_order + contribution={"entity": entity}, source_order=to_marc21.source_order ) if entity_type == EntityType.PERSON: - tag = '6001_' if surname else '6000_' + tag = "6001_" if surname else "6000_" elif entity_type == EntityType.ORGANISATION: - tag = '611__' if conference else '610__' + tag = "611__" if conference else "610__" elif entity_type == EntityType.TOPIC: - result = do_concept( - entity=entity, - source_order=to_marc21.source_order - ) - tag = '650__' + result = do_concept(entity=entity, source_order=to_marc21.source_order) + tag = "650__" elif entity_type == EntityType.WORK: # TODO: to change in the future if $ref's are used. if authorized_access_point := entity.get( - f'authorized_access_point_{to_marc21.language}' - ) or entity.get('authorized_access_point'): + f"authorized_access_point_{to_marc21.language}" + ) or entity.get("authorized_access_point"): result = {} - result = add_value(result, 't', authorized_access_point) - if identified_by := entity.get('identifiedBy'): + result = add_value(result, "t", authorized_access_point) + if identified_by := entity.get("identifiedBy"): result = add_identified_by(result, identified_by) - self.append(('600__', utils.GroupableOrderedDict(result))) + self.append(("600__", utils.GroupableOrderedDict(result))) return elif entity_type == EntityType.PLACE: # TODO: to change in the future if $ref's are used. if authorized_access_point := entity.get( - f'authorized_access_point_{to_marc21.language}' - ) or entity.get('authorized_access_point'): + f"authorized_access_point_{to_marc21.language}" + ) or entity.get("authorized_access_point"): result = {} - result = add_value(result, 'a', authorized_access_point) - if identified_by := entity.get('identifiedBy'): + result = add_value(result, "a", authorized_access_point) + if identified_by := entity.get("identifiedBy"): result = add_identified_by(result, identified_by) - self.append(('651__', utils.GroupableOrderedDict(result))) + self.append(("651__", utils.GroupableOrderedDict(result))) return elif entity_type == EntityType.TEMPORAL: # TODO: to change in the future if $ref's are used. if authorized_access_point := entity.get( - f'authorized_access_point_{to_marc21.language}' - ) or entity.get('authorized_access_point'): + f"authorized_access_point_{to_marc21.language}" + ) or entity.get("authorized_access_point"): result = {} - result = add_value(result, 'a', authorized_access_point) - if identified_by := entity.get('identifiedBy'): + result = add_value(result, "a", authorized_access_point) + if identified_by := entity.get("identifiedBy"): result = add_identified_by(result, identified_by) - self.append(('648_7', utils.GroupableOrderedDict(result))) + self.append(("648_7", utils.GroupableOrderedDict(result))) return else: - error_print(f'No entity type found: {entity}') + error_print(f"No entity type found: {entity}") if tag and result: self.append((tag, utils.GroupableOrderedDict(result))) -@to_marc21.over('655', '^genreForm') +@to_marc21.over("655", "^genreForm") @utils.reverse_for_each_value @utils.ignore_value def reverse_genre_form(self, key, value): @@ -810,89 +818,89 @@ def reverse_genre_form(self, key, value): Genre / Forme > 655 - Genre ou forme """ - if value.get('entity'): + if value.get("entity"): if result := do_concept( - entity=value.get('entity'), source_order=to_marc21.source_order + entity=value.get("entity"), source_order=to_marc21.source_order ): - self.append(('655__', utils.GroupableOrderedDict(result))) + self.append(("655__", utils.GroupableOrderedDict(result))) -@to_marc21.over('7XX', '^contribution') +@to_marc21.over("7XX", "^contribution") @utils.reverse_for_each_value @utils.ignore_value def reverse_contribution(self, key, value): """Reverse - contribution.""" result, entity_type, surname, conference = do_contribution( - contribution=value, - source_order=to_marc21.source_order + contribution=value, source_order=to_marc21.source_order ) tag = None if entity_type == EntityType.PERSON: - tag = '7001_' if surname else '7000_' + tag = "7001_" if surname else "7000_" elif entity_type == EntityType.ORGANISATION: - tag = '711__' if conference else '710__' + tag = "711__" if conference else "710__" if tag and result: self.append((tag, utils.GroupableOrderedDict(result))) -@to_marc21.over('900', '^type') +@to_marc21.over("900", "^type") @utils.reverse_for_each_value @utils.ignore_value def reverse_type(self, key, value): """Reverse - type.""" - result = { - '__order__': ['a'], - 'a': value.get('main_type') - } - if subtype_type := value.get('subtype'): - result['__order__'] = ['a', 'b'] - result['b'] = subtype_type + result = {"__order__": ["a"], "a": value.get("main_type")} + if subtype_type := value.get("subtype"): + result["__order__"] = ["a", "b"] + result["b"] = subtype_type return result -@to_marc21.over('949', '^holdings_items') +@to_marc21.over("949", "^holdings_items") @utils.reverse_for_each_value @utils.ignore_value def reverse_holdings_items(self, key, value): """Reverse - holdings or items.""" - note_types_to_display = ['general_note', 'patrimonial_note', - 'provenance_note', 'binding_note', - 'condition_note'] + note_types_to_display = [ + "general_note", + "patrimonial_note", + "provenance_note", + "binding_note", + "condition_note", + ] result = { - '__order__': ['0', '1', '2', '3', '4', '5'], - '0': value['organisation']['pid'], - '1': value['organisation']['name'], - '2': value['library']['pid'], - '3': value['library']['name'], - '4': value['location']['pid'], - '5': value['location']['name'], + "__order__": ["0", "1", "2", "3", "4", "5"], + "0": value["organisation"]["pid"], + "1": value["organisation"]["name"], + "2": value["library"]["pid"], + "3": value["library"]["name"], + "4": value["location"]["pid"], + "5": value["location"]["name"], } - holdings = value.get('holdings', {}) - add_value(result, 'B', holdings.get('call_number')) - add_value(result, 'C', holdings.get('second_call_number')) - add_value(result, 'D', holdings.get('enumerationAndChronology')) - uris = [data['uri'] for data in holdings.get('electronic_location')] - add_values(result, 'E', uris) + holdings = value.get("holdings", {}) + add_value(result, "B", holdings.get("call_number")) + add_value(result, "C", holdings.get("second_call_number")) + add_value(result, "D", holdings.get("enumerationAndChronology")) + uris = [data["uri"] for data in holdings.get("electronic_location")] + add_values(result, "E", uris) if notes := [ - note['content'] - for note in holdings.get('notes', []) - if note['type'] in note_types_to_display + note["content"] + for note in holdings.get("notes", []) + if note["type"] in note_types_to_display ]: - add_values(result, 'F', notes) - add_value(result, 'G', holdings.get('supplementaryContent')) - add_value(result, 'H', holdings.get('index')) - add_value(result, 'I', holdings.get('missing_issues')) - - item = value.get('item', {}) - add_value(result, 'a', item.get('barcode')) - add_value(result, 'b', item.get('call_number')) - add_value(result, 'c', item.get('enumerationAndChronology')) - add_value(result, 'e', item.get('url')) + add_values(result, "F", notes) + add_value(result, "G", holdings.get("supplementaryContent")) + add_value(result, "H", holdings.get("index")) + add_value(result, "I", holdings.get("missing_issues")) + + item = value.get("item", {}) + add_value(result, "a", item.get("barcode")) + add_value(result, "b", item.get("call_number")) + add_value(result, "c", item.get("enumerationAndChronology")) + add_value(result, "e", item.get("url")) if notes := [ - note['content'] - for note in item.get('notes', []) - if note['type'] in note_types_to_display + note["content"] + for note in item.get("notes", []) + if note["type"] in note_types_to_display ]: - add_values(result, 'f', notes) + add_values(result, "f", notes) return result diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/__init__.py index 8584a52fbd..e6f2ebbf24 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/__init__.py @@ -25,5 +25,11 @@ from .slsp.model import marc21 as marc21_slsp from .ugent.model import marc21 as marc21_ugent -__all__ = ('marc21_dnb', 'marc21_kul', 'marc21_loc', 'marc21_rero', - 'marc21_slsp', 'marc21_ugent') +__all__ = ( + "marc21_dnb", + "marc21_kul", + "marc21_loc", + "marc21_rero", + "marc21_slsp", + "marc21_ugent", +) diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/__init__.py index a3b4d2d419..e713aebbba 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/__init__.py @@ -20,4 +20,4 @@ from .model import marc21 -__all__ = ('marc21') +__all__ = "marc21" diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/model.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/model.py index b4344ddf49..65ef44ed1f 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/model.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/dnb/model.py @@ -24,33 +24,54 @@ from rero_ils.modules.documents.models import DocumentFictionType from ..loc import marc21_to_subjects_6XX as marc21_to_subjects_6XX_loc -from ..utils import do_abbreviated_title, \ - do_acquisition_terms_from_field_037, do_classification, do_contribution, \ - do_copyright_date, do_credits, do_dissertation, do_edition_statement, \ - do_electronic_locator_from_field_856, do_frequency_field_310_321, \ - do_identified_by_from_field_020, do_identified_by_from_field_022, \ - do_identified_by_from_field_024, do_identified_by_from_field_028, \ - do_identified_by_from_field_035, do_intended_audience, do_issuance, \ - do_language, do_notes_and_original_title, do_part_of, \ - do_provision_activity, do_scale_and_cartographic, do_sequence_numbering, \ - do_specific_document_relation, do_summary, do_table_of_contents, \ - do_temporal_coverage, do_title, do_type, \ - do_usage_and_access_policy_from_field_506_540, do_work_access_point, \ - do_work_access_point_240 +from ..utils import ( + do_abbreviated_title, + do_acquisition_terms_from_field_037, + do_classification, + do_contribution, + do_copyright_date, + do_credits, + do_dissertation, + do_edition_statement, + do_electronic_locator_from_field_856, + do_frequency_field_310_321, + do_identified_by_from_field_020, + do_identified_by_from_field_022, + do_identified_by_from_field_024, + do_identified_by_from_field_028, + do_identified_by_from_field_035, + do_intended_audience, + do_issuance, + do_language, + do_notes_and_original_title, + do_part_of, + do_provision_activity, + do_scale_and_cartographic, + do_sequence_numbering, + do_specific_document_relation, + do_summary, + do_table_of_contents, + do_temporal_coverage, + do_title, + do_type, + do_usage_and_access_policy_from_field_506_540, + do_work_access_point, + do_work_access_point_240, +) marc21 = ReroIlsMarc21Overdo() -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_type_and_issuance(self, key, value): """Get document type, content/Media/Carrier type and mode of issuance.""" do_issuance(self, marc21) do_type(self, marc21) - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language(self, key, value): """Get languages. @@ -59,15 +80,15 @@ def marc21_to_language(self, key, value): """ language = do_language(self, marc21) # is fiction - self['fiction_statement'] = DocumentFictionType.Unspecified.value - if value[33] in ['1', 'd', 'f', 'j', 'p']: - self['fiction_statement'] = DocumentFictionType.Fiction.value - elif value[33] in ['0', 'e', 'h', 'i', 's']: - self['fiction_statement'] = DocumentFictionType.NonFiction.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value + if value[33] in ["1", "d", "f", "j", "p"]: + self["fiction_statement"] = DocumentFictionType.Fiction.value + elif value[33] in ["0", "e", "h", "i", "s"]: + self["fiction_statement"] = DocumentFictionType.NonFiction.value return language or None -@marc21.over('title', '(^210|^222)..') +@marc21.over("title", "(^210|^222)..") @utils.ignore_value def marc21_to_abbreviated_title(self, key, value): """Get abbreviated title data.""" @@ -75,7 +96,7 @@ def marc21_to_abbreviated_title(self, key, value): return title_list or None -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data.""" @@ -83,7 +104,7 @@ def marc21_to_title(self, key, value): return title_list or None -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): @@ -91,7 +112,7 @@ def marc21_to_contribution(self, key, value): return do_contribution(self, marc21, key, value) -@marc21.over('relation', '(770|772|775|776|777|780|785|787|533|534)..') +@marc21.over("relation", "(770|772|775|776|777|780|785|787|533|534)..") @utils.for_each_value @utils.ignore_value def marc21_to_specific_document_relation(self, key, value): @@ -99,7 +120,7 @@ def marc21_to_specific_document_relation(self, key, value): do_specific_document_relation(self, marc21, key, value) -@marc21.over('copyrightDate', '^26[04].4') +@marc21.over("copyrightDate", "^26[04].4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" @@ -107,7 +128,7 @@ def marc21_to_copyright_date(self, key, value): return copyright_dates or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -120,7 +141,7 @@ def marc21_to_edition_statement(self, key, value): return edition_data or None -@marc21.over('provisionActivity', '^26[04].[_0-3]') +@marc21.over("provisionActivity", "^26[04].[_0-3]") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -134,7 +155,7 @@ def marc21_to_provision_activity(self, key, value): return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get physical description. @@ -157,7 +178,7 @@ def marc21_to_description(self, key, value): marc21.extract_description_from_marc_field(key, value, self) -@marc21.over('seriesStatement', '^490..') +@marc21.over("seriesStatement", "^490..") @utils.for_each_value @utils.ignore_value def marc21_to_series_statement(self, key, value): @@ -169,7 +190,7 @@ def marc21_to_series_statement(self, key, value): marc21.extract_series_statement_from_marc_field(key, value, self) -@marc21.over('tableOfContents', '^505..') +@marc21.over("tableOfContents", "^505..") @utils.for_each_value @utils.ignore_value def marc21_to_table_of_contents(self, key, value): @@ -177,7 +198,7 @@ def marc21_to_table_of_contents(self, key, value): do_table_of_contents(self, value) -@marc21.over('usageAndAccessPolicy', '^(506|540)..') +@marc21.over("usageAndAccessPolicy", "^(506|540)..") @utils.for_each_value @utils.ignore_value def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): @@ -185,7 +206,7 @@ def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): return do_usage_and_access_policy_from_field_506_540(marc21, key, value) -@marc21.over('frequency', '^(310|321)..') +@marc21.over("frequency", "^(310|321)..") @utils.for_each_value @utils.ignore_value def marc21_to_frequency_field_310_321(self, key, value): @@ -193,7 +214,7 @@ def marc21_to_frequency_field_310_321(self, key, value): return do_frequency_field_310_321(marc21, key, value) -@marc21.over('dissertation', '^502..') +@marc21.over("dissertation", "^502..") @utils.for_each_value @utils.ignore_value def marc21_to_dissertation(self, key, value): @@ -202,7 +223,7 @@ def marc21_to_dissertation(self, key, value): return do_dissertation(marc21, value) -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_abstract(self, key, value): @@ -210,7 +231,7 @@ def marc21_to_abstract(self, key, value): return do_summary(marc21, value) -@marc21.over('intendedAudience', '^521..') +@marc21.over("intendedAudience", "^521..") @utils.for_each_value @utils.ignore_value def marc21_to_intended_audience(self, key, value): @@ -218,58 +239,57 @@ def marc21_to_intended_audience(self, key, value): do_intended_audience(self, value) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identified_by_from_field_020(self, key, value): """Get identifier from field 020.""" do_identified_by_from_field_020(self, marc21, key, value) -@marc21.over('identifiedBy', '^022..') +@marc21.over("identifiedBy", "^022..") @utils.ignore_value def marc21_to_identified_by_from_field_022(self, key, value): """Get identifier from field 022.""" do_identified_by_from_field_022(self, value) -@marc21.over('identifiedBy', '^024..') +@marc21.over("identifiedBy", "^024..") @utils.ignore_value def marc21_to_identified_by_from_field_024(self, key, value): """Get identifier from field 024.""" do_identified_by_from_field_024(self, marc21, key, value) -@marc21.over('identifiedBy', '^028..') +@marc21.over("identifiedBy", "^028..") @utils.ignore_value def marc21_to_identified_by_from_field_028(self, key, value): """Get identifier from field 028.""" do_identified_by_from_field_028(self, marc21, key, value) -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identified_by_from_field_035(self, key, value): """Get identifier from field 035.""" do_identified_by_from_field_035(self, marc21, key, value) -@marc21.over('acquisitionTerms', '^037..') +@marc21.over("acquisitionTerms", "^037..") @utils.ignore_value def marc21_to_acquisition_terms_from_field_037(self, key, value): """Get acquisition terms field 037.""" do_acquisition_terms_from_field_037(self, value) -@marc21.over('electronicLocator', '^856..') +@marc21.over("electronicLocator", "^856..") @utils.ignore_value def marc21_to_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - electronic_locators = do_electronic_locator_from_field_856( - self, marc21, key, value) + electronic_locators = do_electronic_locator_from_field_856(self, marc21, key, value) return electronic_locators or None -@marc21.over('note', '^(500|510|530|545|555|580)..') +@marc21.over("note", "^(500|510|530|545|555|580)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title(self, key, value): @@ -277,7 +297,7 @@ def marc21_to_notes_and_original_title(self, key, value): do_notes_and_original_title(self, key, value) -@marc21.over('credits', '^(508|511)..') +@marc21.over("credits", "^(508|511)..") @utils.for_each_value @utils.ignore_value def marc21_to_credits(self, key, value): @@ -285,16 +305,16 @@ def marc21_to_credits(self, key, value): return do_credits(key, value) -@marc21.over('supplementaryContent', '^504..') +@marc21.over("supplementaryContent", "^504..") @utils.for_each_value @utils.ignore_value def marc21_to_supplementary_content(self, key, value): """Get notes and original title.""" - if value.get('a'): - return utils.force_list(value.get('a'))[0] + if value.get("a"): + return utils.force_list(value.get("a"))[0] -@marc21.over('subjects', '^(600|610|611|630|650|651|655)..') +@marc21.over("subjects", "^(600|610|611|630|650|651|655)..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects_6XX(self, key, value): @@ -302,7 +322,7 @@ def marc21_to_subjects_6XX(self, key, value): return marc21_to_subjects_6XX_loc(self, key, value) -@marc21.over('sequence_numbering', '^362..') +@marc21.over("sequence_numbering", "^362..") @utils.for_each_value @utils.ignore_value def marc21_to_sequence_numbering(self, key, value): @@ -310,7 +330,7 @@ def marc21_to_sequence_numbering(self, key, value): do_sequence_numbering(self, value) -@marc21.over('classification', '^(050|060|080|082)..') +@marc21.over("classification", "^(050|060|080|082)..") @utils.for_each_value @utils.ignore_value def marc21_to_classification(self, key, value): @@ -318,7 +338,7 @@ def marc21_to_classification(self, key, value): do_classification(self, key, value) -@marc21.over('part_of', '^(773|800|830)..') +@marc21.over("part_of", "^(773|800|830)..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): @@ -346,7 +366,7 @@ def marc21_to_part_of(self, key, value): do_part_of(self, marc21, key, value) -@marc21.over('work_access_point', '(^130..|^730..)') +@marc21.over("work_access_point", "(^130..|^730..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point(self, key, value): @@ -354,7 +374,7 @@ def marc21_to_work_access_point(self, key, value): return do_work_access_point(marc21, key, value) -@marc21.over('work_access_point', '(^240..)') +@marc21.over("work_access_point", "(^240..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point_240(self, key, value): @@ -362,7 +382,7 @@ def marc21_to_work_access_point_240(self, key, value): return do_work_access_point_240(marc21, key, value) -@marc21.over('scale_cartographicAttributes', '^255..') +@marc21.over("scale_cartographicAttributes", "^255..") @utils.for_each_value @utils.ignore_value def marc21_to_scale_cartographic_attributes(self, key, value): @@ -370,7 +390,7 @@ def marc21_to_scale_cartographic_attributes(self, key, value): do_scale_and_cartographic(self, marc21, key, value) -@marc21.over('temporalCoverage', '^045..') +@marc21.over("temporalCoverage", "^045..") @utils.for_each_value @utils.ignore_value def marc21_to_temporal_coverage(self, key, value): diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/__init__.py index a3b4d2d419..e713aebbba 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/__init__.py @@ -20,4 +20,4 @@ from .model import marc21 -__all__ = ('marc21') +__all__ = "marc21" diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/model.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/model.py index 37fae55c87..45e39d7189 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/model.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/kul/model.py @@ -24,34 +24,55 @@ from rero_ils.modules.documents.models import DocumentFictionType from ..loc import marc21_to_subjects_6XX as marc21_to_subjects_6XX_loc -from ..utils import do_abbreviated_title, \ - do_acquisition_terms_from_field_037, do_classification, do_contribution, \ - do_copyright_date, do_credits, do_dissertation, do_edition_statement, \ - do_electronic_locator_from_field_856, do_frequency_field_310_321, \ - do_identified_by_from_field_010, do_identified_by_from_field_020, \ - do_identified_by_from_field_022, do_identified_by_from_field_024, \ - do_identified_by_from_field_028, do_identified_by_from_field_035, \ - do_intended_audience, do_issuance, do_language, \ - do_notes_and_original_title, do_part_of, do_provision_activity, \ - do_scale_and_cartographic, do_sequence_numbering, \ - do_specific_document_relation, do_summary, do_table_of_contents, \ - do_temporal_coverage, do_title, do_type, \ - do_usage_and_access_policy_from_field_506_540, do_work_access_point, \ - do_work_access_point_240 +from ..utils import ( + do_abbreviated_title, + do_acquisition_terms_from_field_037, + do_classification, + do_contribution, + do_copyright_date, + do_credits, + do_dissertation, + do_edition_statement, + do_electronic_locator_from_field_856, + do_frequency_field_310_321, + do_identified_by_from_field_010, + do_identified_by_from_field_020, + do_identified_by_from_field_022, + do_identified_by_from_field_024, + do_identified_by_from_field_028, + do_identified_by_from_field_035, + do_intended_audience, + do_issuance, + do_language, + do_notes_and_original_title, + do_part_of, + do_provision_activity, + do_scale_and_cartographic, + do_sequence_numbering, + do_specific_document_relation, + do_summary, + do_table_of_contents, + do_temporal_coverage, + do_title, + do_type, + do_usage_and_access_policy_from_field_506_540, + do_work_access_point, + do_work_access_point_240, +) marc21 = ReroIlsMarc21Overdo() -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_type_and_issuance(self, key, value): """Get document type, content/Media/Carrier type and mode of issuance.""" do_issuance(self, marc21) do_type(self, marc21) - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language(self, key, value): """Get languages. @@ -60,15 +81,15 @@ def marc21_to_language(self, key, value): """ language = do_language(self, marc21) # is fiction - self['fiction_statement'] = DocumentFictionType.Unspecified.value - if value[33] in ['1', 'd', 'f', 'j', 'p']: - self['fiction_statement'] = DocumentFictionType.Fiction.value - elif value[33] in ['0', 'e', 'h', 'i', 's']: - self['fiction_statement'] = DocumentFictionType.NonFiction.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value + if value[33] in ["1", "d", "f", "j", "p"]: + self["fiction_statement"] = DocumentFictionType.Fiction.value + elif value[33] in ["0", "e", "h", "i", "s"]: + self["fiction_statement"] = DocumentFictionType.NonFiction.value return language or None -@marc21.over('title', '(^210|^222)..') +@marc21.over("title", "(^210|^222)..") @utils.ignore_value def marc21_to_abbreviated_title(self, key, value): """Get abbreviated title data.""" @@ -76,7 +97,7 @@ def marc21_to_abbreviated_title(self, key, value): return title_list or None -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data.""" @@ -84,7 +105,7 @@ def marc21_to_title(self, key, value): return title_list or None -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): @@ -92,7 +113,7 @@ def marc21_to_contribution(self, key, value): return do_contribution(self, marc21, key, value) -@marc21.over('relation', '(770|772|775|776|777|780|785|787|533|534)..') +@marc21.over("relation", "(770|772|775|776|777|780|785|787|533|534)..") @utils.for_each_value @utils.ignore_value def marc21_to_specific_document_relation(self, key, value): @@ -100,7 +121,7 @@ def marc21_to_specific_document_relation(self, key, value): do_specific_document_relation(self, marc21, key, value) -@marc21.over('copyrightDate', '^26[04].4') +@marc21.over("copyrightDate", "^26[04].4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" @@ -108,7 +129,7 @@ def marc21_to_copyright_date(self, key, value): return copyright_dates or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -121,7 +142,7 @@ def marc21_to_edition_statement(self, key, value): return edition_data or None -@marc21.over('provisionActivity', '^26[04].[_0-3]') +@marc21.over("provisionActivity", "^26[04].[_0-3]") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -135,7 +156,7 @@ def marc21_to_provision_activity(self, key, value): return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get physical description. @@ -158,7 +179,7 @@ def marc21_to_description(self, key, value): marc21.extract_description_from_marc_field(key, value, self) -@marc21.over('seriesStatement', '^490..') +@marc21.over("seriesStatement", "^490..") @utils.for_each_value @utils.ignore_value def marc21_to_series_statement(self, key, value): @@ -170,7 +191,7 @@ def marc21_to_series_statement(self, key, value): # marc21.extract_series_statement_from_marc_field(key, value, self) -@marc21.over('tableOfContents', '^505..') +@marc21.over("tableOfContents", "^505..") @utils.for_each_value @utils.ignore_value def marc21_to_tableOfContents(self, key, value): @@ -178,7 +199,7 @@ def marc21_to_tableOfContents(self, key, value): do_table_of_contents(self, value) -@marc21.over('usageAndAccessPolicy', '^(506|540)..') +@marc21.over("usageAndAccessPolicy", "^(506|540)..") @utils.for_each_value @utils.ignore_value def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): @@ -186,7 +207,7 @@ def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): return do_usage_and_access_policy_from_field_506_540(marc21, key, value) -@marc21.over('frequency', '^(310|321)..') +@marc21.over("frequency", "^(310|321)..") @utils.for_each_value @utils.ignore_value def marc21_to_frequency_field_310_321(self, key, value): @@ -194,7 +215,7 @@ def marc21_to_frequency_field_310_321(self, key, value): return do_frequency_field_310_321(marc21, key, value) -@marc21.over('dissertation', '^502..') +@marc21.over("dissertation", "^502..") @utils.for_each_value @utils.ignore_value def marc21_to_dissertation(self, key, value): @@ -203,7 +224,7 @@ def marc21_to_dissertation(self, key, value): return do_dissertation(marc21, value) -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_summary(self, key, value): @@ -211,7 +232,7 @@ def marc21_to_summary(self, key, value): return do_summary(marc21, value) -@marc21.over('intendedAudience', '^521..') +@marc21.over("intendedAudience", "^521..") @utils.for_each_value @utils.ignore_value def marc21_to_intended_audience(self, key, value): @@ -219,65 +240,64 @@ def marc21_to_intended_audience(self, key, value): do_intended_audience(self, value) -@marc21.over('identifiedBy', '^010..') +@marc21.over("identifiedBy", "^010..") @utils.ignore_value def marc21_to_identified_by_from_field_010(self, key, value): """Get identifier from field 010.""" do_identified_by_from_field_010(self, marc21, key, value) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identified_by_from_field_020(self, key, value): """Get identifier from field 020.""" do_identified_by_from_field_020(self, marc21, key, value) -@marc21.over('identifiedBy', '^022..') +@marc21.over("identifiedBy", "^022..") @utils.ignore_value def marc21_to_identified_by_from_field_022(self, key, value): """Get identifier from field 022.""" do_identified_by_from_field_022(self, value) -@marc21.over('identifiedBy', '^024..') +@marc21.over("identifiedBy", "^024..") @utils.ignore_value def marc21_to_identified_by_from_field_024(self, key, value): """Get identifier from field 024.""" do_identified_by_from_field_024(self, marc21, key, value) -@marc21.over('identifiedBy', '^028..') +@marc21.over("identifiedBy", "^028..") @utils.ignore_value def marc21_to_identified_by_from_field_028(self, key, value): """Get identifier from field 028.""" do_identified_by_from_field_028(self, marc21, key, value) -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identified_by_from_field_035(self, key, value): """Get identifier from field 035.""" do_identified_by_from_field_035(self, marc21, key, value) -@marc21.over('acquisitionTerms', '^037..') +@marc21.over("acquisitionTerms", "^037..") @utils.ignore_value def marc21_to_acquisition_terms_from_field_037(self, key, value): """Get acquisition terms field 037.""" do_acquisition_terms_from_field_037(self, value) -@marc21.over('electronicLocator', '^856..') +@marc21.over("electronicLocator", "^856..") @utils.ignore_value def marc21_to_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - electronic_locators = do_electronic_locator_from_field_856( - self, marc21, key, value) + electronic_locators = do_electronic_locator_from_field_856(self, marc21, key, value) return electronic_locators or None -@marc21.over('note', '^(500|510|530|545|555|580)..') +@marc21.over("note", "^(500|510|530|545|555|580)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title(self, key, value): @@ -285,7 +305,7 @@ def marc21_to_notes_and_original_title(self, key, value): do_notes_and_original_title(self, key, value) -@marc21.over('credits', '^(508|511)..') +@marc21.over("credits", "^(508|511)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_credits(self, key, value): @@ -293,16 +313,16 @@ def marc21_to_notes_and_credits(self, key, value): return do_credits(key, value) -@marc21.over('supplementaryContent', '^504..') +@marc21.over("supplementaryContent", "^504..") @utils.for_each_value @utils.ignore_value def marc21_to_supplementary_content(self, key, value): """Get notes and original title.""" - if value.get('a'): - return utils.force_list(value.get('a'))[0] + if value.get("a"): + return utils.force_list(value.get("a"))[0] -@marc21.over('subjects', '^(600|610|611|630|650|651|655)..') +@marc21.over("subjects", "^(600|610|611|630|650|651|655)..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects_6XX(self, key, value): @@ -310,7 +330,7 @@ def marc21_to_subjects_6XX(self, key, value): return marc21_to_subjects_6XX_loc(self, key, value) -@marc21.over('sequence_numbering', '^362..') +@marc21.over("sequence_numbering", "^362..") @utils.for_each_value @utils.ignore_value def marc21_to_sequence_numbering(self, key, value): @@ -318,7 +338,7 @@ def marc21_to_sequence_numbering(self, key, value): do_sequence_numbering(self, value) -@marc21.over('classification', '^(050|060|080|082)..') +@marc21.over("classification", "^(050|060|080|082)..") @utils.for_each_value @utils.ignore_value def marc21_to_classification(self, key, value): @@ -326,7 +346,7 @@ def marc21_to_classification(self, key, value): do_classification(self, key, value) -@marc21.over('part_of', '^(773|800|830)..') +@marc21.over("part_of", "^(773|800|830)..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): @@ -354,7 +374,7 @@ def marc21_to_part_of(self, key, value): do_part_of(self, marc21, key, value) -@marc21.over('work_access_point', '(^130..|^730..)') +@marc21.over("work_access_point", "(^130..|^730..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point(self, key, value): @@ -362,7 +382,7 @@ def marc21_to_work_access_point(self, key, value): return do_work_access_point(marc21, key, value) -@marc21.over('work_access_point', '(^240..)') +@marc21.over("work_access_point", "(^240..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point_240(self, key, value): @@ -370,7 +390,7 @@ def marc21_to_work_access_point_240(self, key, value): return do_work_access_point_240(marc21, key, value) -@marc21.over('scale_cartographicAttributes', '^255..') +@marc21.over("scale_cartographicAttributes", "^255..") @utils.for_each_value @utils.ignore_value def marc21_to_scale_cartographic_attributes(self, key, value): @@ -378,7 +398,7 @@ def marc21_to_scale_cartographic_attributes(self, key, value): do_scale_and_cartographic(self, marc21, key, value) -@marc21.over('temporalCoverage', '^045..') +@marc21.over("temporalCoverage", "^045..") @utils.for_each_value @utils.ignore_value def marc21_to_temporal_coverage(self, key, value): diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/__init__.py index c9eabbda0c..febb5bd437 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/__init__.py @@ -20,4 +20,4 @@ from .model import marc21, marc21_to_subjects_6XX -__all__ = ('marc21', 'marc21_to_subjects_6XX') +__all__ = ("marc21", "marc21_to_subjects_6XX") diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/model.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/model.py index 591639c9af..3e470bef28 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/model.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/loc/model.py @@ -25,77 +25,104 @@ from dojson import utils from flask import current_app -from rero_ils.dojson.utils import ReroIlsMarc21Overdo, TitlePartList, \ - build_identifier, build_string_from_subfields, get_field_items, \ - get_mef_link, remove_trailing_punctuation +from rero_ils.dojson.utils import ( + ReroIlsMarc21Overdo, + TitlePartList, + build_identifier, + build_string_from_subfields, + get_field_items, + get_mef_link, + remove_trailing_punctuation, +) from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.entities.models import EntityType -from ..utils import do_abbreviated_title, \ - do_acquisition_terms_from_field_037, do_classification, do_contribution, \ - do_copyright_date, do_credits, do_dissertation, do_edition_statement, \ - do_electronic_locator_from_field_856, do_frequency_field_310_321, \ - do_identified_by_from_field_010, do_identified_by_from_field_020, \ - do_identified_by_from_field_022, do_identified_by_from_field_024, \ - do_identified_by_from_field_028, do_identified_by_from_field_035, \ - do_intended_audience, do_issuance, do_language, \ - do_notes_and_original_title, do_part_of, do_provision_activity, \ - do_scale_and_cartographic, do_sequence_numbering, \ - do_specific_document_relation, do_summary, do_table_of_contents, \ - do_temporal_coverage, do_title, do_type, \ - do_usage_and_access_policy_from_field_506_540, perform_subdivisions +from ..utils import ( + do_abbreviated_title, + do_acquisition_terms_from_field_037, + do_classification, + do_contribution, + do_copyright_date, + do_credits, + do_dissertation, + do_edition_statement, + do_electronic_locator_from_field_856, + do_frequency_field_310_321, + do_identified_by_from_field_010, + do_identified_by_from_field_020, + do_identified_by_from_field_022, + do_identified_by_from_field_024, + do_identified_by_from_field_028, + do_identified_by_from_field_035, + do_intended_audience, + do_issuance, + do_language, + do_notes_and_original_title, + do_part_of, + do_provision_activity, + do_scale_and_cartographic, + do_sequence_numbering, + do_specific_document_relation, + do_summary, + do_table_of_contents, + do_temporal_coverage, + do_title, + do_type, + do_usage_and_access_policy_from_field_506_540, + perform_subdivisions, +) marc21 = ReroIlsMarc21Overdo() _WORK_ACCESS_POINT = { - 'a': 'title', - 'f': 'date_of_work', - 'g': 'miscellaneous_information', - 'k': 'form_subdivision', - 'l': 'language', - 'm': 'medium_of_performance_for_music', - 'n': 'partNumber', - 'o': 'arranged_statement_for_music', - 'p': 'partName', - 'q': 'q', - 'r': 'key_for_music', - 's': 'miscellaneous_information' + "a": "title", + "f": "date_of_work", + "g": "miscellaneous_information", + "k": "form_subdivision", + "l": "language", + "m": "medium_of_performance_for_music", + "n": "partNumber", + "o": "arranged_statement_for_music", + "p": "partName", + "q": "q", + "r": "key_for_music", + "s": "miscellaneous_information", } def remove_punctuation(data, with_dot=False): """Remove punctuation from data.""" with contextlib.suppress(Exception): - if data[-1:] == ',': + if data[-1:] == ",": data = data[:-1] - if data[-1:] == '.' and with_dot: + if data[-1:] == "." and with_dot: data = data[:-1] - if data[-1:] == ':': + if data[-1:] == ":": data = data[:-1] - if data[-2:] == ' :': + if data[-2:] == " :": data = data[:-2] - if data[-1:] == ';': + if data[-1:] == ";": data = data[:-1] - if data[-2:] == ' ;': + if data[-2:] == " ;": data = data[:-2] - if data[-2:] == ' /': + if data[-2:] == " /": data = data[:-2] - if data[-2:] == ' -': + if data[-2:] == " -": data = data[:-2] return data -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_type_and_issuance(self, key, value): """Get document type, content/Media/Carrier type and mode of issuance.""" do_issuance(self, marc21) do_type(self, marc21) - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language(self, key, value): """Get languages. @@ -106,7 +133,7 @@ def marc21_to_language(self, key, value): return language or None -@marc21.over('title', '(^210|^222)..') +@marc21.over("title", "(^210|^222)..") @utils.ignore_value def marc21_to_abbreviated_title(self, key, value): """Get abbreviated title data.""" @@ -114,7 +141,7 @@ def marc21_to_abbreviated_title(self, key, value): return title_list or None -@marc21.over('work_access_point', '(^130|^240|^730)..') +@marc21.over("work_access_point", "(^130|^240|^730)..") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point(self, key, value): @@ -134,19 +161,19 @@ def marc21_to_work_access_point(self, key, value): work = {} tag_key = key[:3] - part_list = TitlePartList(part_number_code='n', part_name_code='p') - part_selection = {'n', 'p'} + part_list = TitlePartList(part_number_code="n", part_name_code="p") + part_selection = {"n", "p"} items = get_field_items(value) - if tag_key in ['130', '730']: - work_selection = {'a', 'f', 'k', 'l', 'm', 'o', 'r'} + if tag_key in ["130", "730"]: + work_selection = {"a", "f", "k", "l", "m", "o", "r"} - miscellaneous_selection = {'g', 's'} + miscellaneous_selection = {"g", "s"} miscellaneous_parts = [] # _WORK_ACCESS_POINT.get(subfield_code) for blob_key, blob_value in items: if blob_key in work_selection: - if blob_key in {'k', 'm'}: + if blob_key in {"k", "m"}: datas = work.get(_WORK_ACCESS_POINT.get(blob_key), []) datas.append(blob_value) work[_WORK_ACCESS_POINT.get(blob_key)] = datas @@ -157,65 +184,61 @@ def marc21_to_work_access_point(self, key, value): if blob_key in part_selection: part_list.update_part(blob_value, blob_key, blob_value) if miscellaneous_parts: - work['miscellaneous_information'] = '. '.join(miscellaneous_parts) - if tag_key == '240': + work["miscellaneous_information"] = ". ".join(miscellaneous_parts) + if tag_key == "240": for blob_key, blob_value in items: - if blob_key in {'a'}: + if blob_key in {"a"}: # work[_WORK_ACCESS_POINT.get('a')] = value.get('a') work[_WORK_ACCESS_POINT.get(blob_key)] = blob_value if blob_key in part_selection: part_list.update_part(blob_value, blob_key, blob_value) - if field_100 := marc21.get_fields('100'): + if field_100 := marc21.get_fields("100"): agent = {} - for blob_key, blob_value in field_100[0].get('subfields').items(): - agent['type'] = EntityType.PERSON - if blob_key == 'a': + for blob_key, blob_value in field_100[0].get("subfields").items(): + agent["type"] = EntityType.PERSON + if blob_key == "a": # numeration = not_repetitive( # marc21.bib_id, marc21.bib_id, blob_key, blob_value, 'b') - agent['preferred_name'] = remove_trailing_punctuation( - blob_value) - if blob_key == 'b': + agent["preferred_name"] = remove_trailing_punctuation(blob_value) + if blob_key == "b": # numeration = not_repetitive( # marc21.bib_id, marc21.bib_id, blob_key, blob_value, 'b') - agent['numeration'] = remove_trailing_punctuation( - blob_value) - if blob_key == 'c': + agent["numeration"] = remove_trailing_punctuation(blob_value) + if blob_key == "c": # qualifier = not_repetitive( # marc21.bib_id, marc21.bib_id, blob_key, blob_value, 'c') - agent['qualifier'] = remove_trailing_punctuation( - blob_value) - if blob_key == 'd': + agent["qualifier"] = remove_trailing_punctuation(blob_value) + if blob_key == "d": # date = not_repetitive( # marc21.bib_id, marc21.bib_id, blob_key, blob_value, 'd') - date = blob_value.rstrip(',') - dates = remove_trailing_punctuation(date).split( - '-') + date = blob_value.rstrip(",") + dates = remove_trailing_punctuation(date).split("-") with contextlib.suppress(Exception): if date_of_birth := dates[0].strip(): - agent['date_of_birth'] = date_of_birth + agent["date_of_birth"] = date_of_birth with contextlib.suppress(Exception): if date_of_death := dates[1].strip(): - agent['date_of_death'] = date_of_death - if blob_key == 'q': + agent["date_of_death"] = date_of_death + if blob_key == "q": # fuller_form_of_name = not_repetitive( # marc21.bib_id, marc21.bib_id, blob_key, blob_value, 'q') - agent['fuller_form_of_name'] = remove_trailing_punctuation( - blob_value - ).lstrip('(').rstrip(')') - work['agent'] = agent + agent["fuller_form_of_name"] = ( + remove_trailing_punctuation(blob_value).lstrip("(").rstrip(")") + ) + work["agent"] = agent if the_part_list := part_list.get_part_list(): - work['part'] = the_part_list + work["part"] = the_part_list if work: - work_access_points = self.get('work_access_point', []) + work_access_points = self.get("work_access_point", []) work_access_points.append(work) - self['work_access_point'] = work_access_points + self["work_access_point"] = work_access_points -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data.""" @@ -223,7 +246,7 @@ def marc21_to_title(self, key, value): return title_list or None -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): @@ -231,7 +254,7 @@ def marc21_to_contribution(self, key, value): return do_contribution(self, marc21, key, value) -@marc21.over('relation', '(770|772|775|776|777|780|785|787|533|534)..') +@marc21.over("relation", "(770|772|775|776|777|780|785|787|533|534)..") @utils.for_each_value @utils.ignore_value def marc21_to_specific_document_relation(self, key, value): @@ -239,7 +262,7 @@ def marc21_to_specific_document_relation(self, key, value): do_specific_document_relation(self, marc21, key, value) -@marc21.over('copyrightDate', '^26[04].4') +@marc21.over("copyrightDate", "^26[04].4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" @@ -247,7 +270,7 @@ def marc21_to_copyright_date(self, key, value): return copyright_dates or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -260,7 +283,7 @@ def marc21_to_edition_statement(self, key, value): return edition_data or None -@marc21.over('provisionActivity', '^26[04].[_0-3]') +@marc21.over("provisionActivity", "^26[04].[_0-3]") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -274,7 +297,7 @@ def marc21_to_provision_activity(self, key, value): return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get physical description. @@ -297,7 +320,7 @@ def marc21_to_description(self, key, value): marc21.extract_description_from_marc_field(key, value, self) -@marc21.over('series', '^4[49]0..') +@marc21.over("series", "^4[49]0..") @utils.for_each_value @utils.ignore_value def marc21_to_series(self, key, value): @@ -306,7 +329,7 @@ def marc21_to_series(self, key, value): series.name: [490$a repetitive] series.number: [490$v repetitive] """ - if key[:3] == '490': + if key[:3] == "490": marc21.extract_series_statement_from_marc_field(key, value, self) else: # we have a 440 series statement @@ -328,63 +351,60 @@ def marc21_to_series(self, key, value): remove final punctuation "." or "," or ";" in seriesTitle """ - if value.get('a'): + if value.get("a"): subseriesStatement = {} subfield_a = remove_punctuation( - utils.force_list(value.get('a'))[0], - with_dot=True + utils.force_list(value.get("a"))[0], with_dot=True ) - series = {'seriesTitle': [{'value': subfield_a}]} + series = {"seriesTitle": [{"value": subfield_a}]} - if value.get('p'): + if value.get("p"): """ 440$n$p = subseriesTitle 440$v = subseriesEnumeration """ - string_build = build_string_from_subfields(value, 'np', ' ') - subseriesStatement['subseriesTitle'] = [{ - 'value': remove_punctuation(string_build, - with_dot=True).rstrip() - }] - if value.get('v'): + string_build = build_string_from_subfields(value, "np", " ") + subseriesStatement["subseriesTitle"] = [ + {"value": remove_punctuation(string_build, with_dot=True).rstrip()} + ] + if value.get("v"): parts = [ - remove_punctuation(subfield_v) for subfield_v - in utils.force_list(value.get('v')) + remove_punctuation(subfield_v) + for subfield_v in utils.force_list(value.get("v")) ] - subseriesStatement['subseriesEnumeration'] = [{ - 'value': '/'.join(parts) - }] - series['subseriesStatement'] = [subseriesStatement] - elif value.get('n'): - if value.get('v'): - string_build = build_string_from_subfields( - value, 'nv') - series['seriesEnumeration'] = [{ - 'value': remove_punctuation(string_build, - with_dot=True).rstrip() - }] + subseriesStatement["subseriesEnumeration"] = [ + {"value": "/".join(parts)} + ] + series["subseriesStatement"] = [subseriesStatement] + elif value.get("n"): + if value.get("v"): + string_build = build_string_from_subfields(value, "nv") + series["seriesEnumeration"] = [ + { + "value": remove_punctuation( + string_build, with_dot=True + ).rstrip() + } + ] else: - subseriesStatement['subseriesTitle'] = [{ - 'value': ''.join( - utils.force_list(value.get('n'))) - }] - series['subseriesStatement'] = [subseriesStatement] - elif value.get('v'): + subseriesStatement["subseriesTitle"] = [ + {"value": "".join(utils.force_list(value.get("n")))} + ] + series["subseriesStatement"] = [subseriesStatement] + elif value.get("v"): parts = [ - remove_punctuation(subfield_v) for subfield_v in - utils.force_list(value.get('v')) + remove_punctuation(subfield_v) + for subfield_v in utils.force_list(value.get("v")) ] - series['seriesEnumeration'] = [{ - 'value': '/'.join(parts) - }] - self['seriesStatement'] = self.get('seriesStatement', []) - self['seriesStatement'].append(series) + series["seriesEnumeration"] = [{"value": "/".join(parts)}] + self["seriesStatement"] = self.get("seriesStatement", []) + self["seriesStatement"].append(series) return None -@marc21.over('tableOfContents', '^505..') +@marc21.over("tableOfContents", "^505..") @utils.for_each_value @utils.ignore_value def marc21_to_table_of_contents(self, key, value): @@ -392,7 +412,7 @@ def marc21_to_table_of_contents(self, key, value): do_table_of_contents(self, value) -@marc21.over('usageAndAccessPolicy', '^(506|540)..') +@marc21.over("usageAndAccessPolicy", "^(506|540)..") @utils.for_each_value @utils.ignore_value def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): @@ -400,7 +420,7 @@ def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): return do_usage_and_access_policy_from_field_506_540(marc21, key, value) -@marc21.over('frequency', '^(310|321)..') +@marc21.over("frequency", "^(310|321)..") @utils.for_each_value @utils.ignore_value def marc21_to_frequency_field_310_321(self, key, value): @@ -408,7 +428,7 @@ def marc21_to_frequency_field_310_321(self, key, value): return do_frequency_field_310_321(marc21, key, value) -@marc21.over('dissertation', '^502..') +@marc21.over("dissertation", "^502..") @utils.for_each_value @utils.ignore_value def marc21_to_dissertation(self, key, value): @@ -417,7 +437,7 @@ def marc21_to_dissertation(self, key, value): return do_dissertation(marc21, value) -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_summary(self, key, value): @@ -425,7 +445,7 @@ def marc21_to_summary(self, key, value): return do_summary(marc21, value) -@marc21.over('intendedAudience', '^521..') +@marc21.over("intendedAudience", "^521..") @utils.for_each_value @utils.ignore_value def marc21_to_intended_audience(self, key, value): @@ -433,65 +453,64 @@ def marc21_to_intended_audience(self, key, value): do_intended_audience(self, value) -@marc21.over('identifiedBy', '^010..') +@marc21.over("identifiedBy", "^010..") @utils.ignore_value def marc21_to_identified_by_from_field_010(self, key, value): """Get identifier from field 010.""" do_identified_by_from_field_010(self, marc21, key, value) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identified_by_from_field_020(self, key, value): """Get identifier from field 020.""" do_identified_by_from_field_020(self, marc21, key, value) -@marc21.over('identifiedBy', '^022..') +@marc21.over("identifiedBy", "^022..") @utils.ignore_value def marc21_to_identified_by_from_field_022(self, key, value): """Get identifier from field 022.""" do_identified_by_from_field_022(self, value) -@marc21.over('identifiedBy', '^024..') +@marc21.over("identifiedBy", "^024..") @utils.ignore_value def marc21_to_identified_by_from_field_024(self, key, value): """Get identifier from field 024.""" do_identified_by_from_field_024(self, marc21, key, value) -@marc21.over('identifiedBy', '^028..') +@marc21.over("identifiedBy", "^028..") @utils.ignore_value def marc21_to_identified_by_from_field_028(self, key, value): """Get identifier from field 028.""" do_identified_by_from_field_028(self, marc21, key, value) -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identified_by_from_field_035(self, key, value): """Get identifier from field 035.""" do_identified_by_from_field_035(self, marc21, key, value) -@marc21.over('acquisitionTerms', '^037..') +@marc21.over("acquisitionTerms", "^037..") @utils.ignore_value def marc21_to_acquisition_terms_from_field_037(self, key, value): """Get acquisition terms field 037.""" do_acquisition_terms_from_field_037(self, value) -@marc21.over('electronicLocator', '^856..') +@marc21.over("electronicLocator", "^856..") @utils.ignore_value def marc21_to_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - electronic_locators = do_electronic_locator_from_field_856( - self, marc21, key, value) + electronic_locators = do_electronic_locator_from_field_856(self, marc21, key, value) return electronic_locators or None -@marc21.over('note', '^(500|510|530|545|555|580)..') +@marc21.over("note", "^(500|510|530|545|555|580)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title_5XX(self, key, value): @@ -499,7 +518,7 @@ def marc21_to_notes_and_original_title_5XX(self, key, value): do_notes_and_original_title(self, key, value) -@marc21.over('credits', '^(508|511)..') +@marc21.over("credits", "^(508|511)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title_508(self, key, value): @@ -507,16 +526,16 @@ def marc21_to_notes_and_original_title_508(self, key, value): return do_credits(key, value) -@marc21.over('supplementaryContent', '^504..') +@marc21.over("supplementaryContent", "^504..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title_504(self, key, value): """Get notes and original title.""" - if value.get('a'): - return utils.force_list(value.get('a'))[0] + if value.get("a"): + return utils.force_list(value.get("a"))[0] -@marc21.over('subjects', '^(600|610|611|630|650|651|655)..') +@marc21.over("subjects", "^(600|610|611|630|650|651|655)..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects_6XX(self, key, value): @@ -528,127 +547,128 @@ def marc21_to_subjects_6XX(self, key, value): subjects_imported : for 6xx having indicator 2 '0' or '2' """ type_per_tag = { - '600': EntityType.PERSON, - '610': EntityType.ORGANISATION, - '611': EntityType.ORGANISATION, - '600t': EntityType.WORK, - '610t': EntityType.WORK, - '611t': EntityType.WORK, - '630': EntityType.WORK, - '650': EntityType.TOPIC, # or bf:Temporal, changed by code - '651': EntityType.PLACE, - '655': EntityType.TOPIC + "600": EntityType.PERSON, + "610": EntityType.ORGANISATION, + "611": EntityType.ORGANISATION, + "600t": EntityType.WORK, + "610t": EntityType.WORK, + "611t": EntityType.WORK, + "630": EntityType.WORK, + "650": EntityType.TOPIC, # or bf:Temporal, changed by code + "651": EntityType.PLACE, + "655": EntityType.TOPIC, } subfield_code_per_tag = { - '600': 'abcd', - '610': 'ab', - '611': 'acden', - '600t': 'tpn', - '610t': 'tpn', - '611t': 't', - '630': 'apn', - '650': 'a', - '651': 'a', - '655': 'a' - } - conference_per_tag = { - '610': False, - '611': True - } - source_per_indicator_2 = { - '0': 'LCSH', - '2': 'MeSH' + "600": "abcd", + "610": "ab", + "611": "acden", + "600t": "tpn", + "610t": "tpn", + "611t": "t", + "630": "apn", + "650": "a", + "651": "a", + "655": "a", } + conference_per_tag = {"610": False, "611": True} + source_per_indicator_2 = {"0": "LCSH", "2": "MeSH"} indicator_2 = key[4] tag_key = key[:3] - subfields_2 = utils.force_list(value.get('2')) + subfields_2 = utils.force_list(value.get("2")) subfield_2 = subfields_2[0] if subfields_2 else None - subfields_a = utils.force_list(value.get('a', [])) + subfields_a = utils.force_list(value.get("a", [])) # Try to get RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE from current app # In the dojson cli is no current app and we have to get the value directly # from config.py try: config_field_key = current_app.config.get( - 'RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE', - 'subjects_imported' + "RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE", "subjects_imported" ) except Exception: - from rero_ils.config import \ - RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key + from rero_ils.config import ( + RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key, + ) - if subfield_2 in ['rero', 'gnd', 'idref']: - if tag_key in ['600', '610', '611'] and value.get('t'): - tag_key += 't' + if subfield_2 in ["rero", "gnd", "idref"]: + if tag_key in ["600", "610", "611"] and value.get("t"): + tag_key += "t" data_type = type_per_tag[tag_key] # `data_type` is Temporal if tag is 650 and a $a start with digit. - if tag_key == '650': + if tag_key == "650": for subfield_a in subfields_a: if subfield_a[0].isdigit(): - data_type = 'bf:Temporal' + data_type = "bf:Temporal" break subject = { - 'type': data_type, + "type": data_type, } string_build = build_string_from_subfields( - value, subfield_code_per_tag[tag_key]) - if tag_key == '655': + value, subfield_code_per_tag[tag_key] + ) + if tag_key == "655": # remove the square brackets - string_build = re.sub(r'^\[(.*)\]$', r'\1', string_build) - subject['authorized_access_point'] = string_build + string_build = re.sub(r"^\[(.*)\]$", r"\1", string_build) + subject["authorized_access_point"] = string_build # if tag_key in ['610', '611']: # subject['conference'] = conference_per_tag[tag_key] # elif tag_key in ['600t', '610t', '611t']: - if tag_key in ['600t', '610t', '611t']: + if tag_key in ["600t", "610t", "611t"]: creator_tag_key = tag_key[:3] # to keep only tag: 600, 610, 611 creator = remove_trailing_punctuation( build_string_from_subfields( - value, subfield_code_per_tag[creator_tag_key]), '.', '.') + value, subfield_code_per_tag[creator_tag_key] + ), + ".", + ".", + ) if creator: - subject['authorized_access_point'] = \ + subject["authorized_access_point"] = ( f'{creator}. {subject["authorized_access_point"]}' + ) - field_key = 'genreForm' if tag_key == '655' else config_field_key - - if field_key != 'subjects_imported' and (ref := get_mef_link( - bibid=marc21.bib_id, - reroid=marc21.bib_id, - entity_type=data_type, - ids=utils.force_list(value.get('0')), - key=key - )): - subject = { - '$ref': ref - } + field_key = "genreForm" if tag_key == "655" else config_field_key + + if field_key != "subjects_imported" and ( + ref := get_mef_link( + bibid=marc21.bib_id, + reroid=marc21.bib_id, + entity_type=data_type, + ids=utils.force_list(value.get("0")), + key=key, + ) + ): + subject = {"$ref": ref} else: identifier = build_identifier(value) if identifier: - subject['identifiedBy'] = identifier - if field_key != 'genreForm': + subject["identifiedBy"] = identifier + if field_key != "genreForm": perform_subdivisions(subject, value) - if subject.get('$ref') or subject.get('authorized_access_point'): + if subject.get("$ref") or subject.get("authorized_access_point"): self.setdefault(field_key, []).append(dict(entity=subject)) - elif indicator_2 in ['0', '2']: + elif indicator_2 in ["0", "2"]: term_string = build_string_from_subfields( - value, 'abcdefghijklmnopqrstuw', ' - ') + value, "abcdefghijklmnopqrstuw", " - " + ) if term_string: data = { - 'type': type_per_tag[tag_key], - 'source': source_per_indicator_2[indicator_2], - 'authorized_access_point': term_string.rstrip('.') + "type": type_per_tag[tag_key], + "source": source_per_indicator_2[indicator_2], + "authorized_access_point": term_string.rstrip("."), } perform_subdivisions(data, value) if data: self.setdefault(config_field_key, []).append(dict(entity=data)) -@marc21.over('sequence_numbering', '^362..') +@marc21.over("sequence_numbering", "^362..") @utils.for_each_value @utils.ignore_value def marc21_to_sequence_numbering(self, key, value): @@ -656,7 +676,7 @@ def marc21_to_sequence_numbering(self, key, value): do_sequence_numbering(self, value) -@marc21.over('classification', '^(050|060|080|082)..') +@marc21.over("classification", "^(050|060|080|082)..") @utils.for_each_value @utils.ignore_value def marc21_to_classification(self, key, value): @@ -664,7 +684,7 @@ def marc21_to_classification(self, key, value): do_classification(self, key, value) -@marc21.over('part_of', '^(773|800|830)..') +@marc21.over("part_of", "^(773|800|830)..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): @@ -692,7 +712,7 @@ def marc21_to_part_of(self, key, value): do_part_of(self, marc21, key, value) -@marc21.over('scale_cartographicAttributes', '^255..') +@marc21.over("scale_cartographicAttributes", "^255..") @utils.for_each_value @utils.ignore_value def marc21_to_scale_cartographic_attributes(self, key, value): @@ -700,7 +720,7 @@ def marc21_to_scale_cartographic_attributes(self, key, value): do_scale_and_cartographic(self, marc21, key, value) -@marc21.over('temporalCoverage', '^045..') +@marc21.over("temporalCoverage", "^045..") @utils.for_each_value @utils.ignore_value def marc21_to_temporal_coverage(self, key, value): diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/__init__.py index f45479fa83..9547a74462 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/__init__.py @@ -19,4 +19,4 @@ from .model import marc21 -__all__ = ('marc21') +__all__ = "marc21" diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/model.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/model.py index 24a25f57aa..c4ad9fc551 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/model.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/rero/model.py @@ -25,26 +25,51 @@ from dojson import utils from dojson.utils import GroupableOrderedDict -from rero_ils.dojson.utils import ReroIlsMarc21Overdo, build_identifier, \ - build_string_from_subfields, error_print, get_field_items, get_mef_link, \ - not_repetitive, re_identified, remove_trailing_punctuation +from rero_ils.dojson.utils import ( + ReroIlsMarc21Overdo, + build_identifier, + build_string_from_subfields, + error_print, + get_field_items, + get_mef_link, + not_repetitive, + re_identified, + remove_trailing_punctuation, +) from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.documents.utils import create_authorized_access_point from rero_ils.modules.entities.models import EntityType -from ..utils import _CONTRIBUTION_ROLE, do_abbreviated_title, \ - do_acquisition_terms_from_field_037, do_copyright_date, do_credits, \ - do_dissertation, do_edition_statement, \ - do_electronic_locator_from_field_856, do_frequency_field_310_321, \ - do_identified_by_from_field_020, do_identified_by_from_field_022, \ - do_identified_by_from_field_024, do_identified_by_from_field_028, \ - do_identified_by_from_field_035, do_intended_audience, do_issuance, \ - do_notes_and_original_title, do_provision_activity, \ - do_scale_and_cartographic, do_sequence_numbering, \ - do_specific_document_relation, do_summary, do_table_of_contents, \ - do_temporal_coverage, do_title, \ - do_usage_and_access_policy_from_field_506_540, do_work_access_point, \ - perform_subdivisions +from ..utils import ( + _CONTRIBUTION_ROLE, + do_abbreviated_title, + do_acquisition_terms_from_field_037, + do_copyright_date, + do_credits, + do_dissertation, + do_edition_statement, + do_electronic_locator_from_field_856, + do_frequency_field_310_321, + do_identified_by_from_field_020, + do_identified_by_from_field_022, + do_identified_by_from_field_024, + do_identified_by_from_field_028, + do_identified_by_from_field_035, + do_intended_audience, + do_issuance, + do_notes_and_original_title, + do_provision_activity, + do_scale_and_cartographic, + do_sequence_numbering, + do_specific_document_relation, + do_summary, + do_table_of_contents, + do_temporal_coverage, + do_title, + do_usage_and_access_policy_from_field_506_540, + do_work_access_point, + perform_subdivisions, +) class MyReroIlsMarc21Overdo(ReroIlsMarc21Overdo): @@ -57,48 +82,44 @@ def do(self, blob, ignore_missing=True, exception_handlers=None): """Translate blob values and instantiate new model instance.""" result = super().do(blob, ignore_missing, exception_handlers) # add fiction - if 'genreForm' in result and 'harvested' not in result: - for genre_form in result.get('genreForm', []): - entity = genre_form['entity'] - if ( - entity['type'] == 'bf:Topic' and - entity['authorized_access_point'] in - ['Fictions', 'Films de fiction'] - ): - result['fiction_statement'] = \ - DocumentFictionType.Fiction.value - if 'fiction_statement' not in result and 'subjects' in result: - result['fiction_statement'] = \ - DocumentFictionType.NonFiction.value - if 'fiction_statement' not in result: - result['fiction_statement'] = DocumentFictionType.Unspecified.value + if "genreForm" in result and "harvested" not in result: + for genre_form in result.get("genreForm", []): + entity = genre_form["entity"] + if entity["type"] == "bf:Topic" and entity[ + "authorized_access_point" + ] in ["Fictions", "Films de fiction"]: + result["fiction_statement"] = DocumentFictionType.Fiction.value + if "fiction_statement" not in result and "subjects" in result: + result["fiction_statement"] = DocumentFictionType.NonFiction.value + if "fiction_statement" not in result: + result["fiction_statement"] = DocumentFictionType.Unspecified.value return result marc21 = MyReroIlsMarc21Overdo() -_CONTAINS_FACTUM_REGEXP = re.compile(r'factum') +_CONTAINS_FACTUM_REGEXP = re.compile(r"factum") -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_type_and_issuance(self, key, value): """Get document type, content/Media/Carrier type and mode of issuance.""" do_issuance(self, marc21) -@marc21.over('pid', '^001') +@marc21.over("pid", "^001") @utils.ignore_value def marc21_to_pid(self, key, value): """Get pid. If 001 starts with 'REROILS:' save as pid. """ - value = value.strip().split(':') - return value[1] if value[0] == 'REROILS' else None + value = value.strip().split(":") + return value[1] if value[0] == "REROILS" else None -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language(self, key, value): """Get languages. @@ -106,38 +127,32 @@ def marc21_to_language(self, key, value): languages: 008 and 041 [$a, repetitive] """ lang_codes = [] - language = self.get('language', []) + language = self.get("language", []) if marc21.lang_from_008: - language.append({ - 'value': marc21.lang_from_008, - 'type': 'bf:Language' - }) + language.append({"value": marc21.lang_from_008, "type": "bf:Language"}) lang_codes.append(marc21.lang_from_008) for lang_value in marc21.langs_from_041_a: if lang_value not in lang_codes: - language.append({ - 'value': lang_value.strip(), - 'type': 'bf:Language' - }) + language.append({"value": lang_value.strip(), "type": "bf:Language"}) lang_codes.append(lang_value) # language note - if fields_546 := marc21.get_fields(tag='546'): - subfields_546_a = marc21.get_subfields(fields_546[0], 'a') + if fields_546 := marc21.get_fields(tag="546"): + subfields_546_a = marc21.get_subfields(fields_546[0], "a") if subfields_546_a and language: - language[0]['note'] = subfields_546_a[0] + language[0]["note"] = subfields_546_a[0] # if not language: # error_print('ERROR LANGUAGE:', marc21.bib_id, 'set to "und"') # language = [{'value': 'und', 'type': 'bf:Language'}] # is - self['fiction_statement'] = DocumentFictionType.Unspecified.value - if value[33] in ['1', 'd', 'f', 'j', 'p']: - self['fiction_statement'] = DocumentFictionType.Fiction.value - elif value[33] in ['0', 'e', 'h', 'i', 's']: - self['fiction_statement'] = DocumentFictionType.NonFiction.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value + if value[33] in ["1", "d", "f", "j", "p"]: + self["fiction_statement"] = DocumentFictionType.Fiction.value + elif value[33] in ["0", "e", "h", "i", "s"]: + self["fiction_statement"] = DocumentFictionType.NonFiction.value return language or None -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data.""" @@ -146,144 +161,162 @@ def marc21_to_title(self, key, value): return title_list or None -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): """Get contribution.""" # exclude work access points - if key[:3] in ['700', '710'] and value.get('t'): + if key[:3] in ["700", "710"] and value.get("t"): if work_access_point := do_work_access_point(marc21, key, value): - self.setdefault('work_access_point', []) - self['work_access_point'].append(work_access_point) + self.setdefault("work_access_point", []) + self["work_access_point"].append(work_access_point) return None agent = {} if ref := get_mef_link( bibid=marc21.bib_id, reroid=marc21.rero_id, entity_type=EntityType.PERSON, - ids=utils.force_list(value.get('0')), - key=key + ids=utils.force_list(value.get("0")), + key=key, ): - agent['$ref'] = ref + agent["$ref"] = ref # we do not have a $ref agent_data = {} - if not agent.get('$ref') and value.get('a'): - if value.get('a'): + if not agent.get("$ref") and value.get("a"): + if value.get("a"): if name := not_repetitive( - marc21.bib_id, - marc21.rero_id, - key, value, 'a').rstrip('.'): - agent_data['preferred_name'] = name + marc21.bib_id, marc21.rero_id, key, value, "a" + ).rstrip("."): + agent_data["preferred_name"] = name # 100|700 Person - if key[:3] in ['100', '700']: - agent_data['type'] = EntityType.PERSON - if value.get('b'): + if key[:3] in ["100", "700"]: + agent_data["type"] = EntityType.PERSON + if value.get("b"): numeration = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'b') + marc21.bib_id, marc21.rero_id, key, value, "b" + ) if numeration := remove_trailing_punctuation(numeration): - agent_data['numeration'] = numeration - if value.get('c'): + agent_data["numeration"] = numeration + if value.get("c"): qualifier = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'c') - agent_data['qualifier'] = \ - remove_trailing_punctuation(qualifier) - if value.get('d'): - date = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'd') - date = date.rstrip(',') - dates = remove_trailing_punctuation(date).split('-') + marc21.bib_id, marc21.rero_id, key, value, "c" + ) + agent_data["qualifier"] = remove_trailing_punctuation(qualifier) + if value.get("d"): + date = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, "d") + date = date.rstrip(",") + dates = remove_trailing_punctuation(date).split("-") with contextlib.suppress(Exception): if date_of_birth := dates[0].strip(): - agent_data['date_of_birth'] = date_of_birth + agent_data["date_of_birth"] = date_of_birth with contextlib.suppress(Exception): if date_of_death := dates[1].strip(): - agent_data['date_of_death'] = date_of_death - if value.get('q'): + agent_data["date_of_death"] = date_of_death + if value.get("q"): fuller_form_of_name = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'q') - if fuller_form_of_name := remove_trailing_punctuation( - fuller_form_of_name).lstrip('(').rstrip(')'): - agent_data['fuller_form_of_name'] = fuller_form_of_name + marc21.bib_id, marc21.rero_id, key, value, "q" + ) + if ( + fuller_form_of_name := remove_trailing_punctuation( + fuller_form_of_name + ) + .lstrip("(") + .rstrip(")") + ): + agent_data["fuller_form_of_name"] = fuller_form_of_name if identifier := build_identifier(value): - agent_data['identifiedBy'] = identifier + agent_data["identifiedBy"] = identifier - elif key[:3] in ['710', '711']: - agent_data['type'] = EntityType.ORGANISATION - agent_data['conference'] = key[:3] == '711' - if value.get('b'): + elif key[:3] in ["710", "711"]: + agent_data["type"] = EntityType.ORGANISATION + agent_data["conference"] = key[:3] == "711" + if value.get("b"): subordinate_units = [ - subordinate_unit.rstrip('.') for subordinate_unit - in utils.force_list(value.get('b')) + subordinate_unit.rstrip(".") + for subordinate_unit in utils.force_list(value.get("b")) ] - agent_data['subordinate_unit'] = subordinate_units - if value.get('e'): - subordinate_units = agent_data.get('subordinate_unit', []) - for subordinate_unit in utils.force_list(value.get('e')): - subordinate_units.append(subordinate_unit.rstrip('.')) - agent_data['subordinate_unit'] = subordinate_units - if value.get('n'): + agent_data["subordinate_unit"] = subordinate_units + if value.get("e"): + subordinate_units = agent_data.get("subordinate_unit", []) + for subordinate_unit in utils.force_list(value.get("e")): + subordinate_units.append(subordinate_unit.rstrip(".")) + agent_data["subordinate_unit"] = subordinate_units + if value.get("n"): numbering = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'n') - if numbering := remove_trailing_punctuation( - numbering).lstrip('(').rstrip(')'): - agent_data['numbering'] = numbering - if value.get('d'): + marc21.bib_id, marc21.rero_id, key, value, "n" + ) + if ( + numbering := remove_trailing_punctuation(numbering) + .lstrip("(") + .rstrip(")") + ): + agent_data["numbering"] = numbering + if value.get("d"): conference_date = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'd') - if conference_date := remove_trailing_punctuation( - conference_date).lstrip('(').rstrip(')'): - agent_data['conference_date'] = conference_date - if value.get('c'): - place = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, 'c') - if place := remove_trailing_punctuation( - place).lstrip('(').rstrip(')'): - agent_data['place'] = place + marc21.bib_id, marc21.rero_id, key, value, "d" + ) + if ( + conference_date := remove_trailing_punctuation(conference_date) + .lstrip("(") + .rstrip(")") + ): + agent_data["conference_date"] = conference_date + if value.get("c"): + place = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, "c") + if place := remove_trailing_punctuation(place).lstrip("(").rstrip(")"): + agent_data["place"] = place if identifier := build_identifier(value): - agent_data['identifiedBy'] = identifier + agent_data["identifiedBy"] = identifier if agent_data: - agent['type'] = agent_data['type'] - agent['authorized_access_point'] = \ - create_authorized_access_point(agent_data) - if agent_data.get('identifiedBy'): - agent['identifiedBy'] = agent_data['identifiedBy'] - if value.get('4'): + agent["type"] = agent_data["type"] + agent["authorized_access_point"] = create_authorized_access_point(agent_data) + if agent_data.get("identifiedBy"): + agent["identifiedBy"] = agent_data["identifiedBy"] + if value.get("4"): roles = [] - for role in utils.force_list(value.get('4')): + for role in utils.force_list(value.get("4")): if len(role) != 3: - error_print('WARNING CONTRIBUTION ROLE LENGTH:', - marc21.bib_id, marc21.rero_id, role) + error_print( + "WARNING CONTRIBUTION ROLE LENGTH:", + marc21.bib_id, + marc21.rero_id, + role, + ) role = role[:3] - if role == 'sce': - error_print('WARNING CONTRIBUTION ROLE SCE:', - marc21.bib_id, marc21.rero_id, - 'sce --> aus') - role = 'aus' + if role == "sce": + error_print( + "WARNING CONTRIBUTION ROLE SCE:", + marc21.bib_id, + marc21.rero_id, + "sce --> aus", + ) + role = "aus" role = role.lower() if role not in _CONTRIBUTION_ROLE: - error_print('WARNING CONTRIBUTION ROLE DEFINITION:', - marc21.bib_id, marc21.rero_id, role) - role = 'ctb' + error_print( + "WARNING CONTRIBUTION ROLE DEFINITION:", + marc21.bib_id, + marc21.rero_id, + role, + ) + role = "ctb" roles.append(role) - elif key[:3] == '100': - roles = ['cre'] - elif key[:3] == '711': - roles = ['aut'] + elif key[:3] == "100": + roles = ["cre"] + elif key[:3] == "711": + roles = ["aut"] else: - roles = ['ctb'] + roles = ["ctb"] if agent: - return { - 'entity': agent, - 'role': list(set(roles)) - } + return {"entity": agent, "role": list(set(roles))} -@marc21.over('relation', '(770|772|775|776|777|780|785|787|533|534)..') +@marc21.over("relation", "(770|772|775|776|777|780|785|787|533|534)..") @utils.for_each_value @utils.ignore_value def marc21_to_specific_document_relation(self, key, value): @@ -291,7 +324,7 @@ def marc21_to_specific_document_relation(self, key, value): do_specific_document_relation(self, marc21, key, value) -@marc21.over('copyrightDate', '^264.4') +@marc21.over("copyrightDate", "^264.4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" @@ -299,7 +332,7 @@ def marc21_to_copyright_date(self, key, value): return copyright_dates or None -@marc21.over('title', '(^210|^222)..') +@marc21.over("title", "(^210|^222)..") @utils.ignore_value def marc21_to_abbreviated_title(self, key, value): """Get abbreviated title data.""" @@ -307,7 +340,7 @@ def marc21_to_abbreviated_title(self, key, value): return title_list or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -320,7 +353,7 @@ def marc21_to_edition_statement(self, key, value): return edition_data or None -@marc21.over('provisionActivity', '^(260..|264.[_0-3])') +@marc21.over("provisionActivity", "^(260..|264.[_0-3])") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -334,7 +367,7 @@ def marc21_to_provision_activity(self, key, value): return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get physical description. @@ -357,20 +390,20 @@ def marc21_to_description(self, key, value): marc21.extract_description_from_marc_field(key, value, self) -@marc21.over('type', '^339..') +@marc21.over("type", "^339..") @utils.for_each_value @utils.ignore_value def marc21_to_type(self, key, value): """Get document type.""" document_type = {} - if main_type := value.get('a'): + if main_type := value.get("a"): document_type["main_type"] = main_type - if sub_type := value.get('b'): + if sub_type := value.get("b"): document_type["subtype"] = sub_type return document_type or None -@marc21.over('seriesStatement', '^490..') +@marc21.over("seriesStatement", "^490..") @utils.for_each_value @utils.ignore_value def marc21_to_series_statement(self, key, value): @@ -382,7 +415,7 @@ def marc21_to_series_statement(self, key, value): marc21.extract_series_statement_from_marc_field(key, value, self) -@marc21.over('tableOfContents', '^505..') +@marc21.over("tableOfContents", "^505..") @utils.for_each_value @utils.ignore_value def marc21_to_table_of_contents(self, key, value): @@ -390,7 +423,7 @@ def marc21_to_table_of_contents(self, key, value): do_table_of_contents(self, value) -@marc21.over('usageAndAccessPolicy', '^(506|540)..') +@marc21.over("usageAndAccessPolicy", "^(506|540)..") @utils.for_each_value @utils.ignore_value def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): @@ -398,7 +431,7 @@ def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): return do_usage_and_access_policy_from_field_506_540(marc21, key, value) -@marc21.over('frequency', '^(310|321)..') +@marc21.over("frequency", "^(310|321)..") @utils.for_each_value @utils.ignore_value def marc21_to_frequency_field_310_321(self, key, value): @@ -406,7 +439,7 @@ def marc21_to_frequency_field_310_321(self, key, value): return do_frequency_field_310_321(marc21, key, value) -@marc21.over('dissertation', '^502..') +@marc21.over("dissertation", "^502..") @utils.for_each_value @utils.ignore_value def marc21_to_dissertation(self, key, value): @@ -415,7 +448,7 @@ def marc21_to_dissertation(self, key, value): return do_dissertation(marc21, value) -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_summary(self, key, value): @@ -423,7 +456,7 @@ def marc21_to_summary(self, key, value): return do_summary(marc21, value) -@marc21.over('intendedAudience', '^521..') +@marc21.over("intendedAudience", "^521..") @utils.for_each_value @utils.ignore_value def marc21_to_intended_audience(self, key, value): @@ -431,78 +464,78 @@ def marc21_to_intended_audience(self, key, value): do_intended_audience(self, value) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identified_by_from_field_020(self, key, value): """Get identifier from field 020.""" do_identified_by_from_field_020(self, marc21, key, value) -@marc21.over('identifiedBy', '^022..') +@marc21.over("identifiedBy", "^022..") @utils.ignore_value def marc21_to_identified_by_from_field_022(self, key, value): """Get identifier from field 022.""" do_identified_by_from_field_022(self, value) -@marc21.over('identifiedBy', '^024..') +@marc21.over("identifiedBy", "^024..") @utils.ignore_value def marc21_to_identified_by_from_field_024(self, key, value): """Get identifier from field 024.""" do_identified_by_from_field_024(self, marc21, key, value) -@marc21.over('identifiedBy', '^028..') +@marc21.over("identifiedBy", "^028..") @utils.ignore_value def marc21_to_identified_by_from_field_028(self, key, value): """Get identifier from field 028.""" do_identified_by_from_field_028(self, marc21, key, value) -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identified_by_from_field_035(self, key, value): """Get identifier from field 035.""" - do_identified_by_from_field_035(self, marc21, key, value, source='RERO') + do_identified_by_from_field_035(self, marc21, key, value, source="RERO") -@marc21.over('identifiedBy', '^930..') +@marc21.over("identifiedBy", "^930..") @utils.ignore_value def marc21_to_identified_by_from_field_930(self, key, value): """Get identifier from field 930.""" - if subfield_a := not_repetitive(marc21.bib_id, marc21.rero_id, key, value, - 'a', default='').strip(): + if subfield_a := not_repetitive( + marc21.bib_id, marc21.rero_id, key, value, "a", default="" + ).strip(): identifier = {} if match := re_identified.match(subfield_a): # match.group(1) : parentheses content - identifier['source'] = match.group(1) + identifier["source"] = match.group(1) # value without parenthesis and parentheses content - identifier['value'] = match.group(2) + identifier["value"] = match.group(2) else: - identifier['value'] = subfield_a - identifier['type'] = 'bf:Local' - identified_by = self.get('identifiedBy', []) + identifier["value"] = subfield_a + identifier["type"] = "bf:Local" + identified_by = self.get("identifiedBy", []) identified_by.append(identifier) - self['identifiedBy'] = identified_by + self["identifiedBy"] = identified_by -@marc21.over('acquisitionTerms', '^037..') +@marc21.over("acquisitionTerms", "^037..") @utils.ignore_value def marc21_to_acquisition_terms_from_field_037(self, key, value): """Get acquisition terms field 037.""" do_acquisition_terms_from_field_037(self, value) -@marc21.over('electronicLocator', '^856..') +@marc21.over("electronicLocator", "^856..") @utils.ignore_value def marc21_to_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - electronic_locators = do_electronic_locator_from_field_856( - self, marc21, key, value) + electronic_locators = do_electronic_locator_from_field_856(self, marc21, key, value) return electronic_locators or None -@marc21.over('note', '^(500|510|530|545|555|580)..') +@marc21.over("note", "^(500|510|530|545|555|580)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title(self, key, value): @@ -510,7 +543,7 @@ def marc21_to_notes_and_original_title(self, key, value): do_notes_and_original_title(self, key, value) -@marc21.over('credits', '^(508|511)..') +@marc21.over("credits", "^(508|511)..") @utils.for_each_value @utils.ignore_value def marc21_to_credits(self, key, value): @@ -518,16 +551,16 @@ def marc21_to_credits(self, key, value): return do_credits(key, value) -@marc21.over('supplementaryContent', '^504..') +@marc21.over("supplementaryContent", "^504..") @utils.for_each_value @utils.ignore_value def marc21_to_supplementary_content(self, key, value): """Get notes and original title.""" - if value.get('a'): - return utils.force_list(value.get('a'))[0] + if value.get("a"): + return utils.force_list(value.get("a"))[0] -@marc21.over('subjects', '^(600|610|611|630|650|651|655)..') +@marc21.over("subjects", "^(600|610|611|630|650|651|655)..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects(self, key, value): @@ -539,172 +572,180 @@ def marc21_to_subjects(self, key, value): subjects_imported : for 6xx having indicator 2 '0' or '2' """ type_per_tag = { - '600': EntityType.PERSON, - '610': EntityType.ORGANISATION, - '611': EntityType.ORGANISATION, - '600t': EntityType.WORK, - '610t': EntityType.WORK, - '611t': EntityType.WORK, - '630': EntityType.WORK, - '650': EntityType.TOPIC, # or bf:Temporal, changed by code - '651': EntityType.PLACE, - '655': EntityType.TOPIC + "600": EntityType.PERSON, + "610": EntityType.ORGANISATION, + "611": EntityType.ORGANISATION, + "600t": EntityType.WORK, + "610t": EntityType.WORK, + "611t": EntityType.WORK, + "630": EntityType.WORK, + "650": EntityType.TOPIC, # or bf:Temporal, changed by code + "651": EntityType.PLACE, + "655": EntityType.TOPIC, } - source_per_indicator_2 = { - '0': 'LCSH', - '2': 'MeSH' - } + source_per_indicator_2 = {"0": "LCSH", "2": "MeSH"} indicator_2 = key[4] tag_key = key[:3] - subfields_2 = utils.force_list(value.get('2')) + subfields_2 = utils.force_list(value.get("2")) subfield_2 = subfields_2[0] if subfields_2 else None - subfields_a = utils.force_list(value.get('a', [])) + subfields_a = utils.force_list(value.get("a", [])) - if subfield_2 == 'rero': - if tag_key in ['600', '610', '611'] and value.get('t'): - tag_key += 't' + if subfield_2 == "rero": + if tag_key in ["600", "610", "611"] and value.get("t"): + tag_key += "t" data_type = type_per_tag[tag_key] # `data_type` is Temporal if tag is 650 and a $a start with digit. - if tag_key == '650': + if tag_key == "650": for subfield_a in subfields_a: if subfield_a[0].isdigit(): data_type = EntityType.TEMPORAL break - subject = { - 'type': data_type, - 'source': subfield_2 - } + subject = {"type": data_type, "source": subfield_2} subfield_code_per_tag = { - '600': 'abcd', - '610': 'ab', - '611': 'acden', - '600t': 'tpn', - '610t': 'tpn', - '611t': 't', - '630': 'apn', - '650': 'a', - '651': 'a', - '655': 'a' + "600": "abcd", + "610": "ab", + "611": "acden", + "600t": "tpn", + "610t": "tpn", + "611t": "t", + "630": "apn", + "650": "a", + "651": "a", + "655": "a", } string_build = build_string_from_subfields( - value, subfield_code_per_tag[tag_key]) - if tag_key == '655': + value, subfield_code_per_tag[tag_key] + ) + if tag_key == "655": # remove the square brackets - string_build = re.sub(r'^\[(.*)\]$', r'\1', string_build) - subject['authorized_access_point'] = string_build + string_build = re.sub(r"^\[(.*)\]$", r"\1", string_build) + subject["authorized_access_point"] = string_build - if tag_key in ['600t', '610t', '611t']: + if tag_key in ["600t", "610t", "611t"]: creator_tag_key = tag_key[:3] # to keep only tag: 600, 610, 611 - subject['authorized_access_point'] = remove_trailing_punctuation( - build_string_from_subfields( - value, - subfield_code_per_tag[creator_tag_key]), - '.', '.' - ) + '. ' + subject['authorized_access_point'] - field_key = 'genreForm' if tag_key == '655' else 'subjects' - subfields_0 = utils.force_list(value.get('0')) - if field_key != 'subjects_imported' and (ref := get_mef_link( - bibid=marc21.bib_id, - reroid=marc21.rero_id, - entity_type=data_type, - ids=utils.force_list(subfields_0), - key=key - )): + subject["authorized_access_point"] = ( + remove_trailing_punctuation( + build_string_from_subfields( + value, subfield_code_per_tag[creator_tag_key] + ), + ".", + ".", + ) + + ". " + + subject["authorized_access_point"] + ) + field_key = "genreForm" if tag_key == "655" else "subjects" + subfields_0 = utils.force_list(value.get("0")) + if field_key != "subjects_imported" and ( + ref := get_mef_link( + bibid=marc21.bib_id, + reroid=marc21.rero_id, + entity_type=data_type, + ids=utils.force_list(subfields_0), + key=key, + ) + ): subject = { - '$ref': ref, + "$ref": ref, } else: if identifier := build_identifier(value): - subject['identifiedBy'] = identifier - if field_key != 'genreForm': + subject["identifiedBy"] = identifier + if field_key != "genreForm": perform_subdivisions(subject, value) - if subject.get('$ref') or subject.get('authorized_access_point'): + if subject.get("$ref") or subject.get("authorized_access_point"): subjects = self.get(field_key, []) subjects.append(dict(entity=subject)) self[field_key] = subjects - elif subfield_2 == 'rerovoc' or indicator_2 in ['0', '2']: + elif subfield_2 == "rerovoc" or indicator_2 in ["0", "2"]: if term_string := build_string_from_subfields( - value, 'abcdefghijklmnopqrstuw', ' - '): - source = 'rerovoc' if subfield_2 == 'rerovoc' \ + value, "abcdefghijklmnopqrstuw", " - " + ): + source = ( + "rerovoc" + if subfield_2 == "rerovoc" else source_per_indicator_2[indicator_2] + ) subject_imported = { - 'type': type_per_tag[tag_key], - 'source': source, - 'authorized_access_point': term_string + "type": type_per_tag[tag_key], + "source": source, + "authorized_access_point": term_string, } perform_subdivisions(subject_imported, value) - subjects_imported = self.get('subjects_imported', []) + subjects_imported = self.get("subjects_imported", []) if subject_imported: subjects_imported.append(dict(entity=subject_imported)) - self['subjects_imported'] = subjects_imported + self["subjects_imported"] = subjects_imported -@marc21.over('subjects_imported', '^919..') +@marc21.over("subjects_imported", "^919..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects_imported(self, key, value): """Get subject and genreForm_imported imported from 919 (L53, L54).""" - specific_contains_regexp = \ - re.compile(r'\[(carte postale|affiche|document photographique)\]') + specific_contains_regexp = re.compile( + r"\[(carte postale|affiche|document photographique)\]" + ) contains_specific_voc_regexp = re.compile( - r'^(chrero|rerovoc|ram|rameau|gnd|rerovoc|gatbegr|gnd-content)$') + r"^(chrero|rerovoc|ram|rameau|gnd|rerovoc|gatbegr|gnd-content)$" + ) - subfields_2 = utils.force_list(value.get('2')) - term_string = '' + subfields_2 = utils.force_list(value.get("2")) + term_string = "" data_imported = None - field_key = 'subjects_imported' + field_key = "subjects_imported" if subfields_2: subfield_2 = subfields_2[0] if contains_specific_voc_regexp.search(subfield_2): add_data_imported = False - if subfield_2 == 'chrero': - subfields_9 = utils.force_list(value.get('9')) + if subfield_2 == "chrero": + subfields_9 = utils.force_list(value.get("9")) subfield_9 = subfields_9[0] - if subfields_v := utils.force_list(value.get('v')): + if subfields_v := utils.force_list(value.get("v")): subfield_v = subfields_v[0] match = specific_contains_regexp.search(subfield_v) if match: - contains_655_regexp = re.compile(r'655') + contains_655_regexp = re.compile(r"655") match = contains_655_regexp.search(subfield_9) add_data_imported = True if match: - field_key = 'genreForm_imported' + field_key = "genreForm_imported" else: add_data_imported = True - if subfield_2 in ['gatbegr', 'gnd-content']: - field_key = 'genreForm_imported' + if subfield_2 in ["gatbegr", "gnd-content"]: + field_key = "genreForm_imported" if add_data_imported: term_string = build_string_from_subfields( - value, - 'abcdefghijklmnopqrstuvwxyz', ' - ') + value, "abcdefghijklmnopqrstuvwxyz", " - " + ) data_imported = { - 'type': EntityType.TOPIC, - 'source': subfield_2, - 'authorized_access_point': term_string + "type": EntityType.TOPIC, + "source": subfield_2, + "authorized_access_point": term_string, } elif term_string := build_string_from_subfields( - value, 'abcdefghijklmnopqrstuvwxyz', ' - '): + value, "abcdefghijklmnopqrstuvwxyz", " - " + ): data_imported = { - 'type': EntityType.TOPIC, - 'authorized_access_point': term_string + "type": EntityType.TOPIC, + "authorized_access_point": term_string, } if data_imported: subjects_or_genre_form_imported_imported = self.get(field_key, []) - subjects_or_genre_form_imported_imported.append( - dict(entity=data_imported)) + subjects_or_genre_form_imported_imported.append(dict(entity=data_imported)) self[field_key] = subjects_or_genre_form_imported_imported -@marc21.over('sequence_numbering', '^362..') +@marc21.over("sequence_numbering", "^362..") @utils.for_each_value @utils.ignore_value def marc21_to_sequence_numbering(self, key, value): @@ -712,69 +753,68 @@ def marc21_to_sequence_numbering(self, key, value): do_sequence_numbering(self, value) -@marc21.over('classification', '^(050|060|080|082|980)..') +@marc21.over("classification", "^(050|060|080|082|980)..") @utils.for_each_value @utils.ignore_value def marc21_to_classification(self, key, value): """Get classification and subject from 980.""" classification_type_per_tag = { - '050': 'bf:ClassificationLcc', - '060': 'bf:ClassificationNlm', - '080': 'bf:ClassificationUdc', - '082': 'bf:ClassificationDdc', + "050": "bf:ClassificationLcc", + "060": "bf:ClassificationNlm", + "080": "bf:ClassificationUdc", + "082": "bf:ClassificationDdc", } def get_classif_type_and_subdivision_codes_from_980_2(subfield_2): if not subfield_2: return None, None classification_type_per_tag_980_2 = { - 'brp': 'classification_brunetparguez', - 'dr-sys': 'classification_droit', - 'musi': 'classification_musicale_instruments', - 'musg': 'classification_musicale_genres' - } - subdivision_subfield_codes_per_tag_980_2 = { - 'brp': {'d'}, - 'musg': {'d', 'e'} + "brp": "classification_brunetparguez", + "dr-sys": "classification_droit", + "musi": "classification_musicale_instruments", + "musg": "classification_musicale_genres", } + subdivision_subfield_codes_per_tag_980_2 = {"brp": {"d"}, "musg": {"d", "e"}} classification_type = None subdivision_subfield_codes = None for key in classification_type_per_tag_980_2: - regexp = re.compile(fr'{key}', re.IGNORECASE) + regexp = re.compile(rf"{key}", re.IGNORECASE) if regexp.search(subfield_2): classification_type = classification_type_per_tag_980_2[key] if key in subdivision_subfield_codes_per_tag_980_2: - subdivision_subfield_codes = \ + subdivision_subfield_codes = ( subdivision_subfield_codes_per_tag_980_2[key] + ) break return classification_type, subdivision_subfield_codes tag = key[:3] indicator1 = key[3] indicator2 = key[4] - subfields_a = utils.force_list(value.get('a', [])) - subfields_2 = utils.force_list(value.get('2')) + subfields_a = utils.force_list(value.get("a", [])) + subfields_2 = utils.force_list(value.get("2")) subfield_2 = None if subfields_2: subfield_2 = subfields_2[0] for subfield_a in subfields_a: classification = {} - classification['classificationPortion'] = subfield_a - if tag == '980': + classification["classificationPortion"] = subfield_a + if tag == "980": if subfield_2 and _CONTAINS_FACTUM_REGEXP.search(subfield_2): subject = { - 'type': EntityType.PERSON, - 'authorized_access_point': subfield_a, - 'source': 'Factum' + "type": EntityType.PERSON, + "authorized_access_point": subfield_a, + "source": "Factum", } - subjects = self.get('subjects', []) + subjects = self.get("subjects", []) subjects.append(dict(entity=subject)) - self['subjects'] = subjects + self["subjects"] = subjects - classif_type, subdivision_subfield_codes = \ + classif_type, subdivision_subfield_codes = ( get_classif_type_and_subdivision_codes_from_980_2(subfield_2) + ) if classif_type: - classification['type'] = classif_type + classification["type"] = classif_type if subdivision_subfield_codes: items = get_field_items(value) subdivision = [] @@ -782,62 +822,62 @@ def get_classif_type_and_subdivision_codes_from_980_2(subfield_2): if blob_key in subdivision_subfield_codes: subdivision.append(blob_value) if subdivision: - classification['subdivision'] = subdivision + classification["subdivision"] = subdivision else: # avoid classification if type not found classification = None else: - classification['type'] = classification_type_per_tag[tag] - if tag == '050' and indicator2 == '0': - classification['assigner'] = 'LOC' - if tag == '060' and indicator2 == '0': - classification['assigner'] = 'NLM' - if tag == '080': - subfields_x = utils.force_list(value.get('x')) + classification["type"] = classification_type_per_tag[tag] + if tag == "050" and indicator2 == "0": + classification["assigner"] = "LOC" + if tag == "060" and indicator2 == "0": + classification["assigner"] = "NLM" + if tag == "080": + subfields_x = utils.force_list(value.get("x")) if subfields_x: - classification['subdivision'] = [] + classification["subdivision"] = [] for subfield_x in subfields_x: - classification['subdivision'].append(subfield_x) + classification["subdivision"].append(subfield_x) edition = None - if indicator1 == '0': - edition = 'Full edition' - elif indicator1 == '1': - edition = 'Abridged edition' + if indicator1 == "0": + edition = "Full edition" + elif indicator1 == "1": + edition = "Abridged edition" if subfield_2: if edition: - edition += ', ' + subfield_2 + edition += ", " + subfield_2 else: edition = subfield_2 if edition: - classification['edition'] = edition - elif tag == '082': - subfields_q = utils.force_list(value.get('q')) + classification["edition"] = edition + elif tag == "082": + subfields_q = utils.force_list(value.get("q")) subfield_q = None edition = None if subfields_q: subfield_q = subfields_q[0] - if indicator2 == '0': - classification['assigner'] = 'LOC' + if indicator2 == "0": + classification["assigner"] = "LOC" elif subfield_q: - classification['assigner'] = subfield_q - if indicator1 == '0': - edition = 'Full edition' - elif indicator1 == '1': - edition = 'Abridged edition' + classification["assigner"] = subfield_q + if indicator1 == "0": + edition = "Full edition" + elif indicator1 == "1": + edition = "Abridged edition" if subfield_2: if edition: - edition += ', ' + subfield_2 + edition += ", " + subfield_2 else: edition = subfield_2 if edition: - classification['edition'] = edition - classification_list = self.get('classification', []) + classification["edition"] = edition + classification_list = self.get("classification", []) if classification: classification_list.append(classification) - self['classification'] = classification_list + self["classification"] = classification_list -@marc21.over('part_of', '^(773|800|830)..') +@marc21.over("part_of", "^(773|800|830)..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): @@ -869,14 +909,14 @@ class Numbering(object): def __init__(self): """Constructor method.""" self._numbering = {} - self._year_regexp = re.compile(r'^\d{4}') - self._string_regexp = re.compile(r'.*') - self._pages_regexp = re.compile(r'^\d+(-\d+)?$') + self._year_regexp = re.compile(r"^\d{4}") + self._string_regexp = re.compile(r".*") + self._pages_regexp = re.compile(r"^\d+(-\d+)?$") self._pattern_per_key = { - 'year': self._year_regexp, - 'pages': self._pages_regexp, - 'issue': self._string_regexp, - 'volume': self._string_regexp + "year": self._year_regexp, + "pages": self._pages_regexp, + "issue": self._string_regexp, + "volume": self._string_regexp, } def add_numbering_value(self, key, value): @@ -892,16 +932,16 @@ def add_numbering_value(self, key, value): """ if self._pattern_per_key[key].search(value): self._numbering[key] = value - elif key != 'year': - self._numbering['discard'] = True + elif key != "year": + self._numbering["discard"] = True def has_year(self): """Check if `year` key is present in `Numbering` data.""" - return 'year' in self._numbering + return "year" in self._numbering def is_valid(self): """Check if `Numbering` data is valid.""" - return self._numbering and 'discard' not in self._numbering + return self._numbering and "discard" not in self._numbering def get(self): """Get the `Numbering` data object.""" @@ -919,125 +959,120 @@ def add_author_to_subfield_t(value): author = None pending_g_values = [] pending_v_values = [] - match = re.compile(r'\. -$') # match the trailing '. -' - subfield_selection = {'a', 't', 'g', 'v'} + match = re.compile(r"\. -$") # match the trailing '. -' + subfield_selection = {"a", "t", "g", "v"} for blob_key, blob_value in items: if blob_key in subfield_selection: - if blob_key == 'a': + if blob_key == "a": # remove the trailing '. -' - author = match.sub('', blob_value) + author = match.sub("", blob_value) # reverse first name and last name - author_parts = author.split(',') - author = ' '.join(reversed(author_parts)).strip() - subfield_selection.remove('a') - elif blob_key == 't': + author_parts = author.split(",") + author = " ".join(reversed(author_parts)).strip() + subfield_selection.remove("a") + elif blob_key == "t": subfield_t = blob_value if author: - subfield_t += f' / {author}' - new_data.append(('t', subfield_t)) - elif blob_key == 'g': + subfield_t += f" / {author}" + new_data.append(("t", subfield_t)) + elif blob_key == "g": pending_g_values.append(blob_value) - elif blob_key == 'v': + elif blob_key == "v": pending_v_values.append(blob_value) - new_data.extend(('g', g_value) for g_value in pending_g_values) - new_data.extend(('v', v_value) for v_value in pending_v_values) + new_data.extend(("g", g_value) for g_value in pending_g_values) + new_data.extend(("v", v_value) for v_value in pending_v_values) return GroupableOrderedDict(tuple(new_data)) - if key[:3] == '800' and value.get('t'): + if key[:3] == "800" and value.get("t"): if work_access_point := do_work_access_point(marc21, key, value): - self.setdefault('work_access_point', []) - self['work_access_point'].append(work_access_point) + self.setdefault("work_access_point", []) + self["work_access_point"].append(work_access_point) part_of = {} numbering_list = [] - subfield_w = not_repetitive(marc21.bib_id, marc21.rero_id, - key, value, 'w', default='').strip() + subfield_w = not_repetitive( + marc21.bib_id, marc21.rero_id, key, value, "w", default="" + ).strip() if subfield_w: - match = re.compile(r'^REROILS:') - pid = match.sub('', subfield_w) - part_of['document'] = { - '$ref': f'https://bib.rero.ch/api/documents/{pid}' - } - if key[:3] == '773': + match = re.compile(r"^REROILS:") + pid = match.sub("", subfield_w) + part_of["document"] = {"$ref": f"https://bib.rero.ch/api/documents/{pid}"} + if key[:3] == "773": discard_numbering = False - for subfield_g in utils.force_list(value.get('g', [])): + for subfield_g in utils.force_list(value.get("g", [])): numbering = Numbering() - values = subfield_g.strip().split('/') - numbering.add_numbering_value('year', values[0][:4]) + values = subfield_g.strip().split("/") + numbering.add_numbering_value("year", values[0][:4]) if len(values) == 1 and not numbering.has_year(): if values[0]: - numbering.add_numbering_value('pages', values[0]) + numbering.add_numbering_value("pages", values[0]) elif len(values) == 2: if numbering.has_year(): if values[1]: - numbering.add_numbering_value('pages', values[1]) + numbering.add_numbering_value("pages", values[1]) else: if values[0]: - numbering.add_numbering_value('volume', values[0]) + numbering.add_numbering_value("volume", values[0]) if values[1]: - numbering.add_numbering_value('issue', values[1]) + numbering.add_numbering_value("issue", values[1]) elif len(values) == 3: if not numbering.has_year() and values[0]: - numbering.add_numbering_value('volume', values[0]) + numbering.add_numbering_value("volume", values[0]) if values[1]: - numbering.add_numbering_value('issue', values[1]) + numbering.add_numbering_value("issue", values[1]) if values[2]: - numbering.add_numbering_value('pages', values[2]) + numbering.add_numbering_value("pages", values[2]) elif len(values) == 4: if numbering.has_year(): if values[1]: - numbering.add_numbering_value('volume', values[1]) + numbering.add_numbering_value("volume", values[1]) if values[2]: - numbering.add_numbering_value('issue', values[2]) + numbering.add_numbering_value("issue", values[2]) if values[3]: - numbering.add_numbering_value('pages', values[3]) + numbering.add_numbering_value("pages", values[3]) else: discard_numbering = True if not discard_numbering and numbering.is_valid(): numbering_list.append(numbering.get()) else: # 800, 830 - for subfield_v in utils.force_list(value.get('v', [])): + for subfield_v in utils.force_list(value.get("v", [])): numbering = Numbering() if subfield_v: - numbering.add_numbering_value('volume', str(subfield_v)) + numbering.add_numbering_value("volume", str(subfield_v)) if numbering.is_valid(): numbering_list.append(numbering.get()) - if 'document' in part_of: + if "document" in part_of: if numbering_list: - part_of['numbering'] = numbering_list - self['partOf'] = self.get('partOf', []) - if part_of not in self['partOf']: - self['partOf'].append(part_of) + part_of["numbering"] = numbering_list + self["partOf"] = self.get("partOf", []) + if part_of not in self["partOf"]: + self["partOf"].append(part_of) else: # no link found - if key[:3] == '773': + if key[:3] == "773": if not marc21.has_field_580: # the author in subfield $a is appended to subfield $t value = add_author_to_subfield_t(value) # create a seriesStatement instead of a partOf - marc21.extract_series_statement_from_marc_field( - key, value, self - ) + marc21.extract_series_statement_from_marc_field(key, value, self) else: # 800, 830 if not marc21.has_field_490: # create a seriesStatement instead of a partOf - if key[:3] == '800': + if key[:3] == "800": # the author in subfield $a is appended to subfield $t value = add_author_to_subfield_t(value) - marc21.extract_series_statement_from_marc_field( - key, value, self - ) + marc21.extract_series_statement_from_marc_field(key, value, self) -@marc21.over('_masked', '^099..') +@marc21.over("_masked", "^099..") def marc21_to_masked(self, key, value): """Get masked. masked: [099$a masked] """ - return value.get('a') == 'masked' + return value.get("a") == "masked" -@marc21.over('work_access_point', '(^130..|^730..)') +@marc21.over("work_access_point", "(^130..|^730..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point(self, key, value): @@ -1045,7 +1080,7 @@ def marc21_to_work_access_point(self, key, value): return do_work_access_point(marc21, key, value) -@marc21.over('scale_cartographicAttributes', '^255..') +@marc21.over("scale_cartographicAttributes", "^255..") @utils.for_each_value @utils.ignore_value def marc21_to_scale_cartographic_attributes(self, key, value): @@ -1053,7 +1088,7 @@ def marc21_to_scale_cartographic_attributes(self, key, value): do_scale_and_cartographic(self, marc21, key, value) -@marc21.over('temporalCoverage', '^045..') +@marc21.over("temporalCoverage", "^045..") @utils.for_each_value @utils.ignore_value def marc21_to_temporal_coverage(self, key, value): diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/__init__.py index a3b4d2d419..e713aebbba 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/__init__.py @@ -20,4 +20,4 @@ from .model import marc21 -__all__ = ('marc21') +__all__ = "marc21" diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/model.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/model.py index 6b566dc44e..3535626e29 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/model.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/slsp/model.py @@ -22,50 +22,77 @@ from dojson import utils from flask import current_app -from rero_ils.dojson.utils import ReroIlsMarc21Overdo, build_identifier, \ - build_string_from_subfields, get_mef_link, remove_trailing_punctuation +from rero_ils.dojson.utils import ( + ReroIlsMarc21Overdo, + build_identifier, + build_string_from_subfields, + get_mef_link, + remove_trailing_punctuation, +) from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.entities.models import EntityType -from ..utils import do_abbreviated_title, \ - do_acquisition_terms_from_field_037, do_classification, do_contribution, \ - do_copyright_date, do_credits, do_dissertation, do_edition_statement, \ - do_electronic_locator_from_field_856, do_frequency_field_310_321, \ - do_identified_by_from_field_010, do_identified_by_from_field_020, \ - do_identified_by_from_field_022, do_identified_by_from_field_024, \ - do_identified_by_from_field_028, do_identified_by_from_field_035, \ - do_intended_audience, do_issuance, do_language, \ - do_notes_and_original_title, do_part_of, do_provision_activity, \ - do_scale_and_cartographic, do_sequence_numbering, \ - do_specific_document_relation, do_summary, do_table_of_contents, \ - do_temporal_coverage, do_title, do_type, \ - do_usage_and_access_policy_from_field_506_540, do_work_access_point, \ - do_work_access_point_240, perform_subdivisions +from ..utils import ( + do_abbreviated_title, + do_acquisition_terms_from_field_037, + do_classification, + do_contribution, + do_copyright_date, + do_credits, + do_dissertation, + do_edition_statement, + do_electronic_locator_from_field_856, + do_frequency_field_310_321, + do_identified_by_from_field_010, + do_identified_by_from_field_020, + do_identified_by_from_field_022, + do_identified_by_from_field_024, + do_identified_by_from_field_028, + do_identified_by_from_field_035, + do_intended_audience, + do_issuance, + do_language, + do_notes_and_original_title, + do_part_of, + do_provision_activity, + do_scale_and_cartographic, + do_sequence_numbering, + do_specific_document_relation, + do_summary, + do_table_of_contents, + do_temporal_coverage, + do_title, + do_type, + do_usage_and_access_policy_from_field_506_540, + do_work_access_point, + do_work_access_point_240, + perform_subdivisions, +) marc21 = ReroIlsMarc21Overdo() -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_type_and_issuance(self, key, value): """Get document type, content/Media/Carrier type and mode of issuance.""" do_issuance(self, marc21) do_type(self, marc21) - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@marc21.over('pid', '^001') +@marc21.over("pid", "^001") @utils.ignore_value def marc21_to_pid(self, key, value): """Get pid. If 001 starts with 'REROILS:' save as pid. """ - value = value.strip().split(':') - return value[1] if value[0] == 'REROILS' else None + value = value.strip().split(":") + return value[1] if value[0] == "REROILS" else None -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language(self, key, value): """Get languages. @@ -74,15 +101,15 @@ def marc21_to_language(self, key, value): """ language = do_language(self, marc21) # is fiction - self['fiction_statement'] = DocumentFictionType.Unspecified.value - if value[33] in ['1', 'd', 'f', 'j', 'p']: - self['fiction_statement'] = DocumentFictionType.Fiction.value - elif value[33] in ['0', 'e', 'h', 'i', 's']: - self['fiction_statement'] = DocumentFictionType.NonFiction.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value + if value[33] in ["1", "d", "f", "j", "p"]: + self["fiction_statement"] = DocumentFictionType.Fiction.value + elif value[33] in ["0", "e", "h", "i", "s"]: + self["fiction_statement"] = DocumentFictionType.NonFiction.value return language or None -@marc21.over('title', '(^210|^222)..') +@marc21.over("title", "(^210|^222)..") @utils.ignore_value def marc21_to_abbreviated_title(self, key, value): """Get abbreviated title data.""" @@ -90,7 +117,7 @@ def marc21_to_abbreviated_title(self, key, value): return title_list or None -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data.""" @@ -98,7 +125,7 @@ def marc21_to_title(self, key, value): return title_list or None -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): @@ -106,7 +133,7 @@ def marc21_to_contribution(self, key, value): return do_contribution(self, marc21, key, value) -@marc21.over('relation', '(770|772|775|776|777|780|785|787|533|534)..') +@marc21.over("relation", "(770|772|775|776|777|780|785|787|533|534)..") @utils.for_each_value @utils.ignore_value def marc21_to_specific_document_relation(self, key, value): @@ -114,7 +141,7 @@ def marc21_to_specific_document_relation(self, key, value): do_specific_document_relation(self, marc21, key, value) -@marc21.over('copyrightDate', '^26[04].4') +@marc21.over("copyrightDate", "^26[04].4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" @@ -122,7 +149,7 @@ def marc21_to_copyright_date(self, key, value): return copyright_dates or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -135,7 +162,7 @@ def marc21_to_edition_statement(self, key, value): return edition_data or None -@marc21.over('provisionActivity', '^26[04].[_0-3]') +@marc21.over("provisionActivity", "^26[04].[_0-3]") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -149,7 +176,7 @@ def marc21_to_provision_activity(self, key, value): return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get physical description. @@ -172,7 +199,7 @@ def marc21_to_description(self, key, value): marc21.extract_description_from_marc_field(key, value, self) -@marc21.over('seriesStatement', '^490..') +@marc21.over("seriesStatement", "^490..") @utils.for_each_value @utils.ignore_value def marc21_to_series_statement(self, key, value): @@ -184,7 +211,7 @@ def marc21_to_series_statement(self, key, value): marc21.extract_series_statement_from_marc_field(key, value, self) -@marc21.over('tableOfContents', '^505..') +@marc21.over("tableOfContents", "^505..") @utils.for_each_value @utils.ignore_value def marc21_to_table_of_contents(self, key, value): @@ -192,7 +219,7 @@ def marc21_to_table_of_contents(self, key, value): do_table_of_contents(self, value) -@marc21.over('usageAndAccessPolicy', '^(506|540)..') +@marc21.over("usageAndAccessPolicy", "^(506|540)..") @utils.for_each_value @utils.ignore_value def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): @@ -200,7 +227,7 @@ def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): return do_usage_and_access_policy_from_field_506_540(marc21, key, value) -@marc21.over('frequency', '^(310|321)..') +@marc21.over("frequency", "^(310|321)..") @utils.for_each_value @utils.ignore_value def marc21_to_frequency_field_310_321(self, key, value): @@ -208,7 +235,7 @@ def marc21_to_frequency_field_310_321(self, key, value): return do_frequency_field_310_321(marc21, key, value) -@marc21.over('dissertation', '^502..') +@marc21.over("dissertation", "^502..") @utils.for_each_value @utils.ignore_value def marc21_to_dissertation(self, key, value): @@ -217,7 +244,7 @@ def marc21_to_dissertation(self, key, value): return do_dissertation(marc21, value) -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_abstract(self, key, value): @@ -225,7 +252,7 @@ def marc21_to_abstract(self, key, value): return do_summary(marc21, value) -@marc21.over('intendedAudience', '^521..') +@marc21.over("intendedAudience", "^521..") @utils.for_each_value @utils.ignore_value def marc21_to_intended_audience(self, key, value): @@ -233,65 +260,64 @@ def marc21_to_intended_audience(self, key, value): do_intended_audience(self, value) -@marc21.over('identifiedBy', '^010..') +@marc21.over("identifiedBy", "^010..") @utils.ignore_value def marc21_to_identified_by_from_field_010(self, key, value): """Get identifier from field 010.""" do_identified_by_from_field_010(self, marc21, key, value) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identified_by_from_field_020(self, key, value): """Get identifier from field 020.""" do_identified_by_from_field_020(self, marc21, key, value) -@marc21.over('identifiedBy', '^022..') +@marc21.over("identifiedBy", "^022..") @utils.ignore_value def marc21_to_identified_by_from_field_022(self, key, value): """Get identifier from field 022.""" do_identified_by_from_field_022(self, value) -@marc21.over('identifiedBy', '^024..') +@marc21.over("identifiedBy", "^024..") @utils.ignore_value def marc21_to_identified_by_from_field_024(self, key, value): """Get identifier from field 024.""" do_identified_by_from_field_024(self, marc21, key, value) -@marc21.over('identifiedBy', '^028..') +@marc21.over("identifiedBy", "^028..") @utils.ignore_value def marc21_to_identified_by_from_field_028(self, key, value): """Get identifier from field 028.""" do_identified_by_from_field_028(self, marc21, key, value) -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identified_by_from_field_035(self, key, value): """Get identifier from field 035.""" do_identified_by_from_field_035(self, marc21, key, value) -@marc21.over('acquisitionTerms', '^037..') +@marc21.over("acquisitionTerms", "^037..") @utils.ignore_value def marc21_to_acquisition_terms_from_field_037(self, key, value): """Get acquisition terms field 037.""" do_acquisition_terms_from_field_037(self, value) -@marc21.over('electronicLocator', '^856..') +@marc21.over("electronicLocator", "^856..") @utils.ignore_value def marc21_to_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - electronic_locators = do_electronic_locator_from_field_856( - self, marc21, key, value) + electronic_locators = do_electronic_locator_from_field_856(self, marc21, key, value) return electronic_locators or None -@marc21.over('note', '^(500|510|530|545|555|580)..') +@marc21.over("note", "^(500|510|530|545|555|580)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title(self, key, value): @@ -299,7 +325,7 @@ def marc21_to_notes_and_original_title(self, key, value): do_notes_and_original_title(self, key, value) -@marc21.over('credits', '^(508|511)..') +@marc21.over("credits", "^(508|511)..") @utils.for_each_value @utils.ignore_value def marc21_to_credits(self, key, value): @@ -307,16 +333,16 @@ def marc21_to_credits(self, key, value): return do_credits(key, value) -@marc21.over('supplementaryContent', '^504..') +@marc21.over("supplementaryContent", "^504..") @utils.for_each_value @utils.ignore_value def marc21_to_supplementary_content(self, key, value): """Get notes and original title.""" - if value.get('a'): - return utils.force_list(value.get('a'))[0] + if value.get("a"): + return utils.force_list(value.get("a"))[0] -@marc21.over('subjects', '^(600|610|611|630|650|651|655)..') +@marc21.over("subjects", "^(600|610|611|630|650|651|655)..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects_6XX(self, key, value): @@ -328,124 +354,128 @@ def marc21_to_subjects_6XX(self, key, value): subjects_imported : for 6xx having indicator 2 '0' or '2' """ type_per_tag = { - '600': EntityType.PERSON, - '610': EntityType.ORGANISATION, - '611': EntityType.ORGANISATION, - '600t': EntityType.WORK, - '610t': EntityType.WORK, - '611t': EntityType.WORK, - '630': EntityType.WORK, - '650': EntityType.TOPIC, # or bf:Temporal, changed by code - '651': EntityType.PLACE, - '655': EntityType.TOPIC + "600": EntityType.PERSON, + "610": EntityType.ORGANISATION, + "611": EntityType.ORGANISATION, + "600t": EntityType.WORK, + "610t": EntityType.WORK, + "611t": EntityType.WORK, + "630": EntityType.WORK, + "650": EntityType.TOPIC, # or bf:Temporal, changed by code + "651": EntityType.PLACE, + "655": EntityType.TOPIC, } - source_per_indicator_2 = { - '0': 'LCSH', - '2': 'MeSH' - } + source_per_indicator_2 = {"0": "LCSH", "2": "MeSH"} indicator_2 = key[4] tag_key = key[:3] - subfields_2 = utils.force_list(value.get('2')) + subfields_2 = utils.force_list(value.get("2")) subfield_2 = subfields_2[0] if subfields_2 else None - subfields_a = utils.force_list(value.get('a', [])) + subfields_a = utils.force_list(value.get("a", [])) # Try to get RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE from current app # In the dojson cli is no current app and we have to get the value directly # from config.py try: config_field_key = current_app.config.get( - 'RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE', - 'subjects_imported' + "RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE", "subjects_imported" ) except Exception: - from rero_ils.config import \ - RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key + from rero_ils.config import ( + RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key, + ) - if subfield_2 in ['rero', 'gnd', 'idref']: - if tag_key in ['600', '610', '611'] and value.get('t'): - tag_key += 't' + if subfield_2 in ["rero", "gnd", "idref"]: + if tag_key in ["600", "610", "611"] and value.get("t"): + tag_key += "t" data_type = type_per_tag[tag_key] # `data_type` is Temporal if tag is 650 and a $a start with digit. - if tag_key == '650': + if tag_key == "650": for subfield_a in subfields_a: if subfield_a[0].isdigit(): data_type = EntityType.TEMPORAL break subject = { - 'type': data_type, + "type": data_type, } subfield_code_per_tag = { - '600': 'abcd', - '610': 'ab', - '611': 'acden', - '600t': 'tpn', - '610t': 'tpn', - '611t': 't', - '630': 'apn', - '650': 'a', - '651': 'a', - '655': 'a' + "600": "abcd", + "610": "ab", + "611": "acden", + "600t": "tpn", + "610t": "tpn", + "611t": "t", + "630": "apn", + "650": "a", + "651": "a", + "655": "a", } string_build = build_string_from_subfields( - value, subfield_code_per_tag[tag_key]) - if tag_key == '655': + value, subfield_code_per_tag[tag_key] + ) + if tag_key == "655": # remove the square brackets - string_build = re.sub(r'^\[(.*)\]$', r'\1', string_build) - subject['authorized_access_point'] = string_build + string_build = re.sub(r"^\[(.*)\]$", r"\1", string_build) + subject["authorized_access_point"] = string_build - conference_per_tag = { - '610': False, - '611': True - } + conference_per_tag = {"610": False, "611": True} # if tag_key in ['610', '611']: # subject['conference'] = conference_per_tag[tag_key] # elif tag_key in ['600t', '610t', '611t']: - if tag_key in ['600t', '610t', '611t']: + if tag_key in ["600t", "610t", "611t"]: creator_tag_key = tag_key[:3] # to keep only tag: 600, 610, 611 creator = remove_trailing_punctuation( build_string_from_subfields( - value, subfield_code_per_tag[creator_tag_key]), '.', '.') + value, subfield_code_per_tag[creator_tag_key] + ), + ".", + ".", + ) if creator: - subject['authorized_access_point'] = \ + subject["authorized_access_point"] = ( f'{creator}. {subject["authorized_access_point"]}' - field_key = 'genreForm' if tag_key == '655' else config_field_key - if field_key != 'subjects_imported' and (ref := get_mef_link( - bibid=marc21.bib_id, - reroid=marc21.rero_id, - entity_type=data_type, - ids=utils.force_list(value.get('0')), - key=key - )): - subject = { - '$ref': ref - } + ) + field_key = "genreForm" if tag_key == "655" else config_field_key + if field_key != "subjects_imported" and ( + ref := get_mef_link( + bibid=marc21.bib_id, + reroid=marc21.rero_id, + entity_type=data_type, + ids=utils.force_list(value.get("0")), + key=key, + ) + ): + subject = {"$ref": ref} else: if identifier := build_identifier(value): - sub_2 = next(iter(utils.force_list(value.get('2') or [])), '') - if data_type == EntityType.TOPIC and sub_2.lower() == 'rero': - identifier['type'] = 'RERO' - subject['identifiedBy'] = identifier - if field_key != 'genreForm': + sub_2 = next(iter(utils.force_list(value.get("2") or [])), "") + if data_type == EntityType.TOPIC and sub_2.lower() == "rero": + identifier["type"] = "RERO" + subject["identifiedBy"] = identifier + if field_key != "genreForm": perform_subdivisions(subject, value) - if subject.get('$ref') or subject.get('authorized_access_point'): + if subject.get("$ref") or subject.get("authorized_access_point"): self.setdefault(field_key, []).append(dict(entity=subject)) - elif subfield_2 == 'rerovoc' or indicator_2 in ['0', '2']: + elif subfield_2 == "rerovoc" or indicator_2 in ["0", "2"]: term_string = build_string_from_subfields( - value, 'abcdefghijklmnopqrstuw', ' - ') + value, "abcdefghijklmnopqrstuw", " - " + ) if term_string: - source = 'rerovoc' if subfield_2 == 'rerovoc' \ + source = ( + "rerovoc" + if subfield_2 == "rerovoc" else source_per_indicator_2[indicator_2] + ) data = { - 'type': type_per_tag[tag_key], - 'source': source, - 'authorized_access_point': term_string + "type": type_per_tag[tag_key], + "source": source, + "authorized_access_point": term_string, } perform_subdivisions(data, value) @@ -453,7 +483,7 @@ def marc21_to_subjects_6XX(self, key, value): self.setdefault(config_field_key, []).append(dict(entity=data)) -@marc21.over('sequence_numbering', '^362..') +@marc21.over("sequence_numbering", "^362..") @utils.for_each_value @utils.ignore_value def marc21_to_sequence_numbering(self, key, value): @@ -461,7 +491,7 @@ def marc21_to_sequence_numbering(self, key, value): do_sequence_numbering(self, value) -@marc21.over('classification', '^(050|060|080|082)..') +@marc21.over("classification", "^(050|060|080|082)..") @utils.for_each_value @utils.ignore_value def marc21_to_classification(self, key, value): @@ -469,7 +499,7 @@ def marc21_to_classification(self, key, value): do_classification(self, key, value) -@marc21.over('part_of', '^(773|800|830)..') +@marc21.over("part_of", "^(773|800|830)..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): @@ -497,7 +527,7 @@ def marc21_to_part_of(self, key, value): do_part_of(self, marc21, key, value) -@marc21.over('work_access_point', '(^130..|^730..)') +@marc21.over("work_access_point", "(^130..|^730..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point(self, key, value): @@ -505,7 +535,7 @@ def marc21_to_work_access_point(self, key, value): return do_work_access_point(marc21, key, value) -@marc21.over('work_access_point', '(^240..)') +@marc21.over("work_access_point", "(^240..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point_240(self, key, value): @@ -513,7 +543,7 @@ def marc21_to_work_access_point_240(self, key, value): return do_work_access_point_240(marc21, key, value) -@marc21.over('scale_cartographicAttributes', '^255..') +@marc21.over("scale_cartographicAttributes", "^255..") @utils.for_each_value @utils.ignore_value def marc21_to_scale_cartographic_attributes(self, key, value): @@ -521,7 +551,7 @@ def marc21_to_scale_cartographic_attributes(self, key, value): do_scale_and_cartographic(self, marc21, key, value) -@marc21.over('temporalCoverage', '^045..') +@marc21.over("temporalCoverage", "^045..") @utils.for_each_value @utils.ignore_value def marc21_to_temporal_coverage(self, key, value): diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/__init__.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/__init__.py index a3b4d2d419..e713aebbba 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/__init__.py @@ -20,4 +20,4 @@ from .model import marc21 -__all__ = ('marc21') +__all__ = "marc21" diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/model.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/model.py index d91b300da9..2ebd99f058 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/model.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/ugent/model.py @@ -21,39 +21,59 @@ from dojson import utils from flask import current_app -from rero_ils.dojson.utils import ReroIlsMarc21Overdo, \ - build_string_from_subfields +from rero_ils.dojson.utils import ReroIlsMarc21Overdo, build_string_from_subfields from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.entities.models import EntityType -from ..utils import do_abbreviated_title, \ - do_acquisition_terms_from_field_037, do_classification, do_contribution, \ - do_copyright_date, do_dissertation, do_edition_statement, \ - do_electronic_locator_from_field_856, do_frequency_field_310_321, \ - do_identified_by_from_field_010, do_identified_by_from_field_020, \ - do_identified_by_from_field_022, do_identified_by_from_field_024, \ - do_identified_by_from_field_028, do_identified_by_from_field_035, \ - do_intended_audience, do_issuance, do_language, \ - do_notes_and_original_title, do_part_of, do_provision_activity, \ - do_scale_and_cartographic, do_sequence_numbering, \ - do_specific_document_relation, do_summary, do_table_of_contents, \ - do_temporal_coverage, do_title, do_type, \ - do_usage_and_access_policy_from_field_506_540, do_work_access_point, \ - do_work_access_point_240, perform_subdivisions +from ..utils import ( + do_abbreviated_title, + do_acquisition_terms_from_field_037, + do_classification, + do_contribution, + do_copyright_date, + do_dissertation, + do_edition_statement, + do_electronic_locator_from_field_856, + do_frequency_field_310_321, + do_identified_by_from_field_010, + do_identified_by_from_field_020, + do_identified_by_from_field_022, + do_identified_by_from_field_024, + do_identified_by_from_field_028, + do_identified_by_from_field_035, + do_intended_audience, + do_issuance, + do_language, + do_notes_and_original_title, + do_part_of, + do_provision_activity, + do_scale_and_cartographic, + do_sequence_numbering, + do_specific_document_relation, + do_summary, + do_table_of_contents, + do_temporal_coverage, + do_title, + do_type, + do_usage_and_access_policy_from_field_506_540, + do_work_access_point, + do_work_access_point_240, + perform_subdivisions, +) marc21 = ReroIlsMarc21Overdo() -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_type_and_issuance(self, key, value): """Get document type, content/Media/Carrier type and mode of issuance.""" do_issuance(self, marc21) do_type(self, marc21) - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language(self, key, value): """Get languages. @@ -62,15 +82,15 @@ def marc21_to_language(self, key, value): """ language = do_language(self, marc21) # is fiction - self['fiction_statement'] = DocumentFictionType.Unspecified.value - if value[33] in ['1', 'd', 'f', 'j', 'p']: - self['fiction_statement'] = DocumentFictionType.Fiction.value - elif value[33] in ['0', 'e', 'h', 'i', 's']: - self['fiction_statement'] = DocumentFictionType.NonFiction.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value + if value[33] in ["1", "d", "f", "j", "p"]: + self["fiction_statement"] = DocumentFictionType.Fiction.value + elif value[33] in ["0", "e", "h", "i", "s"]: + self["fiction_statement"] = DocumentFictionType.NonFiction.value return language or None -@marc21.over('title', '(^210|^222)....') +@marc21.over("title", "(^210|^222)....") @utils.ignore_value def marc21_to_abbreviated_title(self, key, value): """Get abbreviated title data.""" @@ -78,7 +98,7 @@ def marc21_to_abbreviated_title(self, key, value): return title_list or None -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data.""" @@ -86,7 +106,7 @@ def marc21_to_title(self, key, value): return title_list or None -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): @@ -94,7 +114,7 @@ def marc21_to_contribution(self, key, value): return do_contribution(self, marc21, key, value) -@marc21.over('relation', '(770|772|775|776|777|780|785|787|533|534)..') +@marc21.over("relation", "(770|772|775|776|777|780|785|787|533|534)..") @utils.for_each_value @utils.ignore_value def marc21_to_specific_document_relation(self, key, value): @@ -102,7 +122,7 @@ def marc21_to_specific_document_relation(self, key, value): do_specific_document_relation(self, marc21, key, value) -@marc21.over('copyrightDate', '^26[04].4') +@marc21.over("copyrightDate", "^26[04].4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" @@ -110,7 +130,7 @@ def marc21_to_copyright_date(self, key, value): return copyright_dates or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -123,7 +143,7 @@ def marc21_to_edition_statement(self, key, value): return edition_data or None -@marc21.over('provisionActivity', '^26[04].[_0-3]') +@marc21.over("provisionActivity", "^26[04].[_0-3]") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -137,7 +157,7 @@ def marc21_to_provision_activity(self, key, value): return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get physical description. @@ -160,7 +180,7 @@ def marc21_to_description(self, key, value): marc21.extract_description_from_marc_field(key, value, self) -@marc21.over('seriesStatement', '^490..') +@marc21.over("seriesStatement", "^490..") @utils.for_each_value @utils.ignore_value def marc21_to_series_statement(self, key, value): @@ -172,7 +192,7 @@ def marc21_to_series_statement(self, key, value): marc21.extract_series_statement_from_marc_field(key, value, self) -@marc21.over('tableOfContents', '^505..') +@marc21.over("tableOfContents", "^505..") @utils.for_each_value @utils.ignore_value def marc21_to_table_of_contents(self, key, value): @@ -180,7 +200,7 @@ def marc21_to_table_of_contents(self, key, value): do_table_of_contents(self, value) -@marc21.over('usageAndAccessPolicy', '^(506|540)..') +@marc21.over("usageAndAccessPolicy", "^(506|540)..") @utils.for_each_value @utils.ignore_value def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): @@ -188,7 +208,7 @@ def marc21_to_usage_and_access_policy_from_field_506_540(self, key, value): return do_usage_and_access_policy_from_field_506_540(marc21, key, value) -@marc21.over('frequency', '^(310|321)..') +@marc21.over("frequency", "^(310|321)..") @utils.for_each_value @utils.ignore_value def marc21_to_frequency_field_310_321(self, key, value): @@ -196,7 +216,7 @@ def marc21_to_frequency_field_310_321(self, key, value): return do_frequency_field_310_321(marc21, key, value) -@marc21.over('dissertation', '^502..') +@marc21.over("dissertation", "^502..") @utils.for_each_value @utils.ignore_value def marc21_to_dissertation(self, key, value): @@ -205,7 +225,7 @@ def marc21_to_dissertation(self, key, value): return do_dissertation(marc21, value) -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_abstract(self, key, value): @@ -213,7 +233,7 @@ def marc21_to_abstract(self, key, value): return do_summary(marc21, value) -@marc21.over('intendedAudience', '^521..') +@marc21.over("intendedAudience", "^521..") @utils.for_each_value @utils.ignore_value def marc21_to_intended_audience(self, key, value): @@ -221,56 +241,56 @@ def marc21_to_intended_audience(self, key, value): do_intended_audience(self, value) -@marc21.over('identifiedBy', '^010..') +@marc21.over("identifiedBy", "^010..") @utils.ignore_value def marc21_to_identified_by_from_field_010(self, key, value): """Get identifier from field 010.""" do_identified_by_from_field_010(self, marc21, key, value) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identified_by_from_field_020(self, key, value): """Get identifier from field 020.""" do_identified_by_from_field_020(self, marc21, key, value) -@marc21.over('identifiedBy', '^022..') +@marc21.over("identifiedBy", "^022..") @utils.ignore_value def marc21_to_identified_by_from_field_022(self, key, value): """Get identifier from field 022.""" do_identified_by_from_field_022(self, value) -@marc21.over('identifiedBy', '^024..') +@marc21.over("identifiedBy", "^024..") @utils.ignore_value def marc21_to_identified_by_from_field_024(self, key, value): """Get identifier from field 024.""" do_identified_by_from_field_024(self, marc21, key, value) -@marc21.over('identifiedBy', '^028..') +@marc21.over("identifiedBy", "^028..") @utils.ignore_value def marc21_to_identified_by_from_field_028(self, key, value): """Get identifier from field 028.""" do_identified_by_from_field_028(self, marc21, key, value) -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identified_by_from_field_035(self, key, value): """Get identifier from field 035.""" do_identified_by_from_field_035(self, marc21, key, value) -@marc21.over('acquisitionTerms', '^037..') +@marc21.over("acquisitionTerms", "^037..") @utils.ignore_value def marc21_to_acquisition_terms_from_field_037(self, key, value): """Get acquisition terms field 037.""" do_acquisition_terms_from_field_037(self, value) -@marc21.over('note', '^(500|510|530|545|555|580)..') +@marc21.over("note", "^(500|510|530|545|555|580)..") @utils.for_each_value @utils.ignore_value def marc21_to_notes_and_original_title(self, key, value): @@ -278,31 +298,31 @@ def marc21_to_notes_and_original_title(self, key, value): do_notes_and_original_title(self, key, value) -@marc21.over('credits', '^(508|511)..') +@marc21.over("credits", "^(508|511)..") @utils.for_each_value @utils.ignore_value def marc21_to_credits(self, key, value): """Get notes and original title.""" subfield_a = None - if value.get('a'): - subfield_a = utils.force_list(value.get('a'))[0] - if key[:3] == '511': - subfield_a = f'Participants ou interprètes: {subfield_a}' - credits = self.get('credits', []) + if value.get("a"): + subfield_a = utils.force_list(value.get("a"))[0] + if key[:3] == "511": + subfield_a = f"Participants ou interprètes: {subfield_a}" + credits = self.get("credits", []) credits.append(subfield_a) - self['credits'] = credits + self["credits"] = credits -@marc21.over('supplementaryContent', '^504..') +@marc21.over("supplementaryContent", "^504..") @utils.for_each_value @utils.ignore_value def marc21_to_supplementary_content(self, key, value): """Get notes and original title.""" - if value.get('a'): - return utils.force_list(value.get('a'))[0] + if value.get("a"): + return utils.force_list(value.get("a"))[0] -@marc21.over('subjects', '^(600|610|611|630|650|651|655)..') +@marc21.over("subjects", "^(600|610|611|630|650|651|655)..") @utils.for_each_value @utils.ignore_value def marc21_to_subjects_6XX(self, key, value): @@ -314,61 +334,56 @@ def marc21_to_subjects_6XX(self, key, value): subjects_imported : for 6xx having indicator 2 '0' or '2' """ type_per_tag = { - '600': EntityType.PERSON, - '610': EntityType.ORGANISATION, - '611': EntityType.ORGANISATION, - '600t': EntityType.WORK, - '610t': EntityType.WORK, - '611t': EntityType.WORK, - '630': EntityType.WORK, - '650': EntityType.TOPIC, # or bf:Temporal, changed by code - '651': EntityType.PLACE, - '655': EntityType.TOPIC + "600": EntityType.PERSON, + "610": EntityType.ORGANISATION, + "611": EntityType.ORGANISATION, + "600t": EntityType.WORK, + "610t": EntityType.WORK, + "611t": EntityType.WORK, + "630": EntityType.WORK, + "650": EntityType.TOPIC, # or bf:Temporal, changed by code + "651": EntityType.PLACE, + "655": EntityType.TOPIC, } - conference_per_tag = { - '610': False, - '611': True - } - source_per_indicator_2 = { - '7': 'LCSH', - '2': 'MeSH' - } + conference_per_tag = {"610": False, "611": True} + source_per_indicator_2 = {"7": "LCSH", "2": "MeSH"} indicator_2 = key[4] tag_key = key[:3] - subfields_2 = utils.force_list(value.get('2')) + subfields_2 = utils.force_list(value.get("2")) subfield_2 = subfields_2[0] if subfields_2 else None # Try to get RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE from current app # In the dojson cli is no current app and we have to get the value directly # from config.py try: config_field_key = current_app.config.get( - 'RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE', - 'subjects_imported' + "RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE", "subjects_imported" ) except Exception: - from rero_ils.config import \ - RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key + from rero_ils.config import ( + RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key, + ) - if subfield_2 == 'lcsh' or indicator_2 in ['0', '2', '7']: + if subfield_2 == "lcsh" or indicator_2 in ["0", "2", "7"]: term_string = build_string_from_subfields( - value, 'abcdefghijklmnopqrstuw', ' - ') + value, "abcdefghijklmnopqrstuw", " - " + ) if term_string: - source = 'LCSH' if subfield_2 == 'lcsh' else \ - source_per_indicator_2[indicator_2] + source = ( + "LCSH" if subfield_2 == "lcsh" else source_per_indicator_2[indicator_2] + ) data = { - 'type': type_per_tag[tag_key], - 'source': source, - 'authorized_access_point': term_string.rstrip('.') + "type": type_per_tag[tag_key], + "source": source, + "authorized_access_point": term_string.rstrip("."), } perform_subdivisions(data, value) if data: - self.setdefault(config_field_key, []).append( - dict(entity=data)) + self.setdefault(config_field_key, []).append(dict(entity=data)) -@marc21.over('sequence_numbering', '^362..') +@marc21.over("sequence_numbering", "^362..") @utils.for_each_value @utils.ignore_value def marc21_to_sequence_numbering(self, key, value): @@ -376,7 +391,7 @@ def marc21_to_sequence_numbering(self, key, value): do_sequence_numbering(self, value) -@marc21.over('classification', '^(050|060|080|082)..') +@marc21.over("classification", "^(050|060|080|082)..") @utils.for_each_value @utils.ignore_value def marc21_to_classification(self, key, value): @@ -384,16 +399,15 @@ def marc21_to_classification(self, key, value): do_classification(self, key, value) -@marc21.over('electronicLocator', '^856..') +@marc21.over("electronicLocator", "^856..") @utils.ignore_value def marc21_to_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - electronic_locators = do_electronic_locator_from_field_856( - self, marc21, key, value) + electronic_locators = do_electronic_locator_from_field_856(self, marc21, key, value) return electronic_locators or None -@marc21.over('part_of', '^(773|800|830)..') +@marc21.over("part_of", "^(773|800|830)..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): @@ -421,7 +435,7 @@ def marc21_to_part_of(self, key, value): do_part_of(self, marc21, key, value) -@marc21.over('work_access_point', '(^130..|^700.2|^710.2|^730..)') +@marc21.over("work_access_point", "(^130..|^700.2|^710.2|^730..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point(self, key, value): @@ -429,7 +443,7 @@ def marc21_to_work_access_point(self, key, value): return do_work_access_point(marc21, key, value) -@marc21.over('work_access_point', '(^240..)') +@marc21.over("work_access_point", "(^240..)") @utils.for_each_value @utils.ignore_value def marc21_to_work_access_point_240(self, key, value): @@ -437,7 +451,7 @@ def marc21_to_work_access_point_240(self, key, value): return do_work_access_point_240(marc21, key, value) -@marc21.over('scale_cartographicAttributes', '^255..') +@marc21.over("scale_cartographicAttributes", "^255..") @utils.for_each_value @utils.ignore_value def marc21_to_scale_cartographic_attributes(self, key, value): @@ -445,7 +459,7 @@ def marc21_to_scale_cartographic_attributes(self, key, value): do_scale_and_cartographic(self, marc21, key, value) -@marc21.over('temporalCoverage', '^045..') +@marc21.over("temporalCoverage", "^045..") @utils.for_each_value @utils.ignore_value def marc21_to_temporal_coverage(self, key, value): diff --git a/rero_ils/modules/documents/dojson/contrib/marc21tojson/utils.py b/rero_ils/modules/documents/dojson/contrib/marc21tojson/utils.py index 722f36f839..d5fb6ed54d 100644 --- a/rero_ils/modules/documents/dojson/contrib/marc21tojson/utils.py +++ b/rero_ils/modules/documents/dojson/contrib/marc21tojson/utils.py @@ -27,316 +27,449 @@ from dojson.utils import GroupableOrderedDict from iso639 import find -from rero_ils.dojson.utils import _LANGUAGES, TitlePartList, add_note, \ - build_identifier, build_responsibility_data, build_string_from_subfields, \ - error_print, extract_subtitle_and_parallel_titles_from_field_245_b, \ - get_field_items, get_field_link_data, get_mef_link, not_repetitive, \ - re_identified, remove_trailing_punctuation +from rero_ils.dojson.utils import ( + _LANGUAGES, + TitlePartList, + add_note, + build_identifier, + build_responsibility_data, + build_string_from_subfields, + error_print, + extract_subtitle_and_parallel_titles_from_field_245_b, + get_field_items, + get_field_link_data, + get_mef_link, + not_repetitive, + re_identified, + remove_trailing_punctuation, +) from rero_ils.modules.documents.utils import create_authorized_access_point from rero_ils.modules.entities.models import EntityType _DOCUMENT_RELATION_PER_TAG = { - '770': 'supplement', - '772': 'supplementTo', - '775': 'otherEdition', - '776': 'otherPhysicalFormat', - '777': 'issuedWith', - '780': 'precededBy', - '785': 'succeededBy', - '787': 'relatedTo', - '533': 'hasReproduction', - '534': 'reproductionOf' + "770": "supplement", + "772": "supplementTo", + "775": "otherEdition", + "776": "otherPhysicalFormat", + "777": "issuedWith", + "780": "precededBy", + "785": "succeededBy", + "787": "relatedTo", + "533": "hasReproduction", + "534": "reproductionOf", } -_REPRODUCTION_SUBFIELDS_PER_TAG = { - '533': 'abcdemn', - '534': 'cep' -} +_REPRODUCTION_SUBFIELDS_PER_TAG = {"533": "abcdemn", "534": "cep"} _ISSUANCE_MAIN_TYPE_PER_BIB_LEVEL = { - 'a': 'rdami:1001', - 'b': 'rdami:1003', - 'c': 'rdami:1001', - 'd': 'rdami:1001', - 'i': 'rdami:1004', - 'm': 'rdami:1001', # rdami:1002 if top_level record - 's': 'rdami:1003' + "a": "rdami:1001", + "b": "rdami:1003", + "c": "rdami:1001", + "d": "rdami:1001", + "i": "rdami:1004", + "m": "rdami:1001", # rdami:1002 if top_level record + "s": "rdami:1003", } _ISSUANCE_SUBTYPE_PER_BIB_LEVEL = { - 'a': 'article', - 'b': 'serialInSerial', - 'c': 'privateFile', - 'd': 'privateSubfile' + "a": "article", + "b": "serialInSerial", + "c": "privateFile", + "d": "privateSubfile", } _ISSUANCE_SUBTYPE_PER_SERIAL_TYPE = { - 'd': 'updatingWebsite', - 'w': 'updatingWebsite', - 'l': 'updatingLoose-leaf', - 'm': 'monographicSeries', - 'p': 'periodical' + "d": "updatingWebsite", + "w": "updatingWebsite", + "l": "updatingLoose-leaf", + "m": "monographicSeries", + "p": "periodical", } _CONTRIBUTION_ROLE = [ - 'aut', 'cmp', 'ctb', 'edt', 'hnr', 'ill', 'pht', 'prf', 'trl', 'abr', - 'act', 'adi', 'adp', 'aft', 'anm', 'ann', 'ape', 'apl', 'aqt', 'arc', - 'arr', 'art', 'ato', 'auc', 'aui', 'aus', 'bkd', 'bnd', 'brd', 'brl', - 'bsl', 'cas', 'chr', 'cll', 'clr', 'clt', 'cwt', 'cmm', 'cnd', 'cng', - 'cns', 'col', 'com', 'cor', 'cou', 'cre', 'crt', 'csl', 'cst', 'ctg', - 'ctr', 'cur', 'dfd', 'dgg', 'dgs', 'dnc', 'dnr', 'dpt', 'drm', 'drt', - 'dsr', 'dst', 'dte', 'dto', 'dub', 'edm', 'egr', 'enj', 'etr', 'exp', - 'fac', 'fds', 'fmd', 'fmk', 'fmo', 'fmp', 'his', 'hst', 'ill', 'ilu', - 'ins', 'inv', 'isb', 'itr', 'ive', 'ivr', 'jud', 'jug', 'lbt', 'lgd', - 'lsa', 'ltg', 'lyr', 'med', 'mfr', 'mod', 'msd', 'mtk', 'mus', 'nrt', - 'orm', 'osp', 'oth', 'own', 'pan', 'pat', 'pbd', 'pbl', 'plt', 'ppm', - 'ppt', 'pra', 'pre', 'prg', 'prm', 'prn', 'pro', 'prs', 'prt', 'ptf', - 'rcd', 'rce', 'rcp', 'rdd', 'res', 'rpc', 'rsp', 'rsr', 'scl', 'sds', - 'sgd', 'sll', 'sng', 'spk', 'spn', 'srv', 'stl', 'tch', 'tld', 'tlp', - 'trc', 'vac', 'vdg', 'wac', 'wal', 'wat', 'win', 'wpr', 'wst' + "aut", + "cmp", + "ctb", + "edt", + "hnr", + "ill", + "pht", + "prf", + "trl", + "abr", + "act", + "adi", + "adp", + "aft", + "anm", + "ann", + "ape", + "apl", + "aqt", + "arc", + "arr", + "art", + "ato", + "auc", + "aui", + "aus", + "bkd", + "bnd", + "brd", + "brl", + "bsl", + "cas", + "chr", + "cll", + "clr", + "clt", + "cwt", + "cmm", + "cnd", + "cng", + "cns", + "col", + "com", + "cor", + "cou", + "cre", + "crt", + "csl", + "cst", + "ctg", + "ctr", + "cur", + "dfd", + "dgg", + "dgs", + "dnc", + "dnr", + "dpt", + "drm", + "drt", + "dsr", + "dst", + "dte", + "dto", + "dub", + "edm", + "egr", + "enj", + "etr", + "exp", + "fac", + "fds", + "fmd", + "fmk", + "fmo", + "fmp", + "his", + "hst", + "ill", + "ilu", + "ins", + "inv", + "isb", + "itr", + "ive", + "ivr", + "jud", + "jug", + "lbt", + "lgd", + "lsa", + "ltg", + "lyr", + "med", + "mfr", + "mod", + "msd", + "mtk", + "mus", + "nrt", + "orm", + "osp", + "oth", + "own", + "pan", + "pat", + "pbd", + "pbl", + "plt", + "ppm", + "ppt", + "pra", + "pre", + "prg", + "prm", + "prn", + "pro", + "prs", + "prt", + "ptf", + "rcd", + "rce", + "rcp", + "rdd", + "res", + "rpc", + "rsp", + "rsr", + "scl", + "sds", + "sgd", + "sll", + "sng", + "spk", + "spn", + "srv", + "stl", + "tch", + "tld", + "tlp", + "trc", + "vac", + "vdg", + "wac", + "wal", + "wat", + "win", + "wpr", + "wst", ] _INTENDED_AUDIENCE_REGEXP = { # understanding_level - 'target_understanding_children': - re.compile(r'^(Enfants*|Kinder)$', re.IGNORECASE), - 'target_understanding_children_0_3': - re.compile( - r'(Enfants* \(0-3 ans\)|Kinder \(0-3 Jahre\))', re.IGNORECASE), - 'target_understanding_children_3_6': - re.compile( - r'(Enfants* \(3-6 ans\)|Kinder \(3-6 Jahre\))', re.IGNORECASE), - 'target_understanding_children_6_9': - re.compile( - r'(Enfants* \(6-9 ans\)|Kinder \(6-9 Jahre\))', re.IGNORECASE), - 'target_understanding_children_9_12': - re.compile( - r'(Enfants* \(9-12 ans\)|Kinder \(9-12 Jahre\))', re.IGNORECASE), - 'target_understanding_teenagers': - re.compile(r'^(Adolescents*|Jugendliche)$', re.IGNORECASE), - 'target_understanding_teenagers_12_15': - re.compile( - r'(Adolescents* \(12-15 ans\)|Jugendliche \(12-15 Jahre\))', - re.IGNORECASE), - 'target_understanding_teenagers_15_18': - re.compile( - r'(Adolescents* \(15-18 ans\)|Jugendliche \(15-18 Jahre\))', - re.IGNORECASE), - 'target_understanding_secondary_level_2': - re.compile( - r'(Degré secondaire 2|Weiterführende Schulen)', re.IGNORECASE), - 'target_understanding_tertiary': - re.compile(r'(Tertiaire|Tertiär)', re.IGNORECASE), - 'target_understanding_apprentices': - re.compile(r'(Apprentis*|Lehrlinge)', re.IGNORECASE), - 'target_understanding_bachelor_students': - re.compile( - r'(Etudiants* niveau Bachelor|Studierende Bachelor)', - re.IGNORECASE), - 'target_understanding_master_students': - re.compile( - r'(Etudiants* niveau Master|Studierende Master)', re.IGNORECASE), - 'target_understanding_doctoral_students': - re.compile(r'(Doctorants*|Doktoranden)', re.IGNORECASE), - 'target_understanding_beginners': - re.compile(r'(Débutants*|Anfänger)', re.IGNORECASE), - 'target_understanding_intermediaries': - re.compile(r'(Intermédiaires*|Mittelstufe)', re.IGNORECASE), - 'target_understanding_advanced': - re.compile(r'(Avancés*|Fortgeschrittene)', re.IGNORECASE), - 'target_understanding_specialists': - re.compile(r'(Spécialistes*|Spezialisten)', re.IGNORECASE), - 'target_understanding_adults': - re.compile(r'^(Adultes*|Erwachsene)$', re.IGNORECASE), - 'target_understanding_allophone_adults': - re.compile( - r'(Adultes* allophones*|Fremdsprachige Erwachsene)', - re.IGNORECASE), - 'target_understanding_all_audience': - re.compile(r'(Tous publics*|Alle Zielgruppen)', re.IGNORECASE), - 'target_understanding_teachers_harmos_degree': - re.compile( - r'(Enseignants* \(degré Harmos\)|Lehrpersonen \(Harmos\))', - re.IGNORECASE), - 'target_understanding_teachers_secondary_level_2': - re.compile( - r'(Enseignants* Degré secondaire 2|Lehrpersonen Sek II)', - re.IGNORECASE), - 'target_understanding_hep_trainers': - re.compile(r'(Formateurs* HEP|PH-Dozierende)', re.IGNORECASE), - 'target_understanding_parents': - re.compile(r'(Parents*|Eltern)', re.IGNORECASE), - 'target_understanding_caregivers': - re.compile(r'(Soignants*|Pflegepersonal)', re.IGNORECASE), + "target_understanding_children": re.compile(r"^(Enfants*|Kinder)$", re.IGNORECASE), + "target_understanding_children_0_3": re.compile( + r"(Enfants* \(0-3 ans\)|Kinder \(0-3 Jahre\))", re.IGNORECASE + ), + "target_understanding_children_3_6": re.compile( + r"(Enfants* \(3-6 ans\)|Kinder \(3-6 Jahre\))", re.IGNORECASE + ), + "target_understanding_children_6_9": re.compile( + r"(Enfants* \(6-9 ans\)|Kinder \(6-9 Jahre\))", re.IGNORECASE + ), + "target_understanding_children_9_12": re.compile( + r"(Enfants* \(9-12 ans\)|Kinder \(9-12 Jahre\))", re.IGNORECASE + ), + "target_understanding_teenagers": re.compile( + r"^(Adolescents*|Jugendliche)$", re.IGNORECASE + ), + "target_understanding_teenagers_12_15": re.compile( + r"(Adolescents* \(12-15 ans\)|Jugendliche \(12-15 Jahre\))", re.IGNORECASE + ), + "target_understanding_teenagers_15_18": re.compile( + r"(Adolescents* \(15-18 ans\)|Jugendliche \(15-18 Jahre\))", re.IGNORECASE + ), + "target_understanding_secondary_level_2": re.compile( + r"(Degré secondaire 2|Weiterführende Schulen)", re.IGNORECASE + ), + "target_understanding_tertiary": re.compile(r"(Tertiaire|Tertiär)", re.IGNORECASE), + "target_understanding_apprentices": re.compile( + r"(Apprentis*|Lehrlinge)", re.IGNORECASE + ), + "target_understanding_bachelor_students": re.compile( + r"(Etudiants* niveau Bachelor|Studierende Bachelor)", re.IGNORECASE + ), + "target_understanding_master_students": re.compile( + r"(Etudiants* niveau Master|Studierende Master)", re.IGNORECASE + ), + "target_understanding_doctoral_students": re.compile( + r"(Doctorants*|Doktoranden)", re.IGNORECASE + ), + "target_understanding_beginners": re.compile( + r"(Débutants*|Anfänger)", re.IGNORECASE + ), + "target_understanding_intermediaries": re.compile( + r"(Intermédiaires*|Mittelstufe)", re.IGNORECASE + ), + "target_understanding_advanced": re.compile( + r"(Avancés*|Fortgeschrittene)", re.IGNORECASE + ), + "target_understanding_specialists": re.compile( + r"(Spécialistes*|Spezialisten)", re.IGNORECASE + ), + "target_understanding_adults": re.compile( + r"^(Adultes*|Erwachsene)$", re.IGNORECASE + ), + "target_understanding_allophone_adults": re.compile( + r"(Adultes* allophones*|Fremdsprachige Erwachsene)", re.IGNORECASE + ), + "target_understanding_all_audience": re.compile( + r"(Tous publics*|Alle Zielgruppen)", re.IGNORECASE + ), + "target_understanding_teachers_harmos_degree": re.compile( + r"(Enseignants* \(degré Harmos\)|Lehrpersonen \(Harmos\))", re.IGNORECASE + ), + "target_understanding_teachers_secondary_level_2": re.compile( + r"(Enseignants* Degré secondaire 2|Lehrpersonen Sek II)", re.IGNORECASE + ), + "target_understanding_hep_trainers": re.compile( + r"(Formateurs* HEP|PH-Dozierende)", re.IGNORECASE + ), + "target_understanding_parents": re.compile(r"(Parents*|Eltern)", re.IGNORECASE), + "target_understanding_caregivers": re.compile( + r"(Soignants*|Pflegepersonal)", re.IGNORECASE + ), # school_level - 'target_school_harmos1': - re.compile(r'(Harmos1|Kindergarten)', re.IGNORECASE), - 'target_school_harmos2': - re.compile(r'(Harmos2|Kindergarten)', re.IGNORECASE), - 'target_school_harmos3': - re.compile(r'(Harmos3|Primarschule \(1\.-2\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos4': - re.compile(r'(Harmos4|Primarschule \(1\.-2\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos5': - re.compile(r'(Harmos5|Primarschule \(3\.-4\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos6': - re.compile(r'(Harmos6|Primarschule \(3\.-4\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos7': - re.compile(r'(Harmos7|Primarschule \(5\.-6\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos8': - re.compile(r'(Harmos8|Primarschule \(5\.-6\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos9': - re.compile( - r'(Harmos9|Orientierungsschule \(7\.-9\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos10': - re.compile( - r'(Harmos10|Orientierungsschule \(7\.-9\. Kl\.\))', re.IGNORECASE), - 'target_school_harmos11': - re.compile( - r'(Harmos11|Orientierungsschule \(7\.-9\. Kl\.\))', re.IGNORECASE), - 'target_school_upper_secondary': - re.compile( - r'(Degré secondaire 2|Weiterführende Schulen)', re.IGNORECASE), - 'target_school_tertiary': - re.compile(r'^(Tertiaire|Studierende)$', re.IGNORECASE), - 'target_school_bachelor': - re.compile( - r'(Etudiants* niveau Bachelor|Studierende Bachelor)', - re.IGNORECASE), - 'target_school_master': - re.compile( - r'(Etudiants* niveau Master|Studierende Master)', re.IGNORECASE), + "target_school_harmos1": re.compile(r"(Harmos1|Kindergarten)", re.IGNORECASE), + "target_school_harmos2": re.compile(r"(Harmos2|Kindergarten)", re.IGNORECASE), + "target_school_harmos3": re.compile( + r"(Harmos3|Primarschule \(1\.-2\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos4": re.compile( + r"(Harmos4|Primarschule \(1\.-2\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos5": re.compile( + r"(Harmos5|Primarschule \(3\.-4\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos6": re.compile( + r"(Harmos6|Primarschule \(3\.-4\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos7": re.compile( + r"(Harmos7|Primarschule \(5\.-6\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos8": re.compile( + r"(Harmos8|Primarschule \(5\.-6\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos9": re.compile( + r"(Harmos9|Orientierungsschule \(7\.-9\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos10": re.compile( + r"(Harmos10|Orientierungsschule \(7\.-9\. Kl\.\))", re.IGNORECASE + ), + "target_school_harmos11": re.compile( + r"(Harmos11|Orientierungsschule \(7\.-9\. Kl\.\))", re.IGNORECASE + ), + "target_school_upper_secondary": re.compile( + r"(Degré secondaire 2|Weiterführende Schulen)", re.IGNORECASE + ), + "target_school_tertiary": re.compile(r"^(Tertiaire|Studierende)$", re.IGNORECASE), + "target_school_bachelor": re.compile( + r"(Etudiants* niveau Bachelor|Studierende Bachelor)", re.IGNORECASE + ), + "target_school_master": re.compile( + r"(Etudiants* niveau Master|Studierende Master)", re.IGNORECASE + ), # filmage_ch - 'from the age of 18': - re.compile(r'(Dès 18 ans|Ab 18 Jahre)', re.IGNORECASE), - 'from the age of 16': - re.compile(r'(Dès 16 ans|Ab 16 Jahre)', re.IGNORECASE), - 'from the age of 14': - re.compile(r'(Dès 14 ans|Ab 14 Jahre)', re.IGNORECASE), - 'from the age of 12': - re.compile(r'(Dès 12 ans|Ab 12 Jahre)', re.IGNORECASE), - 'from the age of 10': - re.compile(r'(Dès 10 ans|Ab 10 Jahre)', re.IGNORECASE), - 'from the age of 7': - re.compile(r'(Dès 7 ans|Ab 7 Jahre)', re.IGNORECASE), - 'from the age of 5': - re.compile(r'(Dès 5 ans|Ab 5 Jahre)', re.IGNORECASE), - 'from the age of 2': - re.compile(r'(Dès 2 ans|Ab 2 Jahre)', re.IGNORECASE)} + "from the age of 18": re.compile(r"(Dès 18 ans|Ab 18 Jahre)", re.IGNORECASE), + "from the age of 16": re.compile(r"(Dès 16 ans|Ab 16 Jahre)", re.IGNORECASE), + "from the age of 14": re.compile(r"(Dès 14 ans|Ab 14 Jahre)", re.IGNORECASE), + "from the age of 12": re.compile(r"(Dès 12 ans|Ab 12 Jahre)", re.IGNORECASE), + "from the age of 10": re.compile(r"(Dès 10 ans|Ab 10 Jahre)", re.IGNORECASE), + "from the age of 7": re.compile(r"(Dès 7 ans|Ab 7 Jahre)", re.IGNORECASE), + "from the age of 5": re.compile(r"(Dès 5 ans|Ab 5 Jahre)", re.IGNORECASE), + "from the age of 2": re.compile(r"(Dès 2 ans|Ab 2 Jahre)", re.IGNORECASE), +} _INTENDED_AUDIENCE_TYPE_REGEXP = { - 'understanding_level': re.compile(r'^target_understanding'), - 'school_level': re.compile(r'^target_school'), - 'filmage_ch': re.compile(r'^from the age of') + "understanding_level": re.compile(r"^target_understanding"), + "school_level": re.compile(r"^target_school"), + "filmage_ch": re.compile(r"^from the age of"), } -_LONGITUDE = re.compile(r'^[EW0-9.+-]+(\\s[EW0-9.+-]+)?') -_LATITUDE = re.compile(r'^[NS0-9.+-]+(\\s[NS0-9.+-]+)?') +_LONGITUDE = re.compile(r"^[EW0-9.+-]+(\\s[EW0-9.+-]+)?") +_LATITUDE = re.compile(r"^[NS0-9.+-]+(\\s[NS0-9.+-]+)?") -_PERIOD_CODE = re.compile(r'^([a-z][0-9-]){2}$') +_PERIOD_CODE = re.compile(r"^([a-z][0-9-]){2}$") -_SCALE_TYPE = { - 'a': 'Linear scale', - 'b': 'Angular scale', - 'z': 'Other' -} +_SCALE_TYPE = {"a": "Linear scale", "b": "Angular scale", "z": "Other"} -re_reroils = re.compile(r'(^REROILS:)(.*)') -re_electonic_locator = re.compile(r'^(ftps?|https?)://.*$') +re_reroils = re.compile(r"(^REROILS:)(.*)") +re_electonic_locator = re.compile(r"^(ftps?|https?)://.*$") def do_issuance(data, marc21): """Get document content/Media/Carrier type and mode of issuance.""" if marc21.content_media_carrier_type: - data['contentMediaCarrier'] = marc21.content_media_carrier_type + data["contentMediaCarrier"] = marc21.content_media_carrier_type if marc21.langs_from_041_h: - data['originalLanguage'] = marc21.langs_from_041_h + data["originalLanguage"] = marc21.langs_from_041_h if marc21.admin_meta_data: - data['adminMetadata'] = marc21.admin_meta_data - main_type = _ISSUANCE_MAIN_TYPE_PER_BIB_LEVEL.get( - marc21.bib_level, 'rdami:1001') - sub_type = 'NOT_DEFINED' + data["adminMetadata"] = marc21.admin_meta_data + main_type = _ISSUANCE_MAIN_TYPE_PER_BIB_LEVEL.get(marc21.bib_level, "rdami:1001") + sub_type = "NOT_DEFINED" error = False - if marc21.bib_level == 'm': + if marc21.bib_level == "m": if marc21.is_top_level_record: - main_type = 'rdami:1002' - sub_type = 'set' + main_type = "rdami:1002" + sub_type = "set" else: - sub_type = 'materialUnit' + sub_type = "materialUnit" elif marc21.bib_level in _ISSUANCE_SUBTYPE_PER_BIB_LEVEL: sub_type = _ISSUANCE_SUBTYPE_PER_BIB_LEVEL[marc21.bib_level] elif marc21.serial_type in _ISSUANCE_SUBTYPE_PER_SERIAL_TYPE: sub_type = _ISSUANCE_SUBTYPE_PER_SERIAL_TYPE[marc21.serial_type] - if main_type == 'rdami:1001': - if sub_type not in [ - 'article', 'materialUnit', 'privateFile', 'privateSubfile' - ]: + if main_type == "rdami:1001": + if sub_type not in ["article", "materialUnit", "privateFile", "privateSubfile"]: error = True - sub_type = 'materialUnit' - elif main_type == 'rdami:1002': - if sub_type not in [ - 'set', 'partIndependentTitle', 'partDependantTitle' - ]: + sub_type = "materialUnit" + elif main_type == "rdami:1002": + if sub_type not in ["set", "partIndependentTitle", "partDependantTitle"]: error = True - sub_type = 'set' - elif main_type == 'rdami:1003': - if sub_type not in [ - 'serialInSerial', 'monographicSeries', 'periodical' - ]: + sub_type = "set" + elif main_type == "rdami:1003": + if sub_type not in ["serialInSerial", "monographicSeries", "periodical"]: error = True - sub_type = 'periodical' - elif main_type == 'rdami:1004': - if sub_type not in ['updatingWebsite', 'updatingLoose-leaf']: + sub_type = "periodical" + elif main_type == "rdami:1004": + if sub_type not in ["updatingWebsite", "updatingLoose-leaf"]: error = True - sub_type = 'updatingWebsite' + sub_type = "updatingWebsite" if error: - error_print('WARNING ISSUANCE:', marc21.bib_id, marc21.rero_id, - main_type, sub_type, marc21.bib_level, marc21.serial_type) - data['issuance'] = {'main_type': main_type, 'subtype': sub_type} + error_print( + "WARNING ISSUANCE:", + marc21.bib_id, + marc21.rero_id, + main_type, + sub_type, + marc21.bib_level, + marc21.serial_type, + ) + data["issuance"] = {"main_type": main_type, "subtype": sub_type} def do_type(data, marc21): """Get document type.""" doc_type = [{"main_type": "docmaintype_other"}] - if marc21.record_type == 'a': - if marc21.bib_level == 'm': - doc_type = [{ - "main_type": "docmaintype_book", - "subtype": "docsubtype_other_book" - }] + if marc21.record_type == "a": + if marc21.bib_level == "m": + doc_type = [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"} + ] field_008 = None - field_008 = marc21.get_fields('008') + field_008 = marc21.get_fields("008") # if it's an electronic book - if field_008[0]['data'][23] in ('o', 's'): - doc_type = [{ - "main_type": "docmaintype_book", - "subtype": "docsubtype_e-book" - }] - elif marc21.bib_level == 's': - doc_type = [{ - "main_type": "docmaintype_serial" - }] - elif marc21.bib_level == 'a': - doc_type = [{ - "main_type": "docmaintype_article", - }] - elif marc21.record_type in ['c', 'd']: - doc_type = [{ - "main_type": "docmaintype_score", - "subtype": "docsubtype_printed_score" - }] - elif marc21.record_type in ['i', 'j']: - doc_type = [{ - "main_type": "docmaintype_audio", - "subtype": "docsubtype_music" - }] - elif marc21.record_type == 'g': - doc_type = [{ - "main_type": "docmaintype_movie_series", - "subtype": "docsubtype_movie" - }] - data['type'] = doc_type + if field_008[0]["data"][23] in ("o", "s"): + doc_type = [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_e-book"} + ] + elif marc21.bib_level == "s": + doc_type = [{"main_type": "docmaintype_serial"}] + elif marc21.bib_level == "a": + doc_type = [ + { + "main_type": "docmaintype_article", + } + ] + elif marc21.record_type in ["c", "d"]: + doc_type = [ + { + "main_type": "docmaintype_score", + "subtype": "docsubtype_printed_score", + } + ] + elif marc21.record_type in ["i", "j"]: + doc_type = [ + {"main_type": "docmaintype_audio", "subtype": "docsubtype_music"} + ] + elif marc21.record_type == "g": + doc_type = [ + {"main_type": "docmaintype_movie_series", "subtype": "docsubtype_movie"} + ] + data["type"] = doc_type def do_language(data, marc21): @@ -344,33 +477,26 @@ def do_language(data, marc21): languages: 008 and 041 [$a, repetitive] """ - language = data.get('language', []) - lang_codes = [v.get('value') for v in language] + language = data.get("language", []) + lang_codes = [v.get("value") for v in language] if marc21.lang_from_008: lang_value = marc21.lang_from_008 - if lang_value != '|||' and lang_value not in lang_codes: - language.append({ - 'value': lang_value, - 'type': 'bf:Language' - }) + if lang_value != "|||" and lang_value not in lang_codes: + language.append({"value": lang_value, "type": "bf:Language"}) lang_codes.append(marc21.lang_from_008) for lang_value in marc21.langs_from_041_a: if lang_value not in lang_codes: - language.append({ - 'value': lang_value.strip(), - 'type': 'bf:Language' - }) + language.append({"value": lang_value.strip(), "type": "bf:Language"}) lang_codes.append(lang_value) # language note - if fields_546 := marc21.get_fields(tag='546'): - subfields_546_a = marc21.get_subfields(fields_546[0], 'a') + if fields_546 := marc21.get_fields(tag="546"): + subfields_546_a = marc21.get_subfields(fields_546[0], "a") if subfields_546_a and language: - language[0]['note'] = subfields_546_a[0] + language[0]["note"] = subfields_546_a[0] if not language: - error_print( - 'ERROR LANGUAGE:', marc21.bib_id, f'f{language} set to "und"') - language = [{'value': 'und', 'type': 'bf:Language'}] + error_print("ERROR LANGUAGE:", marc21.bib_id, f'f{language} set to "und"') + language = [{"value": "und", "type": "bf:Language"}] return language or None @@ -386,30 +512,23 @@ def do_abbreviated_title(data, marc21, key, value): * partNumber = $h * partName = $i """ - title_list = data.get('title', []) + title_list = data.get("title", []) title = {} - if value.get('a'): - main_title = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'a') - title_type = 'bf:KeyTitle' if key[:3] == '222' \ - else 'bf:AbbreviatedTitle' - title = { - 'type': title_type, - 'mainTitle': [{'value': main_title}] - } - if value.get('b'): - subtitle = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'b') - title['subtitle'] = [{'value': subtitle}] - for resp_tag in ['f', 'g']: + if value.get("a"): + main_title = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "a") + title_type = "bf:KeyTitle" if key[:3] == "222" else "bf:AbbreviatedTitle" + title = {"type": title_type, "mainTitle": [{"value": main_title}]} + if value.get("b"): + subtitle = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "b") + title["subtitle"] = [{"value": subtitle}] + for resp_tag in ["f", "g"]: if datas := utils.force_list(value.get(resp_tag)): for data in datas: if responsibility := build_responsibility_data(data): - new_responsibility = data.get( - 'responsibilityStatement', []) + new_responsibility = data.get("responsibilityStatement", []) for resp in responsibility: new_responsibility.append(resp) - data['responsibilityStatement'] = new_responsibility + data["responsibilityStatement"] = new_responsibility if title: title_list.append(title) return title_list or None @@ -433,245 +552,248 @@ def do_title(data, marc21, value): $6 : non repetitive """ # extraction and initialization of data for further processing - subfield_245_a = '' - subfield_245_b = '' - if fields_245 := marc21.get_fields(tag='245'): - subfields_245_a = marc21.get_subfields(fields_245[0], 'a') - subfields_245_b = marc21.get_subfields(fields_245[0], 'b') + subfield_245_a = "" + subfield_245_b = "" + if fields_245 := marc21.get_fields(tag="245"): + subfields_245_a = marc21.get_subfields(fields_245[0], "a") + subfields_245_b = marc21.get_subfields(fields_245[0], "b") if subfields_245_a: subfield_245_a = subfields_245_a[0] if subfields_245_b: subfield_245_b = subfields_245_b[0] - field_245_a_end_with_equal = re.search(r'\s*=\s*$', subfield_245_a) + field_245_a_end_with_equal = re.search(r"\s*=\s*$", subfield_245_a) - fields_246 = marc21.get_fields('246') - subfield_246_a = '' + fields_246 = marc21.get_fields("246") + subfield_246_a = "" if fields_246: - if subfields_246_a := marc21.get_subfields(fields_246[0], 'a'): + if subfields_246_a := marc21.get_subfields(fields_246[0], "a"): subfield_246_a = subfields_246_a[0] _, link = get_field_link_data(value) items = get_field_items(value) index = 1 - title_list = data.get('title', []) + title_list = data.get("title", []) title_data = {} - part_list = TitlePartList(part_number_code='n', part_name_code='p') + part_list = TitlePartList(part_number_code="n", part_name_code="p") parallel_titles = [] pararalel_title_string_set = set() responsibility = {} # parse field 245 subfields for extracting: # main title, subtitle, parallel titles and the title parts - subfield_selection = {'a', 'b', 'c', 'n', 'p'} + subfield_selection = {"a", "b", "c", "n", "p"} for blob_key, blob_value in items: if blob_key in subfield_selection: value_data = marc21.build_value_with_alternate_graphic( - '245', blob_key, blob_value, index, link, ',.', ':;/-=') - if blob_key in {'a', 'b', 'c'}: + "245", blob_key, blob_value, index, link, ",.", ":;/-=" + ) + if blob_key in {"a", "b", "c"}: subfield_selection.remove(blob_key) - if blob_key == 'a': + if blob_key == "a": if value_data: # if title contains << >>, remove it # Ex: <> beaux dégâts - value_data[0]['value'] = value_data[0]['value']\ - .replace('<<', '').replace('>>', '') - title_data['mainTitle'] = value_data - elif blob_key == 'b': + value_data[0]["value"] = ( + value_data[0]["value"].replace("<<", "").replace(">>", "") + ) + title_data["mainTitle"] = value_data + elif blob_key == "b": if subfield_246_a: - subtitle, parallel_titles, pararalel_title_string_set = \ + subtitle, parallel_titles, pararalel_title_string_set = ( extract_subtitle_and_parallel_titles_from_field_245_b( - value_data, field_245_a_end_with_equal) + value_data, field_245_a_end_with_equal + ) + ) if subtitle: - title_data['subtitle'] = subtitle + title_data["subtitle"] = subtitle elif value_data: - title_data['subtitle'] = value_data - elif blob_key == 'c' and value_data: + title_data["subtitle"] = value_data + elif blob_key == "c" and value_data: responsibility = build_responsibility_data(value_data) - elif blob_key in ['n', 'p']: + elif blob_key in ["n", "p"]: part_list.update_part(value_data, blob_key, blob_value) - if blob_key != '__order__': + if blob_key != "__order__": index += 1 - title_data['type'] = 'bf:Title' + title_data["type"] = "bf:Title" if the_part_list := part_list.get_part_list(): - title_data['part'] = the_part_list + title_data["part"] = the_part_list if title_data: title_list.append(title_data) for parallel_title in parallel_titles: title_list.append(parallel_title) # extract variant titles - variant_title_list = \ - marc21.build_variant_title_data(pararalel_title_string_set) + variant_title_list = marc21.build_variant_title_data(pararalel_title_string_set) for variant_title_data in variant_title_list: title_list.append(variant_title_data) # extract responsibilities if responsibility: - new_responsibility = data.get('responsibilityStatement', []) + new_responsibility = data.get("responsibilityStatement", []) for resp in responsibility: new_responsibility.append(resp) - data['responsibilityStatement'] = new_responsibility + data["responsibilityStatement"] = new_responsibility return title_list or None def build_agent(marc21, key, value): """Build agent.""" agent_data = {} - if value.get('a'): - agent_data['preferred_name'] = remove_trailing_punctuation( - not_repetitive(marc21.bib_id, marc21.bib_id, key, value, 'a')) + if value.get("a"): + agent_data["preferred_name"] = remove_trailing_punctuation( + not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "a") + ) # 100|700|240 Person - if key[:3] in ['100', '700']: - agent_data['type'] = EntityType.PERSON - if value.get('b'): - numeration = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'b') - agent_data['numeration'] = remove_trailing_punctuation( - numeration) - if value.get('c'): - qualifier = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'c') - agent_data['qualifier'] = remove_trailing_punctuation( - qualifier) - if value.get('d'): - date = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'd') - date = date.rstrip(',') - dates = remove_trailing_punctuation(date).split('-') + if key[:3] in ["100", "700"]: + agent_data["type"] = EntityType.PERSON + if value.get("b"): + numeration = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "b") + agent_data["numeration"] = remove_trailing_punctuation(numeration) + if value.get("c"): + qualifier = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "c") + agent_data["qualifier"] = remove_trailing_punctuation(qualifier) + if value.get("d"): + date = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "d") + date = date.rstrip(",") + dates = remove_trailing_punctuation(date).split("-") with contextlib.suppress(Exception): if date_of_birth := dates[0].strip(): - agent_data['date_of_birth'] = date_of_birth + agent_data["date_of_birth"] = date_of_birth with contextlib.suppress(Exception): if date_of_death := dates[1].strip(): - agent_data['date_of_death'] = date_of_death - if value.get('q'): + agent_data["date_of_death"] = date_of_death + if value.get("q"): fuller_form_of_name = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'q') - agent_data[ - 'fuller_form_of_name'] = remove_trailing_punctuation( - fuller_form_of_name - ).lstrip('(').rstrip(')') + marc21.bib_id, marc21.bib_id, key, value, "q" + ) + agent_data["fuller_form_of_name"] = ( + remove_trailing_punctuation(fuller_form_of_name).lstrip("(").rstrip(")") + ) # 710|711 Organisation - if key[:3] in ['710', '711']: - agent_data['type'] = EntityType.ORGANISATION - agent_data['conference'] = key[:3] == '711' - if value.get('b'): + if key[:3] in ["710", "711"]: + agent_data["type"] = EntityType.ORGANISATION + agent_data["conference"] = key[:3] == "711" + if value.get("b"): subordinate_units = [ - remove_trailing_punctuation(subordinate_unit, ',.') - for subordinate_unit in utils.force_list(value.get('b')) + remove_trailing_punctuation(subordinate_unit, ",.") + for subordinate_unit in utils.force_list(value.get("b")) ] - agent_data['subordinate_unit'] = subordinate_units - if value.get('n'): - numbering = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'n') - - if numbering := remove_trailing_punctuation( - numbering).lstrip('(').rstrip(')'): - agent_data['numbering'] = numbering - if value.get('d'): + agent_data["subordinate_unit"] = subordinate_units + if value.get("n"): + numbering = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "n") + + if ( + numbering := remove_trailing_punctuation(numbering) + .lstrip("(") + .rstrip(")") + ): + agent_data["numbering"] = numbering + if value.get("d"): conference_date = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'd') - agent_data['conference_date'] = remove_trailing_punctuation( - conference_date - ).lstrip('(').rstrip(')') - if value.get('c'): - place = not_repetitive( - marc21.bib_id, marc21.bib_id, key, value, 'c') - agent_data['place'] = remove_trailing_punctuation( - place - ).lstrip('(').rstrip(')') + marc21.bib_id, marc21.bib_id, key, value, "d" + ) + agent_data["conference_date"] = ( + remove_trailing_punctuation(conference_date).lstrip("(").rstrip(")") + ) + if value.get("c"): + place = not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "c") + agent_data["place"] = ( + remove_trailing_punctuation(place).lstrip("(").rstrip(")") + ) if not agent_data: return return { - 'type': agent_data.get('type'), - 'authorized_access_point': create_authorized_access_point(agent_data) - }, agent_data + "type": agent_data.get("type"), + "authorized_access_point": create_authorized_access_point(agent_data), + }, agent_data def do_contribution(data, marc21, key, value): """Get contribution.""" # exclude work access points - if key[:3] in ['700', '710'] and value.get('t'): + if key[:3] in ["700", "710"] and value.get("t"): if work_access_point := do_work_access_point(marc21, key, value): - data.setdefault('work_access_point', []) - data['work_access_point'].append(work_access_point) + data.setdefault("work_access_point", []) + data["work_access_point"].append(work_access_point) return None agent = {} - if value.get('a'): + if value.get("a"): agent_data = build_agent(marc21=marc21, key=key, value=value)[0] if ref := get_mef_link( bibid=marc21.bib_id, reroid=marc21.rero_id, entity_type=EntityType.PERSON, - ids=utils.force_list(value.get('0')), - key=key + ids=utils.force_list(value.get("0")), + key=key, ): - agent = { - '$ref': ref, - '_text': agent_data['authorized_access_point'] - } + agent = {"$ref": ref, "_text": agent_data["authorized_access_point"]} else: agent = agent_data - if value.get('4'): + if value.get("4"): roles = set() - for role in utils.force_list(value.get('4')): - role = role.split('/')[-1].lower() + for role in utils.force_list(value.get("4")): + role = role.split("/")[-1].lower() if len(role) != 3: - error_print('WARNING CONTRIBUTION ROLE LENGTH:', - marc21.bib_id, marc21.rero_id, role) - if role == 'sce': - error_print('WARNING CONTRIBUTION ROLE SCE:', - marc21.bib_id, marc21.rero_id, - 'sce --> aus') - role = 'aus' + error_print( + "WARNING CONTRIBUTION ROLE LENGTH:", + marc21.bib_id, + marc21.rero_id, + role, + ) + if role == "sce": + error_print( + "WARNING CONTRIBUTION ROLE SCE:", + marc21.bib_id, + marc21.rero_id, + "sce --> aus", + ) + role = "aus" if role not in _CONTRIBUTION_ROLE: - error_print('WARNING CONTRIBUTION ROLE DEFINITION:', - marc21.bib_id, marc21.rero_id, role) - role = 'ctb' + error_print( + "WARNING CONTRIBUTION ROLE DEFINITION:", + marc21.bib_id, + marc21.rero_id, + role, + ) + role = "ctb" roles.add(role) - elif key[:3] == '100': - roles = ['cre'] - elif key[:3] == '711': - roles = ['aut'] + elif key[:3] == "100": + roles = ["cre"] + elif key[:3] == "711": + roles = ["aut"] else: - roles = ['ctb'] + roles = ["ctb"] if agent: - return { - 'entity': agent, - 'role': list(roles) - } + return {"entity": agent, "role": list(roles)} def do_specific_document_relation(data, marc21, key, value): """Get document relation.""" tag = key[:3] relation = None - if tag in ['533', '534']: - label = build_string_from_subfields( - value, - _REPRODUCTION_SUBFIELDS_PER_TAG[tag] - ) - relation = {'label': label} + if tag in ["533", "534"]: + label = build_string_from_subfields(value, _REPRODUCTION_SUBFIELDS_PER_TAG[tag]) + relation = {"label": label} elif subfield_w := not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'w', default='').strip(): + marc21.bib_id, marc21.rero_id, key, value, "w", default="" + ).strip(): pid = None if match := re_reroils.match(subfield_w): source = match.group(1) pid = match.group(2) - if pid and source == ('REROILS:'): + if pid and source == ("REROILS:"): # TODO: find a way to use a parameter for ref - ref = f'https://bib.rero.ch/api/documents/{pid}' - relation = {'$ref': ref} + ref = f"https://bib.rero.ch/api/documents/{pid}" + relation = {"$ref": ref} else: - label = build_string_from_subfields(value, 'ctw') - relation = {'label': label} - if relation and (relation.get('label') or relation.get('$ref')): + label = build_string_from_subfields(value, "ctw") + relation = {"label": label} + if relation and (relation.get("label") or relation.get("$ref")): relation_tag = _DOCUMENT_RELATION_PER_TAG[tag] relation_list = data.get(relation_tag, []) relation_list.append(relation) @@ -680,10 +802,10 @@ def do_specific_document_relation(data, marc21, key, value): def do_copyright_date(data, value): """Get Copyright Date.""" - copyright_dates = data.get('copyrightDate', []) - for copyright_date in utils.force_list(value.get('c', [])): - if match := re.search(r'^([©℗])+\s*(\d{4}.*)', copyright_date): - copyright_date = ' '.join((match[1], match[2])) + copyright_dates = data.get("copyrightDate", []) + for copyright_date in utils.force_list(value.get("c", [])): + if match := re.search(r"^([©℗])+\s*(\d{4}.*)", copyright_date): + copyright_date = " ".join((match[1], match[2])) copyright_dates.append(copyright_date) # else: # raise ValueError('Bad format of copyright date') @@ -696,24 +818,22 @@ def do_edition_statement(marc21, value): editionDesignation: 250 [$a non repetitive] (without trailing /) responsibility: 250 [$b non repetitive] """ - key_per_code = { - 'a': 'editionDesignation', - 'b': 'responsibility' - } + key_per_code = {"a": "editionDesignation", "b": "responsibility"} tag_link, link = get_field_link_data(value) items = get_field_items(value) index = 1 edition_data = {} - subfield_selection = {'a', 'b'} + subfield_selection = {"a", "b"} for blob_key, blob_value in items: if blob_key in subfield_selection: subfield_selection.remove(blob_key) edition_designation = marc21.build_value_with_alternate_graphic( - '250', blob_key, blob_value, index, link, ',.', ':;/-=') + "250", blob_key, blob_value, index, link, ",.", ":;/-=" + ) if edition_designation: edition_data[key_per_code[blob_key]] = edition_designation - if blob_key != '__order__': + if blob_key != "__order__": index += 1 return edition_data or None @@ -730,47 +850,44 @@ def do_provision_activity(data, marc21, key, value): def correct_b_after_e(field_value): """Corrects wrong $b after $e.""" new_field_values = [] - last_key = '' + last_key = "" for blob_key, blob_value in get_field_items(field_value): - if last_key == 'e' and blob_key == 'b': - blob_key = 'f' + if last_key == "e" and blob_key == "b": + blob_key = "f" new_field_values.append((blob_key, blob_value)) last_key = blob_key return GroupableOrderedDict(new_field_values) - def build_statement(field_value, subtags=('a', 'b')): + def build_statement(field_value, subtags=("a", "b")): def build_agent_data(code, label, index, link): type_per_code = { - 'a': EntityType.PLACE, - 'b': EntityType.AGENT, - 'e': EntityType.PLACE, - 'f': EntityType.AGENT + "a": EntityType.PLACE, + "b": EntityType.AGENT, + "e": EntityType.PLACE, + "f": EntityType.AGENT, } label = remove_trailing_punctuation(label) - if label and code == 'e': - label = label.lstrip('(') - if label and code == 'f': - label = label.rstrip(')') + if label and code == "e": + label = label.lstrip("(") + if label and code == "f": + label = label.rstrip(")") if not label: return None - agent_data = { - 'type': type_per_code[code], - 'label': [{'value': label}] - } + agent_data = {"type": type_per_code[code], "label": [{"value": label}]} with contextlib.suppress(Exception): alt_gr = marc21.alternate_graphic[tag][link] - subfield = \ - marc21.get_subfields(alt_gr['field'])[index] + subfield = marc21.get_subfields(alt_gr["field"])[index] if subfield := remove_trailing_punctuation(subfield): - agent_data['label'].append({ - 'value': subfield, - 'language': marc21.get_language_script( - alt_gr['script']) - }) + agent_data["label"].append( + { + "value": subfield, + "language": marc21.get_language_script(alt_gr["script"]), + } + ) if identifier := build_identifier(value): - agent_data['identifiedBy'] = identifier + agent_data["identifiedBy"] = identifier return agent_data @@ -781,139 +898,127 @@ def build_agent_data(code, label, index, link): index = 1 for blob_key, blob_value in items: if blob_key in subtags: - agent_data = build_agent_data( - blob_key, blob_value, index, link) + agent_data = build_agent_data(blob_key, blob_value, index, link) if agent_data: statement.append(agent_data) - if blob_key != '__order__': + if blob_key != "__order__": index += 1 return statement or None # the function marc21_to_provision_activity start here ind2 = key[4] type_per_ind2 = { - ' ': 'bf:Publication', - '_': 'bf:Publication', - '0': 'bf:Production', - '1': 'bf:Publication', - '2': 'bf:Distribution', - '3': 'bf:Manufacture' + " ": "bf:Publication", + "_": "bf:Publication", + "0": "bf:Production", + "1": "bf:Publication", + "2": "bf:Distribution", + "3": "bf:Manufacture", } - if tag == '260': - ind2 = '1' + if tag == "260": + ind2 = "1" value = correct_b_after_e(value) - publication = {'type': type_per_ind2[ind2]} + publication = {"type": type_per_ind2[ind2]} - if ind2 in ('_', ' ', '1'): - publication['startDate'] = marc21.date['start_date'] - if 'end_date' in marc21.date: - publication['endDate'] = marc21.date['end_date'] - if 'note' in marc21.date: - publication['note'] = marc21.date['note'] + if ind2 in ("_", " ", "1"): + publication["startDate"] = marc21.date["start_date"] + if "end_date" in marc21.date: + publication["endDate"] = marc21.date["end_date"] + if "note" in marc21.date: + publication["note"] = marc21.date["note"] places = [] place = marc21.build_place() - if place and place.get('country') != 'xx': + if place and place.get("country") != "xx": places.append(place) # parce the link skipping the fist (already used by build_place) for i in range(1, len(marc21.links_from_752)): - place = { - 'country': 'xx' - } + place = {"country": "xx"} if marc21.links_from_752: - place['identifiedBy'] = marc21.links_from_752[i] + place["identifiedBy"] = marc21.links_from_752[i] places.append(place) if places: - publication['place'] = places - subfield_3 = not_repetitive( - marc21.bib_id, marc21.rero_id, key, value, '3') + publication["place"] = places + subfield_3 = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, "3") if subfield_3: notes = [] - if pub_notes := publication.get('note'): + if pub_notes := publication.get("note"): notes = [pub_notes] notes.append(subfield_3) - publication['note'] = ', '.join(notes) + publication["note"] = ", ".join(notes) if statement := build_statement(value): - publication['statement'] = statement + publication["statement"] = statement - if subfields_c := utils.force_list(value.get('c', [])): + if subfields_c := utils.force_list(value.get("c", [])): subfield_c = subfields_c[0] - date = { - 'label': [{'value': subfield_c}], - 'type': 'Date' - } + date = {"label": [{"value": subfield_c}], "type": "Date"} _, link = get_field_link_data(value) with contextlib.suppress(Exception): - alt_gr = marc21.alternate_graphic['264'][link] - subfield = \ - marc21.get_subfields(alt_gr['field'], code='c') - date['label'].append({ - 'value': subfield[0], - 'language': marc21.get_language_script( - alt_gr['script']) - }) - publication.setdefault('statement', []) - publication['statement'].append(date) + alt_gr = marc21.alternate_graphic["264"][link] + subfield = marc21.get_subfields(alt_gr["field"], code="c") + date["label"].append( + { + "value": subfield[0], + "language": marc21.get_language_script(alt_gr["script"]), + } + ) + publication.setdefault("statement", []) + publication["statement"].append(date) # make second provision activity for 260 $ e $f $g - if tag == '260': - publication_260 = {'type': 'bf:Manufacture'} - if statement := build_statement(value, ('e', 'f')): - publication_260['statement'] = statement - if subfields_g := utils.force_list(value.get('g', [])): + if tag == "260": + publication_260 = {"type": "bf:Manufacture"} + if statement := build_statement(value, ("e", "f")): + publication_260["statement"] = statement + if subfields_g := utils.force_list(value.get("g", [])): subfield_g = subfields_g[0] - date = { - 'label': [{'value': subfield_g}], - 'type': 'Date' - } - publication_260.setdefault('statement', []) - publication_260['statement'].append(date) + date = {"label": [{"value": subfield_g}], "type": "Date"} + publication_260.setdefault("statement", []) + publication_260["statement"].append(date) if statement or subfields_g: - publications = data.setdefault('provisionActivity', []) + publications = data.setdefault("provisionActivity", []) if publication: publications.append(publication) publication = None publications.append(publication_260) - data['provisionActivity'] = publications + data["provisionActivity"] = publications return publication or None def do_table_of_contents(data, value): """Get tableOfContents from repetitive field 505.""" - if table_of_contents := build_string_from_subfields(value, 'agtr'): - table_of_contents_list = data.get('tableOfContents', []) + if table_of_contents := build_string_from_subfields(value, "agtr"): + table_of_contents_list = data.get("tableOfContents", []) table_of_contents_list.append(table_of_contents) - data['tableOfContents'] = table_of_contents_list + data["tableOfContents"] = table_of_contents_list def do_usage_and_access_policy_from_field_506_540(marc21, key, value): """Get usageAndAccessPolicy from fields: 506, 540.""" if subfield_a := not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'a', default='').strip(): - return {'type': 'bf:UsageAndAccessPolicy', 'label': subfield_a} + marc21.bib_id, marc21.rero_id, key, value, "a", default="" + ).strip(): + return {"type": "bf:UsageAndAccessPolicy", "label": subfield_a} def do_frequency_field_310_321(marc21, key, value): """Get frequency from fields: 310, 321.""" subfield_a = not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'a', default='missing_label').strip() + marc21.bib_id, marc21.rero_id, key, value, "a", default="missing_label" + ).strip() subfield_b = not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'b', default='').strip() + marc21.bib_id, marc21.rero_id, key, value, "b", default="" + ).strip() frequency = { - 'label': remove_trailing_punctuation( - data=subfield_a, - punctuation=',', - spaced_punctuation=',' - ) - } + "label": remove_trailing_punctuation( + data=subfield_a, punctuation=",", spaced_punctuation="," + ) + } if subfield_b: - frequency['date'] = subfield_b + frequency["date"] = subfield_b return frequency @@ -924,46 +1029,45 @@ def do_dissertation(marc21, value): items = get_field_items(value) index = 1 dissertation = {} - subfield_selection = {'a'} + subfield_selection = {"a"} for blob_key, blob_value in items: if blob_key in subfield_selection: subfield_selection.remove(blob_key) - if blob_key == 'a': + if blob_key == "a": dissertation_data = marc21.build_value_with_alternate_graphic( - '502', blob_key, blob_value, index, link, ',.', ':;/-=') + "502", blob_key, blob_value, index, link, ",.", ":;/-=" + ) else: dissertation_data = blob_value if dissertation_data: - dissertation['label'] = dissertation_data - if blob_key != '__order__': + dissertation["label"] = dissertation_data + if blob_key != "__order__": index += 1 return dissertation or None def do_summary(marc21, value): """Get summary from repetitive field 520.""" - key_per_code = { - 'a': 'label', - 'c': 'source' - } + key_per_code = {"a": "label", "c": "source"} # parse field 520 subfields for extracting: # summary and source parts tag_link, link = get_field_link_data(value) items = get_field_items(value) index = 1 summary = {} - subfield_selection = {'a', 'c'} + subfield_selection = {"a", "c"} for blob_key, blob_value in items: if blob_key in subfield_selection: subfield_selection.remove(blob_key) - if blob_key == 'a': + if blob_key == "a": summary_data = marc21.build_value_with_alternate_graphic( - '520', blob_key, blob_value, index, link, ',.', ':;/-=') + "520", blob_key, blob_value, index, link, ",.", ":;/-=" + ) else: summary_data = blob_value if summary_data: summary[key_per_code[blob_key]] = summary_data - if blob_key != '__order__': + if blob_key != "__order__": index += 1 return summary or None @@ -971,7 +1075,7 @@ def do_summary(marc21, value): def do_intended_audience(data, value): """Get intendedAudience from field 521.""" intended_audience_set = set() - for subfield_a in utils.force_list(value.get('a')): + for subfield_a in utils.force_list(value.get("a")): audiance_found = False for audiance in _INTENDED_AUDIENCE_REGEXP: regexp = _INTENDED_AUDIENCE_REGEXP[audiance] @@ -981,269 +1085,227 @@ def do_intended_audience(data, value): if not audiance_found: intended_audience_set.add(subfield_a) - intended_audience_list = data.get('intendedAudience', []) + intended_audience_list = data.get("intendedAudience", []) for intended_audience_str in intended_audience_set: intended_audience = {} # get the audiance_type for audiance_type in _INTENDED_AUDIENCE_TYPE_REGEXP: regexp = _INTENDED_AUDIENCE_TYPE_REGEXP[audiance_type] if regexp.search(intended_audience_str): - intended_audience['audienceType'] = audiance_type - if 'audienceType' not in intended_audience: - intended_audience['audienceType'] = 'undefined' - intended_audience['value'] = intended_audience_str + intended_audience["audienceType"] = audiance_type + if "audienceType" not in intended_audience: + intended_audience["audienceType"] = "undefined" + intended_audience["value"] = intended_audience_str intended_audience_list.append(intended_audience) - data['intendedAudience'] = intended_audience_list + data["intendedAudience"] = intended_audience_list def do_identified_by_from_field_010(data, marc21, key, value): """Get identifier from field 010.""" + def build_identifier_from(subfield_data, identified_by): subfield_data = subfield_data.strip() - identifier = {'value': subfield_data} - if not_repetitive(marc21.bib_id, marc21.rero_id, - key, value, 'a', default='').strip(): - identifier['type'] = 'bf:Lccn' + identifier = {"value": subfield_data} + if not_repetitive( + marc21.bib_id, marc21.rero_id, key, value, "a", default="" + ).strip(): + identifier["type"] = "bf:Lccn" identified_by.append(identifier) return identified_by - identified_by = data.get('identifiedBy', []) - subfield_a = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, 'a') + identified_by = data.get("identifiedBy", []) + subfield_a = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, "a") if subfield_a: identified_by = build_identifier_from(subfield_a, identified_by) - data['identifiedBy'] = identified_by + data["identifiedBy"] = identified_by def do_identified_by_from_field_020(data, marc21, key, value): """Get identifier from field 020.""" + def build_identifier_from(subfield_data, identified_by, status=None): subfield_data = subfield_data.strip() - identifier = {'value': subfield_data} - if value.get('q'): # $q is repetitive - identifier['qualifier'] = \ - ', '.join(utils.force_list(value.get('q'))) + identifier = {"value": subfield_data} + if value.get("q"): # $q is repetitive + identifier["qualifier"] = ", ".join(utils.force_list(value.get("q"))) - if match := re.search(r'^(.+?)\s*\((.+)\)$', subfield_data): + if match := re.search(r"^(.+?)\s*\((.+)\)$", subfield_data): # match.group(2) : parentheses content - identifier['qualifier'] = ', '.join( - filter( - None, - [match.group(2), identifier.get('qualifier', '')] - ) + identifier["qualifier"] = ", ".join( + filter(None, [match.group(2), identifier.get("qualifier", "")]) ) # value without parenthesis and parentheses content - identifier['value'] = match.group(1) + identifier["value"] = match.group(1) if status: - identifier['status'] = status - identifier['type'] = 'bf:Isbn' + identifier["status"] = status + identifier["type"] = "bf:Isbn" identified_by.append(identifier) return identified_by - identified_by = data.get('identifiedBy', []) - subfield_a = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, 'a') + identified_by = data.get("identifiedBy", []) + subfield_a = not_repetitive(marc21.bib_id, marc21.rero_id, key, value, "a") if subfield_a: build_identifier_from(subfield_a, identified_by) - subfield_c = not_repetitive(marc21.bib_id, marc21.rero_id, - key, value, 'c', default='').strip() + subfield_c = not_repetitive( + marc21.bib_id, marc21.rero_id, key, value, "c", default="" + ).strip() if subfield_c: - acquisition_terms = data.get('acquisitionTerms', []) + acquisition_terms = data.get("acquisitionTerms", []) acquisition_terms.append(subfield_c) - data['acquisitionTerms'] = acquisition_terms - for subfield_z in utils.force_list(value.get('z', [])): + data["acquisitionTerms"] = acquisition_terms + for subfield_z in utils.force_list(value.get("z", [])): identified_by = build_identifier_from( - subfield_z, identified_by, status='invalid or cancelled') + subfield_z, identified_by, status="invalid or cancelled" + ) if identified_by: - data['identifiedBy'] = identified_by + data["identifiedBy"] = identified_by def do_identified_by_from_field_022(data, value): """Get identifier from field 022.""" - status_for = { - 'm': 'cancelled', - 'y': 'invalid' - } - type_for = { - 'a': 'bf:Issn', - 'l': 'bf:IssnL', - 'm': 'bf:IssnL', - 'y': 'bf:Issn' - } + status_for = {"m": "cancelled", "y": "invalid"} + type_for = {"a": "bf:Issn", "l": "bf:IssnL", "m": "bf:IssnL", "y": "bf:Issn"} - identified_by = data.get('identifiedBy', []) - for subfield_code in ['a', 'l', 'm', 'y']: + identified_by = data.get("identifiedBy", []) + for subfield_code in ["a", "l", "m", "y"]: if subfields_data := value.get(subfield_code): if isinstance(subfields_data, str): subfields_data = [subfields_data] for subfield_data in subfields_data: subfield_data = subfield_data.strip() - identifier = { - 'type': type_for[subfield_code], 'value': subfield_data} + identifier = {"type": type_for[subfield_code], "value": subfield_data} if subfield_code in status_for: - identifier['status'] = status_for[subfield_code] + identifier["status"] = status_for[subfield_code] identified_by.append(identifier) if identified_by: - data['identifiedBy'] = identified_by + data["identifiedBy"] = identified_by def do_identified_by_from_field_024(data, marc21, key, value): """Get identifier from field 024.""" + def populate_acquisitionTerms_note_qualifier(identifier): if subfield_c := not_repetitive( - marc21.bib_id, - marc21.rero_id, - key, value, 'c', default='').strip(): - acquisition_terms = data.get('acquisitionTerms', []) + marc21.bib_id, marc21.rero_id, key, value, "c", default="" + ).strip(): + acquisition_terms = data.get("acquisitionTerms", []) acquisition_terms.append(subfield_c) - data['acquisitionTerms'] = acquisition_terms + data["acquisitionTerms"] = acquisition_terms if subfield_d := not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'd', default='').strip(): - identifier['note'] = subfield_d - if value.get('q'): # $q is repetitive - identifier['qualifier'] = \ - ', '.join(utils.force_list(value.get('q'))) + marc21.bib_id, marc21.rero_id, key, value, "d", default="" + ).strip(): + identifier["note"] = subfield_d + if value.get("q"): # $q is repetitive + identifier["qualifier"] = ", ".join(utils.force_list(value.get("q"))) subfield_2_regexp = { - 'doi': { - 'type': 'bf:Doi' - }, - 'urn': { - 'type': 'bf:Urn' - }, - 'nipo': { - 'type': 'bf:Local', - 'source': 'NIPO' - }, - 'danacode': { - 'type': 'bf:Local', - 'source': 'danacode' - }, - 'vd18': { - 'type': 'bf:Local', - 'source': 'vd18' - }, - 'gtin-14': { - 'type': 'bf:Gtin14Number' - } + "doi": {"type": "bf:Doi"}, + "urn": {"type": "bf:Urn"}, + "nipo": {"type": "bf:Local", "source": "NIPO"}, + "danacode": {"type": "bf:Local", "source": "danacode"}, + "vd18": {"type": "bf:Local", "source": "vd18"}, + "gtin-14": {"type": "bf:Gtin14Number"}, } type_for_ind1 = { - '0': {'type': 'bf:Isrc'}, - '1': {'type': 'bf:Upc'}, - '2': { - 'pattern': r'^(M|9790|979-0)', - 'matching_type': 'bf:Ismn' - }, - '3': { - 'pattern': r'^97', - 'matching_type': 'bf:Ean' - }, - '8': { + "0": {"type": "bf:Isrc"}, + "1": {"type": "bf:Upc"}, + "2": {"pattern": r"^(M|9790|979-0)", "matching_type": "bf:Ismn"}, + "3": {"pattern": r"^97", "matching_type": "bf:Ean"}, + "8": { # 33 chars example: 0000-0002-A3B1-0000-0-0000-0000-2 - 'pattern': r'^(.{24}|.{26}|(.{4}-){4}.-(.{4}\-){2}.)$', - 'matching_type': 'bf:Isan' - } + "pattern": r"^(.{24}|.{26}|(.{4}-){4}.-(.{4}\-){2}.)$", + "matching_type": "bf:Isan", + }, } identifier = {} - subfield_a = not_repetitive(marc21.bib_id, marc21.rero_id, - key, value, 'a', default='').strip() - subfield_2 = not_repetitive(marc21.bib_id, marc21.rero_id, - key, value, '2', default='').strip() + subfield_a = not_repetitive( + marc21.bib_id, marc21.rero_id, key, value, "a", default="" + ).strip() + subfield_2 = not_repetitive( + marc21.bib_id, marc21.rero_id, key, value, "2", default="" + ).strip() if subfield_a: - if re.search(r'permalink\.snl\.ch', subfield_a, re.IGNORECASE): - identifier.update({ - 'value': subfield_a, - 'type': 'uri', - 'source': 'SNL' - }) - elif re.search(r'bnf\.fr/ark', subfield_a, re.IGNORECASE): - identifier.update({ - 'value': subfield_a, - 'type': 'uri', - 'source': 'BNF' - }) + if re.search(r"permalink\.snl\.ch", subfield_a, re.IGNORECASE): + identifier.update({"value": subfield_a, "type": "uri", "source": "SNL"}) + elif re.search(r"bnf\.fr/ark", subfield_a, re.IGNORECASE): + identifier.update({"value": subfield_a, "type": "uri", "source": "BNF"}) elif subfield_2: - identifier['value'] = subfield_a + identifier["value"] = subfield_a populate_acquisitionTerms_note_qualifier(identifier) for pattern in subfield_2_regexp: if re.search(pattern, subfield_2, re.IGNORECASE): identifier.update(subfield_2_regexp[pattern]) else: # without subfield $2 ind1 = key[3] # indicateur_1 - if ind1 in ('0', '1', '2', '3', '8'): + if ind1 in ("0", "1", "2", "3", "8"): populate_acquisitionTerms_note_qualifier(identifier) - match = re.search(r'^(.+?)\s*\((.*)\)$', subfield_a) + match = re.search(r"^(.+?)\s*\((.*)\)$", subfield_a) if match: # match.group(2) : parentheses content - identifier['qualifier'] = ', '.join(filter( - None, - [match.group(2), identifier.get('qualifier', '')] - )) + identifier["qualifier"] = ", ".join( + filter(None, [match.group(2), identifier.get("qualifier", "")]) + ) # value without parenthesis and parentheses content - identifier['value'] = match.group(1) + identifier["value"] = match.group(1) else: - identifier['value'] = subfield_a - if 'type' in type_for_ind1[ind1]: # ind1 0,1 - identifier['type'] = type_for_ind1[ind1]['type'] + identifier["value"] = subfield_a + if "type" in type_for_ind1[ind1]: # ind1 0,1 + identifier["type"] = type_for_ind1[ind1]["type"] else: # ind1 in (2, 3, 8) tmp = subfield_a - if ind1 == '8': - tmp = identifier['value'] - if re.search(type_for_ind1[ind1]['pattern'], tmp): - identifier['type'] = \ - type_for_ind1[ind1]['matching_type'] + if ind1 == "8": + tmp = identifier["value"] + if re.search(type_for_ind1[ind1]["pattern"], tmp): + identifier["type"] = type_for_ind1[ind1]["matching_type"] else: - identifier['type'] = 'bf:Identifier' + identifier["type"] = "bf:Identifier" else: # ind1 not in (0, 1, 2, 3, 8) - identifier.update({ - 'value': subfield_a, - 'type': 'bf:Identifier' - }) - if not identifier.get('type'): - identifier['type'] = 'bf:Identifier' - identified_by = data.get('identifiedBy', []) + identifier.update({"value": subfield_a, "type": "bf:Identifier"}) + if not identifier.get("type"): + identifier["type"] = "bf:Identifier" + identified_by = data.get("identifiedBy", []) identified_by.append(identifier) - data['identifiedBy'] = identified_by + data["identifiedBy"] = identified_by def do_identified_by_from_field_028(data, marc21, key, value): """Get identifier from field 028.""" - identified_by = data.get('identifiedBy', []) + identified_by = data.get("identifiedBy", []) if subfield_a := not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'a', default='').strip(): - identifier = {'value': subfield_a} - if value.get('q'): # $q is repetitive - identifier['qualifier'] = \ - ', '.join(utils.force_list(value.get('q'))) + marc21.bib_id, marc21.rero_id, key, value, "a", default="" + ).strip(): + identifier = {"value": subfield_a} + if value.get("q"): # $q is repetitive + identifier["qualifier"] = ", ".join(utils.force_list(value.get("q"))) if subfield_b := not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'b', default='').strip(): - identifier['source'] = subfield_b + marc21.bib_id, marc21.rero_id, key, value, "b", default="" + ).strip(): + identifier["source"] = subfield_b type_for_ind1 = { - '0': 'bf:AudioIssueNumber', - '1': 'bf:MatrixNumber', - '2': 'bf:MusicPlate', - '3': 'bf:MusicPublisherNumber', - '4': 'bf:VideoRecordingNumber', - '5': 'bf:PublisherNumber', - '6': 'bf:MusicDistributorNumber' + "0": "bf:AudioIssueNumber", + "1": "bf:MatrixNumber", + "2": "bf:MusicPlate", + "3": "bf:MusicPublisherNumber", + "4": "bf:VideoRecordingNumber", + "5": "bf:PublisherNumber", + "6": "bf:MusicDistributorNumber", } # key[3] is the indicateur_1 - identifier['type'] = type_for_ind1.get(key[3], 'bf:Identifier') + identifier["type"] = type_for_ind1.get(key[3], "bf:Identifier") identified_by.append(identifier) - data['identifiedBy'] = identified_by + data["identifiedBy"] = identified_by def do_identified_by_from_field_035(data, marc21, key, value, source=None): """Get identifier from field 035.""" - identified_by = data.get('identifiedBy', []) + identified_by = data.get("identifiedBy", []) if subfield_a := not_repetitive( - marc21.bib_id, marc21.rero_id, - key, value, 'a', default='').strip(): + marc21.bib_id, marc21.rero_id, key, value, "a", default="" + ).strip(): value = subfield_a # search source between parenthesis if match := re_identified.match(subfield_a): @@ -1251,214 +1313,211 @@ def do_identified_by_from_field_035(data, marc21, key, value, source=None): value = match.group(2) if source and value: identifier = { - 'value': value, - 'source': source, - 'type': 'bf:Local', + "value": value, + "source": source, + "type": "bf:Local", } identified_by.append(identifier) - data['identifiedBy'] = identified_by + data["identifiedBy"] = identified_by def do_acquisition_terms_from_field_037(data, value): """Get acquisition terms field 037.""" - acquisition_terms = data.get('acquisitionTerms', []) - if subfields_c := utils.force_list(value.get('c')): + acquisition_terms = data.get("acquisitionTerms", []) + if subfields_c := utils.force_list(value.get("c")): for subfield_c in subfields_c: acquisition_terms.append(subfield_c.strip()) - data['acquisitionTerms'] = acquisition_terms + data["acquisitionTerms"] = acquisition_terms def do_electronic_locator_from_field_856(data, marc21, key, value): """Get electronicLocator from field 856.""" - electronic_locators = data.get('electronicLocator', []) - if value.get('u'): + electronic_locators = data.get("electronicLocator", []) + if value.get("u"): electronic_locator_type = { - '0': 'resource', - '1': 'versionOfResource', - '2': 'relatedResource', - '8': 'hiddenUrl' + "0": "resource", + "1": "versionOfResource", + "2": "relatedResource", + "8": "hiddenUrl", } electronic_locator_content = [ - 'poster', - 'audio', - 'postcard', - 'addition', - 'debriefing', - 'exhibitionDocumentation', - 'erratum', - 'bookplate', - 'extract', - 'educationalSheet', - 'illustrations', - 'coverImage', - 'deliveryInformation', - 'biographicalInformation', - 'introductionPreface', - 'classReading', + "poster", + "audio", + "postcard", + "addition", + "debriefing", + "exhibitionDocumentation", + "erratum", + "bookplate", + "extract", + "educationalSheet", + "illustrations", + "coverImage", + "deliveryInformation", + "biographicalInformation", + "introductionPreface", + "classReading", "teachersKit", "publishersNote", - 'noteOnContent', - 'titlePage', - 'photography', - 'summarization' - "summarization", + "noteOnContent", + "titlePage", + "photography", + "summarization" "summarization", "onlineResourceViaRERODOC", "pressReview", "webSite", "tableOfContents", "fullText", - "video" + "video", ] indicator2 = key[4] - content = utils.force_list(value.get('3'))[0] if value.get('3') else \ - None + content = utils.force_list(value.get("3"))[0] if value.get("3") else None public_note = [] if content and content not in electronic_locator_content: public_note.append(content) - if value.get('y'): - public_note.extend(iter(utils.force_list(value.get('y')))) - if value.get('z'): - public_note.extend(iter(utils.force_list(value.get('z')))) - for url in utils.force_list(value.get('u')): + if value.get("y"): + public_note.extend(iter(utils.force_list(value.get("y")))) + if value.get("z"): + public_note.extend(iter(utils.force_list(value.get("z")))) + for url in utils.force_list(value.get("u")): electronic_locator = { - 'url': url, - 'type': electronic_locator_type.get(indicator2, 'noInfo') + "url": url, + "type": electronic_locator_type.get(indicator2, "noInfo"), } if content and content in electronic_locator_content: - electronic_locator['content'] = content + electronic_locator["content"] = content if public_note: - electronic_locator['publicNote'] = public_note + electronic_locator["publicNote"] = public_note if re_electonic_locator.match(url): electronic_locators.append(electronic_locator) else: - error_print('WARNING ELECTRONICLOCATOR:', marc21.bib_id, - marc21.rero_id, url) + error_print( + "WARNING ELECTRONICLOCATOR:", marc21.bib_id, marc21.rero_id, url + ) return electronic_locators or None def do_notes_and_original_title(data, key, value): """Get notes and original title.""" subfield_a = None - if value.get('a'): - subfield_a = utils.force_list(value.get('a'))[0] + if value.get("a"): + subfield_a = utils.force_list(value.get("a"))[0] is_original_title_data = False is_general_note_to_add = False - if key[:3] == '510': + if key[:3] == "510": items = get_field_items(value) - subfield_selection = {'a', 'c', 'x'} - note_str = ''.join(f'{blob_value} ' for blob_key, blob_value - in items if blob_key in subfield_selection) - - add_note( - dict(noteType='cited_by', label=note_str.strip()), - data + subfield_selection = {"a", "c", "x"} + note_str = "".join( + f"{blob_value} " + for blob_key, blob_value in items + if blob_key in subfield_selection ) - elif key[:3] == '500': + + add_note(dict(noteType="cited_by", label=note_str.strip()), data) + elif key[:3] == "500": # extract the original title regexp = re.compile( - r'\[?(Trad.+?de|Über.+?von|Trans.+?from|Titre original|' - r'Originaltitel|Original title)\s?\:\]?\s?(.+)', - re.IGNORECASE) + r"\[?(Trad.+?de|Über.+?von|Trans.+?from|Titre original|" + r"Originaltitel|Original title)\s?\:\]?\s?(.+)", + re.IGNORECASE, + ) match = regexp.search(subfield_a) if match and match.group(2): original_title = match.group(2).strip() - original_titles = data.get('originalTitle', []) + original_titles = data.get("originalTitle", []) original_titles.append(original_title) - data['originalTitle'] = original_titles + data["originalTitle"] = original_titles else: is_general_note_to_add = True else: is_general_note_to_add = True if is_general_note_to_add: - add_note( - dict(noteType='general', label=subfield_a), - data - ) + add_note(dict(noteType="general", label=subfield_a), data) def do_credits(key, value): """Get notes and original title.""" - if value.get('a'): - subfield_a = utils.force_list(value.get('a'))[0] - if key[:3] == '511': - subfield_a = f'Participants ou interprètes: {subfield_a}' + if value.get("a"): + subfield_a = utils.force_list(value.get("a"))[0] + if key[:3] == "511": + subfield_a = f"Participants ou interprètes: {subfield_a}" return subfield_a def do_sequence_numbering(data, value): """Get notes and original title.""" - if value.get('a'): - subfield_a = utils.force_list(value.get('a'))[0] - sequence_numbering = data.get('sequence_numbering', '') + if value.get("a"): + subfield_a = utils.force_list(value.get("a"))[0] + sequence_numbering = data.get("sequence_numbering", "") if sequence_numbering: - sequence_numbering += f' ; {subfield_a}' + sequence_numbering += f" ; {subfield_a}" else: sequence_numbering = subfield_a - data['sequence_numbering'] = sequence_numbering + data["sequence_numbering"] = sequence_numbering def do_classification(data, key, value): """Get classification and subject from 980.""" classification_type_per_tag = { - '050': 'bf:ClassificationLcc', - '060': 'bf:ClassificationNlm', - '080': 'bf:ClassificationUdc', - '082': 'bf:ClassificationDdc', + "050": "bf:ClassificationLcc", + "060": "bf:ClassificationNlm", + "080": "bf:ClassificationUdc", + "082": "bf:ClassificationDdc", } tag = key[:3] indicator1 = key[3] indicator2 = key[4] subfield_2 = None - if subfields_2 := utils.force_list(value.get('2')): + if subfields_2 := utils.force_list(value.get("2")): subfield_2 = subfields_2[0] - for subfield_a in utils.force_list(value.get('a', [])): + for subfield_a in utils.force_list(value.get("a", [])): classification = { - 'classificationPortion': subfield_a, - 'type': classification_type_per_tag[tag] + "classificationPortion": subfield_a, + "type": classification_type_per_tag[tag], } # LCC classification - if tag == '050' and indicator2 == '0': - classification['assigner'] = 'LOC' + if tag == "050" and indicator2 == "0": + classification["assigner"] = "LOC" # NLM classification - elif tag == '060' and indicator2 == '0': - classification['assigner'] = 'NLM' + elif tag == "060" and indicator2 == "0": + classification["assigner"] = "NLM" # UDC classification - elif tag == '080': - if subfields_x := utils.force_list(value.get('x')): - classification['subdivision'] = list(subfields_x) + elif tag == "080": + if subfields_x := utils.force_list(value.get("x")): + classification["subdivision"] = list(subfields_x) edition_parts = [] - if indicator1 == '0': - edition_parts.append('Full edition') - elif indicator1 == '1': - edition_parts.append('Abridged edition') + if indicator1 == "0": + edition_parts.append("Full edition") + elif indicator1 == "1": + edition_parts.append("Abridged edition") if subfield_2: edition_parts.append(subfield_2) if edition_parts: - classification['edition'] = ', '.join(edition_parts) + classification["edition"] = ", ".join(edition_parts) # DDC classification - elif tag == '082': - subfields_q = utils.force_list(value.get('q')) + elif tag == "082": + subfields_q = utils.force_list(value.get("q")) subfield_q = None edition_parts = [] if subfields_q: subfield_q = subfields_q[0] - if indicator2 == '0': - classification['assigner'] = 'LOC' + if indicator2 == "0": + classification["assigner"] = "LOC" elif subfield_q: - classification['assigner'] = subfield_q - if indicator1 == '0': - edition_parts.append('Full edition') - elif indicator1 == '1': - edition_parts.append('Abridged edition') + classification["assigner"] = subfield_q + if indicator1 == "0": + edition_parts.append("Full edition") + elif indicator1 == "1": + edition_parts.append("Abridged edition") if subfield_2: edition_parts.append(subfield_2) if edition_parts: - classification['edition'] = ', '.join(edition_parts) + classification["edition"] = ", ".join(edition_parts) if classification: - data.setdefault('classification', []).append(classification) + data.setdefault("classification", []).append(classification) def do_part_of(data, marc21, key, value): @@ -1490,14 +1549,14 @@ class Numbering(object): def __init__(self): """Constructor method.""" self._numbering = {} - self._year_regexp = re.compile(r'^\d{4}') - self._string_regexp = re.compile(r'.*') - self._pages_regexp = re.compile(r'^\d+(-\d+)?$') + self._year_regexp = re.compile(r"^\d{4}") + self._string_regexp = re.compile(r".*") + self._pages_regexp = re.compile(r"^\d+(-\d+)?$") self._pattern_per_key = { - 'year': self._year_regexp, - 'pages': self._pages_regexp, - 'issue': self._string_regexp, - 'volume': self._string_regexp + "year": self._year_regexp, + "pages": self._pages_regexp, + "issue": self._string_regexp, + "volume": self._string_regexp, } def add_numbering_value(self, key, value): @@ -1513,16 +1572,16 @@ def add_numbering_value(self, key, value): """ if self._pattern_per_key[key].search(value): self._numbering[key] = value - elif key != 'year': - self._numbering['discard'] = True + elif key != "year": + self._numbering["discard"] = True def has_year(self): """Check if `year` key is present in `Numbering` data.""" - return 'year' in self._numbering + return "year" in self._numbering def is_valid(self): """Check if `Numbering` data is valid.""" - return self._numbering and 'discard' not in self._numbering + return self._numbering and "discard" not in self._numbering def get(self): """Get the `Numbering` data object.""" @@ -1540,42 +1599,40 @@ def add_author_to_subfield_t(value): author = None pending_g_values = [] pending_v_values = [] - match = re.compile(r'\. -$') # match the trailing '. -' - subfield_selection = {'a', 't', 'g', 'v'} + match = re.compile(r"\. -$") # match the trailing '. -' + subfield_selection = {"a", "t", "g", "v"} for blob_key, blob_value in items: if blob_key in subfield_selection: - if blob_key == 'a': + if blob_key == "a": # remove the trailing '. -' - author = match.sub('', blob_value) + author = match.sub("", blob_value) # reverse first name and last name - author_parts = author.split(',') - author = ' '.join(reversed(author_parts)).strip() - subfield_selection.remove('a') - elif blob_key == 't': + author_parts = author.split(",") + author = " ".join(reversed(author_parts)).strip() + subfield_selection.remove("a") + elif blob_key == "t": subfield_t = blob_value if author: - subfield_t += f' / {author}' - new_data.append(('t', subfield_t)) - elif blob_key == 'g': + subfield_t += f" / {author}" + new_data.append(("t", subfield_t)) + elif blob_key == "g": pending_g_values.append(blob_value) - elif blob_key == 'v': + elif blob_key == "v": pending_v_values.append(blob_value) - new_data.extend(('g', g_value) for g_value in pending_g_values) - new_data.extend(('v', v_value) for v_value in pending_v_values) + new_data.extend(("g", g_value) for g_value in pending_g_values) + new_data.extend(("v", v_value) for v_value in pending_v_values) return GroupableOrderedDict(tuple(new_data)) - if key[:3] == '773' and marc21.bib_level == 'm': + if key[:3] == "773" and marc21.bib_level == "m": if not marc21.has_field_580: # the author in subfield $a is appended to subfield $t value = add_author_to_subfield_t(value) # create a seriesStatement instead of a partOf - marc21.extract_series_statement_from_marc_field( - key, value, data - ) + marc21.extract_series_statement_from_marc_field(key, value, data) else: # 800, 830 if not marc21.has_field_490: # create a seriesStatement instead of a partOf - if key[:3] == '800': + if key[:3] == "800": # the author in subfield $a is appended to subfield $t value = add_author_to_subfield_t(value) marc21.extract_series_statement_from_marc_field(key, value, data) @@ -1613,89 +1670,103 @@ def do_work_access_point(marc21, key, value): * } """ tag = key[:3] - title_tag = 'a' + title_tag = "a" work_access_point = {} bib_id = marc21.bib_id # work_access_point.creator - if (tag in ['700', '800'] and value.get('t')) or tag == '710': - title_tag = 't' - if (creator_data := _do_work_access_point_creator(marc21, key, value))\ - and creator_data.get('preferred_name'): - work_access_point['creator'] = creator_data + if (tag in ["700", "800"] and value.get("t")) or tag == "710": + title_tag = "t" + if ( + creator_data := _do_work_access_point_creator(marc21, key, value) + ) and creator_data.get("preferred_name"): + work_access_point["creator"] = creator_data # work_access_point.title if value.get(title_tag): - work_access_point['title'] = remove_trailing_punctuation( - not_repetitive(bib_id, bib_id, key, value, title_tag), ',.' - ).replace('\u009c', '') + work_access_point["title"] = remove_trailing_punctuation( + not_repetitive(bib_id, bib_id, key, value, title_tag), ",." + ).replace("\u009c", "") - if not work_access_point.get('title'): - error_print('WARNING WORK ACCESS POINT:', bib_id, marc21.rero_id, - 'no title') + if not work_access_point.get("title"): + error_print( + "WARNING WORK ACCESS POINT:", bib_id, marc21.rero_id, "no title" + ) return None # work_access_point.date_of_work - if value.get('f'): - work_access_point['date_of_work'] = \ - not_repetitive(bib_id, bib_id, key, value, 'f') + if value.get("f"): + work_access_point["date_of_work"] = not_repetitive( + bib_id, bib_id, key, value, "f" + ) # work_access_point.miscellaneous_information - if value.get('g'): - work_access_point['miscellaneous_information'] = \ - remove_trailing_punctuation(not_repetitive( - bib_id, bib_id, key, value, 'g'), ',.') + if value.get("g"): + work_access_point["miscellaneous_information"] = remove_trailing_punctuation( + not_repetitive(bib_id, bib_id, key, value, "g"), ",." + ) # work_access_point.language - if value.get('l'): - language = not_repetitive(bib_id, bib_id, key, value, 'l')\ - .lstrip('(')\ - .rstrip('.')\ - .rstrip(')') + if value.get("l"): + language = ( + not_repetitive(bib_id, bib_id, key, value, "l") + .lstrip("(") + .rstrip(".") + .rstrip(")") + ) lang = language if language not in _LANGUAGES: - if len(language.split('-')) > 1 or language == 'mehrsprachig': - lang = 'mul' + if len(language.split("-")) > 1 or language == "mehrsprachig": + lang = "mul" elif iso_language := find(language): - lang = iso_language.get('iso639_2_b') + lang = iso_language.get("iso639_2_b") if lang in _LANGUAGES: - work_access_point['language'] = lang - if lang == 'mul' or lang not in _LANGUAGES: - error_print('WARNING WORK ACCESS POINT LANGUAGE:', bib_id, - marc21.rero_id, language) - if misc_info := work_access_point.get('miscellaneous_information'): - work_access_point['miscellaneous_information'] = \ - f'{misc_info} | language: {language}' + work_access_point["language"] = lang + if lang == "mul" or lang not in _LANGUAGES: + error_print( + "WARNING WORK ACCESS POINT LANGUAGE:", bib_id, marc21.rero_id, language + ) + if misc_info := work_access_point.get("miscellaneous_information"): + work_access_point["miscellaneous_information"] = ( + f"{misc_info} | language: {language}" + ) else: - work_access_point['miscellaneous_information'] = \ - f'language: {language}' + work_access_point["miscellaneous_information"] = f"language: {language}" # work_access_point.part - part_list = TitlePartList(part_number_code='n', part_name_code='p') + part_list = TitlePartList(part_number_code="n", part_name_code="p") items = get_field_items(value) index = 1 for blob_key, blob_value in items: - if blob_key in ['n', 'p']: + if blob_key in ["n", "p"]: part_list.update_part(blob_value, blob_key, blob_value) - if blob_key != '__order__': + if blob_key != "__order__": index += 1 if the_part_list := part_list.get_part_list(): for part in the_part_list: - if part_name := part.get('partName'): - part['partName'] = remove_trailing_punctuation(part_name) - work_access_point['part'] = the_part_list + if part_name := part.get("partName"): + part["partName"] = remove_trailing_punctuation(part_name) + work_access_point["part"] = the_part_list # work_access_point.form_subdivision - if value.get('k'): - work_access_point['form_subdivision'] = list(filter(None, [ - remove_trailing_punctuation(form_subdivision, ',.') - for form_subdivision in list(utils.force_list(value.get('k'))) - ])) + if value.get("k"): + work_access_point["form_subdivision"] = list( + filter( + None, + [ + remove_trailing_punctuation(form_subdivision, ",.") + for form_subdivision in list(utils.force_list(value.get("k"))) + ], + ) + ) # work_access_point.medium_of_performance_for_music - if value.get('m'): - work_access_point['medium_of_performance_for_music'] = list( - utils.force_list(value.get('m'))) + if value.get("m"): + work_access_point["medium_of_performance_for_music"] = list( + utils.force_list(value.get("m")) + ) # work_access_point.arranged_statement_for_music - if value.get('o'): - work_access_point['arranged_statement_for_music'] = not_repetitive( - bib_id, bib_id, key, value, 'o') + if value.get("o"): + work_access_point["arranged_statement_for_music"] = not_repetitive( + bib_id, bib_id, key, value, "o" + ) # work_access_point.key_for_music - if value.get('r'): - work_access_point['key_for_music'] = remove_trailing_punctuation( - not_repetitive(bib_id, bib_id, key, value, 'r'), ',.') + if value.get("r"): + work_access_point["key_for_music"] = remove_trailing_punctuation( + not_repetitive(bib_id, bib_id, key, value, "r"), ",." + ) return work_access_point or None @@ -1712,67 +1783,81 @@ def _do_work_access_point_creator(marc21, key, value): tag = key[:3] bib_id = marc21.bib_id data = {} - if tag in ['100', '700', '800']: - data = {'type': EntityType.PERSON} - if value.get('a'): - data['preferred_name'] = remove_trailing_punctuation( - not_repetitive(bib_id, bib_id, key, value, 'a',)).rstrip('.') - if value.get('b'): - data['numeration'] = remove_trailing_punctuation( - not_repetitive(bib_id, bib_id, key, value, 'b')) - if date := not_repetitive(bib_id, bib_id, key, value, 'd'): - date_parts = [d.strip().rstrip('.') for d in date.split('-')] + if tag in ["100", "700", "800"]: + data = {"type": EntityType.PERSON} + if value.get("a"): + data["preferred_name"] = remove_trailing_punctuation( + not_repetitive( + bib_id, + bib_id, + key, + value, + "a", + ) + ).rstrip(".") + if value.get("b"): + data["numeration"] = remove_trailing_punctuation( + not_repetitive(bib_id, bib_id, key, value, "b") + ) + if date := not_repetitive(bib_id, bib_id, key, value, "d"): + date_parts = [d.strip().rstrip(".") for d in date.split("-")] if date_parts and date_parts[0]: - data['date_of_birth'] = date_parts[0] + data["date_of_birth"] = date_parts[0] if len(date_parts) > 1 and date_parts[1]: - data['date_of_death'] = date_parts[1] - if value.get('c'): - data['qualifier'] = remove_trailing_punctuation( - not_repetitive(bib_id, bib_id, key, value, 'c')).rstrip('.') + data["date_of_death"] = date_parts[1] + if value.get("c"): + data["qualifier"] = remove_trailing_punctuation( + not_repetitive(bib_id, bib_id, key, value, "c") + ).rstrip(".") # bf:Organisation - if tag == '710': - data = { - 'type': EntityType.ORGANISATION, - 'conference': False - } - if value.get('a'): - data['name'] = remove_trailing_punctuation( - not_repetitive(bib_id, bib_id, key, value, 'a',)).rstrip('.') - if value.get('b'): - data['subordinate_unit'] = list(filter(None, [ - remove_trailing_punctuation(unit).rstrip('.') - for unit in list(utils.force_list(value.get('b'))) - ])) + if tag == "710": + data = {"type": EntityType.ORGANISATION, "conference": False} + if value.get("a"): + data["name"] = remove_trailing_punctuation( + not_repetitive( + bib_id, + bib_id, + key, + value, + "a", + ) + ).rstrip(".") + if value.get("b"): + data["subordinate_unit"] = list( + filter( + None, + [ + remove_trailing_punctuation(unit).rstrip(".") + for unit in list(utils.force_list(value.get("b"))) + ], + ) + ) if data and (identifier := build_identifier(value)): - data['identifiedBy'] = identifier + data["identifiedBy"] = identifier return data def do_work_access_point_240(marc21, key, value): """Get work access point from 240.""" work_access_points = {} - part_list = TitlePartList( - part_number_code='n', - part_name_code='p' - ) - part_selection = {'n', 'p'} + part_list = TitlePartList(part_number_code="n", part_name_code="p") + part_selection = {"n", "p"} for blob_key, blob_value in get_field_items(value): - if blob_key in {'a'}: - title = remove_trailing_punctuation( - blob_value.replace('\u009c', '')) - work_access_points['title'] = title + if blob_key in {"a"}: + title = remove_trailing_punctuation(blob_value.replace("\u009c", "")) + work_access_points["title"] = title if blob_key in part_selection: part_list.update_part(blob_value, blob_key, blob_value) - if field_100 := marc21.get_fields('100'): - if agent := build_agent(marc21, '100', field_100[0]['subfields'])[1]: - work_access_points['creator'] = agent + if field_100 := marc21.get_fields("100"): + if agent := build_agent(marc21, "100", field_100[0]["subfields"])[1]: + work_access_points["creator"] = agent if the_part_list := part_list.get_part_list(): - work_access_points['part'] = the_part_list + work_access_points["part"] = the_part_list if work_access_points: return work_access_points @@ -1805,53 +1890,50 @@ def do_scale_and_cartographic(data, marc21, key, value): b > Angular scale z > Other """ - fields_034 = marc21.get_fields(tag='034') + fields_034 = marc21.get_fields(tag="034") index = 0 - if value.get('a'): - scales = data.get('scale', []) + if value.get("a"): + scales = data.get("scale", []) index = len(scales) - scale = { - 'label': remove_trailing_punctuation(value.get('a')) - } + scale = {"label": remove_trailing_punctuation(value.get("a"))} if fields_034 and len(fields_034) > index: - subfields_034_a = marc21.get_subfields(fields_034[index], 'a') - subfields_034_b = marc21.get_subfields(fields_034[index], 'b') - subfields_034_c = marc21.get_subfields(fields_034[index], 'c') + subfields_034_a = marc21.get_subfields(fields_034[index], "a") + subfields_034_b = marc21.get_subfields(fields_034[index], "b") + subfields_034_c = marc21.get_subfields(fields_034[index], "c") if subfields_034_a: - scale['type'] = _SCALE_TYPE.get( - subfields_034_a[0].strip(), _SCALE_TYPE['z']) + scale["type"] = _SCALE_TYPE.get( + subfields_034_a[0].strip(), _SCALE_TYPE["z"] + ) if subfields_034_b: ratio_linear_horizontal = subfields_034_b[0] try: - scale['ratio_linear_horizontal'] = \ - int(ratio_linear_horizontal) + scale["ratio_linear_horizontal"] = int(ratio_linear_horizontal) except Exception: error_print( - f'WARNING ratio_linear_horizontal is not an integer ' - f'for [{marc21.bib_id}]: {ratio_linear_horizontal}' + f"WARNING ratio_linear_horizontal is not an integer " + f"for [{marc21.bib_id}]: {ratio_linear_horizontal}" ) if subfields_034_c: ratio_linear_vertical = subfields_034_c[0] try: - scale['ratio_linear_vertical'] = \ - int(ratio_linear_vertical) + scale["ratio_linear_vertical"] = int(ratio_linear_vertical) except Exception: error_print( - f'WARNING ratio_linear_vertical is not an integer ' - f'for [{marc21.bib_id}]: {ratio_linear_vertical}', + f"WARNING ratio_linear_vertical is not an integer " + f"for [{marc21.bib_id}]: {ratio_linear_vertical}", ) scales.append(scale) - data['scale'] = scales + data["scale"] = scales - subfield_b = value.get('b') - subfield_c = value.get('c') + subfield_b = value.get("b") + subfield_c = value.get("c") cartographic_attribute = {} if subfield_b: - cartographic_attribute['projection'] = subfield_b - coordinates = {'label': subfield_c} if subfield_c else {} + cartographic_attribute["projection"] = subfield_b + coordinates = {"label": subfield_c} if subfield_c else {} if fields_034 and len(fields_034) > index: """ @@ -1860,10 +1942,10 @@ def do_scale_and_cartographic(data, marc21, key, value): * "latitude": "034$f 034$g" - concaténer les valeurs et les séparer si nécessaire d'un espace """ - subfields_034_d = marc21.get_subfields(fields_034[index], 'd') - subfields_034_e = marc21.get_subfields(fields_034[index], 'e') - subfields_034_f = marc21.get_subfields(fields_034[index], 'f') - subfields_034_g = marc21.get_subfields(fields_034[index], 'g') + subfields_034_d = marc21.get_subfields(fields_034[index], "d") + subfields_034_e = marc21.get_subfields(fields_034[index], "e") + subfields_034_f = marc21.get_subfields(fields_034[index], "f") + subfields_034_g = marc21.get_subfields(fields_034[index], "g") longitude_parts = [] latitude_parts = [] @@ -1875,18 +1957,18 @@ def do_scale_and_cartographic(data, marc21, key, value): latitude_parts.append(subfields_034_f[0]) if subfields_034_g: latitude_parts.append(subfields_034_g[0]) - longitude = ' '.join(longitude_parts) - latitude = ' '.join(latitude_parts) + longitude = " ".join(longitude_parts) + latitude = " ".join(latitude_parts) if _LONGITUDE.match(longitude): - coordinates['longitude'] = longitude + coordinates["longitude"] = longitude if _LATITUDE.match(latitude): - coordinates['latitude'] = latitude + coordinates["latitude"] = latitude if coordinates: - cartographic_attribute['coordinates'] = coordinates + cartographic_attribute["coordinates"] = coordinates if cartographic_attribute: - cartographic_attributes = data.get('cartographicAttributes', []) + cartographic_attributes = data.get("cartographicAttributes", []) cartographic_attributes.append(cartographic_attribute) - data['cartographicAttributes'] = cartographic_attributes + data["cartographicAttributes"] = cartographic_attributes def do_temporal_coverage(marc21, key, value): @@ -1922,6 +2004,7 @@ def do_temporal_coverage(marc21, key, value): (autant de chiffres que nécessaires). Ces dates sont toutes négatives (à convertir avec un -). """ + def test_min_max(data, minimum, maximum): with contextlib.suppress(ValueError): number = int(data) @@ -1929,91 +2012,90 @@ def test_min_max(data, minimum, maximum): return False def format_date_b(date): - date = date.replace(' ', '') - if date[0] == 'c': - date = f'-{date[1:]}' - elif date[0] == 'd': - date = f'+{date[1:]}' + date = date.replace(" ", "") + if date[0] == "c": + date = f"-{date[1:]}" + elif date[0] == "d": + date = f"+{date[1:]}" else: - date = f'+{date}' + date = f"+{date}" date_str = date[0] year = date[1:5] if test_min_max(year, 0, 9999): - date_str = f'{date_str}{year}' + date_str = f"{date_str}{year}" month = date[5:7].zfill(2) if test_min_max(month, 1, 12): - date_str = f'{date_str}-{month}' - day = date[7:9].zfill(2) if test_min_max( - date[7:9], 1, 31) else '01' - date_str = f'{date_str}-{day}' + date_str = f"{date_str}-{month}" + day = date[7:9].zfill(2) if test_min_max(date[7:9], 1, 31) else "01" + date_str = f"{date_str}-{day}" hour = date[9:11] if test_min_max(hour, 0, 23): - minute = date[11:13] if \ - test_min_max(date[11:13], 0, 59) else '00' - second = date[13:15] if \ - test_min_max(date[13:15], 0, 59) else '00' - date_str = f'{date_str}T{hour}:{minute}:{second}' + minute = date[11:13] if test_min_max(date[11:13], 0, 59) else "00" + second = date[13:15] if test_min_max(date[13:15], 0, 59) else "00" + date_str = f"{date_str}T{hour}:{minute}:{second}" if len(date_str) > 1: return date_str def format_date_c(date): if test_min_max(date, 0, sys.maxsize): - return f'-{date}' + return f"-{date}" ind1 = key[3] - coverage_type = 'time' if ind1 in ['0', '1'] else 'period' + coverage_type = "time" if ind1 in ["0", "1"] else "period" temporal_coverage = {} - if subfields_a := utils.force_list(value.get('a')): + if subfields_a := utils.force_list(value.get("a")): correct_subfields_a = [] for subfield_a in subfields_a: subfield_a = subfield_a.lower() # duplicate periode_code for the type time - if coverage_type == 'time' and len(subfield_a) == 2: - subfield_a = f'{subfield_a}{subfield_a}' + if coverage_type == "time" and len(subfield_a) == 2: + subfield_a = f"{subfield_a}{subfield_a}" if _PERIOD_CODE.match(subfield_a): correct_subfields_a.append(subfield_a) else: - error_print('WARNING PERIOD CODE:', marc21.bib_id, - marc21.rero_id, subfield_a) + error_print( + "WARNING PERIOD CODE:", marc21.bib_id, marc21.rero_id, subfield_a + ) if correct_subfields_a: - temporal_coverage['period_code'] = correct_subfields_a - if coverage_type == 'time': - if subfield_b := not_repetitive(marc21.bib_id, marc21.bib_id, key, - value, 'b'): - temporal_coverage['date'] = format_date_b(subfield_b) - elif subfield_c := not_repetitive(marc21.bib_id, marc21.bib_id, key, - value, 'c'): - temporal_coverage['date'] = format_date_c(subfield_c) + temporal_coverage["period_code"] = correct_subfields_a + if coverage_type == "time": + if subfield_b := not_repetitive(marc21.bib_id, marc21.bib_id, key, value, "b"): + temporal_coverage["date"] = format_date_b(subfield_b) + elif subfield_c := not_repetitive( + marc21.bib_id, marc21.bib_id, key, value, "c" + ): + temporal_coverage["date"] = format_date_c(subfield_c) else: - if subfields_b := utils.force_list(value.get('b')): + if subfields_b := utils.force_list(value.get("b")): if start_date := format_date_b(subfields_b[0]): - temporal_coverage['start_date'] = start_date + temporal_coverage["start_date"] = start_date if len(subfields_b) > 1: if end_date := format_date_b(subfields_b[1]): - temporal_coverage['end_date'] = end_date - elif subfields_c := utils.force_list(value.get('c')): + temporal_coverage["end_date"] = end_date + elif subfields_c := utils.force_list(value.get("c")): if start_date := format_date_c(subfields_c[0]): - temporal_coverage['start_date'] = start_date + temporal_coverage["start_date"] = start_date if len(subfields_c) > 1: if end_date := format_date_c(subfields_c[1]): - temporal_coverage['end_date'] = end_date + temporal_coverage["end_date"] = end_date if temporal_coverage: - temporal_coverage['type'] = coverage_type + temporal_coverage["type"] = coverage_type return temporal_coverage def perform_subdivisions(field, value): """Perform subject subdivisions from MARC field.""" subdivisions = { - 'v': EntityType.TOPIC, - 'x': EntityType.TOPIC, - 'y': EntityType.TEMPORAL, - 'z': EntityType.PLACE + "v": EntityType.TOPIC, + "x": EntityType.TOPIC, + "y": EntityType.TEMPORAL, + "z": EntityType.PLACE, } for tag, val in value.items(): if tag in subdivisions: for v in utils.force_list(val): - field.setdefault('subdivisions', []).append(dict(entity={ - 'type': subdivisions[tag], - 'authorized_access_point': v - })) + field.setdefault("subdivisions", []).append( + dict( + entity={"type": subdivisions[tag], "authorized_access_point": v} + ) + ) diff --git a/rero_ils/modules/documents/dojson/contrib/unimarctojson/__init__.py b/rero_ils/modules/documents/dojson/contrib/unimarctojson/__init__.py index a4870a64b1..eaad9ca76e 100644 --- a/rero_ils/modules/documents/dojson/contrib/unimarctojson/__init__.py +++ b/rero_ils/modules/documents/dojson/contrib/unimarctojson/__init__.py @@ -19,4 +19,4 @@ from .model import unimarc -__all__ = ('unimarc') +__all__ = "unimarc" diff --git a/rero_ils/modules/documents/dojson/contrib/unimarctojson/model.py b/rero_ils/modules/documents/dojson/contrib/unimarctojson/model.py index 9c61793b26..95cc6681fc 100644 --- a/rero_ils/modules/documents/dojson/contrib/unimarctojson/model.py +++ b/rero_ils/modules/documents/dojson/contrib/unimarctojson/model.py @@ -27,279 +27,285 @@ from isbnlib import EAN13 from pkg_resources import resource_string -from rero_ils.dojson.utils import ReroIlsUnimarcOverdo, TitlePartList, \ - add_note, build_string_from_subfields, get_field_items, \ - get_field_link_data, make_year, not_repetitive, \ - remove_trailing_punctuation +from rero_ils.dojson.utils import ( + ReroIlsUnimarcOverdo, + TitlePartList, + add_note, + build_string_from_subfields, + get_field_items, + get_field_link_data, + make_year, + not_repetitive, + remove_trailing_punctuation, +) from rero_ils.modules.documents.api import Document -from rero_ils.modules.documents.dojson.contrib.marc21tojson.utils import \ - get_mef_link +from rero_ils.modules.documents.dojson.contrib.marc21tojson.utils import get_mef_link from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.documents.utils import create_authorized_access_point from rero_ils.modules.entities.models import EntityType _ISSUANCE_MAIN_TYPE_PER_BIB_LEVEL = { - 'a': 'rdami:1001', - 'c': 'rdami:1001', - 'i': 'rdami:1004', - 'm': 'rdami:1001', - 's': 'rdami:1003' + "a": "rdami:1001", + "c": "rdami:1001", + "i": "rdami:1004", + "m": "rdami:1001", + "s": "rdami:1003", } _ISSUANCE_SUBTYPE_PER_BIB_LEVEL = { - 'a': 'article', - 'c': 'privateFile', - 'm': 'materialUnit' + "a": "article", + "c": "privateFile", + "m": "materialUnit", } _ISSUANCE_SUBTYPE_PER_SERIAL_TYPE = { - 'a': 'periodical', - 'b': 'monographicSeries', - 'e': 'updatingLoose-leaf', - 'f': 'updatingWebsite', - 'g': 'updatingWebsite', - 'h': 'updatingWebsite' + "a": "periodical", + "b": "monographicSeries", + "e": "updatingLoose-leaf", + "f": "updatingWebsite", + "g": "updatingWebsite", + "h": "updatingWebsite", } _COUNTRY_UNIMARC_MARC21 = { - 'AD': 'an', - 'AE': 'ts', - 'AF': 'af', - 'AG': 'aq', - 'AI': 'am', - 'AL': 'aa', - 'AM': 'ai', - 'AO': 'ao', - 'AQ': 'ay', - 'AR': 'ag', - 'AS': 'as', - 'AT': 'au', - 'AU': 'at', - 'AUT': 'at', - 'AW': 'aw', - 'AZ': 'aj', - 'BA': 'bn', - 'BB': 'bb', - 'BD': 'bg', - 'BE': 'be', - 'BF': 'uv', - 'BG': 'bu', - 'BH': 'ba', - 'BI': 'bd', - 'BJ': 'dm', - 'BL': 'sc', - 'BM': 'bm', - 'BN': 'bx', - 'BO': 'bo', - 'BR': 'bl', - 'BS': 'bf', - 'BT': 'bt', - 'BV': 'bv', - 'BW': 'bs', - 'BZ': 'bh', - 'CA': 'xxc', - 'CC': 'xb', - 'CD': 'cf', - 'CF': 'cx', - 'CG': 'cg', - 'CH': 'sz', - 'CI': 'iv', - 'CK': 'cw', - 'CL': 'cl', - 'CM': 'cm', - 'CN': 'cc', - 'CO': 'ck', - 'CR': 'cr', - 'CU': 'cu', - 'CW': 'co', - 'CX': 'xa', - 'CY': 'cy', - 'DE': 'gw', - 'DJ': 'ft', - 'DK': 'dk', - 'DM': 'dq', - 'DZ': 'ae', - 'EC': 'ec', - 'EE': 'er', - 'EG': 'ua', - 'EH': 'ss', - 'ER': 'ea', - 'ES': 'sp', - 'ET': 'et', - 'FI': 'fi', - 'FJ': 'fj', - 'FM': 'fm', - 'FO': 'fa', - 'FR': 'fr', - 'GA': 'go', - 'GB': 'xxk', - 'GD': 'gd', - 'GE': 'gau', - 'GF': 'fg', - 'GG': 'gg', - 'GH': 'gh', - 'GI': 'gi', - 'GL': 'gl', - 'GM': 'gm', - 'GN': 'gv', - 'GP': 'gp', - 'GQ': 'eg', - 'GR': 'gr', - 'GS': 'xs', - 'GT': 'gt', - 'GU': 'gu', - 'GW': 'pg', - 'GY': 'gy', - 'HM': 'hm', - 'HN': 'ho', - 'HR': 'ci', - 'HT': 'ht', - 'HU': 'hu', - 'ID': 'io', - 'IE': 'ie', - 'IL': 'is', - 'IM': 'im', - 'IN': 'ii', - 'IQ': 'iq', - 'IR': 'ir', - 'IS': 'ic', - 'IT': 'it', - 'JE': 'je', - 'JM': 'jm', - 'JO': 'jo', - 'JP': 'ja', - 'KE': 'ke', - 'KG': 'kg', - 'KH': 'cb', - 'KI': 'gb', - 'KM': 'cq', - 'KN': 'xd', - 'KP': 'kn', - 'KR': 'ko', - 'KW': 'ku', - 'KY': 'cj', - 'KZ': 'kz', - 'LA': 'ls', - 'LB': 'le', - 'LC': 'xk', - 'LI': 'lh', - 'LK': 'ce', - 'LR': 'lb', - 'LS': 'lo', - 'LT': 'li', - 'LU': 'lu', - 'LV': 'lv', - 'LY': 'ly', - 'MA': 'mr', - 'MC': 'mc', - 'MD': 'mv', - 'ME': 'mo', - 'MG': 'mg', - 'MH': 'xe', - 'MK': 'xn', - 'ML': 'ml', - 'MM': 'br', - 'MN': 'mp', - 'MP': 'nw', - 'MQ': 'mq', - 'MR': 'mu', - 'MS': 'mj', - 'MT': 'mm', - 'MU': 'mf', - 'MV': 'xc', - 'MW': 'mw', - 'MX': 'mx', - 'MY': 'my', - 'MZ': 'mz', - 'NA': 'sx', - 'NC': 'nl', - 'NE': 'ng', - 'NF': 'nx', - 'NG': 'nr', - 'NI': 'nq', - 'NL': 'ne', - 'NO': 'no', - 'NP': 'np', - 'NR': 'nu', - 'NU': 'xh', - 'NZ': 'nz', - 'OM': 'mk', - 'PA': 'pn', - 'PE': 'pe', - 'PF': 'fp', - 'PG': 'pp', - 'PH': 'ph', - 'PK': 'pk', - 'PL': 'pl', - 'PM': 'xl', - 'PN': 'pc', - 'PR': 'pr', - 'PT': 'po', - 'PW': 'pw', - 'PY': 'py', - 'QA': 'qa', - 'RE': 're', - 'RO': 'rm', - 'RU': 'ru', - 'RW': 'rw', - 'SA': 'su', - 'SB': 'bp', - 'SC': 'se', - 'SD': 'sj', - 'SE': 'sw', - 'SG': 'si', - 'SH': 'xj', - 'SI': 'xv', - 'SK': 'xo', - 'SL': 'sl', - 'SM': 'st', - 'SN': 'sg', - 'SO': 'so', - 'SR': 'sr', - 'SS': 'sd', - 'ST': 'sf', - 'SV': 'es', - 'SX': 'sn', - 'SZ': 'sq', - 'TC': 'tc', - 'TD': 'cd', - 'TF': 'fs', - 'TG': 'tg', - 'TH': 'th', - 'TJ': 'ta', - 'TK': 'tl', - 'TL': 'em', - 'TM': 'tk', - 'TN': 'ti', - 'TO': 'to', - 'TR': 'tu', - 'TT': 'tr', - 'TV': 'tv', - 'TZ': 'tz', - 'UA': 'un', - 'UG': 'ug', - 'US': 'xxu', - 'UY': 'uy', - 'UZ': 'uz', - 'VA': 'vc', - 'VC': 'xm', - 'VE': 've', - 'VN': 'vm', - 'VU': 'nn', - 'WF': 'wf', - 'WS': 'ws', - 'XX': 'xx', - 'YE': 'ye', - 'YT': 'ot', - 'ZA': 'sa', - 'ZM': 'za', - 'ZW': 'rh', + "AD": "an", + "AE": "ts", + "AF": "af", + "AG": "aq", + "AI": "am", + "AL": "aa", + "AM": "ai", + "AO": "ao", + "AQ": "ay", + "AR": "ag", + "AS": "as", + "AT": "au", + "AU": "at", + "AUT": "at", + "AW": "aw", + "AZ": "aj", + "BA": "bn", + "BB": "bb", + "BD": "bg", + "BE": "be", + "BF": "uv", + "BG": "bu", + "BH": "ba", + "BI": "bd", + "BJ": "dm", + "BL": "sc", + "BM": "bm", + "BN": "bx", + "BO": "bo", + "BR": "bl", + "BS": "bf", + "BT": "bt", + "BV": "bv", + "BW": "bs", + "BZ": "bh", + "CA": "xxc", + "CC": "xb", + "CD": "cf", + "CF": "cx", + "CG": "cg", + "CH": "sz", + "CI": "iv", + "CK": "cw", + "CL": "cl", + "CM": "cm", + "CN": "cc", + "CO": "ck", + "CR": "cr", + "CU": "cu", + "CW": "co", + "CX": "xa", + "CY": "cy", + "DE": "gw", + "DJ": "ft", + "DK": "dk", + "DM": "dq", + "DZ": "ae", + "EC": "ec", + "EE": "er", + "EG": "ua", + "EH": "ss", + "ER": "ea", + "ES": "sp", + "ET": "et", + "FI": "fi", + "FJ": "fj", + "FM": "fm", + "FO": "fa", + "FR": "fr", + "GA": "go", + "GB": "xxk", + "GD": "gd", + "GE": "gau", + "GF": "fg", + "GG": "gg", + "GH": "gh", + "GI": "gi", + "GL": "gl", + "GM": "gm", + "GN": "gv", + "GP": "gp", + "GQ": "eg", + "GR": "gr", + "GS": "xs", + "GT": "gt", + "GU": "gu", + "GW": "pg", + "GY": "gy", + "HM": "hm", + "HN": "ho", + "HR": "ci", + "HT": "ht", + "HU": "hu", + "ID": "io", + "IE": "ie", + "IL": "is", + "IM": "im", + "IN": "ii", + "IQ": "iq", + "IR": "ir", + "IS": "ic", + "IT": "it", + "JE": "je", + "JM": "jm", + "JO": "jo", + "JP": "ja", + "KE": "ke", + "KG": "kg", + "KH": "cb", + "KI": "gb", + "KM": "cq", + "KN": "xd", + "KP": "kn", + "KR": "ko", + "KW": "ku", + "KY": "cj", + "KZ": "kz", + "LA": "ls", + "LB": "le", + "LC": "xk", + "LI": "lh", + "LK": "ce", + "LR": "lb", + "LS": "lo", + "LT": "li", + "LU": "lu", + "LV": "lv", + "LY": "ly", + "MA": "mr", + "MC": "mc", + "MD": "mv", + "ME": "mo", + "MG": "mg", + "MH": "xe", + "MK": "xn", + "ML": "ml", + "MM": "br", + "MN": "mp", + "MP": "nw", + "MQ": "mq", + "MR": "mu", + "MS": "mj", + "MT": "mm", + "MU": "mf", + "MV": "xc", + "MW": "mw", + "MX": "mx", + "MY": "my", + "MZ": "mz", + "NA": "sx", + "NC": "nl", + "NE": "ng", + "NF": "nx", + "NG": "nr", + "NI": "nq", + "NL": "ne", + "NO": "no", + "NP": "np", + "NR": "nu", + "NU": "xh", + "NZ": "nz", + "OM": "mk", + "PA": "pn", + "PE": "pe", + "PF": "fp", + "PG": "pp", + "PH": "ph", + "PK": "pk", + "PL": "pl", + "PM": "xl", + "PN": "pc", + "PR": "pr", + "PT": "po", + "PW": "pw", + "PY": "py", + "QA": "qa", + "RE": "re", + "RO": "rm", + "RU": "ru", + "RW": "rw", + "SA": "su", + "SB": "bp", + "SC": "se", + "SD": "sj", + "SE": "sw", + "SG": "si", + "SH": "xj", + "SI": "xv", + "SK": "xo", + "SL": "sl", + "SM": "st", + "SN": "sg", + "SO": "so", + "SR": "sr", + "SS": "sd", + "ST": "sf", + "SV": "es", + "SX": "sn", + "SZ": "sq", + "TC": "tc", + "TD": "cd", + "TF": "fs", + "TG": "tg", + "TH": "th", + "TJ": "ta", + "TK": "tl", + "TL": "em", + "TM": "tk", + "TN": "ti", + "TO": "to", + "TR": "tu", + "TT": "tr", + "TV": "tv", + "TZ": "tz", + "UA": "un", + "UG": "ug", + "US": "xxu", + "UY": "uy", + "UZ": "uz", + "VA": "vc", + "VC": "xm", + "VE": "ve", + "VN": "vm", + "VU": "nn", + "WF": "wf", + "WS": "ws", + "XX": "xx", + "YE": "ye", + "YT": "ot", + "ZA": "sa", + "ZM": "za", + "ZW": "rh", } unimarc = ReroIlsUnimarcOverdo() -@unimarc.over('type_and_issuance', 'leader') +@unimarc.over("type_and_issuance", "leader") @utils.ignore_value def unimarc_type_and_issuance(self, key, value): """ @@ -316,81 +322,76 @@ def unimarc_type_and_issuance(self, key, value): doc_type = [{"main_type": "docmaintype_other"}] if unimarc.admin_meta_data: - self['adminMetadata'] = unimarc.admin_meta_data - - if unimarc.record_type == 'a': - if unimarc.bib_level == 'm': - doc_type = [{ - "main_type": "docmaintype_book", - "subtype": "docsubtype_other_book" - }] - elif unimarc.bib_level == 's': - doc_type = [{ - "main_type": "docmaintype_serial" - }] - elif unimarc.bib_level == 'a': - doc_type = [{ - "main_type": "docmaintype_article", - }] - elif unimarc.record_type in ['c', 'd']: - doc_type = [{ - "main_type": "docmaintype_score", - "subtype": "docsubtype_printed_score" - }] - elif unimarc.record_type in ['i', 'j']: - doc_type = [{ - "main_type": "docmaintype_audio", - "subtype": "docsubtype_music" - }] - elif unimarc.record_type == 'g': - doc_type = [{ - "main_type": "docmaintype_movie_series", - "subtype": "docsubtype_movie" - }] + self["adminMetadata"] = unimarc.admin_meta_data + + if unimarc.record_type == "a": + if unimarc.bib_level == "m": + doc_type = [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"} + ] + elif unimarc.bib_level == "s": + doc_type = [{"main_type": "docmaintype_serial"}] + elif unimarc.bib_level == "a": + doc_type = [ + { + "main_type": "docmaintype_article", + } + ] + elif unimarc.record_type in ["c", "d"]: + doc_type = [ + {"main_type": "docmaintype_score", "subtype": "docsubtype_printed_score"} + ] + elif unimarc.record_type in ["i", "j"]: + doc_type = [{"main_type": "docmaintype_audio", "subtype": "docsubtype_music"}] + elif unimarc.record_type == "g": + doc_type = [ + {"main_type": "docmaintype_movie_series", "subtype": "docsubtype_movie"} + ] # Todo 007 - self['type'] = doc_type + self["type"] = doc_type # get the mode of issuance - self['issuance'] = {} - main_type = _ISSUANCE_MAIN_TYPE_PER_BIB_LEVEL.get( - unimarc.bib_level, 'rdami:1001') - sub_type = 'NOT_DEFINED' + self["issuance"] = {} + main_type = _ISSUANCE_MAIN_TYPE_PER_BIB_LEVEL.get(unimarc.bib_level, "rdami:1001") + sub_type = "NOT_DEFINED" if unimarc.bib_level in _ISSUANCE_SUBTYPE_PER_BIB_LEVEL: sub_type = _ISSUANCE_SUBTYPE_PER_BIB_LEVEL[unimarc.bib_level] if unimarc.serial_type in _ISSUANCE_SUBTYPE_PER_SERIAL_TYPE: sub_type = _ISSUANCE_SUBTYPE_PER_SERIAL_TYPE[unimarc.serial_type] - self['issuance'] = dict(main_type=main_type, subtype=sub_type) + self["issuance"] = dict(main_type=main_type, subtype=sub_type) # fiction statement - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@unimarc.over('identifiedBy', '^003') +@unimarc.over("identifiedBy", "^003") @utils.ignore_value def unimarc_bnf_id(self, key, value): """Get ID. identifier bnfID 003 """ - identifiers = self.get('identifiedBy', []) - if value.startswith('http://catalogue.bnf.fr/'): - identifiers.append({ - "type": "bf:Local", - "source": "BNF", - "value": value.replace('http://catalogue.bnf.fr/', '') - }) + identifiers = self.get("identifiedBy", []) + if value.startswith("http://catalogue.bnf.fr/"): + identifiers.append( + { + "type": "bf:Local", + "source": "BNF", + "value": value.replace("http://catalogue.bnf.fr/", ""), + } + ) return identifiers -@unimarc.over('tableOfContents', '^464..') +@unimarc.over("tableOfContents", "^464..") @utils.for_each_value @utils.ignore_value def marc21_to_tableOfContents(self, key, value): """Get tableOfContents from repetitive field 464.""" - if table_of_contents := build_string_from_subfields(value, 't'): - self.setdefault('tableOfContents', []).append(table_of_contents) + if table_of_contents := build_string_from_subfields(value, "t"): + self.setdefault("tableOfContents", []).append(table_of_contents) -@unimarc.over('title', '^200..') +@unimarc.over("title", "^200..") @utils.ignore_value def unimarc_title(self, key, value): """Get title data. @@ -409,98 +410,104 @@ def unimarc_title(self, key, value): $i : repetitive """ title_list = [] - title = self.get('title', []) + title = self.get("title", []) # this function will be called for each fields 200, but as we already # process all of them in the first run and the tittle is already build, # there is nothing to do if the title has already been build. if not title: responsibilites = [] - for tag in ['200', '510', - '512', '514', '515', '516', '517', '518', '519', '532']: + for tag in [ + "200", + "510", + "512", + "514", + "515", + "516", + "517", + "518", + "519", + "532", + ]: for field in unimarc.get_alt_graphic_fields(tag=tag): title_data = {} - part_list = TitlePartList( - part_number_code='h', - part_name_code='i' - ) - subfields_6 = unimarc.get_subfields(field, '6') - title_type = 'bf:VariantTitle' - if tag == '200': - title_type = 'bf:Title' - elif tag == '510': - title_type = 'bf:ParallelTitle' + part_list = TitlePartList(part_number_code="h", part_name_code="i") + subfields_6 = unimarc.get_subfields(field, "6") + title_type = "bf:VariantTitle" + if tag == "200": + title_type = "bf:Title" + elif tag == "510": + title_type = "bf:ParallelTitle" # build title parts index = 1 - link = '' + link = "" if subfields_6: link = subfields_6[0] - items = get_field_items(field['subfields']) + items = get_field_items(field["subfields"]) for blob_key, blob_value in items: - if blob_key == 'a': - value_data = \ - unimarc.build_value_with_alternate_graphic( - tag, blob_key, blob_value, - index, link, ',.', ':;/-=') - title_data['mainTitle'] = value_data - if blob_key == 'e': - value_data = \ - unimarc.build_value_with_alternate_graphic( - tag, blob_key, blob_value, - index, link, ',.', ':;/-=') - title_data['subtitle'] = value_data - if blob_key in ['f', 'g'] and tag == '200': - value_data = \ - unimarc.build_value_with_alternate_graphic( - tag, blob_key, blob_value, - index, link, ',.', ':;/-=') + if blob_key == "a": + value_data = unimarc.build_value_with_alternate_graphic( + tag, blob_key, blob_value, index, link, ",.", ":;/-=" + ) + title_data["mainTitle"] = value_data + if blob_key == "e": + value_data = unimarc.build_value_with_alternate_graphic( + tag, blob_key, blob_value, index, link, ",.", ":;/-=" + ) + title_data["subtitle"] = value_data + if blob_key in ["f", "g"] and tag == "200": + value_data = unimarc.build_value_with_alternate_graphic( + tag, blob_key, blob_value, index, link, ",.", ":;/-=" + ) responsibilites.append(value_data) - if blob_key in ['h', 'i']: + if blob_key in ["h", "i"]: part_list.update_part( - [dict(value=blob_value)], blob_key, blob_value) - if blob_key != '__order__': + [dict(value=blob_value)], blob_key, blob_value + ) + if blob_key != "__order__": index += 1 - title_data['type'] = title_type + title_data["type"] = title_type if the_part_list := part_list.get_part_list(): - title_data['part'] = the_part_list + title_data["part"] = the_part_list if title_data: title_list.append(title_data) # extract responsibilities if responsibilites: - new_responsibility = self.get('responsibilityStatement', []) + new_responsibility = self.get("responsibilityStatement", []) for resp in responsibilites: new_responsibility.append(resp) - self['responsibilityStatement'] = new_responsibility + self["responsibilityStatement"] = new_responsibility return title_list or None -@unimarc.over('part_of', '^(410|46[234])..') +@unimarc.over("part_of", "^(410|46[234])..") @utils.for_each_value @utils.ignore_value def marc21_to_part_of(self, key, value): """Get part_of.""" linked_pid = None if subfield_x := not_repetitive( - unimarc.bib_id, 'unimarc', key, value, 'x', default='').strip(): + unimarc.bib_id, "unimarc", key, value, "x", default="" + ).strip(): for pid in Document.get_document_pids_by_issn(subfield_x): linked_pid = pid break if linked_pid: - part_of = {'document': { - '$ref': f'https://bib.rero.ch/api/documents/{linked_pid}' - }} + part_of = { + "document": {"$ref": f"https://bib.rero.ch/api/documents/{linked_pid}"} + } numbering = [] - if subfield_v := utils.force_list(value.get('v')): + if subfield_v := utils.force_list(value.get("v")): with contextlib.suppress(ValueError): - numbering.append({'volume': str(subfield_v[0])}) - if subfield_d := utils.force_list(value.get('d')): + numbering.append({"volume": str(subfield_v[0])}) + if subfield_d := utils.force_list(value.get("d")): # get a years range - years = subfield_d[0].split('-') + years = subfield_d[0].split("-") with contextlib.suppress(ValueError): if numbering: - numbering[0]['year'] = str(years[0]) + numbering[0]["year"] = str(years[0]) else: - numbering.append({'year': str(years[0])}) + numbering.append({"year": str(years[0])}) if len(years) > 1: if years := range(int(years[0]), int(years[1]) + 1): numbering_years = deepcopy(numbering) @@ -510,39 +517,38 @@ def marc21_to_part_of(self, key, value): for year in years[1:]: if numbering: number_year = deepcopy(numbering[0]) - number_year['year'] = str(year) + number_year["year"] = str(year) numbering_years.append(number_year) numbering = numbering_years if numbering: - part_of['numbering'] = numbering + part_of["numbering"] = numbering - self['partOf'] = self.get('partOf', []) - self['partOf'].append(part_of) + self["partOf"] = self.get("partOf", []) + self["partOf"].append(part_of) -@unimarc.over('language', '^101') +@unimarc.over("language", "^101") @utils.ignore_value def unimarc_languages(self, key, value): """Get languages. languages: 101 [$a, repetitive] """ - languages = utils.force_list(value.get('a')) + languages = utils.force_list(value.get("a")) schema_in_bytes = resource_string( - 'rero_ils.jsonschemas', - 'common/languages-v0.0.1.json' + "rero_ils.jsonschemas", "common/languages-v0.0.1.json" ) - schema = jsonref.loads(schema_in_bytes.decode('utf8')) - langs = schema['language']['enum'] + schema = jsonref.loads(schema_in_bytes.decode("utf8")) + langs = schema["language"]["enum"] return [ - {'value': language, 'type': 'bf:Language'} + {"value": language, "type": "bf:Language"} for language in languages if language in langs ] -@unimarc.over('contribution', '7[01][0123]..') +@unimarc.over("contribution", "7[01][0123]..") @utils.for_each_value @utils.ignore_value def unimarc_to_contribution(self, key, value): @@ -557,56 +563,56 @@ def unimarc_to_contribution(self, key, value): 712 Nom de collectivité – Responsabilité secondaire """ agent = { - 'preferred_name': ', '.join(utils.force_list(value.get('a', ''))), - 'type': EntityType.PERSON + "preferred_name": ", ".join(utils.force_list(value.get("a", ""))), + "type": EntityType.PERSON, } - if key[:3] in ['700', '701', '702', '703']: - if agent['preferred_name'] and value.get('b'): - agent['preferred_name'] += \ - ', ' + ', '.join(utils.force_list(value.get('b'))) - if value.get('d'): - agent['numeration'] = value.get('d') - - if value.get('c'): - agent['qualifier'] = value.get('c') - - if value.get('f'): - date = utils.force_list(value.get('f'))[0] - date = date.replace('-....', '-') - dates = date.split('-') + if key[:3] in ["700", "701", "702", "703"]: + if agent["preferred_name"] and value.get("b"): + agent["preferred_name"] += ", " + ", ".join( + utils.force_list(value.get("b")) + ) + if value.get("d"): + agent["numeration"] = value.get("d") + + if value.get("c"): + agent["qualifier"] = value.get("c") + + if value.get("f"): + date = utils.force_list(value.get("f"))[0] + date = date.replace("-....", "-") + dates = date.split("-") with contextlib.suppress(Exception): if date_of_birth := dates[0].strip(): - agent['date_of_birth'] = date_of_birth + agent["date_of_birth"] = date_of_birth with contextlib.suppress(Exception): if date_of_death := dates[1].strip(): - agent['date_of_death'] = date_of_death - - if key[:3] in ['710', '711', '712']: - agent['type'] = 'bf:Organisation' - agent['conference'] = key[3] == '1' - if agent['preferred_name'] and value.get('c'): - agent['preferred_name'] += \ - ', ' + ', '.join(utils.force_list(value.get('c'))) - if value.get('b'): - agent['subordinate_unit'] = utils.force_list(value.get('b')) - if value.get('d'): - numbering = utils.force_list(value.get('d'))[0] - agent['numbering'] = remove_trailing_punctuation( - numbering - ).lstrip('(').rstrip(')') - if value.get('e'): - place = utils.force_list(value.get('e'))[0] - agent['place'] = remove_trailing_punctuation( - place - ).lstrip('(').rstrip(')') - if value.get('f'): - conference_date = utils.force_list(value.get('f'))[0] - agent['conference_date'] = remove_trailing_punctuation( - conference_date - ).lstrip('(').rstrip(')') + agent["date_of_death"] = date_of_death + + if key[:3] in ["710", "711", "712"]: + agent["type"] = "bf:Organisation" + agent["conference"] = key[3] == "1" + if agent["preferred_name"] and value.get("c"): + agent["preferred_name"] += ", " + ", ".join( + utils.force_list(value.get("c")) + ) + if value.get("b"): + agent["subordinate_unit"] = utils.force_list(value.get("b")) + if value.get("d"): + numbering = utils.force_list(value.get("d"))[0] + agent["numbering"] = ( + remove_trailing_punctuation(numbering).lstrip("(").rstrip(")") + ) + if value.get("e"): + place = utils.force_list(value.get("e"))[0] + agent["place"] = remove_trailing_punctuation(place).lstrip("(").rstrip(")") + if value.get("f"): + conference_date = utils.force_list(value.get("f"))[0] + agent["conference_date"] = ( + remove_trailing_punctuation(conference_date).lstrip("(").rstrip(")") + ) roles = [] - if value.get('4'): + if value.get("4"): IDREF_ROLE_CONV = { "070": "aut", "230": "cmp", @@ -638,43 +644,41 @@ def unimarc_to_contribution(self, key, value): "632": "adi", "005": "act", "390": "fmo", - "545": "mus" + "545": "mus", } - for role in utils.force_list(value.get('4')): + for role in utils.force_list(value.get("4")): if role_conv := IDREF_ROLE_CONV.get(role): roles.append(role_conv) roles = list(set(roles)) if not roles: - roles = ['aut'] - - ids = utils.force_list(value.get('3')) or [] - ids = [f'(idref){id_}' for id_ in ids] - if ids and (ref := get_mef_link( - bibid=unimarc.bib_id, - reroid=unimarc.rero_id, - entity_type=EntityType.PERSON, - ids=ids, - key=key - )): + roles = ["aut"] + + ids = utils.force_list(value.get("3")) or [] + ids = [f"(idref){id_}" for id_ in ids] + if ids and ( + ref := get_mef_link( + bibid=unimarc.bib_id, + reroid=unimarc.rero_id, + entity_type=EntityType.PERSON, + ids=ids, + key=key, + ) + ): return { - 'entity': { - '$ref': ref, - '_text': create_authorized_access_point(agent) - }, - 'role': roles + "entity": {"$ref": ref, "_text": create_authorized_access_point(agent)}, + "role": roles, } else: return { - 'entity': { - 'authorized_access_point': - create_authorized_access_point(agent), - 'type': agent['type'] + "entity": { + "authorized_access_point": create_authorized_access_point(agent), + "type": agent["type"], }, - 'role': roles + "role": roles, } -@unimarc.over('editionStatement', '^205..') +@unimarc.over("editionStatement", "^205..") @utils.for_each_value @utils.ignore_value def unimarc_to_edition_statement(self, key, value): @@ -684,218 +688,212 @@ def unimarc_to_edition_statement(self, key, value): responsibility: 205 [$f non repetitive] """ edition_data = {} - if subfields_a := utils.force_list(value.get('a')): + if subfields_a := utils.force_list(value.get("a")): subfield_a = subfields_a[0] - edition_data['editionDesignation'] = [{'value': subfield_a.strip()}] - if subfields_f := utils.force_list(value.get('f')): + edition_data["editionDesignation"] = [{"value": subfield_a.strip()}] + if subfields_f := utils.force_list(value.get("f")): subfield_f = subfields_f[0] - edition_data['responsibility'] = [{'value': subfield_f}] + edition_data["responsibility"] = [{"value": subfield_f}] return edition_data or None -@unimarc.over('provisionActivity', '^21[04]..') +@unimarc.over("provisionActivity", "^21[04]..") @utils.for_each_value @utils.ignore_value def unimarc_publishers_provision_activity_publication(self, key, value): """Get provision activity dates.""" + def build_place_or_agent_data(code, label, index): type_per_code = { - 'a': EntityType.PLACE, - 'c': EntityType.AGENT, - 'e': EntityType.PLACE, - 'g': EntityType.AGENT + "a": EntityType.PLACE, + "c": EntityType.AGENT, + "e": EntityType.PLACE, + "g": EntityType.AGENT, } place_or_agent_data = { - 'type': type_per_code[code], - 'label': [{'value': remove_trailing_punctuation(label)}] + "type": type_per_code[code], + "label": [{"value": remove_trailing_punctuation(label)}], } return place_or_agent_data def build_place(): # country from 102 place = {} - field_102 = unimarc.get_fields('102') + field_102 = unimarc.get_fields("102") if field_102: field_102 = field_102[0] - country_codes = unimarc.get_subfields(field_102, 'a') + country_codes = unimarc.get_subfields(field_102, "a") if country_codes: country = _COUNTRY_UNIMARC_MARC21.get(country_codes[0]) if country: - place['country'] = country + place["country"] = country return place # only take 214 if exists publication = {} ind2 = key[4] type_per_ind2 = { - ' ': 'bf:Publication', - '_': 'bf:Publication', - '0': 'bf:Publication', - '1': 'bf:Production', - '2': 'bf:Distribution', - '3': 'bf:Manufacture' + " ": "bf:Publication", + "_": "bf:Publication", + "0": "bf:Publication", + "1": "bf:Production", + "2": "bf:Distribution", + "3": "bf:Manufacture", } - if ind2 == '4': - field_d = value.get('d') + if ind2 == "4": + field_d = value.get("d") if field_d: field_d = utils.force_list(field_d)[0] - copyright_date = self.get('copyrightDate', []) - if field_d[0] == 'P': - copyright_date.append(f'℗ {field_d[2:]}') + copyright_date = self.get("copyrightDate", []) + if field_d[0] == "P": + copyright_date.append(f"℗ {field_d[2:]}") else: - copyright_date.append(f'© {field_d}') - self['copyrightDate'] = copyright_date + copyright_date.append(f"© {field_d}") + self["copyrightDate"] = copyright_date else: start_date = None end_date = None place = build_place() - field_100 = unimarc.get_fields('100') + field_100 = unimarc.get_fields("100") if field_100: field_100 = field_100[0] - subfield_a = unimarc.get_subfields(field_100, 'a') + subfield_a = unimarc.get_subfields(field_100, "a") if subfield_a: subfield_a = subfield_a[0] start_date = make_year(subfield_a[9:13]) end_date = make_year(subfield_a[14:17]) - if key[:3] == '210': - if not unimarc.get_fields('214'): - publications = self.setdefault('provisionActivity', []) + if key[:3] == "210": + if not unimarc.get_fields("214"): + publications = self.setdefault("provisionActivity", []) items = get_field_items(value) index = 1 - old_type = 'bf:Publication' + old_type = "bf:Publication" publication = {} statement = [] for blob_key, blob_value in items: - if blob_key in ('a', 'c'): - publication_type = 'bf:Publication' + if blob_key in ("a", "c"): + publication_type = "bf:Publication" if index == 1: - old_type = 'bf:Publication' - publication = { - 'type': publication_type, - 'statement': [] - } + old_type = "bf:Publication" + publication = {"type": publication_type, "statement": []} if publication_type != old_type: - subfields_h = utils.force_list(value.get('h')) - publication['statement'] = statement + subfields_h = utils.force_list(value.get("h")) + publication["statement"] = statement if subfields_h: subfields_h = subfields_h[0] - publication['statement'].append({ - 'label': [{'value': subfields_h}], - 'type': 'Date' - }) + publication["statement"].append( + {"label": [{"value": subfields_h}], "type": "Date"} + ) statement = [] publications.append(publication) publication = { - 'type': publication_type, - 'statement': [], + "type": publication_type, + "statement": [], } old_type = publication_type place_or_agent_data = build_place_or_agent_data( - blob_key, blob_value, index) + blob_key, blob_value, index + ) statement.append(place_or_agent_data) - if blob_key in ('e', 'g'): - publication_type = 'bf:Manufacture' + if blob_key in ("e", "g"): + publication_type = "bf:Manufacture" if index == 1: - old_type = 'bf:Manufacture' - publication = { - 'type': publication_type, - 'statement': [] - } + old_type = "bf:Manufacture" + publication = {"type": publication_type, "statement": []} if publication_type != old_type: - subfields_d = utils.force_list(value.get('d')) - publication['statement'] = statement + subfields_d = utils.force_list(value.get("d")) + publication["statement"] = statement if subfields_d: subfield_d = subfields_d[0] - publication['statement'].append({ - 'label': [{'value': subfield_d}], - 'type': 'Date' - }) + publication["statement"].append( + {"label": [{"value": subfield_d}], "type": "Date"} + ) if start_date: - publication['startDate'] = start_date + publication["startDate"] = start_date if end_date: - publication['endDate'] = end_date + publication["endDate"] = end_date if place: - publication['place'] = [place] + publication["place"] = [place] statement = [] publications.append(publication) publication = { - 'type': publication_type, - 'statement': [], + "type": publication_type, + "statement": [], } old_type = publication_type place_or_agent_data = build_place_or_agent_data( - blob_key, blob_value, index) + blob_key, blob_value, index + ) statement.append(place_or_agent_data) - if blob_key != '__order__': + if blob_key != "__order__": index += 1 if statement: publication = { - 'type': publication_type, - 'statement': statement, + "type": publication_type, + "statement": statement, } - date_subfield = 'd' - if publication_type == 'bf:Manufacture': - date_subfield = 'h' + date_subfield = "d" + if publication_type == "bf:Manufacture": + date_subfield = "h" subfields = utils.force_list(value.get(date_subfield)) if subfields: subfield = subfields[0] - publication['statement'].append({ - 'label': [{'value': subfield}], - 'type': 'Date' - }) - if publication['type'] == 'bf:Publication': + publication["statement"].append( + {"label": [{"value": subfield}], "type": "Date"} + ) + if publication["type"] == "bf:Publication": if start_date: - publication['startDate'] = start_date + publication["startDate"] = start_date if end_date: - publication['endDate'] = end_date + publication["endDate"] = end_date if place: - publication['place'] = [place] + publication["place"] = [place] publications.append(publication) if publications: - self['provisionActivity'] = publications + self["provisionActivity"] = publications return None else: publication = { - 'type': type_per_ind2[ind2], - 'statement': [], + "type": type_per_ind2[ind2], + "statement": [], } statement = [] items = get_field_items(value) index = 1 for blob_key, blob_value in items: - if blob_key in ('a', 'c'): + if blob_key in ("a", "c"): place_or_agent_data = build_place_or_agent_data( - blob_key, blob_value, index) + blob_key, blob_value, index + ) statement.append(place_or_agent_data) - if blob_key != '__order__': + if blob_key != "__order__": index += 1 if statement: - publication['statement'] = statement - if publication['type'] == 'bf:Publication' and place: - publication['place'] = [place] + publication["statement"] = statement + if publication["type"] == "bf:Publication" and place: + publication["place"] = [place] - subfields_d = utils.force_list(value.get('d')) + subfields_d = utils.force_list(value.get("d")) if subfields_d: subfield_d = subfields_d[0] - publication['statement'].append({ - 'label': [{'value': subfield_d}], - 'type': 'Date' - }) - if publication['type'] == 'bf:Publication': + publication["statement"].append( + {"label": [{"value": subfield_d}], "type": "Date"} + ) + if publication["type"] == "bf:Publication": if start_date: - publication['startDate'] = start_date + publication["startDate"] = start_date if end_date: - publication['endDate'] = end_date + publication["endDate"] = end_date - if not publication.get('statement'): + if not publication.get("statement"): publication = None return publication or None -@unimarc.over('extent', '^215..') +@unimarc.over("extent", "^215..") @utils.ignore_value def unimarc_description(self, key, value): """Get physical description. @@ -920,7 +918,7 @@ def unimarc_description(self, key, value): return None -@unimarc.over('series', '^225..') +@unimarc.over("series", "^225..") @utils.for_each_value @utils.ignore_value def unimarc_series_statement(self, key, value): @@ -935,67 +933,64 @@ def unimarc_series_statement(self, key, value): new_data = [] fist_a_value = None pending_v_values = [] - subfield_selection = {'a', 'e', 'i', 'v'} + subfield_selection = {"a", "e", "i", "v"} for blob_key, blob_value in items: if blob_key in subfield_selection: - if blob_key == 'a': + if blob_key == "a": fist_a_value = blob_value with contextlib.suppress(KeyError): - subfield_selection.remove('a') - elif blob_key == 'e': - fist_a_value += f': {blob_value}' - elif blob_key == 'i': + subfield_selection.remove("a") + elif blob_key == "e": + fist_a_value += f": {blob_value}" + elif blob_key == "i": # we keep on the $e associeted to the $a with contextlib.suppress(KeyError): - subfield_selection.remove('e') + subfield_selection.remove("e") if fist_a_value: - new_data.append(('a', fist_a_value)) - new_data.extend( - ('v', v_value) for v_value in pending_v_values) + new_data.append(("a", fist_a_value)) + new_data.extend(("v", v_value) for v_value in pending_v_values) fist_a_value = None pending_v_values = [] - new_data.append(('a', blob_value)) - elif blob_key == 'v': + new_data.append(("a", blob_value)) + elif blob_key == "v": pending_v_values.append(blob_value) if fist_a_value: - new_data.append(('a', fist_a_value)) - new_data.extend(('v', v_value) for v_value in pending_v_values) + new_data.append(("a", fist_a_value)) + new_data.extend(("v", v_value) for v_value in pending_v_values) new_value = GroupableOrderedDict(tuple(new_data)) unimarc.extract_series_statement_from_marc_field(key, new_value, self) -@unimarc.over('summary', '^330..') +@unimarc.over("summary", "^330..") @utils.for_each_value @utils.ignore_value def marc21_to_summary(self, key, value): """Get summary from repetitive field 520.""" - key_per_code = { - 'a': 'label', - 'c': 'source' - } + key_per_code = {"a": "label", "c": "source"} # parse field 520 subfields for extracting: # summary and source parts tag_link, link = get_field_link_data(value) items = get_field_items(value) index = 1 summary = {} - subfield_selection = {'a', 'c'} + subfield_selection = {"a", "c"} for blob_key, blob_value in items: if blob_key in subfield_selection: subfield_selection.remove(blob_key) - if blob_key == 'a': + if blob_key == "a": summary_data = unimarc.build_value_with_alternate_graphic( - '520', blob_key, blob_value, index, link, ',.', ':;/-=') + "520", blob_key, blob_value, index, link, ",.", ":;/-=" + ) else: summary_data = blob_value if summary_data: summary[key_per_code[blob_key]] = summary_data - if blob_key != '__order__': + if blob_key != "__order__": index += 1 return summary or None -@unimarc.over('identifiedBy', '^010..') +@unimarc.over("identifiedBy", "^010..") @utils.ignore_value def unimarc_identifier_isbn(self, key, value): """Get identifier isbn. @@ -1008,29 +1003,26 @@ def unimarc_identifier_isbn(self, key, value): * value = 010$z - (repeatable, remove hyphen) * status = 'invalid or cancelled' """ - identifiers = self.get('identifiedBy', []) - if value.get('a'): - isbn = { - "type": "bf:Isbn", - "value": value.get('a').replace('-', '') - } - if qualifiers := utils.force_list(value.get('b')): - isbn['qualifier'] = ', '.join(qualifiers) + identifiers = self.get("identifiedBy", []) + if value.get("a"): + isbn = {"type": "bf:Isbn", "value": value.get("a").replace("-", "")} + if qualifiers := utils.force_list(value.get("b")): + isbn["qualifier"] = ", ".join(qualifiers) identifiers.append(isbn) - if value.get('z'): - for value in utils.force_list(value.get('z')): + if value.get("z"): + for value in utils.force_list(value.get("z")): isbn = { "type": "bf:Isbn", - "value": value.replace('-', ''), - 'status': 'invalid or cancelled' + "value": value.replace("-", ""), + "status": "invalid or cancelled", } identifiers.append(isbn) return identifiers -@unimarc.over('identifiedBy', '^011..') +@unimarc.over("identifiedBy", "^011..") @utils.ignore_value def unimarc_identifier_isbn_tag011(self, key, value): """Get identifier isbn. @@ -1054,54 +1046,39 @@ def unimarc_identifier_isbn_tag011(self, key, value): * value: 011$g" (repeatable) * status: 'cancelled' """ - identifiers = self.get('identifiedBy', []) - if value.get('a'): - issn = { - "type": "bf:Issn", - "value": value.get('a') - } - if value.get('b'): - issn['qualifier'] = value.get('b') + identifiers = self.get("identifiedBy", []) + if value.get("a"): + issn = {"type": "bf:Issn", "value": value.get("a")} + if value.get("b"): + issn["qualifier"] = value.get("b") identifiers.append(issn) - if value.get('z'): - for data in utils.force_list(value.get('z')): - issn = { - "type": "bf:Issn", - "value": data, - 'status': 'invalid' - } + if value.get("z"): + for data in utils.force_list(value.get("z")): + issn = {"type": "bf:Issn", "value": data, "status": "invalid"} identifiers.append(issn) - if value.get('y'): - for data in utils.force_list(value.get('y')): - issn = { - "type": "bf:Issn", - "value": data, - 'status': 'cancelled' - } + if value.get("y"): + for data in utils.force_list(value.get("y")): + issn = {"type": "bf:Issn", "value": data, "status": "cancelled"} identifiers.append(issn) - if value.get('f'): + if value.get("f"): issnl = { "type": "bf:IssnL", - "value": value.get('f'), + "value": value.get("f"), } identifiers.append(issnl) - if value.get('g'): - for data in utils.force_list(value.get('g')): - issnl = { - "type": "bf:IssnL", - "value": data, - 'status': 'cancelled' - } + if value.get("g"): + for data in utils.force_list(value.get("g")): + issnl = {"type": "bf:IssnL", "value": data, "status": "cancelled"} identifiers.append(issnl) return identifiers -@unimarc.over('identifiedBy', '^073..') +@unimarc.over("identifiedBy", "^073..") @utils.ignore_value def unimarc_identifier_isbn_tag073(self, key, value): """Get identifier isbn. @@ -1112,21 +1089,18 @@ def unimarc_identifier_isbn_tag073(self, key, value): * qualifier = 073$b * ""status"":""invalid or cancelled"" = 073$z """ - identifiers = self.get('identifiedBy', []) - if value.get('a'): - ean = { - "type": "bf:Ean", - "value": value.get('a') - } - check_ean = EAN13(value.get('a')) + identifiers = self.get("identifiedBy", []) + if value.get("a"): + ean = {"type": "bf:Ean", "value": value.get("a")} + check_ean = EAN13(value.get("a")) # Do we have to check also cancelled status? if not check_ean: - ean['status'] = 'invalid' + ean["status"] = "invalid" identifiers.append(ean) return identifiers -@unimarc.over('note', '^300..') +@unimarc.over("note", "^300..") @utils.for_each_value @utils.ignore_value def unimarc_notes(self, key, value): @@ -1134,17 +1108,12 @@ def unimarc_notes(self, key, value): note: [300$a repetitive] """ - add_note( - dict( - noteType='general', - label=value.get('a', '') - ), - self) + add_note(dict(noteType="general", label=value.get("a", "")), self) return None -@unimarc.over('subjects_imported', '^6((0[0-9])|(1[0-7]))..') +@unimarc.over("subjects_imported", "^6((0[0-9])|(1[0-7]))..") @utils.for_each_value @utils.ignore_value def unimarc_subjects(self, key, value): @@ -1158,44 +1127,41 @@ def unimarc_subjects(self, key, value): # from config.py try: config_field_key = current_app.config.get( - 'RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE', - 'subjects_imported' + "RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE", "subjects_imported" ) except Exception: - from rero_ils.config import \ - RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key - to_return = value.get('a') or '' - if value.get('b'): - to_return += ', ' + ', '.join(utils.force_list(value.get('b'))) - if value.get('d'): - to_return += ' ' + ' '.join(utils.force_list(value.get('d'))) - if value.get('c'): - to_return += ', ' + ', '.join(utils.force_list(value.get('c'))) - if value.get('f'): - to_return += ', ' + ', '.join(utils.force_list(value.get('f'))) - if value.get('y'): - to_return += ' -- ' + ' -- '.join(utils.force_list(value.get('y'))) + from rero_ils.config import ( + RERO_ILS_IMPORT_6XX_TARGET_ATTRIBUTE as config_field_key, + ) + to_return = value.get("a") or "" + if value.get("b"): + to_return += ", " + ", ".join(utils.force_list(value.get("b"))) + if value.get("d"): + to_return += " " + " ".join(utils.force_list(value.get("d"))) + if value.get("c"): + to_return += ", " + ", ".join(utils.force_list(value.get("c"))) + if value.get("f"): + to_return += ", " + ", ".join(utils.force_list(value.get("f"))) + if value.get("y"): + to_return += " -- " + " -- ".join(utils.force_list(value.get("y"))) if to_return: - data = dict(entity={ - 'type': EntityType.TOPIC, - 'authorized_access_point': to_return - }) - if source := value.get('2', None): - data['entity']['source'] = source + data = dict( + entity={"type": EntityType.TOPIC, "authorized_access_point": to_return} + ) + if source := value.get("2", None): + data["entity"]["source"] = source self.setdefault(config_field_key, []).append(data) -@unimarc.over('electronicLocator', '^8564.') +@unimarc.over("electronicLocator", "^8564.") @utils.for_each_value @utils.ignore_value def unimarc_electronicLocator_from_field_856(self, key, value): """Get electronicLocator from field 856.""" - return ( - {'url': value.get('u'), 'type': 'resource'} if value.get('u') else None - ) + return {"url": value.get("u"), "type": "resource"} if value.get("u") else None -@unimarc.over('fiction_statement', '^105..') +@unimarc.over("fiction_statement", "^105..") @utils.ignore_value def unimarc_fiction_statement(self, key, value): """Get fiction from field 105 $a 11. @@ -1204,9 +1170,9 @@ def unimarc_fiction_statement(self, key, value): codes for fiction=False : c, d, e, h, y """ fiction = DocumentFictionType.Unspecified.value - if subfield_a := value.get('a'): - if subfield_a[11] in ['a', 'b', 'f', 'g', 'i']: + if subfield_a := value.get("a"): + if subfield_a[11] in ["a", "b", "f", "g", "i"]: fiction = DocumentFictionType.Fiction.value - if subfield_a[11] in ['c', 'd', 'e', 'h', 'y']: + if subfield_a[11] in ["c", "d", "e", "h", "y"]: fiction = DocumentFictionType.NonFiction.value return fiction diff --git a/rero_ils/modules/documents/dumpers/__init__.py b/rero_ils/modules/documents/dumpers/__init__.py index 2988e1c880..32249a81f1 100644 --- a/rero_ils/modules/documents/dumpers/__init__.py +++ b/rero_ils/modules/documents/dumpers/__init__.py @@ -23,38 +23,43 @@ from rero_ils.modules.commons.dumpers import MultiDumper, ReplaceRefsDumper from .indexer import IndexerDumper -from .replace_refs import ReplaceRefsContributionsDumper, \ - ReplaceRefsEntitiesDumper +from .replace_refs import ReplaceRefsContributionsDumper, ReplaceRefsEntitiesDumper from .title import TitleDumper __all__ = ( - 'TitleDumper', - 'ReplaceRefsContributionsDumper', - 'ReplaceRefsEntitiesDumper', + "TitleDumper", + "ReplaceRefsContributionsDumper", + "ReplaceRefsEntitiesDumper", ) # replace linked data -document_replace_refs_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - ReplaceRefsContributionsDumper(), - ReplaceRefsEntitiesDumper('subjects', 'genreForm'), - ReplaceRefsDumper() -]) +document_replace_refs_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + ReplaceRefsContributionsDumper(), + ReplaceRefsEntitiesDumper("subjects", "genreForm"), + ReplaceRefsDumper(), + ] +) # create a string version of the complex title field -document_title_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - TitleDumper() -]) +document_title_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + TitleDumper(), + ] +) # dumper used for indexing -document_indexer_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - ReplaceRefsContributionsDumper(), - ReplaceRefsEntitiesDumper('subjects', 'genreForm'), - ReplaceRefsDumper(), - IndexerDumper() -]) +document_indexer_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + ReplaceRefsContributionsDumper(), + ReplaceRefsEntitiesDumper("subjects", "genreForm"), + ReplaceRefsDumper(), + IndexerDumper(), + ] +) diff --git a/rero_ils/modules/documents/dumpers/indexer.py b/rero_ils/modules/documents/dumpers/indexer.py index 9361966bfd..701ba2aae8 100644 --- a/rero_ils/modules/documents/dumpers/indexer.py +++ b/rero_ils/modules/documents/dumpers/indexer.py @@ -37,58 +37,63 @@ def _process_holdings(record, data): from rero_ils.modules.holdings.api import HoldingsSearch from rero_ils.modules.items.api.api import ItemsSearch from rero_ils.modules.items.models import ItemNoteTypes + holdings = [] - es_holdings = HoldingsSearch()\ - .filter('term', document__pid=record['pid'])\ - .source().scan() + es_holdings = ( + HoldingsSearch().filter("term", document__pid=record["pid"]).source().scan() + ) for holding in es_holdings: holding = holding.to_dict() hold_data = { - 'pid': holding['pid'], - 'location': { - 'pid': holding['location']['pid'], + "pid": holding["pid"], + "location": { + "pid": holding["location"]["pid"], }, - 'circulation_category': [{ - 'pid': holding['circulation_category']['pid'], - }], - 'organisation': { - 'organisation_pid': holding['organisation']['pid'], - 'library_pid': holding['library']['pid'] + "circulation_category": [ + { + "pid": holding["circulation_category"]["pid"], + } + ], + "organisation": { + "organisation_pid": holding["organisation"]["pid"], + "library_pid": holding["library"]["pid"], }, - 'holdings_type': holding['holdings_type'] + "holdings_type": holding["holdings_type"], } # Index additional holdings fields into the document record holdings_fields = [ - 'call_number', 'second_call_number', 'index', - 'enumerationAndChronology', 'supplementaryContent', - 'local_fields' + "call_number", + "second_call_number", + "index", + "enumerationAndChronology", + "supplementaryContent", + "local_fields", ] for field in holdings_fields: if field in holding: hold_data[field] = holding.get(field) # Index holdings notes - if notes := [n['content'] for n in holding.get('notes', []) if n]: - hold_data['notes'] = notes + if notes := [n["content"] for n in holding.get("notes", []) if n]: + hold_data["notes"] = notes # Index items attached to each holdings record - es_items = ItemsSearch()\ - .filter('term', holding__pid=holding['pid'])\ - .scan() + es_items = ItemsSearch().filter("term", holding__pid=holding["pid"]).scan() for item in es_items: item = item.to_dict() item_data = { - 'pid': item['pid'], - 'barcode': item['barcode'], - 'status': item['status'], - 'local_fields': item.get('local_fields'), - 'call_number': item.get('call_number'), - 'second_call_number': item.get('second_call_number'), - 'temporary_item_type': item.get('temporary_item_type') + "pid": item["pid"], + "barcode": item["barcode"], + "status": item["status"], + "local_fields": item.get("local_fields"), + "call_number": item.get("call_number"), + "second_call_number": item.get("second_call_number"), + "temporary_item_type": item.get("temporary_item_type"), } - if 'temporary_item_type' in item: - hold_data['circulation_category'].append( - {'pid': item['temporary_item_type']['pid']}) + if "temporary_item_type" in item: + hold_data["circulation_category"].append( + {"pid": item["temporary_item_type"]["pid"]} + ) item_data = {k: v for k, v in item_data.items() if v} @@ -97,48 +102,46 @@ def _process_holdings(record, data): # document. As we need to link acquisition date and # org/lib/loc, we need to store these data together in a # 'nested' structure. - if acq_date := item.get('acquisition_date'): - item_data['acquisition'] = { - 'organisation_pid': holding['organisation']['pid'], - 'library_pid': holding['library']['pid'], - 'location_pid': holding['location']['pid'], - 'date': acq_date + if acq_date := item.get("acquisition_date"): + item_data["acquisition"] = { + "organisation_pid": holding["organisation"]["pid"], + "library_pid": holding["library"]["pid"], + "location_pid": holding["location"]["pid"], + "date": acq_date, } - # item notes content. - # index the content of the public notes into the document. - public_notes_content = [ - n['content'] - for n in item.get('notes', []) - if n['type'] in ItemNoteTypes.PUBLIC - ] - if public_notes_content: - item_data['notes'] = public_notes_content - hold_data.setdefault('items', []).append(item_data) + if public_notes_content := [ + n["content"] + for n in item.get("notes", []) + if n["type"] in ItemNoteTypes.PUBLIC + ]: + item_data["notes"] = public_notes_content + hold_data.setdefault("items", []).append(item_data) holdings.append(hold_data) if holdings: - data['holdings'] = holdings + data["holdings"] = holdings @staticmethod def _process_identifiers(record, data): """Add identifiers informations for indexing.""" - from rero_ils.modules.commons.identifiers import IdentifierFactory, \ - IdentifierType + from rero_ils.modules.commons.identifiers import ( + IdentifierFactory, + IdentifierType, + ) # Enrich document identifiers with possible alternative # identifiers. For example, if document data provides an ISBN-10 # identifier, the corresponding ISBN-13 identifiers must be # searchable too. - identifiers = set([ + identifiers = { IdentifierFactory.create_identifier(identifier_data) - for identifier_data in data.get('identifiedBy', []) - ]) + for identifier_data in data.get("identifiedBy", []) + } # enrich elasticsearch data with encoded identifier alternatives. The # result identifiers list should contain only distinct identifier ! for identifier in list(identifiers): identifiers.update(identifier.get_alternatives()) - data['identifiedBy'] = \ - [identifier.dump() for identifier in identifiers] + data["identifiedBy"] = [identifier.dump() for identifier in identifiers] # DEV NOTES :: Why copy `identifiedBy` into `nested_identifiers` # We use an alternative `nested_identifiers` to duplicate identifiers # into a nested structure into ES. Doing this we can continue to search @@ -149,14 +152,14 @@ def _process_identifiers(record, data): # "nested" field using the `copy_to` directive ; this will cause an # exception during index creation. # Best solution seems to "script" this copy into the listener - data['nested_identifiers'] = data['identifiedBy'] + data["nested_identifiers"] = data["identifiedBy"] # create specific keys for some common identifier families. It could # be used as a shortcut to search specific identifiers for expert # search mode. identifier_families = { - 'isbn': [IdentifierType.ISBN], - 'issn': [IdentifierType.ISSN, IdentifierType.L_ISSN] + "isbn": [IdentifierType.ISBN], + "issn": [IdentifierType.ISSN, IdentifierType.L_ISSN], } for key, family_types in identifier_families.items(): if filtered_identifiers := list( @@ -172,104 +175,111 @@ def _process_identifiers(record, data): def _process_i18n_entities(record, data): """Process fields containing entities to allow i18n search.""" # Contribution (aka. authors of the document) - if contributions := data.pop('contribution', []): - data['contribution'] = process_i18n_literal_fields(contributions) + if contributions := data.pop("contribution", []): + data["contribution"] = process_i18n_literal_fields(contributions) # Subject (could contain subdivisions to perform too) - if subjects := data.pop('subjects', []): - data['subjects'] = process_i18n_literal_fields(subjects) - if genreForms := data.pop('genreForm', []): - data['genreForm'] = process_i18n_literal_fields(genreForms) + if subjects := data.pop("subjects", []): + data["subjects"] = process_i18n_literal_fields(subjects) + if genreForms := data.pop("genreForm", []): + data["genreForm"] = process_i18n_literal_fields(genreForms) @staticmethod def _process_sort_title(record, data): """Compute and store the document title used to sort it.""" from rero_ils.utils import language_mapping - sort_title = TitleExtension.format_text(data.get('title', [])) - language = language_mapping(data.get('language')[0].get('value')) - if current_app.config.get('RERO_ILS_STOP_WORDS_ACTIVATE', False): - sort_title = current_app. \ - extensions['reroils-normalizer-stop-words']. \ - normalize(sort_title, language) - data['sort_title'] = sort_title + + sort_title = TitleExtension.format_text(data.get("title", [])) + language = language_mapping(data.get("language")[0].get("value")) + if current_app.config.get("RERO_ILS_STOP_WORDS_ACTIVATE", False): + sort_title = current_app.extensions[ + "reroils-normalizer-stop-words" + ].normalize(sort_title, language) + data["sort_title"] = sort_title @staticmethod def _process_local_field(record, data): """Add local field data related to this document.""" from rero_ils.modules.local_fields.api import LocalField - data['local_fields'] = [{ - 'organisation_pid': field.organisation_pid, - 'fields': field.get('fields') - } for field in LocalField.get_local_fields_by_id('doc', record['pid'])] - if not data['local_fields']: - del data['local_fields'] + + data["local_fields"] = [ + {"organisation_pid": field.organisation_pid, "fields": field.get("fields")} + for field in LocalField.get_local_fields_by_id("doc", record["pid"]) + ] + if not data["local_fields"]: + del data["local_fields"] @staticmethod def _process_host_document(record, data): """Store host document title in child document (part of).""" from ..api import Document - for part_of in data.get('partOf', []): - doc_pid = part_of.get('document', {}).get('pid') + + for part_of in data.get("partOf", []): + doc_pid = part_of.get("document", {}).get("pid") document = Document.get_record_by_pid(doc_pid).dumps() if titles := [ - v['_text'] - for v in document.get('title', {}) - if v.get('_text') and v.get('type') == 'bf:Title' + v["_text"] + for v in document.get("title", {}) + if v.get("_text") and v.get("type") == "bf:Title" ]: - part_of['document']['title'] = titles.pop() + part_of["document"]["title"] = titles.pop() @staticmethod def _process_provision_activity(record, data): """Search into `provisionActivity` field to found sort dates.""" if pub_provisions := [ provision - for provision in record.get('provisionActivity', []) - if provision['type'] == 'bf:Publication' + for provision in record.get("provisionActivity", []) + if provision["type"] == "bf:Publication" ]: - start_date = pub_provisions[0].get('startDate') - end_date = pub_provisions[0].get('endDate') - data['sort_date_new'] = end_date or start_date - data['sort_date_old'] = start_date + start_date = pub_provisions[0].get("startDate") + end_date = pub_provisions[0].get("endDate") + data["sort_date_new"] = end_date or start_date + data["sort_date_old"] = start_date def _process_files(self, record, data): """Add full text from files.""" - ext = current_app.extensions['rero-invenio-files'] + ext = current_app.extensions["rero-invenio-files"] files = [] for record_file in record.get_records_files(): record_files_information = {} - collections = record_file.get('metadata', {}).get('collections') + collections = record_file.get("metadata", {}).get("collections") library_pid = extracted_data_from_ref( - record_file.get('metadata', {}).get('library')) + record_file.get("metadata", {}).get("library") + ) if library_pid: organisation_pid = Library.get_record_by_pid( - library_pid).organisation_pid + library_pid + ).organisation_pid for file_name in record_file.files: file = record_file.files[file_name] - metadata = file.get('metadata', {}) - if metadata.get('type') == 'thumbnail': + metadata = file.get("metadata", {}) + if metadata.get("type") == "thumbnail": # no useful information here continue - if metadata.get('type') == 'fulltext': + if metadata.get("type") == "fulltext": # get the fulltext - stream = file.get_stream('r') - record_files_information.setdefault( - metadata['fulltext_for'], {})['text'] = stream.read() + stream = file.get_stream("r") + record_files_information.setdefault(metadata["fulltext_for"], {})[ + "text" + ] = stream.read() continue # other information from the main file - record_files_information.setdefault( - file_name, {})['file_name'] = file_name - record_files_information[file_name]['rec_id'] = \ - record_file.pid.pid_value + record_files_information.setdefault(file_name, {})[ + "file_name" + ] = file_name + record_files_information[file_name][ + "rec_id" + ] = record_file.pid.pid_value if collections: - record_files_information[file_name]['collections'] = \ - collections + record_files_information[file_name]["collections"] = collections if library_pid: - record_files_information[file_name]['library_pid'] = \ - library_pid - record_files_information[file_name]['organisation_pid'] = \ - organisation_pid + record_files_information[file_name]["library_pid"] = library_pid + record_files_information[file_name][ + "organisation_pid" + ] = organisation_pid files += list(record_files_information.values()) if files: - data['files'] = files + data["files"] = files def dump(self, record, data): """Dump a document instance with basic document information's. diff --git a/rero_ils/modules/documents/dumpers/replace_refs.py b/rero_ils/modules/documents/dumpers/replace_refs.py index 6abc939c96..c944da22e6 100644 --- a/rero_ils/modules/documents/dumpers/replace_refs.py +++ b/rero_ils/modules/documents/dumpers/replace_refs.py @@ -21,8 +21,7 @@ from rero_ils.modules.commons.exceptions import RecordNotFound from rero_ils.modules.entities.dumpers import document_dumper -from rero_ils.modules.entities.remote_entities.utils import \ - extract_data_from_mef_uri +from rero_ils.modules.entities.remote_entities.utils import extract_data_from_mef_uri from rero_ils.modules.utils import extracted_data_from_ref @@ -36,21 +35,18 @@ def _replace_entity(data): from rero_ils.modules.entities.remote_entities.api import RemoteEntity # try to get entity record - entity = extracted_data_from_ref(data['$ref'], 'record') + entity = extracted_data_from_ref(data["$ref"], "record") # check if local entity if entity and isinstance(entity, LocalEntity): # internal resources will be resolved later (see ReplaceRefsDumper) return entity.dumps(document_dumper) - _, _type, _ = extract_data_from_mef_uri(data['$ref']) - if not (entity := RemoteEntity.get_record_by_pid(data['pid'])): - raise RecordNotFound(RemoteEntity, data['pid']) + _, _type, _ = extract_data_from_mef_uri(data["$ref"]) + if not (entity := RemoteEntity.get_record_by_pid(data["pid"])): + raise RecordNotFound(RemoteEntity, data["pid"]) entity = entity.dumps(document_dumper) - entity.update({ - 'primary_source': _type, - 'pid': data['pid'] - }) + entity.update({"primary_source": _type, "pid": data["pid"]}) return entity @@ -65,16 +61,18 @@ def dump(self, record, data): :return a dict with dumped data. """ new_contributions = [] - for contribution in data.get('contribution', []): - if contribution['entity'].get('$ref'): - new_contributions.append({ - 'entity': self._replace_entity(contribution['entity']), - 'role': contribution['role'] - }) + for contribution in data.get("contribution", []): + if contribution["entity"].get("$ref"): + new_contributions.append( + { + "entity": self._replace_entity(contribution["entity"]), + "role": contribution["role"], + } + ) else: new_contributions.append(contribution) if new_contributions: - data['contribution'] = new_contributions + data["contribution"] = new_contributions return data @@ -98,10 +96,10 @@ def dump(self, record, data): """ for field_name in self.field_names: remote_entities = [] - for entity in [d['entity'] for d in data.get(field_name, [])]: - if entity.get('$ref'): + for entity in [d["entity"] for d in data.get(field_name, [])]: + if entity.get("$ref"): entity = self._replace_entity(entity) - remote_entities.append({'entity': entity}) + remote_entities.append({"entity": entity}) if remote_entities: data[field_name] = remote_entities return data diff --git a/rero_ils/modules/documents/dumpers/title.py b/rero_ils/modules/documents/dumpers/title.py index 19994a39e5..d01501eeb1 100644 --- a/rero_ils/modules/documents/dumpers/title.py +++ b/rero_ils/modules/documents/dumpers/title.py @@ -32,11 +32,8 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ title_text = TitleExtension.format_text( - record.get('title', []), - responsabilities=record.get('responsibilityStatement') + record.get("title", []), + responsabilities=record.get("responsibilityStatement"), ) - data.update({ - 'pid': record.get('pid'), - 'title_text': title_text - }) + data.update({"pid": record.get("pid"), "title_text": title_text}) return data diff --git a/rero_ils/modules/documents/extensions/__init__.py b/rero_ils/modules/documents/extensions/__init__.py index 86d917c9e5..64af6649db 100644 --- a/rero_ils/modules/documents/extensions/__init__.py +++ b/rero_ils/modules/documents/extensions/__init__.py @@ -24,9 +24,9 @@ from .title import TitleExtension __all__ = ( - 'AddMEFPidExtension', - 'ProvisionActivitiesExtension', - 'SeriesStatementExtension', - 'EditionStatementExtension', - 'TitleExtension' + "AddMEFPidExtension", + "ProvisionActivitiesExtension", + "SeriesStatementExtension", + "EditionStatementExtension", + "TitleExtension", ) diff --git a/rero_ils/modules/documents/extensions/add_mef_pid.py b/rero_ils/modules/documents/extensions/add_mef_pid.py index b0198b8b5d..ed908db50e 100644 --- a/rero_ils/modules/documents/extensions/add_mef_pid.py +++ b/rero_ils/modules/documents/extensions/add_mef_pid.py @@ -41,6 +41,7 @@ def add_mef_pid(self, record): :params record: dict - a document record. """ from rero_ils.modules.entities.remote_entities.api import RemoteEntity + remote_entities = [] # Search about all entities present in the document through fields @@ -49,17 +50,17 @@ def add_mef_pid(self, record): fields = record.get(field_name, []) if not isinstance(fields, list): fields = [fields] - remote_entities.extend([ - field['entity'] for field in fields if 'entity' in field - ]) + remote_entities.extend( + [field["entity"] for field in fields if "entity" in field] + ) # For each found entity, add its PID into the entity data. for entity_data in remote_entities: - if ref := entity_data.get('$ref'): + if ref := entity_data.get("$ref"): entity, _ = RemoteEntity.get_record_by_ref(ref) if entity: # inject mef pid - entity_data['pid'] = entity['pid'] + entity_data["pid"] = entity["pid"] def post_create(self, record): """Called after a record is initialized. diff --git a/rero_ils/modules/documents/extensions/edition_statement.py b/rero_ils/modules/documents/extensions/edition_statement.py index bd6eb17971..acac8441b2 100644 --- a/rero_ils/modules/documents/extensions/edition_statement.py +++ b/rero_ils/modules/documents/extensions/edition_statement.py @@ -32,30 +32,28 @@ class EditionStatementExtension(RecordExtension): @classmethod def format_text(cls, edition): """Format edition for _text.""" - designations = edition.get('editionDesignation', []) - responsibilities = edition.get('responsibility', []) + designations = edition.get("editionDesignation", []) + responsibilities = edition.get("responsibility", []) designation_output = {} for designation in designations: - language = designation.get('language', 'default') - value = designation.get('value', '') + language = designation.get("language", "default") + value = designation.get("value", "") designation_output[language] = value responsibility_output = {} for responsibility in responsibilities: - language = responsibility.get('language', 'default') - value = responsibility.get('value', '') + language = responsibility.get("language", "default") + value = responsibility.get("value", "") responsibility_output[language] = value edition_text = [] for key, value in designation_output.items(): designation = designation_output.get(key) - responsibility = responsibility_output.get(key, '') - value = remove_trailing_punctuation( - f'{designation} / {responsibility}' - ) + responsibility = responsibility_output.get(key, "") + value = remove_trailing_punctuation(f"{designation} / {responsibility}") if display_alternate_graphic_first(key): - edition_text.insert(0, {'value': value, 'language': key}) + edition_text.insert(0, {"value": value, "language": key}) else: - edition_text.append({'value': value, 'language': key}) + edition_text.append({"value": value, "language": key}) return edition_text def post_dump(self, record, data, dumper=None): @@ -65,6 +63,6 @@ def post_dump(self, record, data, dumper=None): :param data: dict - the data. :param dumper: record dumper - dumper helper. """ - editions = data.get('editionStatement', []) + editions = data.get("editionStatement", []) for edition in editions: - edition['_text'] = self.format_text(edition) + edition["_text"] = self.format_text(edition) diff --git a/rero_ils/modules/documents/extensions/provision_activities.py b/rero_ils/modules/documents/extensions/provision_activities.py index 598f8cd410..747a85f7c6 100644 --- a/rero_ils/modules/documents/extensions/provision_activities.py +++ b/rero_ils/modules/documents/extensions/provision_activities.py @@ -38,40 +38,36 @@ def format_text(cls, provision_activity): :returns: a text representation of the input dictionary :rtype: string """ - punctuation = { - EntityType.PLACE: ' ; ', - EntityType.AGENT: ' ; ', - 'Date': ', ' - } - statement_with_language = {'default': ''} + punctuation = {EntityType.PLACE: " ; ", EntityType.AGENT: " ; ", "Date": ", "} + statement_with_language = {"default": ""} last_statement_type = None # Perform each statement entries to build the best possible string - for statement in provision_activity.get('statement', []): - for label in statement['label']: - language = label.get('language', 'default') - statement_with_language.setdefault(language, '') + for statement in provision_activity.get("statement", []): + for label in statement["label"]: + language = label.get("language", "default") + statement_with_language.setdefault(language, "") if statement_with_language[language]: - if last_statement_type == statement['type']: + if last_statement_type == statement["type"]: statement_with_language[language] += punctuation[ last_statement_type ] - elif statement['type'] == EntityType.PLACE: - statement_with_language[language] += ' ; ' - elif statement['type'] == 'Date': - statement_with_language[language] += ', ' + elif statement["type"] == EntityType.PLACE: + statement_with_language[language] += " ; " + elif statement["type"] == "Date": + statement_with_language[language] += ", " else: - statement_with_language[language] += ' : ' + statement_with_language[language] += " : " - statement_with_language[language] += label['value'] - last_statement_type = statement['type'] + statement_with_language[language] += label["value"] + last_statement_type = statement["type"] # date field: remove ';' and append statement_text = [] for key, value in statement_with_language.items(): value = remove_trailing_punctuation(value) if display_alternate_graphic_first(key): - statement_text.insert(0, {'value': value, 'language': key}) + statement_text.insert(0, {"value": value, "language": key}) else: - statement_text.append({'value': value, 'language': key}) + statement_text.append({"value": value, "language": key}) return statement_text def post_dump(self, record, data, dumper=None): @@ -81,7 +77,6 @@ def post_dump(self, record, data, dumper=None): :param data: dict - the data. :param dumper: record dumper - dumper helper. """ - for provision_activity in data.get('provisionActivity', []): - if pub_state_text := self.format_text( - provision_activity): - provision_activity['_text'] = pub_state_text + for provision_activity in data.get("provisionActivity", []): + if pub_state_text := self.format_text(provision_activity): + provision_activity["_text"] = pub_state_text diff --git a/rero_ils/modules/documents/extensions/series_statement.py b/rero_ils/modules/documents/extensions/series_statement.py index d9293a9ac8..037c4a46cb 100644 --- a/rero_ils/modules/documents/extensions/series_statement.py +++ b/rero_ils/modules/documents/extensions/series_statement.py @@ -30,59 +30,52 @@ class SeriesStatementExtension(RecordExtension): @classmethod def format_text(cls, serie_statement): """Format series statement for template.""" + def get_title_language(data): """Get title and language.""" output = {} for value in data: - language = value.get('language', 'default') - title = value.get('value', '') + language = value.get("language", "default") + title = value.get("value", "") language_title = output.get(language, []) language_title.append(title) output[language] = language_title return output - serie_title = get_title_language( - serie_statement.get('seriesTitle', [])) - serie_enum = get_title_language( - serie_statement.get('seriesEnumeration', []) - ) + serie_title = get_title_language(serie_statement.get("seriesTitle", [])) + serie_enum = get_title_language(serie_statement.get("seriesEnumeration", [])) subserie_data = [] - for subserie in serie_statement.get('subseriesStatement', []): - subserie_title = get_title_language( - subserie.get('subseriesTitle', [])) - subserie_enum = get_title_language( - subserie.get('subseriesEnumeration', []) - ) - subserie_data.append({ - 'title': subserie_title, 'enum': subserie_enum}) + for subserie in serie_statement.get("subseriesStatement", []): + subserie_title = get_title_language(subserie.get("subseriesTitle", [])) + subserie_enum = get_title_language(subserie.get("subseriesEnumeration", [])) + subserie_data.append({"title": subserie_title, "enum": subserie_enum}) intermediate_output = {} for key, value in serie_title.items(): - intermediate_output[key] = ', '.join(value) + intermediate_output[key] = ", ".join(value) for key, value in serie_enum.items(): - value = ', '.join(value) - intermediate_value = intermediate_output.get(key, '') - intermediate_value = f'{intermediate_value}; {value}' + value = ", ".join(value) + intermediate_value = intermediate_output.get(key, "") + intermediate_value = f"{intermediate_value}; {value}" intermediate_output[key] = intermediate_value for intermediate_subserie in subserie_data: - for key, value in intermediate_subserie.get('title', {}).items(): - value = ', '.join(value) - intermediate_value = intermediate_output.get(key, '') - intermediate_value = f'{intermediate_value}. {value}' + for key, value in intermediate_subserie.get("title", {}).items(): + value = ", ".join(value) + intermediate_value = intermediate_output.get(key, "") + intermediate_value = f"{intermediate_value}. {value}" intermediate_output[key] = intermediate_value for key, value in subserie_enum.items(): - value = ', '.join(value) - intermediate_value = intermediate_output.get(key, '') - intermediate_value = f'{intermediate_value}; {value}' + value = ", ".join(value) + intermediate_value = intermediate_output.get(key, "") + intermediate_value = f"{intermediate_value}; {value}" intermediate_output[key] = intermediate_value serie_statement_text = [] for key, value in intermediate_output.items(): if display_alternate_graphic_first(key): - serie_statement_text.insert( - 0, {'value': value, 'language': key}) + serie_statement_text.insert(0, {"value": value, "language": key}) else: - serie_statement_text.append({'value': value, 'language': key}) + serie_statement_text.append({"value": value, "language": key}) return serie_statement_text @@ -93,8 +86,6 @@ def post_dump(self, record, data, dumper=None): :param data: dict - the data. :param dumper: record dumper - dumper helper. """ - series = data.get('seriesStatement', []) + series = data.get("seriesStatement", []) for series_element in series: - series_element["_text"] = self.format_text( - series_element - ) + series_element["_text"] = self.format_text(series_element) diff --git a/rero_ils/modules/documents/extensions/title.py b/rero_ils/modules/documents/extensions/title.py index 29abf5ae1e..d5c1e189a2 100644 --- a/rero_ils/modules/documents/extensions/title.py +++ b/rero_ils/modules/documents/extensions/title.py @@ -46,14 +46,14 @@ def format_text(cls, titles, responsabilities=None, with_subtitle=True): # force title to dict because ES gives AttrDict title = title.to_dict() title = dict(title) - if title.get('type') == 'bf:Title': - title_texts = \ - title_format_text(title=title, with_subtitle=with_subtitle) + if title.get("type") == "bf:Title": + title_texts = title_format_text( + title=title, with_subtitle=with_subtitle + ) if len(title_texts) == 1: - head_titles.append(title_texts[0].get('value')) + head_titles.append(title_texts[0].get("value")) else: - languages = [ - title.get('language') for title in title_texts] + languages = [title.get("language") for title in title_texts] def filter_list(value): """Check if a value should be removed from languages. @@ -62,56 +62,52 @@ def filter_list(value): vernacular from exits """ # keep simple language such as `default` - if '-' not in value: + if "-" not in value: return True - lang, _ = value.split('-') + lang, _ = value.split("-") # remove the latin form if a vernacular form exists return ( - not value.endswith('-latn') - or sum(v.startswith(f'{lang}-') for v in languages) - <= 1 + not value.endswith("-latn") + or sum(v.startswith(f"{lang}-") for v in languages) <= 1 ) # list of selected language filtered_languages = list(filter(filter_list, languages)) for title_text in title_texts: - language = title_text.get('language') + language = title_text.get("language") if language not in filtered_languages: continue if display_alternate_graphic_first(language): - head_titles.append(title_text.get('value')) + head_titles.append(title_text.get("value")) # If I don't have a title available, # I get the last value of the array if not len(head_titles): - head_titles.append(title_texts[-1].get('value')) - elif title.get('type') == 'bf:ParallelTitle': + head_titles.append(title_texts[-1].get("value")) + elif title.get("type") == "bf:ParallelTitle": parallel_title_texts = title_format_text( - title=title, with_subtitle=with_subtitle) + title=title, with_subtitle=with_subtitle + ) if len(parallel_title_texts) == 1: - parallel_titles.append( - parallel_title_texts[0].get('value')) + parallel_titles.append(parallel_title_texts[0].get("value")) else: for parallel_title_text in parallel_title_texts: - language = parallel_title_text.get('language') + language = parallel_title_text.get("language") if display_alternate_graphic_first(language): - parallel_titles.append( - parallel_title_text.get('value') - ) - output_value = '. '.join(head_titles) + parallel_titles.append(parallel_title_text.get("value")) + output_value = ". ".join(head_titles) for parallel_title in parallel_titles: - output_value += f' = {str(parallel_title)}' + output_value += f" = {str(parallel_title)}" responsabilities = responsabilities or [] for responsibility in responsabilities: if len(responsibility) == 1: - output_value += ' / ' + responsibility[0].get('value') + output_value += " / " + responsibility[0].get("value") else: for responsibility_language in responsibility: - value = responsibility_language.get('value') - language = responsibility_language.get( - 'language', 'default') + value = responsibility_language.get("value") + language = responsibility_language.get("language", "default") if display_alternate_graphic_first(language): - output_value += f' / {value}' + output_value += f" / {value}" return output_value def post_dump(self, record, data, dumper=None): @@ -121,7 +117,7 @@ def post_dump(self, record, data, dumper=None): :param data: dict - the data. :param dumper: record dumper - dumper helper. """ - titles = data.get('title', []) - bf_titles = list(filter(lambda t: t['type'] == 'bf:Title', titles)) + titles = data.get("title", []) + bf_titles = list(filter(lambda t: t["type"] == "bf:Title", titles)) for title in bf_titles: - title['_text'] = self.format_text(titles, with_subtitle=True) + title["_text"] = self.format_text(titles, with_subtitle=True) diff --git a/rero_ils/modules/documents/jsonresolver.py b/rero_ils/modules/documents/jsonresolver.py index bbb3dd675c..ca0ff1a43d 100644 --- a/rero_ils/modules/documents/jsonresolver.py +++ b/rero_ils/modules/documents/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/documents/', host='bib.rero.ch') +@jsonresolver.route("/api/documents/", host="bib.rero.ch") def document_resolver(pid): """Document resolver.""" - return resolve_json_refs('doc', pid) + return resolve_json_refs("doc", pid) diff --git a/rero_ils/modules/documents/loaders/marcxml.py b/rero_ils/modules/documents/loaders/marcxml.py index 49b8c67770..075cea93be 100644 --- a/rero_ils/modules/documents/loaders/marcxml.py +++ b/rero_ils/modules/documents/loaders/marcxml.py @@ -31,14 +31,12 @@ def marcxml_marshmallow_loader(): :return: converted marc21 json record. """ marcxml_records = split_stream(BytesIO(request.data)) - number_of_xml_records = 0 json_record = {} - for marcxml_record in marcxml_records: + for number_of_xml_records, marcxml_record in enumerate(marcxml_records): marc21json_record = create_record(marcxml_record) json_record = marc21.do(marc21json_record) # converted records are considered as draft - json_record['_draft'] = True + json_record["_draft"] = True if number_of_xml_records > 0: abort(400) - number_of_xml_records += 1 return json_record diff --git a/rero_ils/modules/documents/models.py b/rero_ils/modules/documents/models.py index 6966bf32ef..26dafc6599 100644 --- a/rero_ils/modules/documents/models.py +++ b/rero_ils/modules/documents/models.py @@ -29,24 +29,25 @@ class DocumentIdentifier(RecordIdentifier): """Sequence generator for Document identifiers.""" - __tablename__ = 'document_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "document_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class DocumentMetadata(db.Model, RecordMetadataBase): """Document record metadata.""" - __tablename__ = 'document_metadata' + __tablename__ = "document_metadata" class DocumentFictionType(Enum): """Document fiction types.""" - Fiction = 'fiction' - NonFiction = 'non_fiction' - Unspecified = 'unspecified' + Fiction = "fiction" + NonFiction = "non_fiction" + Unspecified = "unspecified" diff --git a/rero_ils/modules/documents/permissions.py b/rero_ils/modules/documents/permissions.py index c13464e1cb..cf8c49c642 100644 --- a/rero_ils/modules/documents/permissions.py +++ b/rero_ils/modules/documents/permissions.py @@ -22,16 +22,15 @@ from invenio_access.permissions import any_user from invenio_records_permissions.generators import Generator -from rero_ils.modules.permissions import AllowedByAction, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import AllowedByAction, RecordPermissionPolicy # Actions to control Documents policies for CRUD operations -search_action = action_factory('doc-search') -read_action = action_factory('doc-read') -create_action = action_factory('doc-create') -update_action = action_factory('doc-update') -delete_action = action_factory('doc-delete') -access_action = action_factory('doc-access') +search_action = action_factory("doc-search") +read_action = action_factory("doc-read") +create_action = action_factory("doc-create") +update_action = action_factory("doc-update") +delete_action = action_factory("doc-delete") +access_action = action_factory("doc-access") class DisallowIfCannotEdit(Generator): diff --git a/rero_ils/modules/documents/query.py b/rero_ils/modules/documents/query.py index 8ac66ed800..5b7b16f407 100644 --- a/rero_ils/modules/documents/query.py +++ b/rero_ils/modules/documents/query.py @@ -29,6 +29,7 @@ def acquisition_filter(): :return: Function that returns a nested query to retrieve new acquisition """ + def inner(values): # `values` params could contains one or two values. Values must be @@ -54,39 +55,41 @@ def inner(values): # build acquisition date range query range_values = values.pop() - if '--' in range_values: + if "--" in range_values: # NG-Core's widget for a date range sends timestamps # We transform the timestamp into a date - values = dict(zip(['from', 'to'], range_values.split('--'))) - if 'from' in values: - values['from'] = datetime.fromtimestamp( - float(values['from'])/1000).strftime('%Y-%m-%d') - if 'to' in values: - values['to'] = datetime.fromtimestamp( - float(values['to'])/1000).strftime('%Y-%m-%d') + values = dict(zip(["from", "to"], range_values.split("--"))) + if "from" in values: + values["from"] = datetime.fromtimestamp( + float(values["from"]) / 1000 + ).strftime("%Y-%m-%d") + if "to" in values: + values["to"] = datetime.fromtimestamp( + float(values["to"]) / 1000 + ).strftime("%Y-%m-%d") else: - values = dict(zip(['from', 'to'], range_values.split(':'))) - range_acquisition_dates = {'lte': values.get('to') or 'now/d'} - if values.get('from'): - range_acquisition_dates['gte'] = values.get('from') + values = dict(zip(["from", "to"], range_values.split(":"))) + range_acquisition_dates = {"lte": values.get("to") or "now/d"} + if values.get("from"): + range_acquisition_dates["gte"] = values.get("from") # build general 'match' query (including acq date range query) - must_queries = [Q( - 'range', - holdings__items__acquisition__date=range_acquisition_dates - )] + must_queries = [ + Q("range", holdings__items__acquisition__date=range_acquisition_dates) + ] # Check others filters from command line and add them to the query if # needed - for level in ['location', 'library', 'organisation']: + for level in ["location", "library", "organisation"]: if arg := request.args.get(level): - field = f'holdings__items__acquisition__{level}_pid' - must_queries.append(Q('match', **{field: arg})) + field = f"holdings__items__acquisition__{level}_pid" + must_queries.append(Q("match", **{field: arg})) return Q( - 'nested', - path='holdings.items.acquisition', - query=Q('bool', must=must_queries) + "nested", + path="holdings.items.acquisition", + query=Q("bool", must=must_queries), ) + return inner @@ -102,18 +105,17 @@ def _build_nested_identifier_query(identifier): # * "123456789" --> id_value=123456789 # * "(bf:Isbn)123456789 --> id_type=bf:Isbn, id_value=123456789 # * "(bf:Local)kw(2) --> id_type=bf:Local, id_value=kw(2) - regexp = re.compile(r'^(\((?P[\w\d:]+)\))?(?P.*)$') + regexp = re.compile(r"^(\((?P[\w\d:]+)\))?(?P.*)$") matches = re.match(regexp, identifier) - criteria = Q('wildcard', nested_identifiers__value=matches['id_value']) - if matches['id_type']: - criteria &= Q('match', nested_identifiers__type=matches['id_type']) - return Q('nested', path='nested_identifiers', query=criteria) + criteria = Q("wildcard", nested_identifiers__value=matches["id_value"]) + if matches["id_type"]: + criteria &= Q("match", nested_identifiers__type=matches["id_type"]) + return Q("nested", path="nested_identifiers", query=criteria) def inner(identifiers): queries = [ - _build_nested_identifier_query(identifier) - for identifier in identifiers + _build_nested_identifier_query(identifier) for identifier in identifiers ] - return Q('bool', should=queries) + return Q("bool", should=queries) return inner diff --git a/rero_ils/modules/documents/serializers/__init__.py b/rero_ils/modules/documents/serializers/__init__.py index 2eecccf5b6..161676e62c 100644 --- a/rero_ils/modules/documents/serializers/__init__.py +++ b/rero_ils/modules/documents/serializers/__init__.py @@ -21,13 +21,21 @@ from invenio_records_rest.serializers.response import record_responsify from rero_ils.modules.documents.serializers.dc import DublinCoreSerializer -from rero_ils.modules.documents.serializers.json import \ - DocumentExportJSONSerializer, DocumentJSONSerializer -from rero_ils.modules.documents.serializers.marc import \ - DocumentMARCXMLSerializer, DocumentMARCXMLSRUSerializer +from rero_ils.modules.documents.serializers.json import ( + DocumentExportJSONSerializer, + DocumentJSONSerializer, +) +from rero_ils.modules.documents.serializers.marc import ( + DocumentMARCXMLSerializer, + DocumentMARCXMLSRUSerializer, +) from rero_ils.modules.documents.serializers.ris import RISSerializer -from rero_ils.modules.serializers import RecordSchemaJSONV1, \ - record_responsify_file, search_responsify, search_responsify_file +from rero_ils.modules.serializers import ( + RecordSchemaJSONV1, + record_responsify_file, + search_responsify, + search_responsify_file, +) # Serializers # =========== @@ -40,33 +48,31 @@ # Records-REST serializers # ======================== -json_doc_search = search_responsify(_json, 'application/rero+json') -json_doc_response = record_responsify(_json, 'application/rero+json') +json_doc_search = search_responsify(_json, "application/rero+json") +json_doc_response = record_responsify(_json, "application/rero+json") json_export_doc_search = search_responsify_file( - _json_export, 'application/export+json', - file_extension='json', - file_prefix='export' + _json_export, "application/export+json", file_extension="json", file_prefix="export" ) json_export_doc_response = record_responsify_file( - _json_export, 'application/export+json', - file_extension='json', - file_prefix='export' + _json_export, "application/export+json", file_extension="json", file_prefix="export" ) ris_doc_search = search_responsify_file( - ris_serializer, 'application/x-research-info-systems', - file_extension='ris', - file_prefix='export' + ris_serializer, + "application/x-research-info-systems", + file_extension="ris", + file_prefix="export", ) ris_doc_response = record_responsify_file( - ris_serializer, 'application/x-research-info-systems', - file_extension='ris', - file_prefix='export' + ris_serializer, + "application/x-research-info-systems", + file_extension="ris", + file_prefix="export", ) -xml_dc_search = search_responsify(_xml_dc, 'application/xml') -xml_dc_response = record_responsify(_xml_dc, 'application/xml') +xml_dc_search = search_responsify(_xml_dc, "application/xml") +xml_dc_response = record_responsify(_xml_dc, "application/xml") -xml_marcxml_search = search_responsify(_xml_marcxml, 'application/xml') -xml_marcxml_response = record_responsify(_xml_marcxml, 'application/xml') -xml_marcxmlsru_search = search_responsify(_xml_marcxmlsru, 'application/xml') +xml_marcxml_search = search_responsify(_xml_marcxml, "application/xml") +xml_marcxml_response = record_responsify(_xml_marcxml, "application/xml") +xml_marcxmlsru_search = search_responsify(_xml_marcxmlsru, "application/xml") diff --git a/rero_ils/modules/documents/serializers/base.py b/rero_ils/modules/documents/serializers/base.py index 7b510d2963..28dadc771f 100644 --- a/rero_ils/modules/documents/serializers/base.py +++ b/rero_ils/modules/documents/serializers/base.py @@ -22,17 +22,44 @@ from flask import current_app -from rero_ils.modules.commons.identifiers import IdentifierFactory, \ - IdentifierStatus, IdentifierType +from rero_ils.modules.commons.identifiers import ( + IdentifierFactory, + IdentifierStatus, + IdentifierType, +) from rero_ils.modules.entities.models import EntityType from rero_ils.modules.utils import get_base_url from ..api import DocumentsSearch CREATOR_ROLES = [ - 'aut', 'cmp', 'cre', 'dub', 'pht', 'ape', 'aqt', 'arc', 'art', 'aus', - 'chr', 'cll', 'com', 'drt', 'dsr', 'enj', 'fmk', 'inv', 'ive', 'ivr', - 'lbt', 'lsa', 'lyr', 'pra', 'prg', 'rsp', 'scl' + "aut", + "cmp", + "cre", + "dub", + "pht", + "ape", + "aqt", + "arc", + "art", + "aus", + "chr", + "cll", + "com", + "drt", + "dsr", + "enj", + "fmk", + "inv", + "ive", + "ivr", + "lbt", + "lsa", + "lyr", + "pra", + "prg", + "rsp", + "scl", ] @@ -55,58 +82,60 @@ def _get_document_types(self): """Return document types.""" doc_types = [] for main_type, subtype in [ - (doc_type.get('main_type'), doc_type.get('subtype')) - for doc_type in self.record['type']]: + (doc_type.get("main_type"), doc_type.get("subtype")) + for doc_type in self.record["type"] + ]: if subtype: - main_type = f'{main_type} / {subtype}' + main_type = f"{main_type} / {subtype}" doc_types.append(main_type) return doc_types def _get_pid(self): """Return reference id.""" - return self.record['pid'] + return self.record["pid"] def _is_masked(self): """Return masked information.""" - return 'Yes' if self.record.get('_masked') else 'No' + return "Yes" if self.record.get("_masked") else "No" def _get_title(self): """Return first title.""" return next( - filter(lambda x: x.get('type') == 'bf:Title', - self.record.get('title')), {} - ).get('_text') + filter(lambda x: x.get("type") == "bf:Title", self.record.get("title")), {} + ).get("_text") def _get_series_statement(self): """Return series statement title.""" return [ - data['value'] - for statement in self.record.get('seriesStatement', []) - for data in statement.get('_text', []) + data["value"] + for statement in self.record.get("seriesStatement", []) + for data in statement.get("_text", []) ] def _get_secondary_title(self): """Return secondary title.""" # return series title if exist - if 'seriesStatement' in self.record: + if "seriesStatement" in self.record: return self._get_series_statement() def _extract_part_of_title_callback(part_of): """Extract title for the partOf document.""" - pid = part_of.get('document', {}).get('pid') + pid = part_of.get("document", {}).get("pid") if es_doc := DocumentsSearch().get_record_by_pid(pid): - title = es_doc.to_dict().get('title', []) + title = es_doc.to_dict().get("title", []) return next( - filter(lambda x: x.get('type') == 'bf:Title', - title), {} - ).get('_text') + filter(lambda x: x.get("type") == "bf:Title", title), {} + ).get("_text") # get partOf title - return [title - for title in map(_extract_part_of_title_callback, - self.record.get('partOf', [])) - if title] + return [ + title + for title in map( + _extract_part_of_title_callback, self.record.get("partOf", []) + ) + if title + ] def _get_localized_contribution(self, agent): """Return localized contribution. @@ -114,7 +143,7 @@ def _get_localized_contribution(self, agent): :param agent: contribution agent data. :returns: Function that return localized agent based on language. """ - key = f'authorized_access_point_{self._language}' + key = f"authorized_access_point_{self._language}" return agent.get(key) def _get_authors(self): @@ -122,89 +151,94 @@ def _get_authors(self): def _extract_contribution_callback(contribution) -> str: """Extract value for the given contribution.""" - agent = contribution.get('entity', {}) - role = contribution.get('role', []) + agent = contribution.get("entity", {}) + role = contribution.get("role", []) if any(r in role for r in CREATOR_ROLES): - return self._get_localized_contribution(agent) \ - or agent.get('authorized_access_point') + return self._get_localized_contribution(agent) or agent.get( + "authorized_access_point" + ) - return [contribution - for contribution in map(_extract_contribution_callback, - self.record.get('contribution', []) - ) - if contribution] + return [ + contribution + for contribution in map( + _extract_contribution_callback, self.record.get("contribution", []) + ) + if contribution + ] def _get_secondary_authors(self): """Return other authors.""" def _extract_contribution_callback(contribution) -> str: """Extract value for the given contribution.""" - agent = contribution.get('entity', {}) - role = contribution.get('role', []) + agent = contribution.get("entity", {}) + role = contribution.get("role", []) if all(r not in role for r in CREATOR_ROLES): - return self._get_localized_contribution(agent) \ - or agent.get('preferred_name') + return self._get_localized_contribution(agent) or agent.get( + "preferred_name" + ) - return [contribution - for contribution in map(_extract_contribution_callback, - self.record.get('contribution', []) - ) - if contribution] + return [ + contribution + for contribution in map( + _extract_contribution_callback, self.record.get("contribution", []) + ) + if contribution + ] def _get_publication_year(self): """Return date.""" for start_date, end_date in [ - (provision.get('startDate', ''), provision.get('endDate')) - for provision in self.record.get('provisionActivity', []) - if provision['type'] == 'bf:Publication' - and any(label in provision - for label in ['startDate', 'endDate'])]: + (provision.get("startDate", ""), provision.get("endDate")) + for provision in self.record.get("provisionActivity", []) + if provision["type"] == "bf:Publication" + and any(label in provision for label in ["startDate", "endDate"]) + ]: # return only the first date - return f'{start_date} - {end_date}' if end_date else start_date + return f"{start_date} - {end_date}" if end_date else start_date def _get_start_pages(self): """Return start pages.""" return [ - numbering['pages'].split('-')[0] - for part_of in self.record.get('partOf', []) - for numbering in part_of.get('numbering', []) - if 'pages' in numbering - ] or ([self.record['extent']] if self.record.get('extent') else []) + numbering["pages"].split("-")[0] + for part_of in self.record.get("partOf", []) + for numbering in part_of.get("numbering", []) + if "pages" in numbering + ] or ([self.record["extent"]] if self.record.get("extent") else []) def _get_end_pages(self): """Return end pages.""" return [ - numbering['pages'].split('-')[1] - for part_of in self.record.get('partOf', []) - for numbering in part_of.get('numbering', []) - if 'pages' in numbering - and '-' in numbering['pages'] + numbering["pages"].split("-")[1] + for part_of in self.record.get("partOf", []) + for numbering in part_of.get("numbering", []) + if "pages" in numbering and "-" in numbering["pages"] ] def _get_publication_places(self): """Return publication places.""" return [ - data['value'] - for provision in self.record.get('provisionActivity', []) - for statement in provision.get('statement', []) - for data in statement.get('label', []) - if provision['type'] == 'bf:Publication' - and statement['type'] == EntityType.PLACE + data["value"] + for provision in self.record.get("provisionActivity", []) + for statement in provision.get("statement", []) + for data in statement.get("label", []) + if provision["type"] == "bf:Publication" + and statement["type"] == EntityType.PLACE ] def _get_languages(self): """Return languages.""" - return [lang.get('value') for lang in self.record.get('language', [])] + return [lang.get("value") for lang in self.record.get("language", [])] def _get_publisher(self): """Return publishers.""" return [ - data['value'] - for provision in self.record.get('provisionActivity', []) - for statement in provision.get('statement', []) - for data in statement.get('label', []) - if provision['type'] == 'bf:Publication' - and statement['type'] == EntityType.AGENT + data["value"] + for provision in self.record.get("provisionActivity", []) + for statement in provision.get("statement", []) + for data in statement.get("label", []) + if provision["type"] == "bf:Publication" + and statement["type"] == EntityType.AGENT ] def _get_identifiers(self, types, states=None): @@ -216,8 +250,8 @@ def _get_identifiers(self, types, states=None): """ identifiers = [ IdentifierFactory.create_identifier(identifier) - for identifier in self.record.get('identifiedBy', []) - if identifier['type'] in types + for identifier in self.record.get("identifiedBy", []) + if identifier["type"] in types ] states = states or [IdentifierStatus.UNDEFINED] return [ @@ -239,8 +273,9 @@ def _get_issn(self, states=None): :param states: list of identifier status. """ - return self._get_identifiers([IdentifierType.ISSN, - IdentifierType.L_ISSN], states) + return self._get_identifiers( + [IdentifierType.ISSN, IdentifierType.L_ISSN], states + ) def _get_doi(self): """Return DOI identifiers.""" @@ -249,9 +284,9 @@ def _get_doi(self): def _get_electronic_locators(self): """Return electronic locators.""" return [ - locator['url'] - for locator in self.record.get('electronicLocator', []) - if locator['type'] in ['resource', 'versionOfResource'] + locator["url"] + for locator in self.record.get("electronicLocator", []) + if locator["type"] in ["resource", "versionOfResource"] ] def _get_permalink(self): @@ -261,32 +296,32 @@ def _get_permalink(self): def _get_subjects(self): """Return keywords.""" - return self.record.get('subjects', []) + return self.record.get("subjects", []) def _get_editions(self): """Return editions.""" return [ - edition.get('value') - for edition_statement in self.record.get('editionStatement', []) - for edition in edition_statement.get('_text', []) + edition.get("value") + for edition_statement in self.record.get("editionStatement", []) + for edition in edition_statement.get("_text", []) ] def _get_volume_numbers(self): """Return volume numbers.""" return [ - numbering['volume'] - for part_of in self.record.get('partOf', []) - for numbering in part_of.get('numbering', []) - if 'volume' in numbering + numbering["volume"] + for part_of in self.record.get("partOf", []) + for numbering in part_of.get("numbering", []) + if "volume" in numbering ] def _get_issue_numbers(self): """Return issue numbers.""" return [ - numbering['issue'] - for part_of in self.record.get('partOf', []) - for numbering in part_of.get('numbering', []) - if 'issue' in numbering + numbering["issue"] + for part_of in self.record.get("partOf", []) + for numbering in part_of.get("numbering", []) + if "issue" in numbering ] @@ -296,18 +331,33 @@ class DocumentFormatter(BaseDocumentFormatterMixin): def __init__(self, record, language=None, include_fields=None): """Initialize RIS formatter with the specific record.""" super().__init__(record) - self._language = language or current_app \ - .config.get('BABEL_DEFAULT_LANGUAGE', 'en') + self._language = language or current_app.config.get( + "BABEL_DEFAULT_LANGUAGE", "en" + ) self._include_fields = include_fields or [ - 'document_pid', 'document_type', 'document_title', - 'document_secondary_title', 'document_creator', 'document_masked', - 'document_secondary_authors', 'document_publisher', - 'document_publication_year', 'document_publication_place', - 'document_edition_statement', 'document_series_statement', - 'document_start_page', 'document_end_page', 'document_language', - 'document_isbn', 'document_issn', 'document_electronic_locator', - 'document_permalink', 'document_subjects', 'document_doi', - 'document_volume_number', 'document_issue_number', + "document_pid", + "document_type", + "document_title", + "document_secondary_title", + "document_creator", + "document_masked", + "document_secondary_authors", + "document_publisher", + "document_publication_year", + "document_publication_place", + "document_edition_statement", + "document_series_statement", + "document_start_page", + "document_end_page", + "document_language", + "document_isbn", + "document_issn", + "document_electronic_locator", + "document_permalink", + "document_subjects", + "document_doi", + "document_volume_number", + "document_issue_number", ] def format(self): @@ -318,29 +368,29 @@ def format(self): def available_fields(self): """All available fields for document.""" return { - 'document_pid': self._get_pid, - 'document_type': self._get_document_types, - 'document_masked': self._is_masked, - 'document_title': self._get_title, - 'document_secondary_title': self._get_secondary_title, - 'document_creator': self._get_authors, - 'document_secondary_authors': self._get_secondary_authors, - 'document_publisher': self._get_publisher, - 'document_publication_year': self._get_publication_year, - 'document_publication_place': self._get_publication_places, - 'document_edition_statement': self._get_editions, - 'document_series_statement': self._get_series_statement, - 'document_start_page': self._get_start_pages, - 'document_end_page': self._get_end_pages, - 'document_language': self._get_languages, - 'document_isbn': self._get_isbn, - 'document_issn': self._get_issn, - 'document_electronic_locator': self._get_electronic_locators, - 'document_permalink': self._get_permalink, - 'document_subjects': self._get_subjects, - 'document_doi': self._get_doi, - 'document_volume_number': self._get_volume_numbers, - 'document_issue_number': self._get_issue_numbers, + "document_pid": self._get_pid, + "document_type": self._get_document_types, + "document_masked": self._is_masked, + "document_title": self._get_title, + "document_secondary_title": self._get_secondary_title, + "document_creator": self._get_authors, + "document_secondary_authors": self._get_secondary_authors, + "document_publisher": self._get_publisher, + "document_publication_year": self._get_publication_year, + "document_publication_place": self._get_publication_places, + "document_edition_statement": self._get_editions, + "document_series_statement": self._get_series_statement, + "document_start_page": self._get_start_pages, + "document_end_page": self._get_end_pages, + "document_language": self._get_languages, + "document_isbn": self._get_isbn, + "document_issn": self._get_issn, + "document_electronic_locator": self._get_electronic_locators, + "document_permalink": self._get_permalink, + "document_subjects": self._get_subjects, + "document_doi": self._get_doi, + "document_volume_number": self._get_volume_numbers, + "document_issue_number": self._get_issue_numbers, } def _fetch_fields(self): diff --git a/rero_ils/modules/documents/serializers/dc.py b/rero_ils/modules/documents/serializers/dc.py index 1ee7e01ad5..b4ac72411c 100644 --- a/rero_ils/modules/documents/serializers/dc.py +++ b/rero_ils/modules/documents/serializers/dc.py @@ -20,8 +20,9 @@ from dcxml import simpledc from flask import current_app, request -from invenio_records_rest.serializers.dc import \ - DublinCoreSerializer as _DublinCoreSerializer +from invenio_records_rest.serializers.dc import ( + DublinCoreSerializer as _DublinCoreSerializer, +) from lxml import etree from lxml.builder import ElementMaker from werkzeug.local import LocalProxy @@ -32,8 +33,7 @@ from ..dumpers import document_replace_refs_dumper from ..utils import process_i18n_literal_fields -DEFAULT_LANGUAGE = LocalProxy( - lambda: current_app.config.get('BABEL_DEFAULT_LANGUAGE')) +DEFAULT_LANGUAGE = LocalProxy(lambda: current_app.config.get("BABEL_DEFAULT_LANGUAGE")) class DublinCoreSerializer(_DublinCoreSerializer): @@ -45,30 +45,31 @@ class DublinCoreSerializer(_DublinCoreSerializer): # Default namespace mapping. namespace = { - 'dc': 'http://purl.org/dc/elements/1.1/', - 'xml': 'xml', + "dc": "http://purl.org/dc/elements/1.1/", + "xml": "xml", } # Default container element attributes. # TODO: save local dc schema container_attribs = { - '{http://www.w3.org/2001/XMLSchema-instance}schemaLocation': - 'https://www.loc.gov/standards/sru ' - 'https://www.loc.gov/standards/sru/recordSchemas/dc-schema.xsd', + "{http://www.w3.org/2001/XMLSchema-instance}schemaLocation": "https://www.loc.gov/standards/sru " + "https://www.loc.gov/standards/sru/recordSchemas/dc-schema.xsd", } # Default container element. - container_element = 'record' + container_element = "record" - def transform_record(self, pid, record, links_factory=None, - language=DEFAULT_LANGUAGE, **kwargs): + def transform_record( + self, pid, record, links_factory=None, language=DEFAULT_LANGUAGE, **kwargs + ): """Transform record into an intermediate representation.""" record = record.dumps(document_replace_refs_dumper) - if contributions := record.pop('contribution', []): - record['contribution'] = process_i18n_literal_fields(contributions) + if contributions := record.pop("contribution", []): + record["contribution"] = process_i18n_literal_fields(contributions) record = dublincore.do(record, language=language) return record - def transform_search_hit(self, pid, record, links_factory=None, - language=DEFAULT_LANGUAGE, **kwargs): + def transform_search_hit( + self, pid, record, links_factory=None, language=DEFAULT_LANGUAGE, **kwargs + ): """Transform search result hit into an intermediate representation.""" record = Document.get_record_by_pid(pid) return self.transform_record( @@ -76,11 +77,12 @@ def transform_search_hit(self, pid, record, links_factory=None, record=record, links_factory=links_factory, language=language, - **kwargs + **kwargs, ) - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None, **kwargs): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -88,37 +90,37 @@ def serialize_search(self, pid_fetcher, search_result, links=None, :param links: Dictionary of links to add to response. :param item_links_factory: Factory function for record links. """ - total = search_result['hits']['total']['value'] - sru = search_result['hits'].get('sru', {}) - start_record = sru.get('start_record', 0) - maximum_records = sru.get('maximum_records', 0) - query = sru.get('query') - query_es = sru.get('query_es') + total = search_result["hits"]["total"]["value"] + sru = search_result["hits"].get("sru", {}) + start_record = sru.get("start_record", 0) + maximum_records = sru.get("maximum_records", 0) + query = sru.get("query") + query_es = sru.get("query_es") next_record = start_record + maximum_records + 1 element = ElementMaker() xml_root = element.searchRetrieveResponse() if sru: - xml_root.append(element.version('1.1')) + xml_root.append(element.version("1.1")) xml_root.append(element.numberOfRecords(str(total))) xml_records = element.records() - language = request.args.get('ln', DEFAULT_LANGUAGE) - for hit in search_result['hits']['hits']: - record = hit['_source'] - pid = record['pid'] + language = request.args.get("ln", DEFAULT_LANGUAGE) + for hit in search_result["hits"]["hits"]: + record = hit["_source"] + pid = record["pid"] record = self.transform_search_hit( pid=pid, record=record, links_factory=item_links_factory, language=language, - **kwargs + **kwargs, ) element_record = simpledc.dump_etree( record, container=self.container_element, nsmap=self.namespace, - attribs=self.container_attribs + attribs=self.container_attribs, ) xml_records.append(element_record) xml_root.append(xml_records) @@ -132,12 +134,11 @@ def serialize_search(self, pid_fetcher, search_result, links=None, if start_record: echoed_search_rr.append(element.startRecord(str(start_record))) if next_record > 1 and next_record < total: - echoed_search_rr.append( - element.nextRecordPosition(str(next_record))) + echoed_search_rr.append(element.nextRecordPosition(str(next_record))) if maximum_records: - echoed_search_rr.append(element.maximumRecords( - str(maximum_records))) - echoed_search_rr.append(element.recordPacking('XML')) + echoed_search_rr.append(element.maximumRecords(str(maximum_records))) + echoed_search_rr.append(element.recordPacking("XML")) xml_root.append(echoed_search_rr) - return etree.tostring(xml_root, encoding='utf-8', method='xml', - pretty_print=True) + return etree.tostring( + xml_root, encoding="utf-8", method="xml", pretty_print=True + ) diff --git a/rero_ils/modules/documents/serializers/json.py b/rero_ils/modules/documents/serializers/json.py index c184c5b06f..7a81663272 100644 --- a/rero_ils/modules/documents/serializers/json.py +++ b/rero_ils/modules/documents/serializers/json.py @@ -24,8 +24,11 @@ from werkzeug.local import LocalProxy from rero_ils.modules.documents.utils import process_i18n_literal_fields -from rero_ils.modules.documents.views import create_title_alternate_graphic, \ - create_title_responsibilites, create_title_variants +from rero_ils.modules.documents.views import ( + create_title_alternate_graphic, + create_title_responsibilites, + create_title_variants, +) from rero_ils.modules.libraries.api import LibrariesSearch from rero_ils.modules.locations.api import LocationsSearch from rero_ils.modules.organisations.api import OrganisationsSearch @@ -35,8 +38,9 @@ from ..dumpers.indexer import IndexerDumper from ..extensions import TitleExtension -GLOBAL_VIEW_CODE = LocalProxy(lambda: current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')) +GLOBAL_VIEW_CODE = LocalProxy( + lambda: current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") +) class DocumentJSONSerializer(JSONSerializer): @@ -46,10 +50,11 @@ class DocumentJSONSerializer(JSONSerializer): def _get_view_information(): """Get the `view_id` and `view_code` to use to build response.""" view_id = None - view_code = request.args.get('view', GLOBAL_VIEW_CODE) + view_code = request.args.get("view", GLOBAL_VIEW_CODE) if view_code != GLOBAL_VIEW_CODE: - view_id = OrganisationsSearch() \ - .get_record_by_viewcode(view_code, 'pid')['pid'] + view_id = OrganisationsSearch().get_record_by_viewcode(view_code, "pid")[ + "pid" + ] return view_id, view_code def preprocess_record(self, pid, record, links_factory=None, **kwargs): @@ -58,30 +63,26 @@ def preprocess_record(self, pid, record, links_factory=None, **kwargs): # TODO: uses dumpers # build responsibility data for display purpose - responsibility_statement = rec.get('responsibilityStatement', []) - if responsibilities := create_title_responsibilites( - responsibility_statement - ): - rec['ui_responsibilities'] = responsibilities - titles = rec.get('title', []) + responsibility_statement = rec.get("responsibilityStatement", []) + if responsibilities := create_title_responsibilites(responsibility_statement): + rec["ui_responsibilities"] = responsibilities + titles = rec.get("title", []) if altgr_titles := create_title_alternate_graphic(titles): - rec['ui_title_altgr'] = altgr_titles + rec["ui_title_altgr"] = altgr_titles if altgr_titles_responsibilities := create_title_alternate_graphic( titles, responsibility_statement ): - rec['ui_title_altgr_responsibilities'] = \ - altgr_titles_responsibilities + rec["ui_title_altgr_responsibilities"] = altgr_titles_responsibilities if variant_titles := create_title_variants(titles): - rec['ui_title_variants'] = variant_titles + rec["ui_title_variants"] = variant_titles data = super().preprocess_record( - pid=pid, record=rec, links_factory=links_factory, kwargs=kwargs) - metadata = data['metadata'] + pid=pid, record=rec, links_factory=links_factory, kwargs=kwargs + ) + metadata = data["metadata"] resolve = request.args.get( - 'resolve', - default=False, - type=lambda v: v.lower() in ['true', '1'] + "resolve", default=False, type=lambda v: v.lower() in ["true", "1"] ) if request and resolve: IndexerDumper()._process_host_document(None, metadata) @@ -90,25 +91,24 @@ def preprocess_record(self, pid, record, links_factory=None, **kwargs): def _postprocess_search_hit(self, hit: dict) -> None: """Post-process each hit of a search result.""" view_id, view_code = DocumentJSONSerializer._get_view_information() - metadata = hit.get('metadata', {}) - pid = metadata.get('pid') + metadata = hit.get("metadata", {}) + pid = metadata.get("pid") - titles = metadata.get('title', []) - if text_title := TitleExtension.format_text( - titles, with_subtitle=False - ): - metadata['ui_title_text'] = text_title + titles = metadata.get("title", []) + if text_title := TitleExtension.format_text(titles, with_subtitle=False): + metadata["ui_title_text"] = text_title if text_title := TitleExtension.format_text( titles, - responsabilities=metadata.get('responsibilityStatement', []), - with_subtitle=False + responsabilities=metadata.get("responsibilityStatement", []), + with_subtitle=False, ): - metadata['ui_title_text_responsibility'] = text_title + metadata["ui_title_text_responsibility"] = text_title if view_code != GLOBAL_VIEW_CODE: - metadata['items'] = [ - item for item in metadata.get('items', []) - if item['organisation'].get('organisation_pid') == view_id + metadata["items"] = [ + item + for item in metadata.get("items", []) + if item["organisation"].get("organisation_pid") == view_id ] super()._postprocess_search_hit(hit) @@ -118,7 +118,8 @@ def _postprocess_search_aggregations(self, aggregations: dict) -> None: # format the results of the facet 'year' to be displayed # as range - if aggregations.get('year'): + if aggregations.get("year"): + def extract_year(key, default): """Extract year from year aggregation. @@ -127,23 +128,23 @@ def extract_year(key, default): :return: the year in yyyy format. """ # default could be None - if year := aggregations['year'][key].get('value'): + if year := aggregations["year"][key].get("value"): return float(year) return default # transform aggregation to send configuration # for ng-core range widget. # this allows you to fill in the fields on the frontend. - aggregations['year'] = { - 'type': 'range', - 'config': { - 'min': extract_year('year_min', 0.0), - 'max': extract_year('year_max', 9999.9), - 'step': 1 - } + aggregations["year"] = { + "type": "range", + "config": { + "min": extract_year("year_min", 0.0), + "max": extract_year("year_max", 9999.9), + "step": 1, + }, } - if aggregations.get('acquisition'): + if aggregations.get("acquisition"): # format the results of facet 'acquisition' to be displayed # as date range with min and max date (limit) def extract_acquisition_date(key, default): @@ -153,74 +154,80 @@ def extract_acquisition_date(key, default): :param: default: the default date. :return: the date in yyyy-MM-dd format. """ - return aggregations['acquisition'][key].get( - 'value_as_string', aggregations['acquisition'][key].get( - 'value', default)) + return aggregations["acquisition"][key].get( + "value_as_string", + aggregations["acquisition"][key].get("value", default), + ) # transform aggregation to send configuration # for ng-core date-range widget. # this allows you to fill in the fields on the frontend. - aggregations['acquisition'] = { - 'type': 'date-range', - 'config': { - 'min': extract_acquisition_date('date_min', '1900-01-01'), - 'max': extract_acquisition_date( - 'date_max', datetime.now().strftime('%Y-%m-%d') - ) - } + aggregations["acquisition"] = { + "type": "date-range", + "config": { + "min": extract_acquisition_date("date_min", "1900-01-01"), + "max": extract_acquisition_date( + "date_max", datetime.now().strftime("%Y-%m-%d") + ), + }, } - if aggr_org := aggregations.get('organisation', {}).get('buckets', []): + if aggr_org := aggregations.get("organisation", {}).get("buckets", []): # For a "local view", we only need the facet on the location # organisation. We can filter the organisation aggregation to keep # only this value if view_code != GLOBAL_VIEW_CODE: - aggr_org = list( - filter(lambda term: term['key'] == view_id, aggr_org) - ) - aggregations['organisation']['buckets'] = aggr_org + aggr_org = list(filter(lambda term: term["key"] == view_id, aggr_org)) + aggregations["organisation"]["buckets"] = aggr_org for org in aggr_org: # filter libraries by organisation # Keep only libraries for the current selected organisation. - query = LibrariesSearch() \ - .filter('term', organisation__pid=org['key'])\ - .source(['pid', 'name']) + query = ( + LibrariesSearch() + .filter("term", organisation__pid=org["key"]) + .source(["pid", "name"]) + ) org_libraries = {hit.pid: hit.name for hit in query.scan()} - org['library']['buckets'] = list(filter( - lambda lib: lib['key'] in org_libraries, - org['library']['buckets'] - )) - for term in org['library']['buckets']: - if term['key'] in org_libraries: - term['name'] = org_libraries[term['key']] + org["library"]["buckets"] = list( + filter( + lambda lib: lib["key"] in org_libraries, + org["library"]["buckets"], + ) + ) + for term in org["library"]["buckets"]: + if term["key"] in org_libraries: + term["name"] = org_libraries[term["key"]] # filter locations by library - for library in org['library']['buckets']: - query = LocationsSearch() \ - .filter('term', library__pid=library['key'])\ - .source(['pid', 'name']) + for library in org["library"]["buckets"]: + query = ( + LocationsSearch() + .filter("term", library__pid=library["key"]) + .source(["pid", "name"]) + ) lib_locations = {hit.pid: hit.name for hit in query.scan()} - library['location']['buckets'] = list(filter( - lambda lib: lib['key'] in lib_locations, - library['location']['buckets'] - )) - for term in library['location']['buckets']: - if term['key'] in lib_locations: - term['name'] = lib_locations[term['key']] + library["location"]["buckets"] = list( + filter( + lambda lib: lib["key"] in lib_locations, + library["location"]["buckets"], + ) + ) + for term in library["location"]["buckets"]: + if term["key"] in lib_locations: + term["name"] = lib_locations[term["key"]] # Complete Organisation aggregation information # with corresponding resource name JSONSerializer.enrich_bucket_with_data( - aggr_org, - OrganisationsSearch, 'name' + aggr_org, OrganisationsSearch, "name" ) # For a "local view", we replace the organisation aggregation by # a library aggregation containing only for the local organisation if view_code != GLOBAL_VIEW_CODE: - aggregations['library'] = aggr_org[0].get('library', {}) - del aggregations['organisation'] + aggregations["library"] = aggr_org[0].get("library", {}) + del aggregations["organisation"] super()._postprocess_search_aggregations(aggregations) @@ -238,7 +245,7 @@ def _format_args(): """Get JSON dump indentation and separates.""" return dict( indent=2, - separators=(', ', ': '), + separators=(", ", ": "), ) def serialize(self, pid, record, links_factory=None, **kwargs): @@ -249,12 +256,13 @@ def serialize(self, pid, record, links_factory=None, **kwargs): :param links_factory: Factory function for record links. """ record = record.dumps(document_replace_refs_dumper) - if contributions := record.pop('contribution', []): - record['contribution'] = process_i18n_literal_fields(contributions) + if contributions := record.pop("contribution", []): + record["contribution"] = process_i18n_literal_fields(contributions) return json.dumps(record, **self._format_args()) - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None, **kwargs): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -262,8 +270,5 @@ def serialize_search(self, pid_fetcher, search_result, links=None, :param links: Dictionary of links to add to response. :param item_links_factory: Factory function for record links. """ - records = [ - hit['_source'] - for hit in search_result['hits']['hits'] - ] + records = [hit["_source"] for hit in search_result["hits"]["hits"]] return stream_with_context(json.dumps(records, **self._format_args())) diff --git a/rero_ils/modules/documents/serializers/marc.py b/rero_ils/modules/documents/serializers/marc.py index 6be7a77ecb..cb4599fcbe 100644 --- a/rero_ils/modules/documents/serializers/marc.py +++ b/rero_ils/modules/documents/serializers/marc.py @@ -29,14 +29,14 @@ from werkzeug.local import LocalProxy from rero_ils.modules.documents.dojson.contrib.jsontomarc21 import to_marc21 -from rero_ils.modules.documents.dojson.contrib.jsontomarc21.model import \ - replace_contribution_sources +from rero_ils.modules.documents.dojson.contrib.jsontomarc21.model import ( + replace_contribution_sources, +) from rero_ils.modules.entities.remote_entities.api import RemoteEntitiesSearch from rero_ils.modules.serializers import JSONSerializer from rero_ils.modules.utils import strip_chars -DEFAULT_LANGUAGE = LocalProxy( - lambda: current_app.config.get('BABEL_DEFAULT_LANGUAGE')) +DEFAULT_LANGUAGE = LocalProxy(lambda: current_app.config.get("BABEL_DEFAULT_LANGUAGE")) class DocumentMARCXMLSerializer(JSONSerializer): @@ -71,10 +71,18 @@ def __init__(self, xslt_filename=None, schema_class=None): # with_holdings_items=True) # - def transform_search_hit(self, pid, record_hit, language=None, - with_holdings_items=True, organisation_pids=None, - library_pids=None, location_pids=None, - links_factory=None, **kwargs): + def transform_search_hit( + self, + pid, + record_hit, + language=None, + with_holdings_items=True, + organisation_pids=None, + library_pids=None, + location_pids=None, + links_factory=None, + **kwargs, + ): """Transform search result hit into an intermediate representation.""" return to_marc21.do( record_hit, @@ -82,7 +90,7 @@ def transform_search_hit(self, pid, record_hit, language=None, with_holdings_items=with_holdings_items, organisation_pids=organisation_pids, library_pids=library_pids, - location_pids=location_pids + location_pids=location_pids, ) # Needed if we use it for documents serialization ! @@ -102,50 +110,57 @@ def transform_search_hit(self, pid, record_hit, language=None, # **self.dumps_kwargs # ) - def transform_records(self, hits, pid_fetcher, language, - with_holdings_items=True, organisation_pids=None, - library_pids=None, location_pids=None, - item_links_factory=None): + def transform_records( + self, + hits, + pid_fetcher, + language, + with_holdings_items=True, + organisation_pids=None, + library_pids=None, + location_pids=None, + item_links_factory=None, + ): """Transform records into an intermediate representation.""" # get all linked contributions contribution_pids = [] for hit in hits: - for contribution in hit['_source'].get('contribution', []): - contribution_pid = contribution.get('entity', {}).get('pid') - if contribution_pid: + for contribution in hit["_source"].get("contribution", []): + if contribution_pid := contribution.get("entity", {}).get("pid"): contribution_pids.append(contribution_pid) - search = RemoteEntitiesSearch() \ - .filter('terms', pid=list(set(contribution_pids))) + search = RemoteEntitiesSearch().filter( + "terms", pid=list(set(contribution_pids)) + ) es_contributions = {} for hit in search.scan(): contribution = hit.to_dict() - es_contributions[contribution['pid']] = contribution + es_contributions[contribution["pid"]] = contribution - order = current_app.config.get('RERO_ILS_AGENTS_LABEL_ORDER', {}) - source_order = order.get(language, order.get(order['fallback'], [])) + order = current_app.config.get("RERO_ILS_AGENTS_LABEL_ORDER", {}) + source_order = order.get(language, order.get(order["fallback"], [])) records = [] for hit in hits: - document = hit['_source'] - contributions = document.get('contribution', []) + document = hit["_source"] + contributions = document.get("contribution", []) for contribution in contributions: - contribution_pid = contribution.get('entity', {}).get('pid') + contribution_pid = contribution.get("entity", {}).get("pid") if contribution_pid in es_contributions: - contribution['entity'] = deepcopy( - es_contributions[contribution_pid]) + contribution["entity"] = deepcopy( + es_contributions[contribution_pid] + ) replace_contribution_sources( - contribution=contribution, - source_order=source_order + contribution=contribution, source_order=source_order ) record = self.transform_search_hit( - pid=pid_fetcher(hit['_id'], document), + pid=pid_fetcher(hit["_id"], document), record_hit=document, language=language, with_holdings_items=with_holdings_items, organisation_pids=organisation_pids, library_pids=library_pids, location_pids=location_pids, - links_factory=item_links_factory + links_factory=item_links_factory, ) # complete the contributions from refs @@ -184,29 +199,24 @@ class DocumentMARCXMLSRUSerializer(DocumentMARCXMLSerializer): MARC21_REC = "http://www.loc.gov/MARC21/slim" """MARCXML XML Schema""" - def dumps_etree(self, total, records, sru, xslt_filename=None, - prefix=None): + def dumps_etree(self, total, records, sru, xslt_filename=None, prefix=None): """Dump records into a etree.""" - element = ElementMaker( - namespace=self.MARC21_ZS, - nsmap={'zs': self.MARC21_ZS} - ) + element = ElementMaker(namespace=self.MARC21_ZS, nsmap={"zs": self.MARC21_ZS}) def dump_record(record, idx): """Dump a single record.""" rec_element = ElementMaker( - namespace=self.MARC21_REC, - nsmap={prefix: self.MARC21_REC} + namespace=self.MARC21_REC, nsmap={prefix: self.MARC21_REC} ) data_element = ElementMaker() rec = element.record() - rec.append(element.recordPacking('xml')) - rec.append(element.recordSchema('marcxml')) + rec.append(element.recordPacking("xml")) + rec.append(element.recordSchema("marcxml")) rec_record_data = element.recordData() rec_data = rec_element.record() - if leader := record.get('leader'): + if leader := record.get("leader"): rec_data.append(data_element.leader(leader)) if isinstance(record, GroupableOrderedDict): @@ -219,52 +229,54 @@ def dump_record(record, idx): if len(df) == 3: if isinstance(subfields, string_types): controlfield = data_element.controlfield(subfields) - controlfield.attrib['tag'] = df[:3] + controlfield.attrib["tag"] = df[:3] rec_data.append(controlfield) elif isinstance(subfields, (list, tuple, set)): for subfield in subfields: controlfield = data_element.controlfield(subfield) - controlfield.attrib['tag'] = df[:3] + controlfield.attrib["tag"] = df[:3] rec_data.append(controlfield) else: # Skip leader. - if df == 'leader': + if df == "leader": continue if not isinstance(subfields, (list, tuple, set)): subfields = (subfields,) - df = df.replace('_', ' ') + df = df.replace("_", " ") for subfield in subfields: if not isinstance(subfield, (list, tuple, set)): subfield = [subfield] for s in subfield: datafield = data_element.datafield() - datafield.attrib['tag'] = df[:3] - datafield.attrib['ind1'] = df[3] - datafield.attrib['ind2'] = df[4] + datafield.attrib["tag"] = df[:3] + datafield.attrib["ind1"] = df[3] + datafield.attrib["ind2"] = df[4] if isinstance(s, GroupableOrderedDict): - items = s.iteritems( - with_order=False, repeated=True) + items = s.iteritems(with_order=False, repeated=True) elif isinstance(s, dict): items = iteritems(s) else: datafield.append(data_element.subfield(s)) - items = tuple() + items = () for code, value in items: if isinstance(value, string_types): - datafield.append(data_element.subfield( - strip_chars(value), code=code) + datafield.append( + data_element.subfield( + strip_chars(value), code=code + ) ) else: for v in value: datafield.append( data_element.subfield( - strip_chars(v), code=code) + strip_chars(v), code=code + ) ) rec_data.append(datafield) rec_record_data.append(rec_data) @@ -275,14 +287,14 @@ def dump_record(record, idx): if isinstance(records, dict): root = dump_record(records, 1) else: - number_of_records = total['value'] - start_record = sru.get('start_record', 0) - maximum_records = sru.get('maximum_records', 0) - query = sru.get('query') - query_es = sru.get('query_es') + number_of_records = total["value"] + start_record = sru.get("start_record", 0) + maximum_records = sru.get("maximum_records", 0) + query = sru.get("query") + query_es = sru.get("query_es") next_record = start_record + maximum_records root = element.searchRetrieveResponse() - root.append(element.version('1.1')) + root.append(element.version("1.1")) root.append(element.numberOfRecords(str(number_of_records))) if next_record > 1 and next_record < number_of_records: root.append(element.nextRecordPosition(str(next_record))) @@ -296,16 +308,14 @@ def dump_record(record, idx): if query_es: echoed_search_rr.append(element.query_es(query_es)) if start_record: - echoed_search_rr.append( - element.startRecord(str(start_record))) + echoed_search_rr.append(element.startRecord(str(start_record))) if maximum_records: - echoed_search_rr.append( - element.maximumRecords(str(maximum_records))) - echoed_search_rr.append(element.recordPacking('XML')) + echoed_search_rr.append(element.maximumRecords(str(maximum_records))) + echoed_search_rr.append(element.recordPacking("XML")) echoed_search_rr.append( - element.recordSchema( - 'info:sru/schema/1/marcxml-v1.1-light')) - echoed_search_rr.append(element.resultSetTTL('0')) + element.recordSchema("info:sru/schema/1/marcxml-v1.1-light") + ) + echoed_search_rr.append(element.resultSetTTL("0")) root.append(echoed_search_rr) # Needed if we use display with XSLT file. @@ -319,21 +329,15 @@ def dump_record(record, idx): def dumps(self, total, records, sru, xslt_filename=None, **kwargs): """Dump records into a MarcXMLSRU file.""" root = self.dumps_etree( - total=total, - records=records, - sru=sru, - xslt_filename=xslt_filename + total=total, records=records, sru=sru, xslt_filename=xslt_filename ) return etree.tostring( - root, - pretty_print=True, - xml_declaration=True, - encoding='UTF-8', - **kwargs + root, pretty_print=True, xml_declaration=True, encoding="UTF-8", **kwargs ) - def serialize_search(self, pid_fetcher, search_result, - item_links_factory=None, **kwargs): + def serialize_search( + self, pid_fetcher, search_result, item_links_factory=None, **kwargs + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -342,32 +346,26 @@ def serialize_search(self, pid_fetcher, search_result, (Default: ``None``) :returns: The objects serialized. """ - language = request.args.get('ln', DEFAULT_LANGUAGE) - with_holdings_items = not request.args.get('without_items', False) - sru = search_result['hits'].get('sru', {}) - query_es = sru.get('query_es', '') - organisation_pids = re.findall( - r'organisation_pid:(\d*)', - query_es, re.DOTALL) - library_pids = re.findall( - r'library_pid:(\d*)', - query_es, re.DOTALL) - location_pids = re.findall( - r'holdings.location.pid:(\d*)', - query_es, re.DOTALL) + language = request.args.get("ln", DEFAULT_LANGUAGE) + with_holdings_items = not request.args.get("without_items", False) + sru = search_result["hits"].get("sru", {}) + query_es = sru.get("query_es", "") + organisation_pids = re.findall(r"organisation_pid:(\d*)", query_es, re.DOTALL) + library_pids = re.findall(r"library_pid:(\d*)", query_es, re.DOTALL) + location_pids = re.findall(r"holdings.location.pid:(\d*)", query_es, re.DOTALL) records = self.transform_records( - hits=search_result['hits']['hits'], + hits=search_result["hits"]["hits"], pid_fetcher=pid_fetcher, language=language, with_holdings_items=with_holdings_items, organisation_pids=organisation_pids, library_pids=library_pids, location_pids=location_pids, - item_links_factory=item_links_factory + item_links_factory=item_links_factory, ) return self.dumps( - total=search_result['hits']['total'], + total=search_result["hits"]["total"], sru=sru, records=records, - **self.dumps_kwargs + **self.dumps_kwargs, ) diff --git a/rero_ils/modules/documents/serializers/ris.py b/rero_ils/modules/documents/serializers/ris.py index 82f5990a28..2b434d3c49 100644 --- a/rero_ils/modules/documents/serializers/ris.py +++ b/rero_ils/modules/documents/serializers/ris.py @@ -22,13 +22,12 @@ from invenio_i18n.ext import current_i18n from invenio_records_rest.serializers.base import SerializerMixinInterface -from rero_ils.modules.commons.identifiers import IdentifierFactory, \ - IdentifierType +from rero_ils.modules.commons.identifiers import IdentifierFactory, IdentifierType from rero_ils.utils import get_i18n_supported_languages -from .base import BaseDocumentFormatterMixin from ..dumpers import document_replace_refs_dumper from ..utils import process_i18n_literal_fields +from .base import BaseDocumentFormatterMixin class RISSerializer(SerializerMixinInterface): @@ -42,23 +41,23 @@ def serialize(self, pid, record, links_factory=None, **kwargs): :param links_factory: Factory function for record links. """ record = record.dumps(document_replace_refs_dumper) - if contributions := record.pop('contribution', []): - record['contribution'] = process_i18n_literal_fields(contributions) + if contributions := record.pop("contribution", []): + record["contribution"] = process_i18n_literal_fields(contributions) # enrich record data with encoded identifier alternatives. The # record identifiers list should contain only distinct identifier ! - identifiers = set([ + identifiers = { IdentifierFactory.create_identifier(identifier_data) - for identifier_data in record.get('identifiedBy', []) - ]) + for identifier_data in record.get("identifiedBy", []) + } for identifier in list(identifiers): identifiers.update(identifier.get_alternatives()) - record['identifiedBy'] = \ - [identifier.dump() for identifier in identifiers] + record["identifiedBy"] = [identifier.dump() for identifier in identifiers] return RISFormatter(record=record).format() - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None, **kwargs): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -66,9 +65,11 @@ def serialize_search(self, pid_fetcher, search_result, links=None, :param links: Dictionary of links to add to response. :param item_links_factory: Factory function for record links. """ + def generate_export(): - for hit in search_result['hits']['hits']: - yield RISFormatter(record=hit['_source']).format() + for hit in search_result["hits"]["hits"]: + yield RISFormatter(record=hit["_source"]).format() + return stream_with_context(generate_export()) @@ -76,24 +77,22 @@ class RISFormatter(BaseDocumentFormatterMixin): """RIS formatter class.""" # RIS separator between key and value - separator = ' - ' + separator = " - " def __init__(self, record, doctype_mapping=None, export_fields=None): """Initialize RIS formatter with the specific record.""" super().__init__(record) - config = current_app.config \ - .get('RERO_ILS_EXPORT_MAPPER').get('ris', {}) + config = current_app.config.get("RERO_ILS_EXPORT_MAPPER").get("ris", {}) language = request.args.get("lang", current_i18n.language) if not language or language not in get_i18n_supported_languages(): - language = current_app.config.get('BABEL_DEFAULT_LANGUAGE', 'en') + language = current_app.config.get("BABEL_DEFAULT_LANGUAGE", "en") self._language = language - self._doctype_mapping = doctype_mapping \ - or config.get('doctype_mapping') - self._export_fields = export_fields or config.get('export_fields') + self._doctype_mapping = doctype_mapping or config.get("doctype_mapping") + self._export_fields = export_fields or config.get("export_fields") def format(self): """Return RIS export for single record.""" - return self._fetch_fields() + f'ER{self.separator}\n' + return f"{self._fetch_fields()}ER{self.separator}\n" def _doctype_mapper(self, main_type: str, sub_type: str = None): """Document type mapper. @@ -102,20 +101,23 @@ def _doctype_mapper(self, main_type: str, sub_type: str = None): :param: sub_type: subtype of main document type. :return: mapped RIS reference type. """ - for ris_doc_type, func in self._doctype_mapping.items(): - if func(main_type, sub_type): - return ris_doc_type - return 'GEN' + return next( + ( + ris_doc_type + for ris_doc_type, func in self._doctype_mapping.items() + if func(main_type, sub_type) + ), + "GEN", + ) def _get_document_types(self): """Return document types.""" - if 'type' not in self.record: - return ['GEN'] + if "type" not in self.record: + return ["GEN"] return [ - self._doctype_mapper(doc_type.get('main_type'), - doc_type.get('subtype')) - for doc_type in self.record['type'] + self._doctype_mapper(doc_type.get("main_type"), doc_type.get("subtype")) + for doc_type in self.record["type"] ] def _get_city(self): @@ -132,33 +134,33 @@ def _get_primary_year(self): def _fetch_fields(self): """Return formatted output based on export fields.""" available_fields = { - 'TY': self._get_document_types(), - 'ID': self._get_pid(), - 'TI': self._get_title(), - 'T2': self._get_secondary_title(), - 'AU': self._get_authors(), - 'A2': self._get_secondary_authors(), - 'DA': self._get_publication_year(), - 'ET': self._get_editions(), - 'SP': self._get_start_pages(), - 'EP': self._get_end_pages(), - 'CY': self._get_publication_places(), - 'LA': self._get_languages(), - 'PB': self._get_publisher(), - 'SN': self._get_identifiers([IdentifierType.ISBN, - IdentifierType.ISSN, - IdentifierType.L_ISSN]), - 'UR': self._get_electronic_locators(), - 'UR': self._get_permalink(), - 'KW': self._get_subjects(), - 'DO': self._get_identifiers([IdentifierType.DOI]), - 'VL': self._get_volume_numbers(), - 'IS': self._get_issue_numbers(), - 'PP': self._get_publication_places(), - 'Y1': self._get_publication_year(), - 'PY': self._get_publication_year() + "TY": self._get_document_types(), + "ID": self._get_pid(), + "TI": self._get_title(), + "T2": self._get_secondary_title(), + "AU": self._get_authors(), + "A2": self._get_secondary_authors(), + "DA": self._get_publication_year(), + "ET": self._get_editions(), + "SP": self._get_start_pages(), + "EP": self._get_end_pages(), + "CY": self._get_publication_places(), + "LA": self._get_languages(), + "PB": self._get_publisher(), + "SN": self._get_identifiers( + [IdentifierType.ISBN, IdentifierType.ISSN, IdentifierType.L_ISSN] + ), + "UR": self._get_electronic_locators(), + "UR": self._get_permalink(), + "KW": self._get_subjects(), + "DO": self._get_identifiers([IdentifierType.DOI]), + "VL": self._get_volume_numbers(), + "IS": self._get_issue_numbers(), + "PP": self._get_publication_places(), + "Y1": self._get_publication_year(), + "PY": self._get_publication_year(), } - out = '' + out = "" for field in self._export_fields: if value := available_fields[field]: out += self._format_output_row(field, value) @@ -171,10 +173,10 @@ def _format_output_row(self, field, value): :param value: value for RIS tag :returns formatted row string """ - out = '' + out = "" if isinstance(value, list): for v in value: - out += f'{field}{self.separator}{v}\n' + out += f"{field}{self.separator}{v}\n" else: - out += f'{field}{self.separator}{value}\n' + out += f"{field}{self.separator}{value}\n" return out diff --git a/rero_ils/modules/documents/utils.py b/rero_ils/modules/documents/utils.py index a4b71f353c..895248346c 100644 --- a/rero_ils/modules/documents/utils.py +++ b/rero_ils/modules/documents/utils.py @@ -28,30 +28,25 @@ from invenio_jsonschemas.proxies import current_jsonschemas from werkzeug.local import LocalProxy -from ..utils import get_schema_for_resource, memoize from ...utils import get_i18n_supported_languages +from ..utils import get_schema_for_resource, memoize -_records_state = LocalProxy(lambda: current_app.extensions['invenio-records']) +_records_state = LocalProxy(lambda: current_app.extensions["invenio-records"]) @memoize(timeout=3600) -def get_document_types_from_schema(schema='doc'): +def get_document_types_from_schema(schema="doc"): """Create document type definition from schema.""" path = current_jsonschemas.url_to_path(get_schema_for_resource(schema)) schema = current_jsonschemas.get_schema(path=path) schema = _records_state.replace_refs(schema) - schema_types = schema\ - .get('properties', {})\ - .get('type', {})\ - .get('items', {})\ - .get('oneOf', []) + schema_types = ( + schema.get("properties", {}).get("type", {}).get("items", {}).get("oneOf", []) + ) doc_types = {} for schema_type in schema_types: - schema_title = schema_type['title'] - sub_types = schema_type\ - .get('properties', {})\ - .get('subtype', {})\ - .get('enum', []) + schema_title = schema_type["title"] + sub_types = schema_type.get("properties", {}).get("subtype", {}).get("enum", []) doc_types[schema_title] = {sub_type: True for sub_type in sub_types} return doc_types @@ -62,11 +57,11 @@ def filter_document_type_buckets(buckets): if doc_types := get_document_types_from_schema(): if buckets: for term in buckets: - main_type = term['key'] - term['document_subtype']['buckets'] = [ + main_type = term["key"] + term["document_subtype"]["buckets"] = [ subtype_bucket - for subtype_bucket in term['document_subtype']['buckets'] - if doc_types.get(main_type, {}).get(subtype_bucket['key']) + for subtype_bucket in term["document_subtype"]["buckets"] + if doc_types.get(main_type, {}).get(subtype_bucket["key"]) ] @@ -81,7 +76,7 @@ def display_alternate_graphic_first(language): :return: true if the alternate graphic value must be display first :rtype: bool """ - return not re.search(r'(default|^und-|-zyyy$)', language) + return not re.search(r"(default|^und-|-zyyy$)", language) def title_format_text_alternate_graphic(titles, responsabilities=None): @@ -95,27 +90,25 @@ def title_format_text_alternate_graphic(titles, responsabilities=None): altgr_titles = {} parallel_titles = {} for title in titles: - if title.get('type') == 'bf:Title': - title_texts = \ - title_format_text(title=title, with_subtitle=True) + if title.get("type") == "bf:Title": + title_texts = title_format_text(title=title, with_subtitle=True) # the first title is remove because it is already used for the # heading title title_texts.pop(0) for title_text in title_texts: - language = title_text.get('language') + language = title_text.get("language") altgr = altgr_titles.get(language, []) - altgr.append(title_text.get('value')) + altgr.append(title_text.get("value")) altgr_titles[language] = altgr - elif title.get('type') == 'bf:ParallelTitle': - parallel_title_texts = title_format_text( - title=title, with_subtitle=True) + elif title.get("type") == "bf:ParallelTitle": + parallel_title_texts = title_format_text(title=title, with_subtitle=True) parallel_title_texts.pop(0) # the first parallel title is removed because it is already used # for the heading title for parallel_title_text in parallel_title_texts: - language = parallel_title_text.get('language') + language = parallel_title_text.get("language") parallel_title = parallel_titles.get(language, []) - parallel_title.append(parallel_title_text.get('value')) + parallel_title.append(parallel_title_text.get("value")) parallel_titles[language] = parallel_title # if language in parallel_titles: # parallel_titles.get(language, []) @@ -126,22 +119,22 @@ def title_format_text_alternate_graphic(titles, responsabilities=None): responsabilities = responsabilities or [] for responsibility in responsabilities: for responsibility_language in responsibility: - language = responsibility_language.get('language', 'default') + language = responsibility_language.get("language", "default") responsibility_text = responsibilities_text.get(language, []) - responsibility_text.append(responsibility_language.get('value')) + responsibility_text.append(responsibility_language.get("value")) responsibilities_text[language] = responsibility_text output = [] for language in altgr_titles: - altgr_text = '. '.join(altgr_titles[language]) + altgr_text = ". ".join(altgr_titles[language]) if language in parallel_titles: - parallel_title_text = ' = '.join(parallel_titles[language]) - altgr_text += ' = ' + str(parallel_title_text) + parallel_title_text = " = ".join(parallel_titles[language]) + altgr_text += " = " + str(parallel_title_text) if language in responsibilities_text: - responsibility_text = ' / '.join(responsibilities_text[language]) - altgr_text += ' / ' + str(responsibility_text) + responsibility_text = " / ".join(responsibilities_text[language]) + altgr_text += " / " + str(responsibility_text) - output.append({'value': altgr_text, 'language': language}) + output.append({"value": altgr_text, "language": language}) return output @@ -155,9 +148,8 @@ def title_variant_format_text(titles, with_subtitle=True): """ variant_title_texts = [] for title in titles: - if title.get('type') == 'bf:VariantTitle': - title_texts = \ - title_format_text(title=title, with_subtitle=with_subtitle) + if title.get("type") == "bf:VariantTitle": + title_texts = title_format_text(title=title, with_subtitle=with_subtitle) variant_title_texts.extend(title_texts) return variant_title_texts @@ -173,51 +165,48 @@ def title_format_text(title, with_subtitle=True): """ # build main_title string per language main_title_output = {} - for main_title in title.get('mainTitle', []): - language = main_title.get('language', 'default') - value = main_title.get('value', '') + for main_title in title.get("mainTitle", []): + language = main_title.get("language", "default") + value = main_title.get("value", "") main_title_output.setdefault(language, []).append(value) # build subtitle string per language subtitle_output = {} if with_subtitle: - subtitles = title.get('subtitle', []) + subtitles = title.get("subtitle", []) for subtitle in subtitles: - language = subtitle.get('language', 'default') - value = subtitle.get('value', '') + language = subtitle.get("language", "default") + value = subtitle.get("value", "") subtitle_output.setdefault(language, []).append(value) # build part strings per language part_output = {} - for part in title.get('part', []): + for part in title.get("part", []): data = {} # part number first - for part_type in ['partNumber', 'partName']: + for part_type in ["partNumber", "partName"]: part_type_values = part.get(part_type, {}) # again repeatable for part_type_value in part_type_values: - language = part_type_value.get('language', 'default') - if value := part_type_value.get('value'): + language = part_type_value.get("language", "default") + if value := part_type_value.get("value"): data.setdefault(language, []).append(value) # each part number and part name are separate by a comma for key, value in data.items(): - part_output.setdefault(key, []).append(', '.join(value)) + part_output.setdefault(key, []).append(", ".join(value)) # each part are separate by a point - part_output = { - key: '. '.join(values) - for key, values in part_output.items() - } + part_output = {key: ". ".join(values) for key, values in part_output.items()} # build title text strings lists, # if a vernacular title exists it will be place on top of the title list title_text = [] for language, main_title in main_title_output.items(): - text = '. '.join(main_title) + text = ". ".join(main_title) if language in subtitle_output: - subtitle_text = ' : '.join(subtitle_output[language]) - text = f'{text} : {subtitle_text}' + subtitle_text = " : ".join(subtitle_output[language]) + text = f"{text} : {subtitle_text}" if language in part_output: - text = f'{text}. {part_output[language]}' - data = {'value': text, 'language': language} + text = f"{text}. {part_output[language]}" + data = {"value": text, "language": language} if display_alternate_graphic_first(language): title_text.insert(0, data) else: @@ -233,37 +222,38 @@ def create_authorized_access_point(agent): """ if not agent: return None - authorized_access_point = agent.get('preferred_name') + authorized_access_point = agent.get("preferred_name") from rero_ils.modules.entities.models import EntityType - if agent.get('type') == EntityType.PERSON: - date_parts = [agent.get('date_of_birth'), agent.get('date_of_death')] - date = '-'.join(filter(None, date_parts)) - numeration = agent.get('numeration') - fuller_form_of_name = agent.get('fuller_form_of_name') - qualifier = agent.get('qualifier') + + if agent.get("type") == EntityType.PERSON: + date_parts = [agent.get("date_of_birth"), agent.get("date_of_death")] + date = "-".join(filter(None, date_parts)) + numeration = agent.get("numeration") + fuller_form_of_name = agent.get("fuller_form_of_name") + qualifier = agent.get("qualifier") if numeration: - authorized_access_point += f' {numeration}' + authorized_access_point += f" {numeration}" if qualifier: - authorized_access_point += f', {qualifier}' + authorized_access_point += f", {qualifier}" if date: - authorized_access_point += f', {date}' + authorized_access_point += f", {date}" else: if fuller_form_of_name: - authorized_access_point += f' ({fuller_form_of_name})' + authorized_access_point += f" ({fuller_form_of_name})" if date: - authorized_access_point += f', {date}' + authorized_access_point += f", {date}" if qualifier: - authorized_access_point += f', {qualifier}' - elif agent.get('type') == EntityType.ORGANISATION: - if subordinate_unit := agent.get('subordinate_unit'): - authorized_access_point += f'''. {'. '.join(subordinate_unit)}''' + authorized_access_point += f", {qualifier}" + elif agent.get("type") == EntityType.ORGANISATION: + if subordinate_unit := agent.get("subordinate_unit"): + authorized_access_point += f""". {'. '.join(subordinate_unit)}""" conference_data = [] - if numbering := agent.get('numbering'): + if numbering := agent.get("numbering"): conference_data.append(numbering) - if conference_date := agent.get('conference_date'): + if conference_date := agent.get("conference_date"): conference_data.append(conference_date) - if place := agent.get('place'): + if place := agent.get("place"): conference_data.append(place) if conference_data: authorized_access_point += f' ({" : ".join(conference_data)})' @@ -274,11 +264,11 @@ def process_i18n_literal_fields(fields): """Normalize literal fields.""" calculated_fields = [] for field in fields: - if entity := field.get('entity'): + if entity := field.get("entity"): entity = process_i18n_literal_entity(entity) - if subs := entity.pop('subdivisions', []): - entity['subdivisions'] = process_i18n_literal_fields(subs) - field['entity'] = entity + if subs := entity.pop("subdivisions", []): + entity["subdivisions"] = process_i18n_literal_fields(subs) + field["entity"] = entity calculated_fields.append(field) return calculated_fields @@ -298,17 +288,17 @@ def process_i18n_literal_entity(entity): :param entity: the entity to transform. """ - if entity.get('pid'): + if entity.get("pid"): # in such case, it means that's an entity linked to an `Entity` record. # and we don't need to transform it. Just return the current entity # without any modifications. return entity - if access_point := entity.pop('authorized_access_point', None): + if access_point := entity.pop("authorized_access_point", None): # use the encoded access point for all supported languages if the key # doesn't already exists for the entity. for language in get_i18n_supported_languages(): - key = f'authorized_access_point_{language}' + key = f"authorized_access_point_{language}" if key not in entity: entity[key] = access_point return entity @@ -318,26 +308,28 @@ def get_remote_cover(isbn): """Document cover service.""" if not isbn: return None - cover_service = current_app.config.get('RERO_ILS_THUMBNAIL_SERVICE_URL') - url = f'{cover_service}' \ - '?height=244px' \ - '&width=244px' \ - '&jsonpCallbackParam=callback' \ - '&callback=thumb' \ - '&type=isbn' \ - f'&value={isbn}' + cover_service = current_app.config.get("RERO_ILS_THUMBNAIL_SERVICE_URL") + url = ( + f"{cover_service}" + "?height=244px" + "&width=244px" + "&jsonpCallbackParam=callback" + "&callback=thumb" + "&type=isbn" + f"&value={isbn}" + ) try: host_url = flask_request.host_url except Exception: - host_url = current_app.config.get('RERO_ILS_APP_URL', '??') - if host_url[-1] != '/': - host_url = f'{host_url}/' - response = requests.get(url, headers={'referer': host_url}) + host_url = current_app.config.get("RERO_ILS_APP_URL", "??") + if host_url[-1] != "/": + host_url = f"{host_url}/" + response = requests.get(url, headers={"referer": host_url}) if response.status_code != 200: - msg = f'Unable to get cover for isbn: {isbn} {response.status_code}' + msg = f"Unable to get cover for isbn: {isbn} {response.status_code}" current_app.logger.debug(msg) return None - result = json.loads(response.text[len('thumb('):-1]) - if result['success']: + result = json.loads(response.text[len("thumb(") : -1]) + if result["success"]: return result - current_app.logger.debug(f'Unable to get cover for isbn: {isbn}') + current_app.logger.debug(f"Unable to get cover for isbn: {isbn}") diff --git a/rero_ils/modules/documents/views.py b/rero_ils/modules/documents/views.py index 7cb142abd8..52a552c079 100644 --- a/rero_ils/modules/documents/views.py +++ b/rero_ils/modules/documents/views.py @@ -41,13 +41,6 @@ from rero_ils.modules.patrons.api import current_patrons from rero_ils.modules.utils import extracted_data_from_ref -from .api import Document, DocumentsSearch -from .dumpers import document_indexer_dumper -from .extensions import EditionStatementExtension, \ - ProvisionActivitiesExtension, SeriesStatementExtension, TitleExtension -from .utils import display_alternate_graphic_first, get_remote_cover, \ - title_format_text, title_format_text_alternate_graphic, \ - title_variant_format_text from ..collections.api import CollectionsSearch from ..entities.api import Entity from ..entities.models import EntityType @@ -58,6 +51,21 @@ from ..organisations.api import Organisation from ..patrons.api import current_patrons from ..utils import extracted_data_from_ref +from .api import Document, DocumentsSearch +from .dumpers import document_indexer_dumper +from .extensions import ( + EditionStatementExtension, + ProvisionActivitiesExtension, + SeriesStatementExtension, + TitleExtension, +) +from .utils import ( + display_alternate_graphic_first, + get_remote_cover, + title_format_text, + title_format_text_alternate_graphic, + title_variant_format_text, +) def doc_item_view_method(pid, record, template=None, **kwargs): @@ -70,12 +78,11 @@ def doc_item_view_method(pid, record, template=None, **kwargs): :param kwargs: Additional view arguments based on URL rule. :return: The rendered template. """ - record_viewed.send( - current_app._get_current_object(), pid=pid, record=record) + record_viewed.send(current_app._get_current_object(), pid=pid, record=record) - viewcode = kwargs['viewcode'] + viewcode = kwargs["viewcode"] organisation = None - if viewcode != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if viewcode != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): organisation = Organisation.get_record_by_viewcode(viewcode) # build provision activity @@ -83,20 +90,21 @@ def doc_item_view_method(pid, record, template=None, **kwargs): # Counting holdings to display the get button from ..holdings.api import HoldingsSearch - query = HoldingsSearch()\ - .filter('term', document__pid=pid.pid_value) - query = query.filter('bool', must_not=[Q('term', _masked=True)]) + + query = HoldingsSearch().filter("term", document__pid=pid.pid_value) + query = query.filter("bool", must_not=[Q("term", _masked=True)]) if organisation: - query = query.filter('term', organisation__pid=organisation.pid) + query = query.filter("term", organisation__pid=organisation.pid) holdings_count = query.count() # Counting linked documents from .api import DocumentsSearch - query = DocumentsSearch()\ - .filter('term', partOf__document__pid=pid.pid_value) + + query = DocumentsSearch().filter("term", partOf__document__pid=pid.pid_value) if organisation: query = query.filter( - 'term', holdings__organisation__organisation_pid=organisation.pid) + "term", holdings__organisation__organisation_pid=organisation.pid + ) linked_documents_count = query.count() return render_template( template, @@ -105,25 +113,24 @@ def doc_item_view_method(pid, record, template=None, **kwargs): es_record=record.dumps(document_indexer_dumper), holdings_count=holdings_count, viewcode=viewcode, - recordType='documents', + recordType="documents", current_patrons=current_patrons, - linked_documents_count=linked_documents_count + linked_documents_count=linked_documents_count, ) blueprint = Blueprint( - 'documents', + "documents", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) @blueprint.app_template_filter() def babeltheque_enabled_view(view): """Check if the view is activated for babeltheque.""" - enabled_views = current_app.config.get( - 'RERO_ILS_APP_BABELTHEQUE_ENABLED_VIEWS', []) + enabled_views = current_app.config.get("RERO_ILS_APP_BABELTHEQUE_ENABLED_VIEWS", []) return view in enabled_views @@ -132,9 +139,9 @@ def get_first_isbn(record): """Get first isbn result.""" # ISBN isbns = [ - identified_by.get('value') - for identified_by in record.get('identifiedBy', []) - if identified_by.get('type') == 'bf:Isbn' + identified_by.get("value") + for identified_by in record.get("identifiedBy", []) + if identified_by.get("type") == "bf:Isbn" ] return isbns[0] if isbns else None @@ -142,23 +149,21 @@ def get_first_isbn(record): @blueprint.app_template_filter() def note_general(notes): """Preprocess notes to extract only general type.""" - return sort_by_type( - list(filter(lambda t: t['noteType'] == 'general', notes))) + return sort_by_type(list(filter(lambda t: t["noteType"] == "general", notes))) @blueprint.app_template_filter() def notes_except_general(notes): """Preprocess notes to extract all note except general type.""" - return sort_by_type( - list(filter(lambda t: t['noteType'] != 'general', notes))) + return sort_by_type(list(filter(lambda t: t["noteType"] != "general", notes))) def sort_by_type(notes): """Sort notes by type.""" by_type = {} for note in notes: - by_type.setdefault(note['noteType'], []) - by_type[note['noteType']].append(note['label']) + by_type.setdefault(note["noteType"], []) + by_type[note["noteType"]].append(note["label"]) return by_type @@ -168,8 +173,7 @@ def cartographic_attributes(attributes): return [ attribute for attribute in attributes - if 'projection' in attribute - or attribute.get('coordinates', {}).get('label') + if "projection" in attribute or attribute.get("coordinates", {}).get("label") ] @@ -177,29 +181,26 @@ def cartographic_attributes(attributes): def provision_activity(provisions): """Preprocess provision activity.""" output = {} - provisions = list( - filter(lambda t: '_text' in t and 'statement' in t, provisions)) + provisions = list(filter(lambda t: "_text" in t and "statement" in t, provisions)) for provision in provisions: - if provision['type'] not in output: - output.setdefault(provision['type'], []) - for text in provision['_text']: - output[provision['type']].append(text) + if provision["type"] not in output: + output.setdefault(provision["type"], []) + for text in provision["_text"]: + output[provision["type"]].append(text) return output @blueprint.app_template_filter() def provision_activity_publication(provisions): """Extact only publication of provision activity.""" - return { - 'bf:Publication': provisions.get('bf:Publication', []) - } + return {"bf:Publication": provisions.get("bf:Publication", [])} @blueprint.app_template_filter() def provision_activity_not_publication(provisions): """Extact other than publication of provision activity.""" - if 'bf:Publication' in provisions: - provisions.pop('bf:Publication') + if "bf:Publication" in provisions: + provisions.pop("bf:Publication") return provisions @@ -207,9 +208,9 @@ def provision_activity_not_publication(provisions): def provision_activity_original_date(provisions): """Preprocess provision activity original date.""" return [ - provision['original_date'] + provision["original_date"] for provision in provisions - if 'original_date' in provision + if "original_date" in provision ] @@ -217,11 +218,11 @@ def provision_activity_original_date(provisions): def title_variants(titles): """Preprocess title variants.""" variants = {} - bf_titles = list(filter(lambda t: t['type'] != 'bf:Title', titles)) + bf_titles = list(filter(lambda t: t["type"] != "bf:Title", titles)) for title in bf_titles: title_texts = title_format_text(title, with_subtitle=True) - variants.setdefault(title['type'], []) - variants[title['type']].append(title_texts[0].get("value")) + variants.setdefault(title["type"], []) + variants[title["type"]].append(title_texts[0].get("value")) return variants @@ -232,21 +233,23 @@ def identified_by(identifiedby): for identifier in identifiedby: details = [] # Replace bf:Local by source - id_type = identifier.get('type') - if id_type == 'bf:Local': - id_type = identifier.get('source') + id_type = identifier.get("type") + if id_type == "bf:Local": + id_type = identifier.get("source") # Format qualifier, status and note - if identifier.get('qualifier'): - details.append(identifier.get('qualifier')) - if identifier.get('status'): - details.append(identifier.get('status')) - if identifier.get('note'): - details.append(identifier.get('note')) - output.append({ - 'type': id_type, - 'value': identifier.get('value'), - 'details': ', '.join(details) - }) + if identifier.get("qualifier"): + details.append(identifier.get("qualifier")) + if identifier.get("status"): + details.append(identifier.get("status")) + if identifier.get("note"): + details.append(identifier.get("note")) + output.append( + { + "type": id_type, + "value": identifier.get("value"), + "details": ", ".join(details), + } + ) return output @@ -262,7 +265,7 @@ def can_request(item): can, reasons = item.can( ItemCirculationAction.REQUEST, patron=patron, - library=Library.get_record_by_pid(patron.library_pid) + library=Library.get_record_by_pid(patron.library_pid), ) return can, reasons return False, [] @@ -279,41 +282,42 @@ def contribution_format(contributions, language, viewcode, with_roles=False): :return the contribution in formatted form. """ output = [] - for contrib in filter(lambda c: c.get('entity'), contributions): - if entity := get_entity_record_from_data(contrib['entity']): + for contrib in filter(lambda c: c.get("entity"), contributions): + if entity := get_entity_record_from_data(contrib["entity"]): text = entity.get_authorized_access_point(language=language) args = { - 'viewcode': viewcode, - 'recordType': 'documents', - 'q': f'contribution.entity.pids.{entity.resource_type}:' - f'{entity.pid}', - 'simple': 0 + "viewcode": viewcode, + "recordType": "documents", + "q": f"contribution.entity.pids.{entity.resource_type}:" + f"{entity.pid}", + "simple": 0, } else: - default_key = 'authorized_access_point' - localized_key = f'authorized_access_point_{language}' - text = contrib['entity'].get(localized_key) or \ - contrib['entity'].get(default_key) + default_key = "authorized_access_point" + localized_key = f"authorized_access_point_{language}" + text = contrib["entity"].get(localized_key) or contrib["entity"].get( + default_key + ) args = { - 'viewcode': viewcode, - 'recordType': 'documents', - 'q': f'contribution.entity.{localized_key}:"{text}"', - 'simple': 0 + "viewcode": viewcode, + "recordType": "documents", + "q": f'contribution.entity.{localized_key}:"{text}"', + "simple": 0, } - url = url_for('rero_ils.search', **args) + url = url_for("rero_ils.search", **args) label = f'{text}' if with_roles: - if roles := [_(role) for role in contrib.get('role', [])]: - roles_str = ', '.join(roles) + if roles := [_(role) for role in contrib.get("role", [])]: + roles_str = ", ".join(roles) label += f' ({roles_str})' output.append(label) - return ' ; '.join(output) + return " ; ".join(output) @blueprint.app_template_filter() -def doc_entity_label(entity, language=None, part_separator=' - ') -> str: +def doc_entity_label(entity, language=None, part_separator=" - ") -> str: """Format an entity according to the available keys. :param entity: the entity to analyze. @@ -322,25 +326,24 @@ def doc_entity_label(entity, language=None, part_separator=' - ') -> str: :returns: the best possible label to display. """ parts = [] - if '$ref' in entity: + if "$ref" in entity: # Local or remote entity - if entity := Entity.get_record_by_ref(entity['$ref']): + if entity := Entity.get_record_by_ref(entity["$ref"]): entity_type = entity.resource_type value = entity.pid parts.append(entity.get_authorized_access_point(language=language)) else: # Textual entity - entity_type = 'textual' - default_key = 'authorized_access_point' - localized_key = f'{default_key}_{language}' + entity_type = "textual" + default_key = "authorized_access_point" + localized_key = f"{default_key}_{language}" value = entity.get(localized_key) or entity.get(default_key) parts.append(value) # Subdivisions (only for textual entity) - for subdivision in entity.get('subdivisions', []): - if sub_entity := subdivision.get('entity'): - _, _, label = doc_entity_label( - sub_entity, language, part_separator) + for subdivision in entity.get("subdivisions", []): + if sub_entity := subdivision.get("entity"): + _, _, label = doc_entity_label(sub_entity, language, part_separator) parts.append(label) return entity_type, value, part_separator.join(filter(None, parts)) @@ -352,65 +355,61 @@ def edition_format(editions): output = [] for edition in editions: if languages := EditionStatementExtension.format_text(edition): - output.extend(edition.get('value') for edition in languages) + output.extend(edition.get("value") for edition in languages) return output @blueprint.app_template_filter() def part_of_format(part_of): """Format 'part of' data for template.""" - document_pid = extracted_data_from_ref(part_of.get('document'), data='pid') + document_pid = extracted_data_from_ref(part_of.get("document"), data="pid") document = Document.get_record_by_pid(document_pid) - nums = part_of.get('numbering') + nums = part_of.get("numbering") # Set host document pid - output = {'document_pid': document_pid} + output = {"document_pid": document_pid} # Set label - subtype = document.get('issuance').get('subtype') - if subtype == 'periodical': - output['label'] = _('Journal') - elif subtype == 'monographicSeries': - output['label'] = _('Series') + subtype = document.get("issuance").get("subtype") + if subtype == "periodical": + output["label"] = _("Journal") + elif subtype == "monographicSeries": + output["label"] = _("Series") else: - output['label'] = _('Published in') + output["label"] = _("Published in") # Set title - bf_titles = list( - filter( - lambda t: t['type'] == 'bf:Title', document.get('title') - ) - ) - output['title'] = TitleExtension.format_text(bf_titles) + bf_titles = list(filter(lambda t: t["type"] == "bf:Title", document.get("title"))) + output["title"] = TitleExtension.format_text(bf_titles) # Format and set numbering (example: 2020, vol. 2, nr. 3, p. 302) if nums is not None: for num in nums: numbering = [] - if num.get('year'): - numbering.append(num.get('year')) - if num.get('volume'): - volume = [_('vol'), str(num.get('volume'))] + if num.get("year"): + numbering.append(num.get("year")) + if num.get("volume"): + volume = [_("vol"), str(num.get("volume"))] numbering.append(". ".join(volume)) - if num.get('issue'): - issue = [_('nr'), str(num.get('issue'))] + if num.get("issue"): + issue = [_("nr"), str(num.get("issue"))] numbering.append(". ".join(issue)) - if num.get('pages'): - pages = [_('p'), str(num.get('pages'))] + if num.get("pages"): + pages = [_("p"), str(num.get("pages"))] numbering.append(". ".join(pages)) - output.setdefault('numbering', []) - output['numbering'].append(", ".join(numbering)) + output.setdefault("numbering", []) + output["numbering"].append(", ".join(numbering)) return output @blueprint.app_template_filter() def record_library_pickup_locations(record): """Get the pickup locations of the library of the given item or holding.""" - location_pid = extracted_data_from_ref(record.get('location')) + location_pid = extracted_data_from_ref(record.get("location")) location = Location.get_record_by_pid(location_pid) # Either the location defines some 'restrict_pickup_to' either not. # * If 'restrict_pickup_to' is defined, then only these locations are # eligible as possible pickup_locations # * Otherwise, get all organisation pickup locations # of the record belongs to - if 'restrict_pickup_to' in location: + if "restrict_pickup_to" in location: # Get all pickup locations as Location objects and append it to the # location record (removing possible None values) pickup_locations = [ @@ -423,13 +422,13 @@ def record_library_pickup_locations(record): # (removing possible None value) pickup_locations = [] for library in org.get_libraries(): - for location_pid in list(library.get_pickup_locations_pids()): - pickup_locations.append( - Location.get_record_by_pid(location_pid)) - + pickup_locations.extend( + Location.get_record_by_pid(location_pid) + for location_pid in list(library.get_pickup_locations_pids()) + ) return sorted( list(filter(None, pickup_locations)), - key=lambda location: location.get('pickup_name', location.get('code')) + key=lambda location: location.get("pickup_name", location.get("code")), ) @@ -438,63 +437,64 @@ def work_access_point(work_access_point): """Process work access point data.""" wap = [] for work in work_access_point: - agent_formatted = '' - if agent := work.get('creator'): - if agent['type'] == EntityType.PERSON: + agent_formatted = "" + if agent := work.get("creator"): + if agent["type"] == EntityType.PERSON: # Person name = [] - if 'preferred_name' in agent: - name.append(agent['preferred_name']) - if 'numeration' in agent: - name.append(agent['numeration']) - elif 'fuller_form_of_name' in agent: + if "preferred_name" in agent: + name.append(agent["preferred_name"]) + if "numeration" in agent: + name.append(agent["numeration"]) + elif "fuller_form_of_name" in agent: name.append(f"({agent['fuller_form_of_name']})") if len(name): agent_formatted += f"{', '.join(name)}, " - if 'numeration' in agent and 'qualifier' in agent: + if "numeration" in agent and "qualifier" in agent: agent_formatted += f"{agent['qualifier']}, " dates = [ agent[key] - for key in ['date_of_birth', 'date_of_death'] + for key in ["date_of_birth", "date_of_death"] if key in agent ] if len(dates): agent_formatted += f"{'-'.join(dates)}. " - if 'numeration' not in agent and 'qualifier' in agent: + if "numeration" not in agent and "qualifier" in agent: agent_formatted += f"{agent['qualifier']}. " else: # Organisation - if 'preferred_name' in agent: - agent_formatted += agent['preferred_name'] + '. ' - if 'subordinate_unit' in agent: - for unit in agent['subordinate_unit']: - agent_formatted += f'{unit}. ' - if 'numbering' in agent or 'conference_date' in agent or \ - 'place' in agent: + if "preferred_name" in agent: + agent_formatted += agent["preferred_name"] + ". " + if "subordinate_unit" in agent: + for unit in agent["subordinate_unit"]: + agent_formatted += f"{unit}. " + if ( + "numbering" in agent + or "conference_date" in agent + or "place" in agent + ): conf = [ - agent[key] - for key in ['numbering', 'conference_date', 'place'] + agent[key] for key in ["numbering", "conference_date", "place"] ] if len(conf): agent_formatted += f"({' : '.join(conf)}) " agent_formatted += f"{work['title']}. " - if 'part' in work: - for part in work['part']: - for key in ['partNumber', 'partName']: + if "part" in work: + for part in work["part"]: + for key in ["partNumber", "partName"]: if key in part: agent_formatted += f"{part[key]}. " - if 'miscellaneous_information' in work: + if "miscellaneous_information" in work: agent_formatted += f"{work['miscellaneous_information']}. " - if 'language' in work: + if "language" in work: agent_formatted += f"{_('lang_'+work['language'])}. " - if 'medium_of_performance_for_music' in work: - agent_formatted += \ - f"{'. '.join(work['medium_of_performance_for_music'])}. " - if 'key_for_music' in work: + if "medium_of_performance_for_music" in work: + agent_formatted += f"{'. '.join(work['medium_of_performance_for_music'])}. " + if "key_for_music" in work: agent_formatted += f"{work['key_for_music']}. " - if 'arranged_statement_for_music' in work: + if "arranged_statement_for_music" in work: agent_formatted += f"{work['arranged_statement_for_music']}. " - if 'date_of_work' in work: + if "date_of_work" in work: agent_formatted += f"{work['date_of_work']}. " wap.append(agent_formatted.strip()) return wap @@ -504,14 +504,13 @@ def work_access_point(work_access_point): def create_publication_statement(provision_activity): """Create publication statement from place, agent and date values.""" output = [] - publication_texts = \ - ProvisionActivitiesExtension.format_text(provision_activity) + publication_texts = ProvisionActivitiesExtension.format_text(provision_activity) for publication_text in publication_texts: - language = publication_text.get('language', 'default') + language = publication_text.get("language", "default") if display_alternate_graphic_first(language): - output.insert(0, publication_text.get('value')) + output.insert(0, publication_text.get("value")) else: - output.append(publication_text.get('value')) + output.append(publication_text.get("value")) return output @@ -526,26 +525,26 @@ def get_cover_art(record, save_cover_url=True, verbose=False): :return: url for cover art or None """ # electronicLocator - for electronic_locator in record.get('electronicLocator', []): - e_content = electronic_locator.get('content') - e_type = electronic_locator.get('type') - if e_content == 'coverImage' and e_type == 'relatedResource': - return electronic_locator.get('url') + for electronic_locator in record.get("electronicLocator", []): + e_content = electronic_locator.get("content") + e_type = electronic_locator.get("type") + if e_content == "coverImage" and e_type == "relatedResource": + return electronic_locator.get("url") # ISBN isbns = [ - identified_by.get('value') - for identified_by in record.get('identifiedBy', []) - if identified_by.get('type') == 'bf:Isbn' + identified_by.get("value") + for identified_by in record.get("identifiedBy", []) + if identified_by.get("type") == "bf:Isbn" ] for isbn in sorted(isbns): isbn_cover = get_remote_cover(isbn) - if isbn_cover and isbn_cover.get('success'): - url = isbn_cover.get('image') + if isbn_cover and isbn_cover.get("success"): + url = isbn_cover.get("image") if save_cover_url: - pid = record.get('pid') + pid = record.get("pid") record_db = Document.get_record_by_pid(pid) record_db.add_cover_url(url=url, dbcommit=True, reindex=True) - msg = f'Add cover art url: {url} do document: {pid}' + msg = f"Add cover art url: {url} do document: {pid}" if verbose: click.echo(msg) return url @@ -562,25 +561,28 @@ def get_other_accesses(record): def filter_type(electronic_locator): """Filter electronic locator for related resources and no info.""" - return electronic_locator.get('type') in [ - 'noInfo', 'resource', 'relatedResource', 'versionOfResource' - ] and electronic_locator.get('content') != 'coverImage' + return ( + electronic_locator.get("type") + in ["noInfo", "resource", "relatedResource", "versionOfResource"] + and electronic_locator.get("content") != "coverImage" + ) filtered_electronic_locators = filter( - filter_type, - record.get('electronicLocator', []) + filter_type, record.get("electronicLocator", []) ) for electronic_locator in filtered_electronic_locators: - url = electronic_locator.get('url') - content = _(electronic_locator.get('content')) - public_notes = electronic_locator.get('publicNote', []) - public_note = ', '.join(public_notes) - accesses.append({ - 'type': electronic_locator.get('type'), - 'url': url, - 'content': content, - 'public_note': public_note - }) + url = electronic_locator.get("url") + content = _(electronic_locator.get("content")) + public_notes = electronic_locator.get("publicNote", []) + public_note = ", ".join(public_notes) + accesses.append( + { + "type": electronic_locator.get("type"), + "url": url, + "content": content, + "public_note": public_note, + } + ) return accesses @@ -596,8 +598,8 @@ def create_title_text(titles, responsibility_statement=None): :rtype: str """ return TitleExtension.format_text( - titles, responsibility_statement, - with_subtitle=True) + titles, responsibility_statement, with_subtitle=True + ) @blueprint.app_template_filter() @@ -612,10 +614,9 @@ def create_title_alternate_graphic(titles, responsibility_statement=None): :rtype: list """ output = [] - altgr_texts = title_format_text_alternate_graphic(titles, - responsibility_statement) + altgr_texts = title_format_text_alternate_graphic(titles, responsibility_statement) for altgr_text in altgr_texts: - value = altgr_text.get('value') + value = altgr_text.get("value") if value not in output: output.append(value) return output @@ -631,10 +632,9 @@ def create_title_variants(titles): :rtype: list """ output = [] - title_variant_texts = \ - title_variant_format_text(titles=titles, with_subtitle=True) + title_variant_texts = title_variant_format_text(titles=titles, with_subtitle=True) for title_variant_text in title_variant_texts: - value = title_variant_text.get('value') + value = title_variant_text.get("value") if value not in output: output.append(value) return output @@ -653,9 +653,9 @@ def create_title_responsibilites(responsibilityStatement): output = [] for responsibility in responsibilityStatement: for responsibility_language in responsibility: - value = responsibility_language.get('value') + value = responsibility_language.get("value") if value not in output: - language = responsibility_language.get('language', 'default') + language = responsibility_language.get("language", "default") if display_alternate_graphic_first(language): output.insert(0, value) else: @@ -672,8 +672,8 @@ def in_collection(item_pid): """ return list( CollectionsSearch() - .filter('term', items__pid=item_pid) - .filter('term', published=True) + .filter("term", items__pid=item_pid) + .filter("term", published=True) .scan() ) @@ -700,8 +700,8 @@ def document_main_type(record, translate: bool = True) -> Optional[str]: :param translate: is the response should be localized :return: the document main type """ - if 'type' in record: - doc_type = record['type'][0]['main_type'] + if "type" in record: + doc_type = record["type"][0]["main_type"] return _(doc_type) if translate else doc_type @@ -711,17 +711,19 @@ def get_articles(record): :return: list of articles with title and pid- """ - search = DocumentsSearch() \ - .filter('term', partOf__document__pid=record.get('pid')) \ - .source(['pid', 'title']) + search = ( + DocumentsSearch() + .filter("term", partOf__document__pid=record.get("pid")) + .source(["pid", "title"]) + ) return [ - {'title': TitleExtension.format_text(hit.title), 'pid': hit.pid} + {"title": TitleExtension.format_text(hit.title), "pid": hit.pid} for hit in search.scan() ] @blueprint.app_template_filter() -def online_holdings(document_pid, viewcode='global'): +def online_holdings(document_pid, viewcode="global"): """Find holdings by document pid and viewcode. :param document_pid: document pid @@ -729,40 +731,42 @@ def online_holdings(document_pid, viewcode='global'): :return: list of holdings """ from ..holdings.api import HoldingsSearch + organisation = None - if viewcode != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if viewcode != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): organisation = Organisation.get_record_by_viewcode(viewcode) - query = HoldingsSearch()\ - .filter('term', document__pid=document_pid)\ - .filter('bool', must_not=[Q('term', _masked=True)]) + query = ( + HoldingsSearch() + .filter("term", document__pid=document_pid) + .filter("bool", must_not=[Q("term", _masked=True)]) + ) if organisation: - query = query.filter('term', organisation__pid=organisation.pid) - results = query.source(['library', 'electronic_location', - 'enumerationAndChronology', 'notes']).scan() + query = query.filter("term", organisation__pid=organisation.pid) + results = query.source( + ["library", "electronic_location", "enumerationAndChronology", "notes"] + ).scan() holdings = {} for record in results: library = Library.get_record_by_pid(record.library.pid) - library_holdings = holdings.get(library['name'], []) - record.library.name = library['name'] - public_notes_content = [ - n['content'] - for n in record.to_dict().get('notes', []) - if n['type'] in HoldingNoteTypes.PUBLIC - ] - if public_notes_content: + library_holdings = holdings.get(library["name"], []) + record.library.name = library["name"] + if public_notes_content := [ + n["content"] + for n in record.to_dict().get("notes", []) + if n["type"] in HoldingNoteTypes.PUBLIC + ]: record.notes = public_notes_content library_holdings.append(record) - holdings[library['name']] = library_holdings + holdings[library["name"]] = library_holdings return holdings @blueprint.app_template_filter() def series_statement_format(series): """Series statement format.""" - return [SeriesStatementExtension.format_text( - serie) for serie in series] + return [SeriesStatementExtension.format_text(serie) for serie in series] @blueprint.app_template_filter() @@ -771,7 +775,4 @@ def main_title_text(title): :param title: array of the field title. """ - return list(filter( - lambda t: t.get('type') == 'bf:Title', - title - )) + return list(filter(lambda t: t.get("type") == "bf:Title", title)) diff --git a/rero_ils/modules/ebooks/cli.py b/rero_ils/modules/ebooks/cli.py index 0afa67372e..881fe918a6 100644 --- a/rero_ils/modules/ebooks/cli.py +++ b/rero_ils/modules/ebooks/cli.py @@ -28,70 +28,66 @@ from .utils import add_oai_source -@oaiharvester.command('addsource') -@click.argument('name') -@click.argument('baseurl') -@click.option('-m', '--metadataprefix', default='marc21', - help='The prefix for the metadata') -@click.option('-s', '--setspecs', default='', - help='The ‘set’ criteria for the harvesting') -@click.option('-c', '--comment', default='', - help='Comment') +@oaiharvester.command("addsource") +@click.argument("name") +@click.argument("baseurl") @click.option( - '-u', '--update', is_flag=True, default=False, help='Update config' + "-m", "--metadataprefix", default="marc21", help="The prefix for the metadata" ) +@click.option( + "-s", "--setspecs", default="", help="The ‘set’ criteria for the harvesting" +) +@click.option("-c", "--comment", default="", help="Comment") +@click.option("-u", "--update", is_flag=True, default=False, help="Update config") @with_appcontext -def add_oai_source_config(name, baseurl, metadataprefix, setspecs, comment, - update): +def add_oai_source_config(name, baseurl, metadataprefix, setspecs, comment, update): """Add OAIHarvestConfig.""" - click.echo(f'Add OAIHarvestConfig: {name} ', nl=False) + click.echo(f"Add OAIHarvestConfig: {name} ", nl=False) msg = add_oai_source( name=name, baseurl=baseurl, metadataprefix=metadataprefix, setspecs=setspecs, comment=comment, - update=update + update=update, ) click.echo(msg) -@oaiharvester.command('initconfig') -@click.argument('configfile', type=click.File('rb')) -@click.option( - '-u', '--update', is_flag=True, default=False, help='Update config' -) +@oaiharvester.command("initconfig") +@click.argument("configfile", type=click.File("rb")) +@click.option("-u", "--update", is_flag=True, default=False, help="Update config") @with_appcontext def init_oai_harvest_config(configfile, update): """Init OAIHarvestConfig.""" configs = yaml.load(configfile, Loader=yaml.FullLoader) for name, values in sorted(configs.items()): - baseurl = values['baseurl'] - metadataprefix = values.get('metadataprefix', 'marc21') - setspecs = values.get('setspecs', '') - comment = values.get('comment', '') - click.echo(f'Add OAIHarvestConfig: {name} {baseurl} ', nl=False) + baseurl = values["baseurl"] + metadataprefix = values.get("metadataprefix", "marc21") + setspecs = values.get("setspecs", "") + comment = values.get("comment", "") + click.echo(f"Add OAIHarvestConfig: {name} {baseurl} ", nl=False) msg = add_oai_source( name=name, baseurl=baseurl, metadataprefix=metadataprefix, setspecs=setspecs, comment=comment, - update=update + update=update, ) click.echo(msg) -@oaiharvester.command('info') +@oaiharvester.command("info") @with_appcontext def info(): """List infos for tasks.""" oais = OAIHarvestConfig.query.all() for oai in oais: click.echo(oai.name) - click.echo('\tlastrun : ', nl=False) + click.echo("\tlastrun : ", nl=False) click.echo(oai.lastrun) - click.echo('\tbaseurl : ' + oai.baseurl) - click.echo('\tmetadataprefix: ' + oai.metadataprefix) - click.echo('\tcomment : ' + oai.comment) - click.echo('\tsetspecs : ' + oai.setspecs) + click.echo("\tbaseurl : " + oai.baseurl) + click.echo("\tmetadataprefix: " + oai.metadataprefix) + click.echo("\tcomment : " + oai.comment) + click.echo("\tsetspecs : " + oai.setspecs) diff --git a/rero_ils/modules/ebooks/dojson/contrib/marc21/__init__.py b/rero_ils/modules/ebooks/dojson/contrib/marc21/__init__.py index f45479fa83..9547a74462 100644 --- a/rero_ils/modules/ebooks/dojson/contrib/marc21/__init__.py +++ b/rero_ils/modules/ebooks/dojson/contrib/marc21/__init__.py @@ -19,4 +19,4 @@ from .model import marc21 -__all__ = ('marc21') +__all__ = "marc21" diff --git a/rero_ils/modules/ebooks/dojson/contrib/marc21/model.py b/rero_ils/modules/ebooks/dojson/contrib/marc21/model.py index cbaa3af8fc..2835a7c7a1 100644 --- a/rero_ils/modules/ebooks/dojson/contrib/marc21/model.py +++ b/rero_ils/modules/ebooks/dojson/contrib/marc21/model.py @@ -24,12 +24,17 @@ from dojson import utils from isbnlib import EAN13 -from rero_ils.dojson.utils import ReroIlsMarc21Overdo, TitlePartList, \ - add_note, extract_subtitle_and_parallel_titles_from_field_245_b, \ - get_field_items, get_field_link_data, make_year, \ - remove_trailing_punctuation -from rero_ils.modules.documents.dojson.contrib.marc21tojson.utils import \ - do_language +from rero_ils.dojson.utils import ( + ReroIlsMarc21Overdo, + TitlePartList, + add_note, + extract_subtitle_and_parallel_titles_from_field_245_b, + get_field_items, + get_field_link_data, + make_year, + remove_trailing_punctuation, +) +from rero_ils.modules.documents.dojson.contrib.marc21tojson.utils import do_language from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.documents.utils import create_authorized_access_point from rero_ils.modules.entities.models import EntityType @@ -37,20 +42,17 @@ marc21 = ReroIlsMarc21Overdo() -@marc21.over('issuance', 'leader') +@marc21.over("issuance", "leader") @utils.ignore_value def marc21_to_issuance(self, key, value): """Set the mode of issuance.""" - self['issuance'] = dict( - main_type='rdami:1001', - subtype='materialUnit' - ) + self["issuance"] = dict(main_type="rdami:1001", subtype="materialUnit") if marc21.admin_meta_data: - self['adminMetadata'] = marc21.admin_meta_data - self['fiction_statement'] = DocumentFictionType.Unspecified.value + self["adminMetadata"] = marc21.admin_meta_data + self["fiction_statement"] = DocumentFictionType.Unspecified.value -@marc21.over('language', '^008') +@marc21.over("language", "^008") @utils.ignore_value def marc21_to_language_from_008(self, key, value): """Get languages. @@ -60,154 +62,144 @@ def marc21_to_language_from_008(self, key, value): return do_language(self, marc21) -@marc21.over('identifiedBy', '^020..') +@marc21.over("identifiedBy", "^020..") @utils.ignore_value def marc21_to_identifier_isbn(self, key, value): """Get identifier isbn. identifiers_isbn: 020 $a """ - if isbn13 := EAN13(value.get('a')): - identifiers = self.get('identifiedBy', []) - identifier = { - 'type': 'bf:Isbn', - 'value': isbn13 - } + if isbn13 := EAN13(value.get("a")): + identifiers = self.get("identifiedBy", []) + identifier = {"type": "bf:Isbn", "value": isbn13} identifiers.append(identifier) return identifiers return None -@marc21.over('type', '^0248.$') +@marc21.over("type", "^0248.$") def marc21_to_type(self, key, value): """Get document type.""" - if value.get('a').find('cantook') > -1: - return [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_e-book' - }] + if value.get("a").find("cantook") > -1: + return [{"main_type": "docmaintype_book", "subtype": "docsubtype_e-book"}] return None -@marc21.over('identifiedBy', '^035..') +@marc21.over("identifiedBy", "^035..") @utils.ignore_value def marc21_to_identifier_rero_id(self, key, value): """Get identifier reroId. identifiers:reroID: 035$a """ - identifiers = self.get('identifiedBy', []) - identifier = { - 'type': 'bf:Local', - 'value': value.get('a') - } + identifiers = self.get("identifiedBy", []) + identifier = {"type": "bf:Local", "value": value.get("a")} identifiers.append(identifier) return identifiers -@marc21.over('language', '^041..') +@marc21.over("language", "^041..") @utils.ignore_value def marc21_to_translated_from(self, key, value): """Get language. languages: 008 and 041 [$a, repetitive] """ - languages = self.get('language', []) + languages = self.get("language", []) unique_lang = [] if languages != []: - unique_lang.extend(language['value'] for language in languages) - if language := value.get('a'): + unique_lang.extend(language["value"] for language in languages) + if language := value.get("a"): for lang in utils.force_list(language): if lang not in unique_lang: unique_lang.append(lang) - languages.append({'type': 'bf:Language', 'value': lang}) + languages.append({"type": "bf:Language", "value": lang}) return languages -@marc21.over('contribution', '(^100|^700|^710|^711)..') +@marc21.over("contribution", "(^100|^700|^710|^711)..") @utils.for_each_value @utils.ignore_value def marc21_to_contribution(self, key, value): """Get contribution.""" - if key[4] == '2' or key[:3] not in ['100', '700', '710', '711']: + if key[4] == "2" or key[:3] not in ["100", "700", "710", "711"]: return None - agent_data = {'type': 'bf:Person'} - if value.get('a'): - name = utils.force_list(value.get('a'))[0] - agent_data['preferred_name'] = remove_trailing_punctuation(name) + agent_data = {"type": "bf:Person"} + if value.get("a"): + name = utils.force_list(value.get("a"))[0] + agent_data["preferred_name"] = remove_trailing_punctuation(name) # 100|700 Person - if key[:3] in ['100', '700']: - if value.get('b'): - numeration = utils.force_list(value.get('b'))[0] - agent_data['numeration'] = remove_trailing_punctuation( - numeration) - if value.get('c'): - qualifier = utils.force_list(value.get('c'))[0] - agent_data['qualifier'] = remove_trailing_punctuation(qualifier) - if value.get('d'): - date = utils.force_list(value.get('d'))[0] - date = date.rstrip(',') - dates = remove_trailing_punctuation(date).split('-') + if key[:3] in ["100", "700"]: + if value.get("b"): + numeration = utils.force_list(value.get("b"))[0] + agent_data["numeration"] = remove_trailing_punctuation(numeration) + if value.get("c"): + qualifier = utils.force_list(value.get("c"))[0] + agent_data["qualifier"] = remove_trailing_punctuation(qualifier) + if value.get("d"): + date = utils.force_list(value.get("d"))[0] + date = date.rstrip(",") + dates = remove_trailing_punctuation(date).split("-") with contextlib.suppress(Exception): if date_of_birth := dates[0].strip(): - agent_data['date_of_birth'] = date_of_birth + agent_data["date_of_birth"] = date_of_birth with contextlib.suppress(Exception): if date_of_death := dates[1].strip(): - agent_data['date_of_death'] = date_of_death - if value.get('q'): - fuller_form_of_name = utils.force_list(value.get('q'))[0] - agent_data['fuller_form_of_name'] = remove_trailing_punctuation( - fuller_form_of_name - ).lstrip('(').rstrip(')') - - elif key[:3] in ['710', '711']: - agent_data['type'] = 'bf:Organisation' - agent_data['conference'] = key[:3] == '711' - if value.get('e'): + agent_data["date_of_death"] = date_of_death + if value.get("q"): + fuller_form_of_name = utils.force_list(value.get("q"))[0] + agent_data["fuller_form_of_name"] = ( + remove_trailing_punctuation(fuller_form_of_name).lstrip("(").rstrip(")") + ) + + elif key[:3] in ["710", "711"]: + agent_data["type"] = "bf:Organisation" + agent_data["conference"] = key[:3] == "711" + if value.get("e"): subordinate_units = [ - subordinate_unit.rstrip('.') for subordinate_unit - in utils.force_list(value.get('e'))] - - agent_data['subordinate_unit'] = subordinate_units - if value.get('n'): - numbering = utils.force_list(value.get('n'))[0] - agent_data['numbering'] = remove_trailing_punctuation( - numbering - ).lstrip('(').rstrip(')') - if value.get('d'): - conference_date = utils.force_list(value.get('d'))[0] - if conference_date := remove_trailing_punctuation( - conference_date).lstrip('(').rstrip(')'): - agent_data['conference_date'] = conference_date - if value.get('c'): - place = utils.force_list(value.get('c'))[0] - if place := remove_trailing_punctuation( - place).lstrip('(').rstrip(')'): - agent_data['place'] = place + subordinate_unit.rstrip(".") + for subordinate_unit in utils.force_list(value.get("e")) + ] + + agent_data["subordinate_unit"] = subordinate_units + if value.get("n"): + numbering = utils.force_list(value.get("n"))[0] + agent_data["numbering"] = ( + remove_trailing_punctuation(numbering).lstrip("(").rstrip(")") + ) + if value.get("d"): + conference_date = utils.force_list(value.get("d"))[0] + if ( + conference_date := remove_trailing_punctuation(conference_date) + .lstrip("(") + .rstrip(")") + ): + agent_data["conference_date"] = conference_date + if value.get("c"): + place = utils.force_list(value.get("c"))[0] + if place := remove_trailing_punctuation(place).lstrip("(").rstrip(")"): + agent_data["place"] = place agent = { - 'type': agent_data['type'], - 'authorized_access_point': create_authorized_access_point(agent_data), + "type": agent_data["type"], + "authorized_access_point": create_authorized_access_point(agent_data), } - if agent_data.get('identifiedBy'): - agent['identifiedBy'] = agent_data['identifiedBy'] - roles = ['aut'] - if value.get('4'): - roles = list(utils.force_list(value.get('4'))) - elif key[:3] == '100': - roles = ['cre'] - elif key[:3] == '711': - roles = ['aut'] + if agent_data.get("identifiedBy"): + agent["identifiedBy"] = agent_data["identifiedBy"] + roles = ["aut"] + if value.get("4"): + roles = list(utils.force_list(value.get("4"))) + elif key[:3] == "100": + roles = ["cre"] + elif key[:3] == "711": + roles = ["aut"] else: - roles = ['ctb'] - return { - 'entity': agent, - 'role': roles - } + roles = ["ctb"] + return {"entity": agent, "role": roles} -@marc21.over('title', '^245..') +@marc21.over("title", "^245..") @utils.ignore_value def marc21_to_title(self, key, value): """Get title data. @@ -225,24 +217,24 @@ def marc21_to_title(self, key, value): $p : repetitive $6 : non repetitive """ - subfield_245_a = '' - subfield_245_b = '' - if fields_245 := marc21.get_fields('245'): - subfields_245_a = marc21.get_subfields(fields_245[0], 'a') - subfields_245_b = marc21.get_subfields(fields_245[0], 'b') + subfield_245_a = "" + subfield_245_b = "" + if fields_245 := marc21.get_fields("245"): + subfields_245_a = marc21.get_subfields(fields_245[0], "a") + subfields_245_b = marc21.get_subfields(fields_245[0], "b") if subfields_245_a: subfield_245_a = subfields_245_a[0] if subfields_245_b: subfield_245_b = subfields_245_b[0] - field_245_a_end_with_equal = re.search(r'\s*=\s*$', subfield_245_a) - field_245_a_end_with_colon = re.search(r'\s*:\s*$', subfield_245_a) - field_245_a_end_with_semicolon = re.search(r'\s*;\s*$', subfield_245_a) - field_245_b_contains_equal = re.search(r'=', subfield_245_b) + field_245_a_end_with_equal = re.search(r"\s*=\s*$", subfield_245_a) + field_245_a_end_with_colon = re.search(r"\s*:\s*$", subfield_245_a) + field_245_a_end_with_semicolon = re.search(r"\s*;\s*$", subfield_245_a) + field_245_b_contains_equal = re.search(r"=", subfield_245_b) - fields_246 = marc21.get_fields('246') - subfield_246_a = '' + fields_246 = marc21.get_fields("246") + subfield_246_a = "" if fields_246: - if subfields_246_a := marc21.get_subfields(fields_246[0], 'a'): + if subfields_246_a := marc21.get_subfields(fields_246[0], "a"): subfield_246_a = subfields_246_a[0] tag_link, link = get_field_link_data(value) @@ -250,55 +242,54 @@ def marc21_to_title(self, key, value): index = 1 title_list = [] title_data = {} - part_list = TitlePartList( - part_number_code='n', - part_name_code='p' - ) + part_list = TitlePartList(part_number_code="n", part_name_code="p") parallel_titles = [] pararalel_title_data_list = [] pararalel_title_string_set = set() responsibility = {} - subfield_selection = {'a', 'b', 'c', 'n', 'p'} + subfield_selection = {"a", "b", "c", "n", "p"} for blob_key, blob_value in items: if blob_key in subfield_selection: value_data = marc21.build_value_with_alternate_graphic( - '245', blob_key, blob_value, index, link, ',.', ':;/-=') - if blob_key in {'a', 'b', 'c'}: + "245", blob_key, blob_value, index, link, ",.", ":;/-=" + ) + if blob_key in {"a", "b", "c"}: subfield_selection.remove(blob_key) - if blob_key == 'a': - title_data['mainTitle'] = value_data - elif blob_key == 'b': + if blob_key == "a": + title_data["mainTitle"] = value_data + elif blob_key == "b": if subfield_246_a: - subtitle, parallel_titles, pararalel_title_string_set = \ + subtitle, parallel_titles, pararalel_title_string_set = ( extract_subtitle_and_parallel_titles_from_field_245_b( - value_data, field_245_a_end_with_equal) + value_data, field_245_a_end_with_equal + ) + ) if subtitle: - title_data['subtitle'] = subtitle + title_data["subtitle"] = subtitle elif value_data: - title_data['subtitle'] = value_data - elif blob_key == 'c': + title_data["subtitle"] = value_data + elif blob_key == "c": responsibility = marc21.build_responsibility_data(value_data) - elif blob_key in ['n', 'p']: + elif blob_key in ["n", "p"]: part_list.update_part(value_data, blob_key, blob_value) - if blob_key != '__order__': + if blob_key != "__order__": index += 1 - title_data['type'] = 'bf:Title' + title_data["type"] = "bf:Title" if the_part_list := part_list.get_part_list(): - title_data['part'] = the_part_list + title_data["part"] = the_part_list if title_data: title_list.append(title_data) - variant_title_list = \ - marc21.build_variant_title_data(pararalel_title_string_set) + variant_title_list = marc21.build_variant_title_data(pararalel_title_string_set) title_list.extend(iter(parallel_titles)) title_list.extend(iter(variant_title_list)) if responsibility: - self['responsibilityStatement'] = responsibility + self["responsibilityStatement"] = responsibility return title_list or None -@marc21.over('editionStatement', '^250..') +@marc21.over("editionStatement", "^250..") @utils.for_each_value @utils.ignore_value def marc21_to_edition_statement(self, key, value): @@ -308,34 +299,31 @@ def marc21_to_edition_statement(self, key, value): responsibility: 250 [$b non repetitive] """ edition_data = {} - if subfields_a := utils.force_list(value.get('a')): + if subfields_a := utils.force_list(value.get("a")): subfield_a = remove_trailing_punctuation(subfields_a[0]) - edition_data['editionDesignation'] = [{'value': subfield_a}] - if subfields_b := utils.force_list(value.get('b')): + edition_data["editionDesignation"] = [{"value": subfield_a}] + if subfields_b := utils.force_list(value.get("b")): subfields_b = subfields_b[0] - edition_data['responsibility'] = [{'value': subfields_b}] + edition_data["responsibility"] = [{"value": subfields_b}] return edition_data or None -@marc21.over('copyrightDate', '^264.4') +@marc21.over("copyrightDate", "^264.4") @utils.ignore_value def marc21_to_copyright_date(self, key, value): """Get Copyright Date.""" - copyright_dates = self.get('copyrightDate', []) - copyright_date = value.get('c') + copyright_dates = self.get("copyrightDate", []) + copyright_date = value.get("c") if copyright_date: - if match := re.search(r'^([©℗])+\s*(\d{4}.*)', copyright_date): - copyright_date = ' '.join(( - match.group(1), - match.group(2) - )) + if match := re.search(r"^([©℗])+\s*(\d{4}.*)", copyright_date): + copyright_date = " ".join((match.group(1), match.group(2))) else: - raise ValueError('Bad format of copyright date') + raise ValueError("Bad format of copyright date") copyright_dates.append(copyright_date) return copyright_dates or None -@marc21.over('provisionActivity', '^(260..|264.[_0-3])') +@marc21.over("provisionActivity", "^(260..|264.[_0-3])") @utils.for_each_value @utils.ignore_value def marc21_to_provision_activity(self, key, value): @@ -345,23 +333,23 @@ def marc21_to_provision_activity(self, key, value): publisher.place: 264 [$a repetitive] publicationDate: 264 [$c repetitive] (but take only the first one) """ + def build_statement(field_value, ind2): def build_place_or_agent_data(code, label): - type_per_code = { - 'a': EntityType.PLACE, - 'b': EntityType.AGENT - } - return {'type': type_per_code[code], 'label': [{'value': value}]} \ - if (value := remove_trailing_punctuation(label)) else None + type_per_code = {"a": EntityType.PLACE, "b": EntityType.AGENT} + return ( + {"type": type_per_code[code], "label": [{"value": value}]} + if (value := remove_trailing_punctuation(label)) + else None + ) # function build_statement start here statement = [] items = get_field_items(field_value) for blob_key, blob_value in items: - if blob_key in ('a', 'b'): - place_or_agent_data = build_place_or_agent_data( - blob_key, blob_value) + if blob_key in ("a", "b"): + place_or_agent_data = build_place_or_agent_data(blob_key, blob_value) if place_or_agent_data: statement.append(place_or_agent_data) return statement or None @@ -369,72 +357,72 @@ def build_place_or_agent_data(code, label): def build_place(marc21): place = {} if marc21.country: - place['country'] = marc21.country + place["country"] = marc21.country if place: - place['type'] = EntityType.PLACE + place["type"] = EntityType.PLACE return place # the function marc21_to_provision_activity start here ind2 = key[4] type_per_ind2 = { - ' ': 'bf:Publication', - '_': 'bf:Publication', - '0': 'bf:Production', - '1': 'bf:Publication', - '2': 'bf:Distribution', - '3': 'bf:Manufacture' + " ": "bf:Publication", + "_": "bf:Publication", + "0": "bf:Production", + "1": "bf:Publication", + "2": "bf:Distribution", + "3": "bf:Manufacture", } - if key[:3] == '260': - ind2 = '1' # to force type to bf:Publication for field 260 + if key[:3] == "260": + ind2 = "1" # to force type to bf:Publication for field 260 publication = { - 'type': type_per_ind2[ind2], - 'statement': [], + "type": type_per_ind2[ind2], + "statement": [], } - publication['statement'] = build_statement(value, ind2) + publication["statement"] = build_statement(value, ind2) - subfields_c = utils.force_list(value.get('c')) + subfields_c = utils.force_list(value.get("c")) if subfields_c: subfield_c = subfields_c[0] - publication['statement'].append({ - 'label': [{'value': subfield_c}], - 'type': 'Date' - }) - if ind2 in (' ', '1'): - dates = subfield_c.replace('[', '').replace(']', '').split('-') + publication["statement"].append( + {"label": [{"value": subfield_c}], "type": "Date"} + ) + if ind2 in (" ", "1"): + dates = subfield_c.replace("[", "").replace("]", "").split("-") try: start_date = make_year(dates[0]) if start_date: - publication['startDate'] = start_date + publication["startDate"] = start_date except Exception: pass try: end_date = make_year(dates[1]) if end_date: - publication['endDate'] = end_date + publication["endDate"] = end_date except Exception: pass place = build_place(marc21) - if place and place.get('country') != 'xx': - publication['place'] = [place] + if place and place.get("country") != "xx": + publication["place"] = [place] return publication or None -@marc21.over('extent', '^300..') +@marc21.over("extent", "^300..") @utils.ignore_value def marc21_to_description(self, key, value): """Get extent. extent: 300$a (the first one if many) """ - if value.get('a') and not self.get('extent', None): - self['extent'] = remove_trailing_punctuation( - utils.force_list(value.get('a'))[0]) + if value.get("a") and not self.get("extent", None): + self["extent"] = remove_trailing_punctuation( + utils.force_list(value.get("a"))[0] + ) return None -@marc21.over('note', '^500..') +@marc21.over("note", "^500..") @utils.for_each_value @utils.ignore_value def marc21_to_notes(self, key, value): @@ -442,48 +430,41 @@ def marc21_to_notes(self, key, value): note: [500$a repetitive] """ - add_note( - dict( - noteType='general', - label=value.get('a', '') - ), - self) + add_note(dict(noteType="general", label=value.get("a", "")), self) return None -@marc21.over('summary', '^520..') +@marc21.over("summary", "^520..") @utils.for_each_value @utils.ignore_value def marc21_to_summary(self, key, value): """Get summary from repetitive field 520.""" - key_per_code = { - 'a': 'label', - 'c': 'source' - } + key_per_code = {"a": "label", "c": "source"} # parse field 520 subfields for extracting: # summary and source parts tag_link, link = get_field_link_data(value) items = get_field_items(value) index = 1 summary = {} - subfield_selection = {'a', 'c'} + subfield_selection = {"a", "c"} for blob_key, blob_value in items: if blob_key in subfield_selection: subfield_selection.remove(blob_key) - if blob_key == 'a': + if blob_key == "a": summary_data = marc21.build_value_with_alternate_graphic( - '520', blob_key, blob_value, index, link, ',.', ':;/-=') + "520", blob_key, blob_value, index, link, ",.", ":;/-=" + ) else: summary_data = blob_value if summary_data: summary[key_per_code[blob_key]] = summary_data - if blob_key != '__order__': + if blob_key != "__order__": index += 1 return summary or None -@marc21.over('subjects', '^6....') +@marc21.over("subjects", "^6....") @utils.for_each_value @utils.ignore_value @utils.ignore_value @@ -494,47 +475,46 @@ def marc21_to_subjects(self, key, value): if possible deduplicate] """ seen = {} - for subject in utils.force_list(value.get('a')): - subject = { - 'type': EntityType.TOPIC, - 'authorized_access_point': subject - } + for subject in utils.force_list(value.get("a")): + subject = {"type": EntityType.TOPIC, "authorized_access_point": subject} str_subject = str(subject) if str_subject not in seen: seen[str_subject] = 1 - self.setdefault('subjects', []).append(dict(entity=subject)) + self.setdefault("subjects", []).append(dict(entity=subject)) return None -@marc21.over('electronicLocator', '^8564.') +@marc21.over("electronicLocator", "^8564.") @utils.for_each_value @utils.ignore_value def marc21_electronicLocator(self, key, value): """Get electronic locator.""" indicator2 = key[4] electronic_locator = {} - url = utils.force_list(value.get('u'))[0].strip() - subfield_3 = value.get('3') # materials_specified + url = utils.force_list(value.get("u"))[0].strip() + subfield_3 = value.get("3") # materials_specified if subfield_3: subfield_3 = utils.force_list(subfield_3)[0] - if indicator2 == '2': - if subfield_3 and subfield_3 == 'Image de couverture': + if indicator2 == "2": + if subfield_3 and subfield_3 == "Image de couverture": electronic_locator = { - 'url': url, - 'type': 'relatedResource', - 'content': 'coverImage' + "url": url, + "type": "relatedResource", + "content": "coverImage", } - elif indicator2 == '0': - if subfield_x := value.get('x'): # nonpublic_note + elif indicator2 == "0": + if subfield_x := value.get("x"): # nonpublic_note electronic_locator = { - 'url': url, - 'type': 'resource', - 'source': utils.force_list(subfield_x)[0] + "url": url, + "type": "resource", + "source": utils.force_list(subfield_x)[0], } - if subfield_q := value.get('q'): # electronic_format_type - if subfield_q == 'audio': - self['type'] = [{ - 'main_type': 'docmaintype_audio', - 'subtype': 'docsubtype_audio_book' - }] + if subfield_q := value.get("q"): # electronic_format_type + if subfield_q == "audio": + self["type"] = [ + { + "main_type": "docmaintype_audio", + "subtype": "docsubtype_audio_book", + } + ] return electronic_locator or None diff --git a/rero_ils/modules/ebooks/receivers.py b/rero_ils/modules/ebooks/receivers.py index c5abae3c6e..b8fc9442ba 100644 --- a/rero_ils/modules/ebooks/receivers.py +++ b/rero_ils/modules/ebooks/receivers.py @@ -20,49 +20,44 @@ from dojson.contrib.marc21.utils import create_record from flask import current_app +from ..utils import set_timestamp from .dojson.contrib.marc21 import marc21 from .tasks import create_records, delete_records -from ..utils import set_timestamp -def publish_harvested_records(sender=None, records=None, max_results=None, - *args, **kwargs): +def publish_harvested_records( + sender=None, records=None, max_results=None, *args, **kwargs +): """Create, index the harvested records.""" # name = kwargs['name'] - records = records if records else [] + records = records or [] if max_results: - records = list(records)[:int(max_results)] + records = list(records)[: int(max_results)] converted_records = [] deleted_records = [] for record in records: rec = create_record(record.xml) rec = marc21.do(rec) - rec.setdefault('harvested', True) + rec.setdefault("harvested", True) - identifiers = rec.get('identifiedBy', []) + identifiers = rec.get("identifiedBy", []) identifiers.append( - { - "type": "bf:Local", - "source": "cantook", - "value": record.header.identifier - } + {"type": "bf:Local", "source": "cantook", "value": record.header.identifier} ) - rec['identifiedBy'] = identifiers + rec["identifiedBy"] = identifiers if record.deleted: deleted_records.append(rec) else: converted_records.append(rec) if converted_records: current_app.logger.info( - f'publish_harvester: received {len(converted_records)} ' - 'records to create' + f"publish_harvester: received {len(converted_records)} " "records to create" ) create_records(converted_records) if deleted_records: current_app.logger.info( - f'publish_harvester: received {len(deleted_records)} ' - 'records to delete' + f"publish_harvester: received {len(deleted_records)} " "records to delete" ) delete_records(deleted_records) - msg = f'deleted: {len(deleted_records)}, created: {len(converted_records)}' - set_timestamp('ebooks-harvester', msg=msg) + msg = f"deleted: {len(deleted_records)}, created: {len(converted_records)}" + set_timestamp("ebooks-harvester", msg=msg) diff --git a/rero_ils/modules/ebooks/tasks.py b/rero_ils/modules/ebooks/tasks.py index b52ce78da9..ed62abe807 100644 --- a/rero_ils/modules/ebooks/tasks.py +++ b/rero_ils/modules/ebooks/tasks.py @@ -22,9 +22,9 @@ from celery import shared_task from flask import current_app -from .utils import create_document_holding, update_document_holding from ..documents.api import Document, DocumentsSearch from ..utils import do_bulk_index, get_schema_for_resource, set_timestamp +from .utils import create_document_holding, update_document_holding @shared_task(ignore_result=True) @@ -35,19 +35,20 @@ def create_records(records): uuids = [] for record in records: # add document type - if 'type' not in record: - record['type'] = [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_e-book' - }] + if "type" not in record: + record["type"] = [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_e-book"} + ] # check if already harvested pid = None - for identifier in record.get('identifiedBy'): - if identifier.get('source') == 'cantook': - harvested_id = identifier.get('value') - query = DocumentsSearch()\ - .filter('term', identifiedBy__value__raw=harvested_id)\ - .source(includes=['pid']) + for identifier in record.get("identifiedBy"): + if identifier.get("source") == "cantook": + harvested_id = identifier.get("value") + query = ( + DocumentsSearch() + .filter("term", identifiedBy__value__raw=harvested_id) + .source(includes=["pid"]) + ) try: pid = next(query.scan()).pid except StopIteration: @@ -55,10 +56,10 @@ def create_records(records): try: # add documents schema pid_type = Document.provider.pid_type - record['$schema'] = get_schema_for_resource(pid_type) + record["$schema"] = get_schema_for_resource(pid_type) if pid: # update the record - record['pid'] = pid + record["pid"] = pid existing_record = update_document_holding(record, pid) n_updated += 1 uuids.append(existing_record.id) @@ -66,14 +67,11 @@ def create_records(records): n_created += 1 uuids.append(new_record.id) except Exception as err: - current_app.logger.error(f'EBOOKS CREATE RECORDS: {err} {record}') - do_bulk_index(uuids, doc_type='doc', process=True) + current_app.logger.error(f"EBOOKS CREATE RECORDS: {err} {record}") + do_bulk_index(uuids, doc_type="doc", process=True) - current_app.logger.info( - f'create_records: {n_updated} updated, {n_created} new' - ) - set_timestamp('ebooks_create_records', created=n_created, - updated=n_updated) + current_app.logger.info(f"create_records: {n_updated} updated, {n_created} new") + set_timestamp("ebooks_create_records", created=n_created, updated=n_updated) return n_created, n_updated @@ -84,12 +82,14 @@ def delete_records(records): for record in records: # check if exist pid = None - for identifier in record.get('identifiedBy'): - if identifier.get('source') == 'cantook': - harvested_id = identifier.get('value') - query = DocumentsSearch()\ - .filter('term', identifiedBy__value__raw=harvested_id)\ - .source(includes=['pid']) + for identifier in record.get("identifiedBy"): + if identifier.get("source") == "cantook": + harvested_id = identifier.get("value") + query = ( + DocumentsSearch() + .filter("term", identifiedBy__value__raw=harvested_id) + .source(includes=["pid"]) + ) try: pid = [r.pid for r in query.scan()].pop() except IndexError: @@ -101,7 +101,7 @@ def delete_records(records): # TODO: delete record and linked references count += 1 except Exception as err: - current_app.logger.error(f'EBOOKS DELETE RECORDS: {err} {record}') - current_app.logger.info(f'delete_records: {count}') - set_timestamp('ebooks_delete_records', deleted=count) + current_app.logger.error(f"EBOOKS DELETE RECORDS: {err} {record}") + current_app.logger.info(f"delete_records: {count}") + set_timestamp("ebooks_delete_records", deleted=count) return count diff --git a/rero_ils/modules/ebooks/utils.py b/rero_ils/modules/ebooks/utils.py index b7b8a55d98..e64a7c13aa 100644 --- a/rero_ils/modules/ebooks/utils.py +++ b/rero_ils/modules/ebooks/utils.py @@ -24,13 +24,18 @@ from rero_ils.modules.locations.api import Location from ..documents.api import Document -from ..holdings.api import Holding, HoldingsSearch, create_holding, \ - get_holding_pid_by_doc_location_item_type +from ..holdings.api import ( + Holding, + HoldingsSearch, + create_holding, + get_holding_pid_by_doc_location_item_type, +) from ..organisations.api import Organisation -def add_oai_source(name, baseurl, metadataprefix='marc21', - setspecs='', comment='', update=False): +def add_oai_source( + name, baseurl, metadataprefix="marc21", setspecs="", comment="", update=False +): """Add OAIHarvestConfig.""" with current_app.app_context(): source = OAIHarvestConfig.query.filter_by(name=name).first() @@ -40,39 +45,38 @@ def add_oai_source(name, baseurl, metadataprefix='marc21', baseurl=baseurl, metadataprefix=metadataprefix, setspecs=setspecs, - comment=comment + comment=comment, ) source.save() db.session.commit() - return 'Added' + return "Added" elif update: source.name = name source.baseurl = baseurl source.metadataprefix = metadataprefix - if setspecs != '': + if setspecs != "": source.setspecs = setspecs - if comment != '': + if comment != "": source.comment = comment db.session.commit() - return 'Updated' - return 'Not Updated' + return "Updated" + return "Not Updated" def get_harvested_sources(record): """Get the harvested sources from electronicLocator.""" harvested_sources = [] new_electronic_locators = [] - electronic_locators = record.get('electronicLocator', []) + electronic_locators = record.get("electronicLocator", []) for electronic_locator in electronic_locators: - if source := electronic_locator.get('source'): - harvested_sources.append({ - 'source': source, - 'uri': electronic_locator.get('url') - }) + if source := electronic_locator.get("source"): + harvested_sources.append( + {"source": source, "uri": electronic_locator.get("url")} + ) else: new_electronic_locators.append(electronic_locator) if new_electronic_locators: - record['electronicLocator'] = new_electronic_locators + record["electronicLocator"] = new_electronic_locators return harvested_sources @@ -83,13 +87,10 @@ def create_document_holding(record): holdings = [] for harvested_source in harvested_sources: if org := Organisation.get_record_by_online_harvested_source( - source=harvested_source['source']): + source=harvested_source["source"] + ): if not new_record: - new_record = Document.create( - data=record, - dbcommit=False, - reindex=False - ) + new_record = Document.create(data=record, dbcommit=False, reindex=False) if new_record: item_type_pid = org.online_circulation_category() location_pids = org.get_online_locations() @@ -97,16 +98,18 @@ def create_document_holding(record): location = Location.get_record_by_pid(location_pid) library = location.get_library() if url := library.get_online_harvested_source_url( - source=harvested_source['source']): - uri_split = harvested_source['uri'].split('/')[3:] - uri_split.insert(0, url.rstrip('/')) - harvested_source['uri'] = '/'.join(uri_split) + source=harvested_source["source"] + ): + uri_split = harvested_source["uri"].split("/")[3:] + uri_split.insert(0, url.rstrip("/")) + harvested_source["uri"] = "/".join(uri_split) hold = create_holding( document_pid=new_record.pid, location_pid=location_pid, item_type_pid=item_type_pid, electronic_location=harvested_source, - holdings_type='electronic') + holdings_type="electronic", + ) holdings.append(hold) else: current_app.logger.warning( @@ -126,41 +129,39 @@ def update_document_holding(record, pid): harvested_sources = get_harvested_sources(record) new_record = None existing_record = Document.get_record_by_pid(pid) - new_record = existing_record.replace( - data=record, - dbcommit=False, - reindex=False - ) + new_record = existing_record.replace(data=record, dbcommit=False, reindex=False) # Save all source uris to find holdings we can delete later source_uris = [] holdings = [] for harvested_source in harvested_sources: if org := Organisation.get_record_by_online_harvested_source( - source=harvested_source['source']): + source=harvested_source["source"] + ): # add the organisation source uri - source_uris.append(harvested_source['uri']) + source_uris.append(harvested_source["uri"]) item_type_pid = org.online_circulation_category() for location_pid in org.get_online_locations(): location = Location.get_record_by_pid(location_pid) library = location.get_library() # replace "https://some.uri" from ebooks with library uri if url := library.get_online_harvested_source_url( - source=harvested_source['source']): - uri_split = harvested_source['uri'].split('/')[3:] - uri_split.insert(0, url.rstrip('/')) - new_uri = '/'.join(uri_split) - harvested_source['uri'] = new_uri + source=harvested_source["source"] + ): + uri_split = harvested_source["uri"].split("/")[3:] + uri_split.insert(0, url.rstrip("/")) + new_uri = "/".join(uri_split) + harvested_source["uri"] = new_uri # add the library source uri source_uris.append(new_uri) if not get_holding_pid_by_doc_location_item_type( - new_record.pid, location_pid, item_type_pid, 'electronic' + new_record.pid, location_pid, item_type_pid, "electronic" ): hold = create_holding( document_pid=new_record.pid, location_pid=location_pid, item_type_pid=item_type_pid, electronic_location=harvested_source, - holdings_type='electronic' + holdings_type="electronic", ) holdings.append(hold) db.session.commit() @@ -175,16 +176,16 @@ def update_document_holding(record, pid): for holding_pid in Holding.get_holdings_pid_by_document_pid(pid): holding = Holding.get_record_by_pid(holding_pid) to_delete = True - for electronic_location in holding.get('electronic_location', []): - uri = electronic_location.get('uri') - if electronic_location.get('source') and uri not in seen_uris: + for electronic_location in holding.get("electronic_location", []): + uri = electronic_location.get("uri") + if electronic_location.get("source") and uri not in seen_uris: seen_uris.append(uri) if uri in source_uris: to_delete = False if to_delete: current_app.logger.info( - 'Delete harvested holding | ' - f'document: {pid} ' + "Delete harvested holding | " + f"document: {pid} " f'holding: {holding.pid} {holding.get("electronic_location")}' ) holding.delete(force=False, dbcommit=True, delindex=True) diff --git a/rero_ils/modules/entities/api.py b/rero_ils/modules/entities/api.py index 3d6cc48720..74d0a60ce6 100644 --- a/rero_ils/modules/entities/api.py +++ b/rero_ils/modules/entities/api.py @@ -25,8 +25,7 @@ from rero_ils.modules.api import IlsRecord, IlsRecordsSearch from rero_ils.modules.documents.api import DocumentsSearch -from rero_ils.modules.entities.remote_entities.utils import \ - extract_data_from_mef_uri +from rero_ils.modules.entities.remote_entities.utils import extract_data_from_mef_uri from rero_ils.modules.utils import extracted_data_from_ref, sorted_pids @@ -36,9 +35,9 @@ class EntitiesSearch(IlsRecordsSearch): class Meta: """Meta class.""" - index = 'entities' + index = "entities" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -51,7 +50,8 @@ class Entity(IlsRecord, ABC): def get_record_by_ref(cls, ref): """.""" from .remote_entities.api import RemoteEntity - if entity := extracted_data_from_ref(ref, 'record'): + + if entity := extracted_data_from_ref(ref, "record"): return entity _, _type, _pid = extract_data_from_mef_uri(ref) return RemoteEntity.get_entity(_type, _pid) @@ -75,18 +75,15 @@ def get_links_to_me(self, get_pids=False): :rtype: dict. """ document_query = DocumentsSearch().by_entity(self) - documents = sorted_pids(document_query) if get_pids \ - else document_query.count() - links = { - 'documents': documents - } + documents = sorted_pids(document_query) if get_pids else document_query.count() + links = {"documents": documents} return {k: v for k, v in links.items() if v} def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete @property @@ -100,23 +97,21 @@ def organisation_pids(self): """Get organisation pids related with this entity.""" search = DocumentsSearch().by_entity(self)[:0] agg = A( - 'terms', - field='organisation_pid', + "terms", + field="organisation_pid", min_doc_count=1, - size=current_app.config - .get('RERO_ILS_AGGREGATION_SIZE', {}) - .get('organisations', 10) + size=current_app.config.get("RERO_ILS_AGGREGATION_SIZE", {}).get( + "organisations", 10 + ), ) - search.aggs.bucket('organisation', agg) + search.aggs.bucket("organisation", agg) results = search.execute() - return list({ - result.key - for result in results.aggregations.organisation.buckets - }) + return list( + {result.key for result in results.aggregations.organisation.buckets} + ) def documents_pids( - self, with_subjects=True, with_subjects_imported=True, - with_genre_forms=True + self, with_subjects=True, with_subjects_imported=True, with_genre_forms=True ): """Get documents pids related to this entity. @@ -132,13 +127,12 @@ def documents_pids( self, subjects=with_subjects, imported_subjects=with_subjects_imported, - genre_forms=with_genre_forms + genre_forms=with_genre_forms, ) - return [hit.pid for hit in search.source('pid').scan()] + return [hit.pid for hit in search.source("pid").scan()] def documents_ids( - self, with_subjects=True, with_subjects_imported=True, - with_genre_forms=True + self, with_subjects=True, with_subjects_imported=True, with_genre_forms=True ): """Get document ID's/UUID related to this entity. @@ -150,10 +144,14 @@ def documents_ids( :returns: document ID's/UUID related to this entity. :rtype: list """ - search = DocumentsSearch().by_entity( - self, - subjects=with_subjects, - imported_subjects=with_subjects_imported, - genre_forms=with_genre_forms - ).source(False) + search = ( + DocumentsSearch() + .by_entity( + self, + subjects=with_subjects, + imported_subjects=with_subjects_imported, + genre_forms=with_genre_forms, + ) + .source(False) + ) return [hit.meta.id for hit in search.scan()] diff --git a/rero_ils/modules/entities/dumpers/__init__.py b/rero_ils/modules/entities/dumpers/__init__.py index ec9ea48ac6..ea61b0b56b 100644 --- a/rero_ils/modules/entities/dumpers/__init__.py +++ b/rero_ils/modules/entities/dumpers/__init__.py @@ -26,23 +26,29 @@ from .indexer import EntityIndexerDumper # replace linked data (seems not necessary at this time) -replace_refs_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - ReplaceRefsDumper() -]) +replace_refs_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + ReplaceRefsDumper(), + ] +) # dumper used for indexing -indexer_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - ReplaceRefsDumper(), - EntityIndexerDumper(), - LocalizedAuthorizedAccessPointDumper() -]) +indexer_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + ReplaceRefsDumper(), + EntityIndexerDumper(), + LocalizedAuthorizedAccessPointDumper(), + ] +) -document_dumper = MultiDumper(dumpers=[ - BaseDocumentEntityDumper(), - EntityIndexerDumper(), - LocalizedAuthorizedAccessPointDumper(), -]) +document_dumper = MultiDumper( + dumpers=[ + BaseDocumentEntityDumper(), + EntityIndexerDumper(), + LocalizedAuthorizedAccessPointDumper(), + ] +) diff --git a/rero_ils/modules/entities/dumpers/authorized_acces_point.py b/rero_ils/modules/entities/dumpers/authorized_acces_point.py index c300ffc4cc..489b6c56d2 100644 --- a/rero_ils/modules/entities/dumpers/authorized_acces_point.py +++ b/rero_ils/modules/entities/dumpers/authorized_acces_point.py @@ -32,6 +32,7 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ for language in get_i18n_supported_languages(): - data[f'authorized_access_point_{language}'] = \ + data[f"authorized_access_point_{language}"] = ( record.get_authorized_access_point(language) + ) return data diff --git a/rero_ils/modules/entities/dumpers/document.py b/rero_ils/modules/entities/dumpers/document.py index b9bd95b7c5..9fa7273dc2 100644 --- a/rero_ils/modules/entities/dumpers/document.py +++ b/rero_ils/modules/entities/dumpers/document.py @@ -35,27 +35,25 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ data = { - 'type': record['type'], - 'pid': record.pid, - 'pids': { - record.resource_type: record.pid - } + "type": record["type"], + "pid": record.pid, + "pids": {record.resource_type: record.pid}, } if record.resource_type == EntityResourceType.REMOTE: - for agency in current_app.config['RERO_ILS_AGENTS_SOURCES']: + for agency in current_app.config["RERO_ILS_AGENTS_SOURCES"]: if field := record.get(agency): - data['type'] = field.get('type', record['type']) - data['pids'][agency] = record[agency]['pid'] + data["type"] = field.get("type", record["type"]) + data["pids"][agency] = record[agency]["pid"] variant_access_points = [] parallel_access_points = [] - for source in record.get('sources'): - variant_access_points += record[source].get( - 'variant_access_point', []) + for source in record.get("sources"): + variant_access_points += record[source].get("variant_access_point", []) parallel_access_points += record[source].get( - 'parallel_access_point', []) + "parallel_access_point", [] + ) if variant_access_points: - data['variant_access_point'] = variant_access_points + data["variant_access_point"] = variant_access_points if parallel_access_points: - data['parallel_access_point'] = parallel_access_points + data["parallel_access_point"] = parallel_access_points return data diff --git a/rero_ils/modules/entities/dumpers/indexer.py b/rero_ils/modules/entities/dumpers/indexer.py index 43a3ca7706..482475b1f5 100644 --- a/rero_ils/modules/entities/dumpers/indexer.py +++ b/rero_ils/modules/entities/dumpers/indexer.py @@ -30,6 +30,6 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data['resource_type'] = record.resource_type - data['organisations'] = record.organisation_pids + data["resource_type"] = record.resource_type + data["organisations"] = record.organisation_pids return data diff --git a/rero_ils/modules/entities/fetchers.py b/rero_ils/modules/entities/fetchers.py index 457321b950..21425c667e 100644 --- a/rero_ils/modules/entities/fetchers.py +++ b/rero_ils/modules/entities/fetchers.py @@ -25,7 +25,7 @@ from rero_ils.modules.utils import get_pid_type_from_schema -FetchedPID = namedtuple('FetchedPID', ['pid_type', 'pid_value']) +FetchedPID = namedtuple("FetchedPID", ["pid_type", "pid_value"]) """A pid fetcher.""" @@ -36,11 +36,8 @@ def id_fetcher(record_uuid, data): :param data: The record metadata. :return: A :data:`rero_ils.modules.fetchers.FetchedPID` instance. """ - pid_type = 'ent' + pid_type = "ent" # try to extract pid type from schema - if schema := data.get('$schema'): + if schema := data.get("$schema"): pid_type = get_pid_type_from_schema(schema) - return FetchedPID( - pid_type=pid_type, - pid_value=data['pid'] - ) + return FetchedPID(pid_type=pid_type, pid_value=data["pid"]) diff --git a/rero_ils/modules/entities/helpers.py b/rero_ils/modules/entities/helpers.py index e375f4b274..cf621e0082 100644 --- a/rero_ils/modules/entities/helpers.py +++ b/rero_ils/modules/entities/helpers.py @@ -23,7 +23,7 @@ from rero_ils.modules.utils import extracted_data_from_ref -def str_builder(field_values, prefix='', suffix='', delimiter=''): +def str_builder(field_values, prefix="", suffix="", delimiter=""): """String builder method. This builder is used to format string depending on given arguments @@ -51,8 +51,8 @@ def str_builder(field_values, prefix='', suffix='', delimiter=''): # any(field_values['','']) == False # bool(field_values['','']) == True if any(field_values): - return f'{prefix}{delimiter.join(field_values)}{suffix}' - return '' + return f"{prefix}{delimiter.join(field_values)}{suffix}" + return "" def get_entity_record_from_data(data): @@ -61,13 +61,13 @@ def get_entity_record_from_data(data): # todo: Add comments """ # try to get entity record - if pid := data.get('pid'): + if pid := data.get("pid"): # remote entities have a pid in data if entity := RemoteEntity.get_record_by_pid(pid): return entity - raise RecordNotFound(RemoteEntity, data.get('pid')) - if ref := data.get('$ref'): - entity = extracted_data_from_ref(ref, 'record') + raise RecordNotFound(RemoteEntity, data.get("pid")) + if ref := data.get("$ref"): + entity = extracted_data_from_ref(ref, "record") # check if local entity if entity and isinstance(entity, LocalEntity): return entity diff --git a/rero_ils/modules/entities/local_entities/api.py b/rero_ils/modules/entities/local_entities/api.py index 6d847d6e64..95cd6169ed 100644 --- a/rero_ils/modules/entities/local_entities/api.py +++ b/rero_ils/modules/entities/local_entities/api.py @@ -23,22 +23,20 @@ from rero_ils.modules.api import IlsRecordsSearch from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.minters import id_minter -from rero_ils.modules.operation_logs.extensions import \ - OperationLogObserverExtension +from rero_ils.modules.operation_logs.extensions import OperationLogObserverExtension from rero_ils.modules.providers import Provider -from .extensions import AuthorizedAccessPointExtension, \ - LocalEntityFactoryExtension -from .models import LocalEntityIdentifier, LocalEntityMetadata from ..api import Entity from ..dumpers import replace_refs_dumper from ..models import EntityResourceType +from .extensions import AuthorizedAccessPointExtension, LocalEntityFactoryExtension +from .models import LocalEntityIdentifier, LocalEntityMetadata # provider LocalEntityProvider = type( - 'LocalEntityProvider', + "LocalEntityProvider", (Provider,), - dict(identifier=LocalEntityIdentifier, pid_type='locent') + dict(identifier=LocalEntityIdentifier, pid_type="locent"), ) # minter local_entity_id_minter = partial(id_minter, provider=LocalEntityProvider) @@ -52,9 +50,9 @@ class LocalEntitiesSearch(IlsRecordsSearch): class Meta: """Meta class.""" - index = 'local_entities' + index = "local_entities" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -73,7 +71,7 @@ class LocalEntity(Entity): _extensions = [ LocalEntityFactoryExtension(), AuthorizedAccessPointExtension(), - OperationLogObserverExtension() + OperationLogObserverExtension(), ] @property @@ -84,7 +82,7 @@ def resource_type(self): @property def type(self): """Shortcut for local entity type.""" - return self.get('type') + return self.get("type") def get_authorized_access_point(self, language): """Get localized authorized_access_point. @@ -95,7 +93,7 @@ def get_authorized_access_point(self, language): :param language: language for authorized access point. :returns: authorized access point in given language. """ - return self.get('authorized_access_point') + return self.get("authorized_access_point") def resolve(self): """Resolve references data. diff --git a/rero_ils/modules/entities/local_entities/extensions/__init__.py b/rero_ils/modules/entities/local_entities/extensions/__init__.py index 9c91dc9a99..880df9f772 100644 --- a/rero_ils/modules/entities/local_entities/extensions/__init__.py +++ b/rero_ils/modules/entities/local_entities/extensions/__init__.py @@ -20,7 +20,4 @@ from .authorized_access_point import AuthorizedAccessPointExtension from .local_entity_factory import LocalEntityFactoryExtension -__all__ = [ - 'AuthorizedAccessPointExtension', - 'LocalEntityFactoryExtension' -] +__all__ = ["AuthorizedAccessPointExtension", "LocalEntityFactoryExtension"] diff --git a/rero_ils/modules/entities/local_entities/extensions/authorized_access_point.py b/rero_ils/modules/entities/local_entities/extensions/authorized_access_point.py index 3025e44b0c..ad4bec9a4b 100644 --- a/rero_ils/modules/entities/local_entities/extensions/authorized_access_point.py +++ b/rero_ils/modules/entities/local_entities/extensions/authorized_access_point.py @@ -35,8 +35,7 @@ def pre_create(self, record): :param record: the record metadata. """ - record['authorized_access_point'] = \ - self._get_authorized_access_point(record) + record["authorized_access_point"] = self._get_authorized_access_point(record) # required for validation if record.model: record.model.data = record @@ -46,5 +45,4 @@ def pre_commit(self, record): :param record: the record metadata. """ - record['authorized_access_point'] = \ - self._get_authorized_access_point(record) + record["authorized_access_point"] = self._get_authorized_access_point(record) diff --git a/rero_ils/modules/entities/local_entities/extensions/local_entity_factory.py b/rero_ils/modules/entities/local_entities/extensions/local_entity_factory.py index d28323fc38..8dd8b91a6d 100644 --- a/rero_ils/modules/entities/local_entities/extensions/local_entity_factory.py +++ b/rero_ils/modules/entities/local_entities/extensions/local_entity_factory.py @@ -33,9 +33,14 @@ class LocalEntityFactoryExtension(RecordExtension): def _get_local_entity_class(record): """Get the Local entity class to use based on record data.""" from ..api import LocalEntity - from ..subclasses import OrganisationLocalEntity, PersonLocalEntity, \ - PlaceLocalEntity, TemporalLocalEntity, TopicLocalEntity, \ - WorkLocalEntity + from ..subclasses import ( + OrganisationLocalEntity, + PersonLocalEntity, + PlaceLocalEntity, + TemporalLocalEntity, + TopicLocalEntity, + WorkLocalEntity, + ) mapping = { EntityType.PERSON: PersonLocalEntity, diff --git a/rero_ils/modules/entities/local_entities/indexer.py b/rero_ils/modules/entities/local_entities/indexer.py index 0bdfc97ca1..c981755d27 100644 --- a/rero_ils/modules/entities/local_entities/indexer.py +++ b/rero_ils/modules/entities/local_entities/indexer.py @@ -23,18 +23,20 @@ from flask import current_app from rero_ils.modules.api import IlsRecordsIndexer, ReferencedRecordsIndexer -from rero_ils.modules.utils import get_indexer_class_by_resource, \ - get_record_class_by_resource +from rero_ils.modules.utils import ( + get_indexer_class_by_resource, + get_record_class_by_resource, +) -from .api import LocalEntity from ..dumpers import indexer_dumper +from .api import LocalEntity @shared_task(ignore_result=True) def index_referenced_records(entity): """Index referenced records.""" indexer = ReferencedRecordsIndexer() - entity = LocalEntity.get_record_by_pid(entity.get('pid')) + entity = LocalEntity.get_record_by_pid(entity.get("pid")) if referenced_resources := entity.get_links_to_me(get_pids=True): for resource, pids in referenced_resources.items(): record_cls = get_record_class_by_resource(resource) @@ -43,10 +45,7 @@ def index_referenced_records(entity): referenced = [] for pid in pids: record = record_cls.get_record_by_pid(pid) - referenced.append(dict( - pid_type=pid_type, - record=record - )) + referenced.append(dict(pid_type=pid_type, record=record)) indexer.index(indexer_cls, referenced) @@ -61,7 +60,8 @@ def index(self, entity, arguments=None, **kwargs): """Index a Local entity record.""" super().index(entity) eta = datetime.utcnow() + current_app.config.get( - "RERO_ILS_INDEXER_TASK_DELAY", 0) + "RERO_ILS_INDEXER_TASK_DELAY", 0 + ) index_referenced_records.apply_async((entity,), eta=eta) def bulk_index(self, record_id_iterator): @@ -69,4 +69,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='locent') + super().bulk_index(record_id_iterator, doc_type="locent") diff --git a/rero_ils/modules/entities/local_entities/jsonresolver.py b/rero_ils/modules/entities/local_entities/jsonresolver.py index 366cc95d20..24906edb2c 100644 --- a/rero_ils/modules/entities/local_entities/jsonresolver.py +++ b/rero_ils/modules/entities/local_entities/jsonresolver.py @@ -23,7 +23,7 @@ from rero_ils.modules.jsonresolver import resolve_json_refs -@jsonresolver.route('/api/local_entities/', host='bib.rero.ch') +@jsonresolver.route("/api/local_entities/", host="bib.rero.ch") def local_entities_resolver(pid): """Resolver for local entity record.""" - return resolve_json_refs('locent', pid) + return resolve_json_refs("locent", pid) diff --git a/rero_ils/modules/entities/local_entities/models.py b/rero_ils/modules/entities/local_entities/models.py index 3ccd081320..65cadac22f 100644 --- a/rero_ils/modules/entities/local_entities/models.py +++ b/rero_ils/modules/entities/local_entities/models.py @@ -26,11 +26,11 @@ class LocalEntityIdentifier(RecordIdentifier): """Sequence generator for `Entity` identifiers.""" - __tablename__ = 'local_entity_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "local_entity_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), + db.BigInteger().with_variant(db.Integer, "sqlite"), primary_key=True, autoincrement=True, ) @@ -39,4 +39,4 @@ class LocalEntityIdentifier(RecordIdentifier): class LocalEntityMetadata(db.Model, RecordMetadataBase): """Entity record metadata.""" - __tablename__ = 'local_entity_metadata' + __tablename__ = "local_entity_metadata" diff --git a/rero_ils/modules/entities/local_entities/permissions.py b/rero_ils/modules/entities/local_entities/permissions.py index 6af2cd6d83..94d164cfa6 100644 --- a/rero_ils/modules/entities/local_entities/permissions.py +++ b/rero_ils/modules/entities/local_entities/permissions.py @@ -19,16 +19,15 @@ """Permissions for `Local Entity` records.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import AllowedByAction, RecordPermissionPolicy # Actions to control local entity policies for CRUD operations -search_action = action_factory('locent-search') -read_action = action_factory('locent-read') -create_action = action_factory('locent-create') -update_action = action_factory('locent-update') -delete_action = action_factory('locent-delete') -access_action = action_factory('locent-access') +search_action = action_factory("locent-search") +read_action = action_factory("locent-read") +create_action = action_factory("locent-create") +update_action = action_factory("locent-update") +delete_action = action_factory("locent-delete") +access_action = action_factory("locent-access") class LocalEntityPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/entities/local_entities/proxy.py b/rero_ils/modules/entities/local_entities/proxy.py index ec93ee5a03..4c5c52ac65 100644 --- a/rero_ils/modules/entities/local_entities/proxy.py +++ b/rero_ils/modules/entities/local_entities/proxy.py @@ -19,16 +19,15 @@ """Local entity proxies.""" from elasticsearch_dsl import Q -from .api import LocalEntitiesSearch from ..models import EntityType +from .api import LocalEntitiesSearch CATEGORY_FILTERS = { - 'agents': Q('terms', type=[EntityType.PERSON, EntityType.ORGANISATION]), - 'person': Q('term', type=EntityType.PERSON), - 'organisation': Q('term', type=EntityType.ORGANISATION), - 'concepts': Q('term', type=EntityType.TOPIC), - 'concepts-genreForm': - Q('term', type=EntityType.TOPIC) & Q('term', genreForm=True) + "agents": Q("terms", type=[EntityType.PERSON, EntityType.ORGANISATION]), + "person": Q("term", type=EntityType.PERSON), + "organisation": Q("term", type=EntityType.ORGANISATION), + "concepts": Q("term", type=EntityType.TOPIC), + "concepts-genreForm": Q("term", type=EntityType.TOPIC) & Q("term", genreForm=True), } @@ -50,8 +49,9 @@ def search(self, search_term, size=10): :return: local entities matching the search term. :rtype: generator. """ - query = self._create_base_query()[:size]\ - .filter('query_string', query=search_term) + query = self._create_base_query()[:size].filter( + "query_string", query=search_term + ) yield from query.execute() def _create_base_query(self): @@ -65,4 +65,4 @@ def _create_base_query(self): if self.category in CATEGORY_FILTERS: return query.filter(CATEGORY_FILTERS[self.category]) else: - return query.filter('term', type=self.category) + return query.filter("term", type=self.category) diff --git a/rero_ils/modules/entities/local_entities/subclasses/__init__.py b/rero_ils/modules/entities/local_entities/subclasses/__init__.py index 1f16eb23dc..4babcdc0dd 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/__init__.py +++ b/rero_ils/modules/entities/local_entities/subclasses/__init__.py @@ -26,10 +26,10 @@ from .work import WorkLocalEntity __all__ = [ - 'OrganisationLocalEntity', - 'PersonLocalEntity', - 'PlaceLocalEntity', - 'TemporalLocalEntity', - 'TopicLocalEntity', - 'WorkLocalEntity' + "OrganisationLocalEntity", + "PersonLocalEntity", + "PlaceLocalEntity", + "TemporalLocalEntity", + "TopicLocalEntity", + "WorkLocalEntity", ] diff --git a/rero_ils/modules/entities/local_entities/subclasses/organisation.py b/rero_ils/modules/entities/local_entities/subclasses/organisation.py index bd6713f6ce..034342cef8 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/organisation.py +++ b/rero_ils/modules/entities/local_entities/subclasses/organisation.py @@ -18,8 +18,8 @@ """API for manipulating "organisation" local entities.""" -from ..api import LocalEntity from ...helpers import str_builder as builder +from ..api import LocalEntity class OrganisationLocalEntity(LocalEntity): @@ -31,14 +31,13 @@ def get_authorized_access_point(self, language=None): :return return the calculated authorized access point to use. """ conference = [ - self.get('conference_numbering', ''), - self.get('conference_date', ''), - self.get('conference_place', ''), + self.get("conference_numbering", ""), + self.get("conference_date", ""), + self.get("conference_place", ""), ] field_builders = [ - self.get('name'), - builder(self.get('subordinate_units'), - prefix='. ', delimiter='. '), - builder(conference, delimiter=' ; ', prefix=' (', suffix=')') + self.get("name"), + builder(self.get("subordinate_units"), prefix=". ", delimiter=". "), + builder(conference, delimiter=" ; ", prefix=" (", suffix=")"), ] - return ''.join(field_builders) + return "".join(field_builders) diff --git a/rero_ils/modules/entities/local_entities/subclasses/person.py b/rero_ils/modules/entities/local_entities/subclasses/person.py index e6002008bd..99154b2c94 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/person.py +++ b/rero_ils/modules/entities/local_entities/subclasses/person.py @@ -18,8 +18,8 @@ """API for manipulating "person" local entities.""" -from ..api import LocalEntity from ...helpers import str_builder as builder +from ..api import LocalEntity class PersonLocalEntity(LocalEntity): @@ -30,12 +30,12 @@ def get_authorized_access_point(self, language=None): :return return the calculated authorized access point to use. """ - dates = [self.get('date_of_birth', ''), self.get('date_of_death', '')] + dates = [self.get("date_of_birth", ""), self.get("date_of_death", "")] field_builders = [ - self.get('name'), - builder(self.get('numeration'), prefix=' '), - builder(self.get('qualifier'), prefix=', '), - builder(self.get('fuller_form_of_name'), prefix=' (', suffix=')'), - builder(dates, delimiter='-', prefix=' (', suffix=')') + self.get("name"), + builder(self.get("numeration"), prefix=" "), + builder(self.get("qualifier"), prefix=", "), + builder(self.get("fuller_form_of_name"), prefix=" (", suffix=")"), + builder(dates, delimiter="-", prefix=" (", suffix=")"), ] - return ''.join(field_builders) + return "".join(field_builders) diff --git a/rero_ils/modules/entities/local_entities/subclasses/place.py b/rero_ils/modules/entities/local_entities/subclasses/place.py index ff29dd4fa3..69ca09b55e 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/place.py +++ b/rero_ils/modules/entities/local_entities/subclasses/place.py @@ -29,4 +29,4 @@ def get_authorized_access_point(self, language=None): :return return the calculated authorized access point to use. """ - return self.get('name') + return self.get("name") diff --git a/rero_ils/modules/entities/local_entities/subclasses/temporal.py b/rero_ils/modules/entities/local_entities/subclasses/temporal.py index ec8190e0b7..d38571e019 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/temporal.py +++ b/rero_ils/modules/entities/local_entities/subclasses/temporal.py @@ -29,4 +29,4 @@ def get_authorized_access_point(self, language=None): :return return the calculated authorized access point to use. """ - return self.get('name') + return self.get("name") diff --git a/rero_ils/modules/entities/local_entities/subclasses/topic.py b/rero_ils/modules/entities/local_entities/subclasses/topic.py index b1116a42b9..894fdbb012 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/topic.py +++ b/rero_ils/modules/entities/local_entities/subclasses/topic.py @@ -29,4 +29,4 @@ def get_authorized_access_point(self, language=None): :return return the calculated authorized access point to use. """ - return self.get('name') + return self.get("name") diff --git a/rero_ils/modules/entities/local_entities/subclasses/work.py b/rero_ils/modules/entities/local_entities/subclasses/work.py index 1cf65b6632..7546f5ab0e 100644 --- a/rero_ils/modules/entities/local_entities/subclasses/work.py +++ b/rero_ils/modules/entities/local_entities/subclasses/work.py @@ -18,8 +18,8 @@ """API for manipulating "work" local entities.""" -from ..api import LocalEntity from ...helpers import str_builder as builder +from ..api import LocalEntity class WorkLocalEntity(LocalEntity): @@ -31,7 +31,7 @@ def get_authorized_access_point(self, language=None): :return return the calculated authorized access point to use. """ field_builders = [ - builder(self.get('creator'), suffix='. '), - self.get('title'), + builder(self.get("creator"), suffix=". "), + self.get("title"), ] - return ''.join(field_builders) + return "".join(field_builders) diff --git a/rero_ils/modules/entities/local_entities/views.py b/rero_ils/modules/entities/local_entities/views.py index 9bca18935d..66b8cd6bc0 100644 --- a/rero_ils/modules/entities/local_entities/views.py +++ b/rero_ils/modules/entities/local_entities/views.py @@ -26,26 +26,28 @@ from rero_ils.modules.decorators import check_logged_as_librarian from rero_ils.modules.entities.local_entities.proxy import LocalEntityProxy -api_blueprint = Blueprint('api_local_entities', __name__) +api_blueprint = Blueprint("api_local_entities", __name__) def extract_size_parameter(func): """Decorator to extract the size parameter from query string.""" + @wraps(func) def wrapper(*args, **kwargs): - if 'size' not in kwargs: - kwargs['size'] = current_app.config.get( - 'RERO_ILS_DEFAULT_SUGGESTION_LIMIT') + if "size" not in kwargs: + kwargs["size"] = current_app.config.get("RERO_ILS_DEFAULT_SUGGESTION_LIMIT") with suppress(ValueError): - kwargs['size'] = int(request.args.get('size') or '') + kwargs["size"] = int(request.args.get("size") or "") return func(*args, **kwargs) + return wrapper -@api_blueprint.route('/local_entities/search/', - defaults={'entity_type': 'agents'}) -@api_blueprint.route('/local_entities/search//') -@api_blueprint.route('/local_entities/search///') +@api_blueprint.route( + "/local_entities/search/", defaults={"entity_type": "agents"} +) +@api_blueprint.route("/local_entities/search//") +@api_blueprint.route("/local_entities/search///") @check_logged_as_librarian @extract_size_parameter def local_search_proxy(entity_type, term, size): @@ -64,7 +66,6 @@ def local_search_proxy(entity_type, term, size): # apply. # See same behavior for remote entities search proxy. - return jsonify([ - hit.to_dict() - for hit in LocalEntityProxy(entity_type).search(term, size) - ]) + return jsonify( + [hit.to_dict() for hit in LocalEntityProxy(entity_type).search(term, size)] + ) diff --git a/rero_ils/modules/entities/logger.py b/rero_ils/modules/entities/logger.py index 035ab99b05..93654684aa 100644 --- a/rero_ils/modules/entities/logger.py +++ b/rero_ils/modules/entities/logger.py @@ -39,43 +39,36 @@ def create_logger(name, file_name, log_dir=None, verbose=False): # default value if not log_dir: log_dir = current_app.config.get( - 'RERO_ILS_MEF_SYNC_LOG_DIR', - os.path.join(current_app.instance_path, 'logs') + "RERO_ILS_MEF_SYNC_LOG_DIR", os.path.join(current_app.instance_path, "logs") ) # create the log directory if does not exists if not os.path.exists(log_dir): os.mkdir(log_dir) - verbose_level = ['ERROR', 'INFO', 'DEBUG'] + verbose_level = ["ERROR", "INFO", "DEBUG"] logging_config = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'standard': { - 'format': '%(asctime)s [%(levelname)s] :: %(message)s' - } + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": {"format": "%(asctime)s [%(levelname)s] :: %(message)s"} }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'standard', - 'level': verbose_level[min(verbose, 2)] + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "standard", + "level": verbose_level[min(verbose, 2)], }, - 'file': { - 'class': 'logging.handlers.TimedRotatingFileHandler', - 'filename': os.path.join(log_dir, file_name), - 'when': 'D', - 'interval': 7, - 'backupCount': 10, - 'formatter': 'standard' - } + "file": { + "class": "logging.handlers.TimedRotatingFileHandler", + "filename": os.path.join(log_dir, file_name), + "when": "D", + "interval": 7, + "backupCount": 10, + "formatter": "standard", + }, + }, + "loggers": { + name: {"handlers": ["console", "file"], "level": "INFO", "propagate": False} }, - 'loggers': { - name: { - 'handlers': ['console', 'file'], - 'level': 'INFO', - 'propagate': False - } - } } dictConfig(logging_config) return logging.getLogger(name) diff --git a/rero_ils/modules/entities/minters.py b/rero_ils/modules/entities/minters.py index a867883389..3204cf4330 100644 --- a/rero_ils/modules/entities/minters.py +++ b/rero_ils/modules/entities/minters.py @@ -20,18 +20,18 @@ from collections import namedtuple EntityMinter = namedtuple( - 'EntityMinter', - ['pid_type', 'pid_value', 'object_uuid', 'object_type']) + "EntityMinter", ["pid_type", "pid_value", "object_uuid", "object_type"] +) -def id_minter(record_uuid, data, provider, pid_key='pid', object_type='rec'): +def id_minter(record_uuid, data, provider, pid_key="pid", object_type="rec"): """RERO ILS dummy minter.""" # DEV NOTES: # A minter is required for invenio-records-rest. # This return a dummy PersistentIdentifier return EntityMinter( pid_type=object_type, - pid_value=data['pid'], + pid_value=data["pid"], object_uuid=record_uuid, - object_type=object_type + object_type=object_type, ) diff --git a/rero_ils/modules/entities/models.py b/rero_ils/modules/entities/models.py index 63166c5472..4b55e25eb3 100644 --- a/rero_ils/modules/entities/models.py +++ b/rero_ils/modules/entities/models.py @@ -22,25 +22,25 @@ class EntityType: """Class holding all available entity types.""" - AGENT = 'bf:Agent' - ORGANISATION = 'bf:Organisation' - PERSON = 'bf:Person' - PLACE = 'bf:Place' - TEMPORAL = 'bf:Temporal' - TOPIC = 'bf:Topic' - WORK = 'bf:Work' + AGENT = "bf:Agent" + ORGANISATION = "bf:Organisation" + PERSON = "bf:Person" + PLACE = "bf:Place" + TEMPORAL = "bf:Temporal" + TOPIC = "bf:Topic" + WORK = "bf:Work" class EntityResourceType: """Class holding all available resource entity types.""" - REMOTE = 'remote' - LOCAL = 'local' + REMOTE = "remote" + LOCAL = "local" class EntityFieldWithRef: """Class to define field with $ref.""" - CONTRIBUTION = 'contribution' - GENRE_FORM = 'genreForm' - SUBJECTS = 'subjects' + CONTRIBUTION = "contribution" + GENRE_FORM = "genreForm" + SUBJECTS = "subjects" diff --git a/rero_ils/modules/entities/remote_entities/api.py b/rero_ils/modules/entities/remote_entities/api.py index 76fa5d2303..900335e16a 100644 --- a/rero_ils/modules/entities/remote_entities/api.py +++ b/rero_ils/modules/entities/remote_entities/api.py @@ -32,18 +32,17 @@ from rero_ils.modules.minters import id_minter from rero_ils.modules.providers import Provider -from .models import EntityUpdateAction, RemoteEntityIdentifier, \ - RemoteEntityMetadata -from .utils import extract_data_from_mef_uri, get_mef_data_by_type from ..api import Entity from ..dumpers import indexer_dumper, replace_refs_dumper from ..models import EntityResourceType +from .models import EntityUpdateAction, RemoteEntityIdentifier, RemoteEntityMetadata +from .utils import extract_data_from_mef_uri, get_mef_data_by_type # provider RemoteEntityProvider = type( - 'EntityProvider', + "EntityProvider", (Provider,), - dict(identifier=RemoteEntityIdentifier, pid_type='rement') + dict(identifier=RemoteEntityIdentifier, pid_type="rement"), ) # minter remote_entity_id_minter = partial(id_minter, provider=RemoteEntityProvider) @@ -57,9 +56,9 @@ class RemoteEntitiesSearch(IlsRecordsSearch): class Meta: """Meta class.""" - index = 'remote_entities' + index = "remote_entities" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -84,18 +83,20 @@ def get_entity(cls, ref_type, ref_pid): :param ref_pid: the identifier to search. :returns: the corresponding `Entity` if exists. """ - if ref_type == 'mef': + if ref_type == "mef": return cls.get_record_by_pid(ref_pid) - es_filter = Q('term', **{f'{ref_type}.pid': ref_pid}) - if ref_type == 'viaf': - es_filter = Q('term', viaf_pid=ref_pid) - query = RemoteEntitiesSearch() \ - .params(preserve_order=True) \ - .sort({'_created': {'order': 'desc'}}) \ + es_filter = Q("term", **{f"{ref_type}.pid": ref_pid}) + if ref_type == "viaf": + es_filter = Q("term", viaf_pid=ref_pid) + query = ( + RemoteEntitiesSearch() + .params(preserve_order=True) + .sort({"_created": {"order": "desc"}}) .filter(es_filter) + ) with contextlib.suppress(StopIteration): - pid = next(query.source('pid').scan()).pid + pid = next(query.source("pid").scan()).pid return cls.get_record_by_pid(pid) @classmethod @@ -120,11 +121,12 @@ def get_record_by_ref(cls, ref): nested = db.session.begin_nested() try: data = get_mef_data_by_type( - entity_type=entity_type, pid_type=ref_type, pid=ref_pid) + entity_type=entity_type, pid_type=ref_type, pid=ref_pid + ) if not data: - raise HTTPError('', 404, "Not found") + raise HTTPError("", 404, "Not found") # Try to get the contribution from DB maybe it was not indexed. - if entity := RemoteEntity.get_record_by_pid(data['pid']): + if entity := RemoteEntity.get_record_by_pid(data["pid"]): entity = entity.replace(data) else: entity = cls.create(data) @@ -134,9 +136,7 @@ def get_record_by_ref(cls, ref): entity.reindex() except Exception as err: nested.rollback() - current_app.logger.error( - f'Get MEF record: {ref_type}:{ref_pid} >>{err}<<' - ) + current_app.logger.error(f"Get MEF record: {ref_type}:{ref_pid} >>{err}<<") entity = None return entity, online @@ -148,8 +148,8 @@ def resource_type(self): @property def type(self): """Get entity type.""" - entity_types = current_app.config['RERO_ILS_ENTITY_TYPES'] - return entity_types.get(self['type']) + entity_types = current_app.config["RERO_ILS_ENTITY_TYPES"] + return entity_types.get(self["type"]) def resolve(self): """Resolve references data. @@ -174,12 +174,12 @@ def get_authorized_access_point(self, language): :returns: authorized access point in given language. """ return self._get_mef_localized_value( - key='authorized_access_point', - language=language + key="authorized_access_point", language=language ) - def update_online(self, dbcommit=False, reindex=False, verbose=False, - reindex_doc=True): + def update_online( + self, dbcommit=False, reindex=False, verbose=False, reindex_doc=True + ): """Update record online. :param reindex: reindex record by record @@ -189,27 +189,26 @@ def update_online(self, dbcommit=False, reindex=False, verbose=False, :return: updated record status and updated record """ action = EntityUpdateAction.UPTODATE - pid = self.get('pid') + pid = self.get("pid") try: if data := get_mef_data_by_type( - entity_type=self.type, - pid_type='mef', - pid=pid, - verbose=verbose): - data['$schema'] = self['$schema'] - if data.get('deleted'): + entity_type=self.type, pid_type="mef", pid=pid, verbose=verbose + ): + data["$schema"] = self["$schema"] + if data.get("deleted"): current_app.logger.warning( - f'UPDATE ONLINE {self.type} (pid:{pid}): was deleted') + f"UPDATE ONLINE {self.type} (pid:{pid}): was deleted" + ) action = EntityUpdateAction.ERROR - elif not data.get('sources'): + elif not data.get("sources"): current_app.logger.warning( - f'UPDATE ONLINE {self.type} (pid:{pid}): ' - f'has no sources' + f"UPDATE ONLINE {self.type} (pid:{pid}): " f"has no sources" ) action = EntityUpdateAction.ERROR - elif not data.get('type'): + elif not data.get("type"): current_app.logger.warning( - f'UPDATE ONLINE {self.type} (pid:{pid}): has no type') + f"UPDATE ONLINE {self.type} (pid:{pid}): has no type" + ) action = EntityUpdateAction.ERROR elif dict(self) != data: action = EntityUpdateAction.REPLACE @@ -220,22 +219,18 @@ def update_online(self, dbcommit=False, reindex=False, verbose=False, indexer.process_bulk_queue() except Exception as err: action = EntityUpdateAction.ERROR - current_app.logger.warning(f'UPDATE ONLINE {pid}: {err}') + current_app.logger.warning(f"UPDATE ONLINE {pid}: {err}") return action, self def source_pids(self): """Get agents pids.""" - sources = current_app.config.get('RERO_ILS_AGENTS_SOURCES', []) - return { - source: self[source]['pid'] - for source in sources - if source in self - } + sources = current_app.config.get("RERO_ILS_AGENTS_SOURCES", []) + return {source: self[source]["pid"] for source in sources if source in self} def _get_mef_localized_value(self, key, language): """Get the 1st localized value for given key among MEF source list.""" - order = current_app.config.get('RERO_ILS_AGENTS_LABEL_ORDER', []) - source_order = order.get(language, order.get(order['fallback'], [])) + order = current_app.config.get("RERO_ILS_AGENTS_LABEL_ORDER", []) + source_order = order.get(language, order.get(order["fallback"], [])) for source in source_order: if value := self.get(source, {}).get(key, None): return value @@ -253,4 +248,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='rement') + super().bulk_index(record_id_iterator, doc_type="rement") diff --git a/rero_ils/modules/entities/remote_entities/cli.py b/rero_ils/modules/entities/remote_entities/cli.py index 1e65120c97..f57c0750d3 100644 --- a/rero_ils/modules/entities/remote_entities/cli.py +++ b/rero_ils/modules/entities/remote_entities/cli.py @@ -33,21 +33,21 @@ def entity(): @entity.command() -@click.option('-q', '--query', default='*') -@click.option('-n', '--dry-run', is_flag=True, default=False) -@click.option('-d', '--from-last-date', is_flag=True, default=False) -@click.option('-v', '--verbose', count=True, default=0) -@click.option('-l', '--log-dir', default=None) -@click.option('-f', '--from-date', - type=click.DateTime(formats=["%Y-%m-%d"]), default=None) -@click.option('-m', '--in-memory', is_flag=True, default=False) +@click.option("-q", "--query", default="*") +@click.option("-n", "--dry-run", is_flag=True, default=False) +@click.option("-d", "--from-last-date", is_flag=True, default=False) +@click.option("-v", "--verbose", count=True, default=0) +@click.option("-l", "--log-dir", default=None) +@click.option( + "-f", "--from-date", type=click.DateTime(formats=["%Y-%m-%d"]), default=None +) +@click.option("-m", "--in-memory", is_flag=True, default=False) @with_appcontext -def sync(query, dry_run, from_last_date, verbose, log_dir, from_date, - in_memory): +def sync(query, dry_run, from_last_date, verbose, log_dir, from_date, in_memory): """Updated the MEF records and the linked documents.""" sync_entity = SyncEntity( - dry_run=dry_run, verbose=verbose, log_dir=log_dir, - from_last_date=from_last_date) + dry_run=dry_run, verbose=verbose, log_dir=log_dir, from_last_date=from_last_date + ) if verbose: sync_entity.sync(query, from_date) else: @@ -60,8 +60,7 @@ def sync(query, dry_run, from_last_date, verbose, log_dir, from_date, err_pids = [] with click.progressbar(pids, length=total) as bar: for pid in bar: - current_doc_updated, updated, error = sync_entity.sync_record( - pid) + current_doc_updated, updated, error = sync_entity.sync_record(pid) doc_updated.update(current_doc_updated) if updated: n_updated += 1 @@ -70,14 +69,14 @@ def sync(query, dry_run, from_last_date, verbose, log_dir, from_date, n_doc_updated = len(doc_updated) sync_entity.end_sync(n_doc_updated, n_updated, err_pids) if err_pids: - click.secho(f'ERROR: MEF pids: {err_pids}', fg='red') + click.secho(f"ERROR: MEF pids: {err_pids}", fg="red") @entity.command() -@click.option('-q', '--query', default='*') -@click.option('-n', '--dry-run', is_flag=True, default=False) -@click.option('-v', '--verbose', count=True, default=0) -@click.option('-l', '--log-dir', default=None) +@click.option("-q", "--query", default="*") +@click.option("-n", "--dry-run", is_flag=True, default=False) +@click.option("-v", "--verbose", count=True, default=0) +@click.option("-l", "--log-dir", default=None) @with_appcontext def clean(query, dry_run, verbose, log_dir): """Removes MEF records that are not linked to documents.""" @@ -96,57 +95,52 @@ def clean(query, dry_run, verbose, log_dir): except Exception: err_pids.append(pid) - click.secho(f'{n_removed} removed MEF records', fg='green') + click.secho(f"{n_removed} removed MEF records", fg="green") if err_pids: - click.secho(f'ERROR: MEF pids: {err_pids}', fg='red') + click.secho(f"ERROR: MEF pids: {err_pids}", fg="red") @entity.command() -@click.option('-c', '--clear', is_flag=True, default=False) -@click.option('-v', '--verbose', count=True, default=0) +@click.option("-c", "--clear", is_flag=True, default=False) +@click.option("-v", "--verbose", count=True, default=0) @with_appcontext def sync_errors(clear, verbose): """Removes errors in the cache information.""" errors = SyncEntity.get_errors() if verbose: - click.echo(f'Errors MEF pids: {errors}') + click.echo(f"Errors MEF pids: {errors}") if clear: SyncEntity.clear_errors() - click.secho(f'Removed {len(errors)} errors', fg='yellow') + click.secho(f"Removed {len(errors)} errors", fg="yellow") -@entity.command('replace-identified-by') -@click.option('-f', '--field', multiple=True, default=None) -@click.option('-n', '--dry-run', is_flag=True, default=False) -@click.option('-v', '--verbose', count=True, default=0) -@click.option('-l', '--log-dir', default=None) +@entity.command("replace-identified-by") +@click.option("-f", "--field", multiple=True, default=None) +@click.option("-n", "--dry-run", is_flag=True, default=False) +@click.option("-v", "--verbose", count=True, default=0) +@click.option("-l", "--log-dir", default=None) @with_appcontext def replace_identified_by_cli(field, dry_run, verbose, log_dir): """Replace identifiedBy with $ref.""" for parent in field or ReplaceIdentifiedBy.fields: replace_identified_by = ReplaceIdentifiedBy( - field=parent, - verbose=verbose, - dry_run=dry_run, - log_dir=log_dir + field=parent, verbose=verbose, dry_run=dry_run, log_dir=log_dir ) changed, not_found, rero_only = replace_identified_by.run() click.secho( - f'{parent:<12} | Changed: {changed} | ' - f'Not found: {not_found} | ' - f'RERO only: {rero_only}', - fg='green' + f"{parent:<12} | Changed: {changed} | " + f"Not found: {not_found} | " + f"RERO only: {rero_only}", + fg="green", ) if verbose: - if replace_identified_by._error_count( - replace_identified_by.not_found): - click.secho('Not found:', fg='yellow') + if replace_identified_by._error_count(replace_identified_by.not_found): + click.secho("Not found:", fg="yellow") for etype, values in replace_identified_by.not_found.items(): for pid, data in values.items(): - click.echo(f'\t{etype} {pid}: {data}') - if replace_identified_by._error_count( - replace_identified_by.rero_only): - click.secho('RERO only:', fg='yellow') + click.echo(f"\t{etype} {pid}: {data}") + if replace_identified_by._error_count(replace_identified_by.rero_only): + click.secho("RERO only:", fg="yellow") for etype, values in replace_identified_by.rero_only.items(): for pid, data in values.items(): - click.echo(f'\t{pid}: {data}') + click.echo(f"\t{pid}: {data}") diff --git a/rero_ils/modules/entities/remote_entities/models.py b/rero_ils/modules/entities/remote_entities/models.py index 075dc96e29..8797488592 100644 --- a/rero_ils/modules/entities/remote_entities/models.py +++ b/rero_ils/modules/entities/remote_entities/models.py @@ -28,11 +28,11 @@ class RemoteEntityIdentifier(RecordIdentifier): """Sequence generator for `Remote Entity` identifiers.""" - __tablename__ = 'remote_entity_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "remote_entity_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), + db.BigInteger().with_variant(db.Integer, "sqlite"), primary_key=True, autoincrement=True, ) @@ -41,12 +41,12 @@ class RemoteEntityIdentifier(RecordIdentifier): class RemoteEntityMetadata(db.Model, RecordMetadataBase): """Remote Entity record metadata.""" - __tablename__ = 'remote_entity_metadata' + __tablename__ = "remote_entity_metadata" class EntityUpdateAction: """Class holding all available agent record creation actions.""" - REPLACE = 'replace' - UPTODATE = 'uptodate' - ERROR = 'error' + REPLACE = "replace" + UPTODATE = "uptodate" + ERROR = "error" diff --git a/rero_ils/modules/entities/remote_entities/proxy.py b/rero_ils/modules/entities/remote_entities/proxy.py index d7d3cf8b00..75e50c46a8 100644 --- a/rero_ils/modules/entities/remote_entities/proxy.py +++ b/rero_ils/modules/entities/remote_entities/proxy.py @@ -56,50 +56,41 @@ def create_proxy(category): # DEV NOTES :: `agents` isn't yet used, but could be ASAP. This is why # it's already configured. proxy_config = { - 'agents': { - 'class': MefAgentsProxy, - 'entities': (EntityType.PERSON, EntityType.ORGANISATION) + "agents": { + "class": MefAgentsProxy, + "entities": (EntityType.PERSON, EntityType.ORGANISATION), }, - 'person': { - 'class': MefAgentsProxy, - 'entities': (EntityType.PERSON,) + "person": {"class": MefAgentsProxy, "entities": (EntityType.PERSON,)}, + "organisation": { + "class": MefAgentsProxy, + "entities": (EntityType.ORGANISATION,), }, - 'organisation': { - 'class': MefAgentsProxy, - 'entities': (EntityType.ORGANISATION,) + "concepts": { + "class": MefConceptsProxy, + "entities": (EntityType.TOPIC, EntityType.TEMPORAL), }, - 'concepts': { - 'class': MefConceptsProxy, - 'entities': (EntityType.TOPIC, EntityType.TEMPORAL) + "topics": {"class": MefConceptsProxy, "entities": (EntityType.TOPIC,)}, + "temporals": { + "class": MefConceptsProxy, + "entities": (EntityType.TEMPORAL,), }, - 'topics': { - 'class': MefConceptsProxy, - 'entities': (EntityType.TOPIC,) - }, - 'temporals': { - 'class': MefConceptsProxy, - 'entities': (EntityType.TEMPORAL, ) - }, - 'concepts-genreForm': { - 'class': MefConceptsGenreFormProxy, - 'entities': (EntityType.TOPIC,) - }, - 'places': { - 'class': MefPlacesProxy, - 'entities': (EntityType.PLACE, ) + "concepts-genreForm": { + "class": MefConceptsGenreFormProxy, + "entities": (EntityType.TOPIC,), }, + "places": {"class": MefPlacesProxy, "entities": (EntityType.PLACE,)}, } # Create proxy configuration aliases - proxy_config[EntityType.PERSON] = proxy_config['person'] - proxy_config[EntityType.ORGANISATION] = proxy_config['organisation'] - proxy_config[EntityType.TOPIC] = proxy_config['topics'] - proxy_config[EntityType.TEMPORAL] = proxy_config['temporals'] - proxy_config[EntityType.PLACE] = proxy_config['places'] + proxy_config[EntityType.PERSON] = proxy_config["person"] + proxy_config[EntityType.ORGANISATION] = proxy_config["organisation"] + proxy_config[EntityType.TOPIC] = proxy_config["topics"] + proxy_config[EntityType.TEMPORAL] = proxy_config["temporals"] + proxy_config[EntityType.PLACE] = proxy_config["places"] # Try to create the proxy, otherwise raise a ValueError if data := proxy_config.get(category): - return data['class'](*(data['entities'])) - raise ValueError(f'Unable to find a MEF factory for {category}') + return data["class"](*(data["entities"])) + raise ValueError(f"Unable to find a MEF factory for {category}") class MEFProxyMixin: @@ -119,24 +110,26 @@ class MEFProxyMixin: # Headers that should be excluded from remote MEF system response. excluded_headers = [ - 'Content-Encoding', - 'Content-Length', - 'Transfer-Encoding', - 'Connection' + "Content-Encoding", + "Content-Length", + "Transfer-Encoding", + "Connection", ] mef_entrypoint = None # Must be overridden by subclasses def __init__(self, *args): """Magic initialization method.""" self.entity_types = args - self.sources = current_app.config \ - .get('RERO_ILS_MEF_CONFIG', {}) \ - .get(self.mef_entrypoint, {}) \ - .get('sources', []) - self.filters = current_app.config \ - .get('RERO_ILS_MEF_CONFIG', {}) \ - .get(self.mef_entrypoint, {}) \ - .get('filters', []) + self.sources = ( + current_app.config.get("RERO_ILS_MEF_CONFIG", {}) + .get(self.mef_entrypoint, {}) + .get("sources", []) + ) + self.filters = ( + current_app.config.get("RERO_ILS_MEF_CONFIG", {}) + .get(self.mef_entrypoint, {}) + .get("filters", []) + ) def search(self, term): """Search specific term on MEF authority system. @@ -149,9 +142,7 @@ def search(self, term): # Call the remote MEF server removing the 'Host' headers from initial # request to avoid security problems. request_headers = { - key: value - for key, value in request.headers - if key != 'Host' + key: value for key, value in request.headers if key != "Host" } response = requests.request( method=request.method, @@ -159,7 +150,7 @@ def search(self, term): headers=request_headers, data=request.get_data(), cookies=request.cookies, - allow_redirects=True + allow_redirects=True, ) # If remote server response failed, raise this HTTP error through a @@ -170,7 +161,7 @@ def search(self, term): # Post-process the result hits to get a standard format against all # format possibility depending on entity type searched. content = json.loads(response.content) - for hit in content.get('hits', {}).get('hits', []): + for hit in content.get("hits", {}).get("hits", []): self._post_process_result_hit(hit) # Finally, return a flask `Response` from a `request.Response`. All @@ -198,13 +189,13 @@ def _build_filter_value(value): return f'({" OR ".join(value)})' return f'"{str(value)}"' - query_params = [f'((autocomplete_name:{term})^2 OR {term})'] + query_params = [f"((autocomplete_name:{term})^2 OR {term})"] if self.sources: - query_params.append(f'sources:{_build_filter_value(self.sources)}') + query_params.append(f"sources:{_build_filter_value(self.sources)}") for filter_field in self.filters: for key, value in filter_field.items(): filter_value = _build_filter_value(value) - query_params.append(f'{key}:{filter_value}') + query_params.append(f"{key}:{filter_value}") return query_params def _build_url(self, term): @@ -215,9 +206,9 @@ def _build_url(self, term): :returns: the MEF URL to call to get response hits. :rtype: str """ - query = quote_plus(' AND '.join(self._get_query_params(term))) + query = quote_plus(" AND ".join(self._get_query_params(term))) base_url = get_mef_url(self.mef_entrypoint) - return f'{base_url}/mef?q={query}&page=1&size=10&facets=' + return f"{base_url}/mef?q={query}&page=1&size=10&facets=" def _post_process_result_hit(self, hit): """Modify a MEF hit response to return a standardized hit.""" @@ -225,23 +216,25 @@ def _post_process_result_hit(self, hit): # This URI is the direct access for the source metadata on the remote # MEF authority server. # TODO :: this URI should be returned by MEF API - if not (metadata := hit.get('metadata')): + if not (metadata := hit.get("metadata")): return base_url = get_mef_url(self.mef_entrypoint) for source_name in self.sources: if not (src_data := metadata.get(source_name)): continue - src_data.setdefault('identifiedBy', []).append({ - 'source': 'mef', - 'type': 'uri', - 'value': f'{base_url}/{source_name}/{src_data["pid"]}' - }) + src_data.setdefault("identifiedBy", []).append( + { + "source": "mef", + "type": "uri", + "value": f'{base_url}/{source_name}/{src_data["pid"]}', + } + ) class MefAgentsProxy(MEFProxyMixin): """Proxy on RERO-MEF authority system when searching for `agents`.""" - mef_entrypoint = 'agents' + mef_entrypoint = "agents" def _get_query_params(self, term): """Get all parameters to use to build the MEF query. @@ -258,7 +251,7 @@ def _get_query_params(self, term): ent_types = [] for _type in self.entity_types: _type = _type.replace(":", "\\:") - ent_types.append(f'type:{_type}') + ent_types.append(f"type:{_type}") params += [f'({" OR ".join(ent_types)})'] return params @@ -271,24 +264,22 @@ def _post_process_result_hit(self, hit): :param hit: an elasticSearch hit already parsed as a dictionary. """ - if not (metadata := hit.get('metadata', {})): + if not (metadata := hit.get("metadata", {})): return for source_name in self.sources: if not (src_data := metadata.get(source_name)): continue - if identifier := src_data.pop('identifier', None): - src_data.setdefault('identifiedBy', []).append({ - 'source': source_name, - 'type': 'uri', - 'value': identifier - }) + if identifier := src_data.pop("identifier", None): + src_data.setdefault("identifiedBy", []).append( + {"source": source_name, "type": "uri", "value": identifier} + ) super()._post_process_result_hit(hit) class MefConceptsProxy(MEFProxyMixin): """Proxy on RERO-MEF authority system when searching for `concepts`.""" - mef_entrypoint = 'concepts' + mef_entrypoint = "concepts" def _get_query_params(self, term): """Get all parameters to use to build the MEF query. @@ -305,7 +296,7 @@ def _get_query_params(self, term): ent_types = [] for _type in self.entity_types: _type = _type.replace(":", "\\:") - ent_types.append(f'type:{_type}') + ent_types.append(f"type:{_type}") params += [f'({" OR ".join(ent_types)})'] return params @@ -318,7 +309,7 @@ def _post_process_result_hit(self, hit): :param hit: an elasticSearch hit already parsed as a dictionary. """ - if not (metadata := hit.get('metadata', {})): + if not (metadata := hit.get("metadata", {})): return super()._post_process_result_hit(hit) @@ -326,10 +317,10 @@ def _post_process_result_hit(self, hit): class MefConceptsGenreFormProxy(MefConceptsProxy): """Proxy on RERO-MEF authority system for specific `genreForm` concepts.""" - mef_entrypoint = 'concepts-genreForm' + mef_entrypoint = "concepts-genreForm" class MefPlacesProxy(MEFProxyMixin): """Proxy on RERO-MEF authority system when searching for `places`.""" - mef_entrypoint = 'places' + mef_entrypoint = "places" diff --git a/rero_ils/modules/entities/remote_entities/replace.py b/rero_ils/modules/entities/remote_entities/replace.py index bd270d10fd..05b1cfe884 100644 --- a/rero_ils/modules/entities/remote_entities/replace.py +++ b/rero_ils/modules/entities/remote_entities/replace.py @@ -27,12 +27,11 @@ from sqlalchemy.orm.exc import NoResultFound from rero_ils.modules.documents.api import Document, DocumentsSearch -from rero_ils.modules.utils import get_mef_url, get_timestamp, \ - requests_retry_session +from rero_ils.modules.utils import get_mef_url, get_timestamp, requests_retry_session from rero_ils.modules.utils import set_timestamp as utils_set_timestamp -from .api import RemoteEntity from ..logger import create_logger +from .api import RemoteEntity class ReplaceIdentifiedBy(object): @@ -51,11 +50,10 @@ class ReplaceIdentifiedBy(object): # (therefore not changed). """ - fields = ('contribution', 'subjects', 'genreForm') - timestamp_name = 'replace_identified_by' + fields = ("contribution", "subjects", "genreForm") + timestamp_name = "replace_identified_by" - def __init__(self, field, dry_run=False, verbose=False, - log_dir=None): + def __init__(self, field, dry_run=False, verbose=False, log_dir=None): """Constructor. :param field: field type [contribution, subjects, genreForm] @@ -65,12 +63,12 @@ def __init__(self, field, dry_run=False, verbose=False, self.field = field self.dry_run = dry_run self.verbose = verbose - self.entity_types = current_app.config['RERO_ILS_ENTITY_TYPES'] + self.entity_types = current_app.config["RERO_ILS_ENTITY_TYPES"] self.logger = create_logger( - name='ReplaceIdentifiedBy', - file_name='replace_identifiedby.log', + name="ReplaceIdentifiedBy", + file_name="replace_identifiedby.log", log_dir=log_dir, - verbose=verbose + verbose=verbose, ) self.changed = 0 self.rero_only = {} @@ -80,7 +78,7 @@ def _get_base_url(self, entity_type): """Get MEF base URL.""" if base_url := get_mef_url(entity_type): return base_url - raise KeyError(f'Unable to find MEF base url for {entity_type}') + raise KeyError(f"Unable to find MEF base url for {entity_type}") def _get_latest(self, entity_type, source, pid): """Query the MEF server to retrieve the last MEF for a given entity id. @@ -90,11 +88,11 @@ def _get_latest(self, entity_type, source, pid): :returns: dictionary representing the MEF record. :rtype: dictionary. """ - url = f'{self._get_base_url(entity_type)}/mef/latest/{source}:{pid}' + url = f"{self._get_base_url(entity_type)}/mef/latest/{source}:{pid}" res = requests_retry_session().get(url) if res.status_code == requests.codes.ok: return res.json() - self.logger.warning(f'Problem get {url}: {res.status_code}') + self.logger.warning(f"Problem get {url}: {res.status_code}") return {} def _find_other_source(self, source, mef_data): @@ -105,21 +103,23 @@ def _find_other_source(self, source, mef_data): :params mef_data: mef data to find other source :returns: found source and source pid """ - if source in ('idref', 'gnd'): - return source, mef_data[source]['pid'] - elif source == 'rero': - for new_source in ('idref', 'gnd'): + if source in ("idref", "gnd"): + return source, mef_data[source]["pid"] + elif source == "rero": + for new_source in ("idref", "gnd"): if source_data := mef_data.get(new_source): - return new_source, source_data['pid'] + return new_source, source_data["pid"] return None, None @property def query(self): """ES query for documents with identifiedBy and entity types.""" - entity_types = list(current_app.config['RERO_ILS_ENTITY_TYPES'].keys()) - return DocumentsSearch() \ - .filter('exists', field=f'{self.field}.entity.identifiedBy') \ - .filter({'terms': {f'{self.field}.entity.type': entity_types}}) + entity_types = list(current_app.config["RERO_ILS_ENTITY_TYPES"].keys()) + return ( + DocumentsSearch() + .filter("exists", field=f"{self.field}.entity.identifiedBy") + .filter({"terms": {f"{self.field}.entity.type": entity_types}}) + ) def count(self): """Get count of Documents with identifiedBy.""" @@ -131,22 +131,17 @@ def _create_entity(self, mef_type, mef_data): :param mef_type: MEF type (agent, concept) :param mef_data: MEF data for entity. """ - if not RemoteEntity.get_record_by_pid(mef_data['pid']): + if not RemoteEntity.get_record_by_pid(mef_data["pid"]): if not self.dry_run: new_mef_data = deepcopy(mef_data) - fields_to_remove = ['$schema', '_created', '_updated'] + fields_to_remove = ["$schema", "_created", "_updated"] for field in fields_to_remove: new_mef_data.pop(field, None) # TODO: try to optimize with parent commit and reindex # bulk operation - RemoteEntity.create( - data=new_mef_data, - dbcommit=True, - reindex=True - ) + RemoteEntity.create(data=new_mef_data, dbcommit=True, reindex=True) self.logger.info( - f'Create a new MEF {mef_type} ' - f'record(pid: {mef_data["pid"]})' + f"Create a new MEF {mef_type} " f'record(pid: {mef_data["pid"]})' ) def _do_entity(self, entity, doc_pid): @@ -157,77 +152,79 @@ def _do_entity(self, entity, doc_pid): :returns: changed """ changed = False - doc_entity_type = entity['entity']['type'] + doc_entity_type = entity["entity"]["type"] self.not_found.setdefault(doc_entity_type, {}) self.rero_only.setdefault(doc_entity_type, {}) if mef_type := self.entity_types.get(doc_entity_type): - source_pid = entity['entity']['identifiedBy']['value'] - source = entity['entity']['identifiedBy']['type'].lower() - identifier = f'{source}:{source_pid}' + source_pid = entity["entity"]["identifiedBy"]["value"] + source = entity["entity"]["identifiedBy"]["type"].lower() + identifier = f"{source}:{source_pid}" if ( - identifier in self.not_found[doc_entity_type] or - identifier in self.rero_only[doc_entity_type] + identifier in self.not_found[doc_entity_type] + or identifier in self.rero_only[doc_entity_type] ): # MEF was not found previously. Do not try it again. return None if mef_data := self._get_latest(mef_type, source, source_pid): new_source, new_source_pid = self._find_other_source( - source=source, - mef_data=mef_data + source=source, mef_data=mef_data ) if new_source: - mef_entity_type = mef_data.get('type') + mef_entity_type = mef_data.get("type") # verify local and MEF type are the same if mef_entity_type == doc_entity_type: self._create_entity(mef_type, mef_data) - authorized_access_point = entity[ - "entity"]["authorized_access_point"] - mef_authorized_access_point = mef_data[ - new_source]["authorized_access_point"] + authorized_access_point = entity["entity"][ + "authorized_access_point" + ] + mef_authorized_access_point = mef_data[new_source][ + "authorized_access_point" + ] self.logger.info( - f'Replace document:{doc_pid} ' + f"Replace document:{doc_pid} " f'{self.field} "{authorized_access_point}" - ' f'({mef_type}:{mef_data["pid"]}) ' - f'{new_source}:{new_source_pid} ' + f"{new_source}:{new_source_pid} " f'"{mef_authorized_access_point}"' ) - entity['entity'] = { - '$ref': ( - f'{self._get_base_url(mef_type)}' - f'/{new_source}/{new_source_pid}' + entity["entity"] = { + "$ref": ( + f"{self._get_base_url(mef_type)}" + f"/{new_source}/{new_source_pid}" ), - 'pid': mef_data['pid'] + "pid": mef_data["pid"], } changed = True else: - authorized_access_point = mef_data.get( - source, {}).get('authorized_access_point') + authorized_access_point = mef_data.get(source, {}).get( + "authorized_access_point" + ) info = ( - f'{doc_entity_type} != {mef_entity_type} ' + f"{doc_entity_type} != {mef_entity_type} " f': "{authorized_access_point}"' ) self.rero_only[doc_entity_type][identifier] = info self.logger.warning( - f'Type differ:{doc_pid} ' - f'{self.field} - ({mef_type}) {identifier} {info}' + f"Type differ:{doc_pid} " + f"{self.field} - ({mef_type}) {identifier} {info}" ) else: - authorized_access_point = mef_data.get( - source, {}).get('authorized_access_point') - info = f'{authorized_access_point}' + authorized_access_point = mef_data.get(source, {}).get( + "authorized_access_point" + ) + info = f"{authorized_access_point}" self.rero_only[doc_entity_type][identifier] = info self.logger.info( - f'No other source found for document:{doc_pid} ' - f'{self.field} - ({mef_type}|{doc_entity_type}) ' + f"No other source found for document:{doc_pid} " + f"{self.field} - ({mef_type}|{doc_entity_type}) " f'{identifier} "{info}"' ) else: - authorized_access_point = entity[ - 'entity']['authorized_access_point'] - info = f'{authorized_access_point}' + authorized_access_point = entity["entity"]["authorized_access_point"] + info = f"{authorized_access_point}" self.not_found[doc_entity_type][identifier] = info self.logger.info( - f'No MEF found for document:{doc_pid} ' + f"No MEF found for document:{doc_pid} " f' - ({mef_type}) {identifier} "{info}"' ) self.not_found = {k: v for k, v in self.not_found.items() if v} @@ -243,16 +240,14 @@ def _replace_entities_in_document(self, doc_id): with contextlib.suppress(NoResultFound): doc = Document.get_record(doc_id) entities_to_update = filter( - lambda c: c.get('entity', {}).get('identifiedBy'), - doc.get(self.field, {}) + lambda c: c.get("entity", {}).get("identifiedBy"), + doc.get(self.field, {}), ) for entity in entities_to_update: try: changed = self._do_entity(entity, doc.pid) or changed except Exception as err: - self.logger.error( - f'Error document:{doc.pid} {entity} {err}"' - ) + self.logger.error(f'Error document:{doc.pid} {entity} {err}"') if changed: return doc @@ -265,25 +260,28 @@ def run(self): self.changed = 0 self.not_found = {} self.rero_only = {} - self.logger.info( - f'Found {self.field} identifiedBy: {self.count()}') - query = self.query \ - .params(preserve_order=True) \ - .sort({'_created': {'order': 'asc'}}) \ - .source(['pid', self.field]) + self.logger.info(f"Found {self.field} identifiedBy: {self.count()}") + query = ( + self.query.params(preserve_order=True) + .sort({"_created": {"order": "asc"}}) + .source(["pid", self.field]) + ) for hit in list(query.scan()): if doc := self._replace_entities_in_document(hit.meta.id): self.changed += 1 if not self.dry_run: doc.update(data=doc, dbcommit=True, reindex=True) self.set_timestamp() - return self.changed, self._error_count(self.not_found), \ - self._error_count(self.rero_only) + return ( + self.changed, + self._error_count(self.not_found), + self._error_count(self.rero_only), + ) def get_timestamp(self): """Get time stamp.""" - if data := get_timestamp('replace_identified_by'): - data.pop('name', None) + if data := get_timestamp("replace_identified_by"): + data.pop("name", None) return data or {} def set_timestamp(self): @@ -299,9 +297,9 @@ def set_timestamp(self): # not found: no entity was found. # rero only: entity was found but has only `rero` as source. data[self.field] = { - 'changed': self.changed, - 'not found': self._error_count(self.not_found), - 'rero only': self._error_count(self.rero_only), - 'time': datetime.now(timezone.utc), + "changed": self.changed, + "not found": self._error_count(self.not_found), + "rero only": self._error_count(self.rero_only), + "time": datetime.now(timezone.utc), } utils_set_timestamp(self.timestamp_name, **data) diff --git a/rero_ils/modules/entities/remote_entities/sync.py b/rero_ils/modules/entities/remote_entities/sync.py index 1a0d14e0d7..09b426f3c0 100644 --- a/rero_ils/modules/entities/remote_entities/sync.py +++ b/rero_ils/modules/entities/remote_entities/sync.py @@ -29,18 +29,23 @@ from rero_ils.modules.commons.exceptions import RecordNotFound from rero_ils.modules.documents.api import Document -from rero_ils.modules.utils import get_mef_url, get_timestamp, \ - requests_retry_session, set_timestamp +from rero_ils.modules.utils import ( + get_mef_url, + get_timestamp, + requests_retry_session, + set_timestamp, +) -from .api import RemoteEntitiesSearch, RemoteEntity from ..logger import create_logger +from .api import RemoteEntitiesSearch, RemoteEntity class SyncEntity: """Entity MEF synchronization.""" - def __init__(self, dry_run=False, verbose=False, log_dir=None, - from_last_date=False): + def __init__( + self, dry_run=False, verbose=False, log_dir=None, from_last_date=False + ): """Constructor. :param dry_run: bool - if true the data are not modified @@ -54,19 +59,19 @@ def __init__(self, dry_run=False, verbose=False, log_dir=None, self.from_date = None self.start_timestamp = None self.logger = create_logger( - name='SyncEntity', - file_name='sync_mef.log', + name="SyncEntity", + file_name="sync_mef.log", log_dir=log_dir, - verbose=verbose + verbose=verbose, ) if from_last_date: self._get_last_date() def _get_last_date(self): """Get the date of the last execution of the synchronization.""" - data = get_timestamp('sync_entities') - if data and data.get('start_timestamp'): - self.from_date = data.get('start_timestamp') + data = get_timestamp("sync_entities") + if data and data.get("start_timestamp"): + self.from_date = data.get("start_timestamp") def _entity_are_different(self, entity1, entity2): """Check if two entities are different. @@ -80,14 +85,12 @@ def _entity_are_different(self, entity1, entity2): def remove_fields(entity): """Remove specific fields.""" - fields_to_remove = [ - '$schema', 'organisation', '_created', '_updated' - ] + fields_to_remove = ["$schema", "organisation", "_created", "_updated"] for field in fields_to_remove: entity.pop(field, None) - fields_to_remove = ['$schema', 'md5'] - for source in entity['sources']: + fields_to_remove = ["$schema", "md5"] + for source in entity["sources"]: for field in fields_to_remove: entity[source].pop(field, None) return entity @@ -95,11 +98,12 @@ def remove_fields(entity): diff = DeepDiff( remove_fields(deepcopy(entity1)), remove_fields(deepcopy(entity2)), - ignore_order=True) + ignore_order=True, + ) if diff: self.logger.debug( - f"Entity differs: {entity1['pid']}, {entity2['pid']}", - diff) + f"Entity differs: {entity1['pid']}, {entity2['pid']}", diff + ) return True return False @@ -114,13 +118,13 @@ def _get_latest(self, entity_type, source, pid): :rtype: dictionary. """ if not (base_url := get_mef_url(entity_type)): - msg = f'Unable to find MEF base url for {entity_type}' + msg = f"Unable to find MEF base url for {entity_type}" raise KeyError(msg) - url = f'{base_url}/mef/latest/{source}:{pid}' + url = f"{base_url}/mef/latest/{source}:{pid}" res = requests_retry_session().get(url) if res.status_code == requests.codes.ok: return res.json() - self.logger.debug(f'Problem get {url}: {res.status_code}') + self.logger.debug(f"Problem get {url}: {res.status_code}") return {} def _update_entities_in_document(self, doc_pid, pids_to_replace): @@ -138,33 +142,35 @@ def _update_entities_in_document(self, doc_pid, pids_to_replace): # get all entities from the document over all entity fields: # contribution and subjects remote_entities = [] - for field in ['contribution', 'subjects', 'genreForm']: + for field in ["contribution", "subjects", "genreForm"]: remote_entities += [ - entity['entity'] + entity["entity"] for entity in doc.get(field, []) - if entity.get('entity', {}).get('$ref') + if entity.get("entity", {}).get("$ref") ] if not remote_entities: - self.logger.debug(f'No entity to update for document {doc.pid}') + self.logger.debug(f"No entity to update for document {doc.pid}") # update the $ref entity URL and MEF pid for mef_url, (old_pid, new_pid) in pids_to_replace.items(): - old_entity_url = f'{mef_url}/{old_pid}' - new_entity_url = f'{mef_url}/{new_pid}' + old_entity_url = f"{mef_url}/{old_pid}" + new_entity_url = f"{mef_url}/{new_pid}" entities_to_update = filter( - lambda c: c.get('$ref') == old_entity_url, remote_entities) + lambda c: c.get("$ref") == old_entity_url, remote_entities + ) for entity in entities_to_update: if old_entity_url != new_entity_url: self.logger.info( - f'Entitiy URL changed from {old_entity_url} to ' - f'{new_entity_url} for document {doc.pid}') + f"Entitiy URL changed from {old_entity_url} to " + f"{new_entity_url} for document {doc.pid}" + ) # update the entity URL - entity['$ref'] = new_entity_url + entity["$ref"] = new_entity_url # in any case we update the doc as the mef pid can be changed if not self.dry_run: doc.replace(doc, dbcommit=True, reindex=True) - def get_entities_pids(self, query='*', from_date=None): + def get_entities_pids(self, query="*", from_date=None): """Get contributions identifiers. :param query: (string) a query to select the MEF record to be updated. @@ -173,12 +179,12 @@ def get_entities_pids(self, query='*', from_date=None): :returns: the list of the contribution identifiers. :rtype: list of strings. """ - es_query = RemoteEntitiesSearch().filter('query_string', query=query) + es_query = RemoteEntitiesSearch().filter("query_string", query=query) total = es_query.count() if not from_date and self.from_date: from_date = self.from_date if from_date: - self.logger.info(f'Get records updated after: {from_date}') + self.logger.info(f"Get records updated after: {from_date}") def get_mef_pids(es_query, chunk_size=1000): """Get the identifiers from elasticsearch. @@ -191,25 +197,29 @@ def get_mef_pids(es_query, chunk_size=1000): The scroll is done using the slice scroll feature: https://www.elastic.co/guide/en/elasticsearch/reference/8.5/paginate-search-results.html#slice-scroll """ - self.logger.info(f'Processing: {total} MEF records') - if total > 2*chunk_size: - n_part = int(total/chunk_size) + self.logger.info(f"Processing: {total} MEF records") + if total > 2 * chunk_size: + n_part = int(total / chunk_size) for i in range(n_part): # processing the slice should be faster than 30m - for hit in es_query.extra( - slice={"id": i, "max": n_part}).params( - scroll='30m').source('pid').scan(): + for hit in ( + es_query.extra(slice={"id": i, "max": n_part}) + .params(scroll="30m") + .source("pid") + .scan() + ): yield hit.pid # no need to slice as the part is smaller than the number # of results # the results can be in memory as it is small else: - for hit in list(es_query.params(scroll='30m').scan()): + for hit in list(es_query.params(scroll="30m").scan()): yield hit.pid # ask the MEF server to know which MEF pids has been updated # from a given date if from_date: + def get_updated_mef(pids, chunk_size): """Ask the MEF server using chunks. @@ -219,7 +229,7 @@ def get_updated_mef(pids, chunk_size): # MEF urls for updated pids urls = [ f'{get_mef_url("agents")}/mef/updated', - f'{get_mef_url("concepts")}/mef/updated' + f'{get_mef_url("concepts")}/mef/updated', ] # number of provided updated MEF pids n_provided = 0 @@ -231,9 +241,8 @@ def get_updated_mef(pids, chunk_size): res = requests_retry_session().post( url, json=dict( - from_date=from_date.strftime("%Y-%m-%d"), - pids=chunk - ) + from_date=from_date.strftime("%Y-%m-%d"), pids=chunk + ), ) if res.status_code != 200: requests.ConnectionError( @@ -242,14 +251,15 @@ def get_updated_mef(pids, chunk_size): ) for hit in res.json(): n_provided += 1 - yield hit.get('pid') + yield hit.get("pid") finally: - self.logger.info(f'Processed {n_provided} records.') + self.logger.info(f"Processed {n_provided} records.") + if total: - return get_updated_mef( - pids=get_mef_pids(es_query), - chunk_size=5000 - ), total + return ( + get_updated_mef(pids=get_mef_pids(es_query), chunk_size=5000), + total, + ) else: return [], 0 # considers all MEF pids @@ -276,25 +286,24 @@ def sync_record(self, pid): self.logger.debug(f'Processing {entity["type"]} MEF(pid: {pid})') # iterate over all entity sources: rero, gnd, idref pids_to_replace = {} - for source in entity['sources']: + for source in entity["sources"]: mef = self._get_latest( - entity_type=entity.type, - source=source, - pid=entity[source]["pid"] + entity_type=entity.type, source=source, pid=entity[source]["pid"] ) # MEF sever failed to retrieve the latest MEF record # for the given entity - if not (mef_pid := mef.get('pid')): + if not (mef_pid := mef.get("pid")): raise Exception( - f'Error cannot get latest for ' - f'{entity["type"]} {source}:{entity[source]["pid"]}') + f"Error cannot get latest for " + f'{entity["type"]} {source}:{entity[source]["pid"]}' + ) - old_entity_pid = entity[source]['pid'] - new_entity_pid = mef[source]['pid'] + old_entity_pid = entity[source]["pid"] + new_entity_pid = mef[source]["pid"] new_mef_pid = mef_pid old_mef_pid = entity.pid if old_entity_pid != new_entity_pid: - mef_url = f'{get_mef_url(entity.type)}/{source}' + mef_url = f"{get_mef_url(entity.type)}/{source}" pids_to_replace[mef_url] = (old_entity_pid, new_entity_pid) # can be mef pid, source pid or metadata @@ -302,23 +311,25 @@ def sync_record(self, pid): # need a copy as we want to keep the MEF record # untouched for the next entity new_mef_data = deepcopy(mef) - fields_to_remove = ['$schema', '_created', '_updated'] + fields_to_remove = ["$schema", "_created", "_updated"] for field in fields_to_remove: new_mef_data.pop(field, None) if old_mef_pid != new_mef_pid: self.logger.info( - f'MEF pid has changed from {entity.type} ' - f'{old_mef_pid} to {new_mef_pid} ' - f'for {source} (pid:{old_entity_pid})' + f"MEF pid has changed from {entity.type} " + f"{old_mef_pid} to {new_mef_pid} " + f"for {source} (pid:{old_entity_pid})" ) if RemoteEntity.get_record_by_pid(new_mef_pid): # update the new MEF - recursion self.logger.info( f'{entity["type"]} MEF(pid: {entity.pid}) ' - f'recursion with (pid:{new_mef_pid})') - new_doc_updated, new_updated, new_error = \ - self.sync_record(new_mef_pid) + f"recursion with (pid:{new_mef_pid})" + ) + new_doc_updated, new_updated, new_error = self.sync_record( + new_mef_pid + ) # TODO: find a better way doc_updated.update(new_doc_updated) updated = updated or new_updated @@ -327,29 +338,31 @@ def sync_record(self, pid): # if the MEF record does not exist create it if not self.dry_run: RemoteEntity.create( - data=new_mef_data, - dbcommit=True, - reindex=True + data=new_mef_data, dbcommit=True, reindex=True ) RemoteEntitiesSearch.flush_and_refresh() self.logger.info( f'Create a new MEF {entity["type"]} ' - f'record(pid: {new_mef_pid})') + f"record(pid: {new_mef_pid})" + ) # something changed, update the content self.logger.info( f'MEF {entity["type"]} record(pid: {entity.pid}) ' - 'content has been updated') + "content has been updated" + ) if not self.dry_run: if old_mef_pid == new_mef_pid: RemoteEntity.get_record(entity.id).replace( - new_mef_data, dbcommit=True, reindex=True) + new_mef_data, dbcommit=True, reindex=True + ) else: # as we have only the last mef but not the old one # we need get it from the MEF server # this is important as it can still be used by # other entities - RemoteEntity.get_record_by_pid(pid)\ - .update_online(dbcommit=True, reindex=True) + RemoteEntity.get_record_by_pid(pid).update_online( + dbcommit=True, reindex=True + ) updated = True if updated: @@ -357,16 +370,16 @@ def sync_record(self, pid): doc_pids = entity.documents_pids() self.logger.info( f'MEF {entity["type"]} record(pid: {entity.pid}) ' - f' try to update documents: {doc_pids}') + f" try to update documents: {doc_pids}" + ) for doc_pid in doc_pids: self._update_entities_in_document( - doc_pid=doc_pid, - pids_to_replace=pids_to_replace + doc_pid=doc_pid, pids_to_replace=pids_to_replace ) doc_updated = set(doc_pids) except Exception as err: - self.logger.error(f'ERROR: MEF record(pid: {pid}) -> {str(err)}') + self.logger.error(f"ERROR: MEF record(pid: {pid}) -> {str(err)}") error = True # uncomment to debug # raise @@ -377,28 +390,32 @@ def start_sync(self): self.start_timestamp = datetime.now() if self.dry_run: self.logger.info( - '--------- Starting synchronization in dry run mode ---------') + "--------- Starting synchronization in dry run mode ---------" + ) else: - self.logger.info('--------- Starting synchronization ---------') + self.logger.info("--------- Starting synchronization ---------") def end_sync(self, n_doc_updated, n_mef_updated, mef_errors): """Add logging and cache information about the ending process.""" self.logger.info( - f'DONE: doc updated: {n_doc_updated}, ' - f'mef updated: {n_mef_updated}.') + f"DONE: doc updated: {n_doc_updated}, " f"mef updated: {n_mef_updated}." + ) if self.dry_run: return - if data := get_timestamp('sync_entities'): - errors = data.get('errors', []) + if data := get_timestamp("sync_entities"): + errors = data.get("errors", []) else: errors = [] errors += mef_errors set_timestamp( - 'sync_entities', n_doc_updated=n_doc_updated, - n_mef_updated=n_mef_updated, errors=errors, - start_timestamp=self.start_timestamp) + "sync_entities", + n_doc_updated=n_doc_updated, + n_mef_updated=n_mef_updated, + errors=errors, + start_timestamp=self.start_timestamp, + ) - def sync(self, query='*', from_date=None, in_memory=False): + def sync(self, query="*", from_date=None, in_memory=False): """Updated the MEF records and the linked documents. :param query: (string) a query to select the MEF record to be updated. @@ -443,34 +460,34 @@ def remove_unused_record(self, pid): if not self.dry_run: # remove from the database and the index: no tombstone entity.delete(True, True, True) - self.logger.info(f'MEF {entity["type"]} record(pid: {entity.pid}) ' - 'has been deleted.') + self.logger.info( + f'MEF {entity["type"]} record(pid: {entity.pid}) ' "has been deleted." + ) return True return False @classmethod def get_errors(cls): """Get all the MEF pids that causes an error.""" - return get_timestamp('sync_entities').get('errors', []) + return get_timestamp("sync_entities").get("errors", []) @classmethod def clear_errors(cls): """Removes errors in the cache information.""" - data = get_timestamp('sync_entities') - if data.get('errors'): - data['errors'] = [] - set_timestamp('sync_entities', **data) + data = get_timestamp("sync_entities") + if data.get("errors"): + data["errors"] = [] + set_timestamp("sync_entities", **data) def start_clean(self): """Add logging information about the starting process.""" self.start_timestamp = datetime.now() if self.dry_run: - self.logger.info( - '--------- Starting cleaning in dry run mode ---------') + self.logger.info("--------- Starting cleaning in dry run mode ---------") else: - self.logger.info('--------- Starting cleaning ---------') + self.logger.info("--------- Starting cleaning ---------") - def remove_unused(self, query='*'): + def remove_unused(self, query="*"): """Removes MEF records that are not linked to any documents. :param query: (string) query to limit the record candidates. @@ -488,5 +505,5 @@ def remove_unused(self, query='*'): except Exception: error_entities.append(pid) sys.stdout.flush() - self.logger.info(f'DONE: MEF deleted: {removed_entity_counter}') + self.logger.info(f"DONE: MEF deleted: {removed_entity_counter}") return removed_entity_counter, error_entities diff --git a/rero_ils/modules/entities/remote_entities/tasks.py b/rero_ils/modules/entities/remote_entities/tasks.py index 02b23db03b..f1690a581d 100644 --- a/rero_ils/modules/entities/remote_entities/tasks.py +++ b/rero_ils/modules/entities/remote_entities/tasks.py @@ -38,22 +38,16 @@ def delete_records(records, verbose=False): :return: count of records """ for record in records: - status = RemoteEntity.delete( - record, - force=False, - dbcommit=True, - delindex=True - ) - current_app.logger.info(f'record: {record.id} | DELETED {status}') + status = RemoteEntity.delete(record, force=False, dbcommit=True, delindex=True) + current_app.logger.info(f"record: {record.id} | DELETED {status}") # TODO bulk update and reindexing if verbose: - click.echo(f'records deleted: {len(records)}') + click.echo(f"records deleted: {len(records)}") return len(records) @shared_task(ignore_result=True) -def sync_entities( - from_last_date=True, verbose=0, dry_run=False, in_memory=True): +def sync_entities(from_last_date=True, verbose=0, dry_run=False, in_memory=True): """Synchronize the entities within the MEF server. :param from_last_date: (boolean) if True try to consider agent modified @@ -62,45 +56,41 @@ def sync_entities( :param dry_run: (boolean) if true the data are not modified """ sync_entity = SyncEntity( - from_last_date=from_last_date, verbose=verbose, dry_run=dry_run) + from_last_date=from_last_date, verbose=verbose, dry_run=dry_run + ) n_doc_updated, n_mef_updated, sync_mef_errors = sync_entity.sync( - in_memory=in_memory) + in_memory=in_memory + ) n_mef_removed, clean_mef_errors = sync_entity.remove_unused() return { - 'n_doc_updated': n_doc_updated, - 'n_mef_updated': n_mef_updated, - 'clean_mef_errors': clean_mef_errors, - 'sync_mef_errors': sync_mef_errors, - 'n_mef_removed': n_mef_removed + "n_doc_updated": n_doc_updated, + "n_mef_updated": n_mef_updated, + "clean_mef_errors": clean_mef_errors, + "sync_mef_errors": sync_mef_errors, + "n_mef_removed": n_mef_removed, } @shared_task(ignore_result=True) -def replace_identified_by( - fields=None, verbose=0, dry_run=False -): +def replace_identified_by(fields=None, verbose=0, dry_run=False): """Replace identifiedBy with $ref. :param fields: Entity type to replace (concepts, subjects, genreForm) :param verbose: (boolean|integer) verbose level :param dry_run: (boolean) if true the data are not modified """ - fields = fields or ['contribution', 'subjects', 'genreForm'] + fields = fields or ["contribution", "subjects", "genreForm"] result = {} for field in fields: try: - replace = ReplaceIdentifiedBy( - field=field, - verbose=verbose, - dry_run=dry_run - ) + replace = ReplaceIdentifiedBy(field=field, verbose=verbose, dry_run=dry_run) changed, not_found, rero_only = replace.run() replace.set_timestamp() result[field] = { - 'changed': changed, - 'not_found': not_found, - 'rero_only': rero_only + "changed": changed, + "not_found": not_found, + "rero_only": rero_only, } except Exception as err: - result[field] = {'error': err} + result[field] = {"error": err} return result diff --git a/rero_ils/modules/entities/remote_entities/utils.py b/rero_ils/modules/entities/remote_entities/utils.py index 6f90e38790..2923944b02 100644 --- a/rero_ils/modules/entities/remote_entities/utils.py +++ b/rero_ils/modules/entities/remote_entities/utils.py @@ -36,8 +36,8 @@ def get_entity_localized_value(entity, key, language): :param language: Language to use. :returns: Value from key in language if found otherwise the value of key. """ - order = current_app.config.get('RERO_ILS_AGENTS_LABEL_ORDER', []) - source_order = order.get(language, order.get(order['fallback'], [])) + order = current_app.config.get("RERO_ILS_AGENTS_LABEL_ORDER", []) + source_order = order.get(language, order.get(order["fallback"], [])) for source in source_order: if value := entity.get(source, {}).get(key): return value @@ -51,7 +51,7 @@ def extract_data_from_mef_uri(mef_uri): :returns: the entity_type, the ref type such as idref, and the pid value. :rtype tuple """ - ref_split = mef_uri.split('/') + ref_split = mef_uri.split("/") # TODO :: check back compatibility return ref_split[-3], ref_split[-2], ref_split[-1] @@ -65,15 +65,22 @@ def remove_schema(data): :returns: the modified data. :rtype: dict. """ - data.pop('$schema', None) - for source in current_app.config.get('RERO_ILS_AGENTS_SOURCES', []): + data.pop("$schema", None) + for source in current_app.config.get("RERO_ILS_AGENTS_SOURCES", []): if source in data: - data[source].pop('$schema', None) + data[source].pop("$schema", None) return data -def get_mef_data_by_type(pid_type, pid, entity_type='agents', verbose=False, - with_deleted=True, resolve=True, sources=True): +def get_mef_data_by_type( + pid_type, + pid, + entity_type="agents", + verbose=False, + with_deleted=True, + resolve=True, + sources=True, +): """Request MEF REST API in JSON format. :param pid_type: the type of entity (idref, gnd, viaf, ...) @@ -89,48 +96,51 @@ def get_mef_data_by_type(pid_type, pid, entity_type='agents', verbose=False, # Depending on the entity type, try to get the correct MEF base URL. # If no base URL could be found, a key error will be raised if not (base_url := get_mef_url(entity_type)): - msg = f'Unable to find MEF base url for {entity_type}' + msg = f"Unable to find MEF base url for {entity_type}" if verbose: current_app.logger.warning(msg) raise KeyError(msg) - if pid_type == 'mef': + if pid_type == "mef": mef_url = f'{base_url}/mef/?q=pid:"{pid}"' - elif pid_type == 'viaf': + elif pid_type == "viaf": mef_url = f'{base_url}/mef/?q=viaf_pid:"{pid}"' else: - mef_url = f'{base_url}/mef/latest/{pid_type}:{pid}' - - request = requests_retry_session().get(url=mef_url, params={ - 'with_deleted': int(with_deleted), - 'resolve': int(resolve), - 'sources': int(sources) - }) + mef_url = f"{base_url}/mef/latest/{pid_type}:{pid}" + + request = requests_retry_session().get( + url=mef_url, + params={ + "with_deleted": int(with_deleted), + "resolve": int(resolve), + "sources": int(sources), + }, + ) if request.status_code == requests_codes.ok: try: json_data = request.json() - if hits := json_data.get('hits', {}): + if hits := json_data.get("hits", {}): # we got an ES response - data = hits.get('hits', [None])[0].get('metadata', {}) + data = hits.get("hits", [None])[0].get("metadata", {}) else: # we got an DB response data = json_data - data.pop('_created', None) - data.pop('_updated', None) + data.pop("_created", None) + data.pop("_updated", None) # TODO :: This `if` statement should be removed when MEF will # return the `type` key for concept - if entity_type == 'concepts': - data.setdefault('type', EntityType.TOPIC) + if entity_type == "concepts": + data.setdefault("type", EntityType.TOPIC) return remove_schema(data) except Exception as err: - msg = f'MEF resolver no metadata: {mef_url} {err}' + msg = f"MEF resolver no metadata: {mef_url} {err}" if verbose: current_app.logger.warning(msg) raise ValueError(msg) from err else: - msg = f'Mef http error: {request.status_code} {mef_url}' + msg = f"Mef http error: {request.status_code} {mef_url}" if verbose: current_app.logger.error(msg) raise RequestException(msg) diff --git a/rero_ils/modules/entities/remote_entities/views.py b/rero_ils/modules/entities/remote_entities/views.py index a2b5229d52..18fe69ccfe 100644 --- a/rero_ils/modules/entities/remote_entities/views.py +++ b/rero_ils/modules/entities/remote_entities/views.py @@ -26,16 +26,14 @@ from .proxy import MEFProxyFactory -api_blueprint = Blueprint( - 'api_remote_entities', - __name__ -) +api_blueprint = Blueprint("api_remote_entities", __name__) -@api_blueprint.route('/remote_entities/search/', - defaults={'entity_type': 'agents'}) -@api_blueprint.route('/remote_entities/search//') -@api_blueprint.route('/remote_entities/search///') +@api_blueprint.route( + "/remote_entities/search/", defaults={"entity_type": "agents"} +) +@api_blueprint.route("/remote_entities/search//") +@api_blueprint.route("/remote_entities/search///") @check_logged_as_librarian def remote_search_proxy(entity_type, term): """Proxy to search entities on remote server. diff --git a/rero_ils/modules/entities/serializers/__init__.py b/rero_ils/modules/entities/serializers/__init__.py index f74f406517..d7d9a3b6f3 100644 --- a/rero_ils/modules/entities/serializers/__init__.py +++ b/rero_ils/modules/entities/serializers/__init__.py @@ -28,4 +28,4 @@ # Records-REST serializers # ======================== -json_entities_search = search_responsify(_json, 'application/json') +json_entities_search = search_responsify(_json, "application/json") diff --git a/rero_ils/modules/entities/views.py b/rero_ils/modules/entities/views.py index 3061324d88..5220464513 100644 --- a/rero_ils/modules/entities/views.py +++ b/rero_ils/modules/entities/views.py @@ -27,15 +27,15 @@ from .remote_entities.api import RemoteEntity blueprint = Blueprint( - 'entities', + "entities", __name__, - url_prefix='//entities', - template_folder='templates', - static_folder='static', + url_prefix="//entities", + template_folder="templates", + static_folder="static", ) -@blueprint.route('//') +@blueprint.route("//") def entity_detailed_view(viewcode, type, pid): """Display entity view (local or remote). @@ -44,17 +44,17 @@ def entity_detailed_view(viewcode, type, pid): :param: pid: Resource PID. :returns: The html rendering of the resource. """ - if type not in ['local', 'remote']: + if type not in ["local", "remote"]: abort(404) - entity_class = LocalEntity if type == 'local' else RemoteEntity + entity_class = LocalEntity if type == "local" else RemoteEntity if not (record := entity_class.get_record_by_pid(pid)): - abort(404, _('Entity not found.')) + abort(404, _("Entity not found.")) return render_template( - f'rero_ils/entity_{type}.html', + f"rero_ils/entity_{type}.html", record=record, viewcode=viewcode, - search_link=search_link(record) + search_link=search_link(record), ) @@ -66,14 +66,14 @@ def entity_icon(type): :returns: string, The class of the selected icon. """ icons = { - EntityType.ORGANISATION: 'fa-building-o', - EntityType.PERSON: 'fa-user-o', - EntityType.PLACE: 'fa-map-marker', - EntityType.TEMPORAL: 'fa-calendar', - EntityType.TOPIC: 'fa-tag', - EntityType.WORK: 'fa-book' + EntityType.ORGANISATION: "fa-building-o", + EntityType.PERSON: "fa-user-o", + EntityType.PLACE: "fa-map-marker", + EntityType.TEMPORAL: "fa-calendar", + EntityType.TOPIC: "fa-tag", + EntityType.WORK: "fa-book", } - return icons.get(type, 'fa-question-circle-o') + return icons.get(type, "fa-question-circle-o") @blueprint.app_template_filter() @@ -86,9 +86,9 @@ def extract_data_from_remote_entity(record): :returns: source and the dictionary of the resource selected. """ locale = current_i18n.locale.language - agent_order = current_app.config.get('RERO_ILS_AGENTS_LABEL_ORDER') + agent_order = current_app.config.get("RERO_ILS_AGENTS_LABEL_ORDER") if locale not in agent_order: - locale = agent_order.get('fallback', {}) + locale = agent_order.get("fallback", {}) sources = agent_order.get(locale) for source in sources: if data := record.get(source): @@ -103,12 +103,12 @@ def entity_label(data, language): :param language: The current language. :returns: The contribution label. """ - order = current_app.config.get('RERO_ILS_AGENTS_LABEL_ORDER', []) - source_order = order.get(language, order.get(order['fallback'], [])) + order = current_app.config.get("RERO_ILS_AGENTS_LABEL_ORDER", []) + source_order = order.get(language, order.get(order["fallback"], [])) for source in source_order: - if label := data.get(source, {}).get('authorized_access_point', None): + if label := data.get(source, {}).get("authorized_access_point", None): return label - return '-' + return "-" @blueprint.app_template_filter() @@ -119,13 +119,16 @@ def sources_link(data): :returns A dict with the source and link. """ links = {} - sources_link = list(filter(lambda source: source not in - current_app.config.get( - 'RERO_ILS_AGENTS_SOURCES_EXCLUDE_LINK', []), - data.get('sources', []))) + sources_link = list( + filter( + lambda source: source + not in current_app.config.get("RERO_ILS_AGENTS_SOURCES_EXCLUDE_LINK", []), + data.get("sources", []), + ) + ) for source in sources_link: - if identifier := data.get(source, {}).get('identifier'): + if identifier := data.get(source, {}).get("identifier"): links[source] = identifier return links @@ -136,22 +139,21 @@ def search_link(metadata): :param metadata: the record metadata. :returns: the search link. """ - fields_config = current_app.config.get( - 'RERO_ILS_APP_ENTITIES_TYPES_FIELDS', {}) - fields_ref = current_app.config.get( - 'RERO_ILS_APP_ENTITIES_FIELDS_REF', []) - entity_type = metadata['type'] - fields = fields_config[entity_type] if (entity_type in fields_config) \ - else fields_ref + fields_config = current_app.config.get("RERO_ILS_APP_ENTITIES_TYPES_FIELDS", {}) + fields_ref = current_app.config.get("RERO_ILS_APP_ENTITIES_FIELDS_REF", []) + entity_type = metadata["type"] + fields = ( + fields_config[entity_type] if (entity_type in fields_config) else fields_ref + ) queries = [] for field in fields: - if 'sources' in metadata: + if "sources" in metadata: # Remote entities source, data = extract_data_from_remote_entity(metadata) - entity_id = data.get('pid') + entity_id = data.get("pid") else: # Local entities - source = 'local' - entity_id = metadata.get('pid') - queries.append(f'{field}.entity.pids.{source}:{entity_id}') + source = "local" + entity_id = metadata.get("pid") + queries.append(f"{field}.entity.pids.{source}:{entity_id}") return " OR ".join(queries) + "&simple=0" diff --git a/rero_ils/modules/ext.py b/rero_ils/modules/ext.py index 840dc5e508..0a2cace48b 100644 --- a/rero_ils/modules/ext.py +++ b/rero_ils/modules/ext.py @@ -23,8 +23,7 @@ import jinja2 from flask import Blueprint from flask_bootstrap import Bootstrap4 -from flask_login.signals import user_loaded_from_cookie, user_logged_in, \ - user_logged_out +from flask_login.signals import user_loaded_from_cookie, user_logged_in, user_logged_out from flask_principal import identity_loaded from flask_wiki import Wiki from invenio_base.signals import app_loaded @@ -32,53 +31,73 @@ from invenio_circulation.signals import loan_state_changed from invenio_indexer.signals import before_record_index from invenio_oaiharvester.signals import oaiharvest_finished -from invenio_records.signals import after_record_insert, after_record_update, \ - before_record_update +from invenio_records.signals import ( + after_record_insert, + after_record_update, + before_record_update, +) from invenio_records_rest.errors import JSONSchemaValidationError from jsonschema.exceptions import ValidationError -from rero_ils.filter import address_block, empty_data, format_date_filter, \ - get_record_by_ref, jsondumps, message_filter, node_assets, text_to_id, \ - to_pretty_json, translate -from rero_ils.modules.acquisition.acq_accounts.listener import \ - enrich_acq_account_data -from rero_ils.modules.acquisition.acq_order_lines.listener import \ - enrich_acq_order_line_data -from rero_ils.modules.acquisition.acq_orders.listener import \ - enrich_acq_order_data -from rero_ils.modules.acquisition.acq_receipt_lines.listener import \ - enrich_acq_receipt_line_data -from rero_ils.modules.acquisition.acq_receipts.listener import \ - enrich_acq_receipt_data -from rero_ils.modules.acquisition.budgets.listener import \ - budget_is_active_changed +from rero_ils.filter import ( + address_block, + empty_data, + format_date_filter, + get_record_by_ref, + jsondumps, + message_filter, + node_assets, + text_to_id, + to_pretty_json, + translate, +) +from rero_ils.modules.acquisition.acq_accounts.listener import enrich_acq_account_data +from rero_ils.modules.acquisition.acq_order_lines.listener import ( + enrich_acq_order_line_data, +) +from rero_ils.modules.acquisition.acq_orders.listener import enrich_acq_order_data +from rero_ils.modules.acquisition.acq_receipt_lines.listener import ( + enrich_acq_receipt_line_data, +) +from rero_ils.modules.acquisition.acq_receipts.listener import enrich_acq_receipt_data +from rero_ils.modules.acquisition.budgets.listener import budget_is_active_changed from rero_ils.modules.collections.listener import enrich_collection_data from rero_ils.modules.ebooks.receivers import publish_harvested_records -from rero_ils.modules.holdings.listener import enrich_holding_data, \ - update_items_locations_and_types +from rero_ils.modules.holdings.listener import ( + enrich_holding_data, + update_items_locations_and_types, +) from rero_ils.modules.ill_requests.listener import enrich_ill_request_data -from rero_ils.modules.imports.views import ImportsListResource, \ - ImportsResource, ResultNotFoundOnTheRemoteServer +from rero_ils.modules.imports.views import ( + ImportsListResource, + ImportsResource, + ResultNotFoundOnTheRemoteServer, +) from rero_ils.modules.item_types.listener import negative_availability_changes from rero_ils.modules.items.listener import enrich_item_data from rero_ils.modules.items.views.filters import issue_client_reference -from rero_ils.modules.loans.listener import enrich_loan_data, \ - listener_loan_state_changed +from rero_ils.modules.loans.listener import ( + enrich_loan_data, + listener_loan_state_changed, +) from rero_ils.modules.normalizer_stop_words import NormalizerStopWords from rero_ils.modules.notifications.listener import enrich_notification_data -from rero_ils.modules.patron_transaction_events.listener import \ - enrich_patron_transaction_event_data -from rero_ils.modules.patron_transactions.listener import \ - enrich_patron_transaction_data +from rero_ils.modules.patron_transaction_events.listener import ( + enrich_patron_transaction_event_data, +) +from rero_ils.modules.patron_transactions.listener import enrich_patron_transaction_data from rero_ils.modules.patrons.api import current_librarian, current_patrons -from rero_ils.modules.patrons.listener import \ - create_subscription_patron_transaction, enrich_patron_data -from rero_ils.modules.permissions import LibraryNeed, OrganisationNeed, \ - OwnerNeed +from rero_ils.modules.patrons.listener import ( + create_subscription_patron_transaction, + enrich_patron_data, +) +from rero_ils.modules.permissions import LibraryNeed, OrganisationNeed, OwnerNeed from rero_ils.modules.sru.views import SRUDocumentsSearch from rero_ils.modules.templates.listener import prepare_template_data -from rero_ils.modules.users.listener import user_register_forms, \ - user_reset_password_forms +from rero_ils.modules.users.listener import ( + user_register_forms, + user_reset_password_forms, +) from rero_ils.modules.users.views import UsersCreateResource, UsersResource from rero_ils.modules.utils import remove_user_name, set_user_name from rero_ils.version import __version__ @@ -99,10 +118,12 @@ def on_identity_loaded(sender, identity): @param identity: the identity to enrich. """ if current_librarian: - identity.provides.update([ - OwnerNeed(current_librarian.pid), - OrganisationNeed(current_librarian.organisation_pid) - ]) + identity.provides.update( + [ + OwnerNeed(current_librarian.pid), + OrganisationNeed(current_librarian.organisation_pid), + ] + ) # for a `full_permission` user, the manageable libraries are all # libraries from the organisation ; otherwise, this is the libraries # referenced into the ``Patron`` profile. @@ -114,10 +135,9 @@ def on_identity_loaded(sender, identity): # patrons elif current_patrons: for patron in current_patrons: - identity.provides.update([ - OwnerNeed(patron.pid), - OrganisationNeed(patron.organisation_pid) - ]) + identity.provides.update( + [OwnerNeed(patron.pid), OrganisationNeed(patron.organisation_pid)] + ) @app_loaded.connect @@ -141,23 +161,21 @@ def load_actions(sender, app): # We can't use the `current_access` proxy from `invenio-access` because # we need the application context to use it. At this time, the context # isn't define ; get the invenio-access extension directly from app. - access_ext = app.extensions['invenio-access'] - for action in app.config.get('RERO_ILS_PERMISSIONS_ACTIONS', []): + access_ext = app.extensions["invenio-access"] + for action in app.config.get("RERO_ILS_PERMISSIONS_ACTIONS", []): access_ext.register_action(obj_or_import_string(action)) # add jsonschema resolution from local:// and bib.rero.ch data = app.extensions["invenio-jsonschemas"].refresolver_store() cfg = app.config - schema_url = f'{cfg["JSONSCHEMAS_URL_SCHEME"]}://'\ - f'{cfg["JSONSCHEMAS_HOST"]}'\ - f'{cfg["JSONSCHEMAS_ENDPOINT"]}/' - - app.extensions['rero-ils'].jsonschema_store = dict( - **data, - **{ - k.replace('local://', schema_url): v - for k, v in data.items() - } + schema_url = ( + f'{cfg["JSONSCHEMAS_URL_SCHEME"]}://' + f'{cfg["JSONSCHEMAS_HOST"]}' + f'{cfg["JSONSCHEMAS_ENDPOINT"]}/' + ) + + app.extensions["rero-ils"].jsonschema_store = dict( + **data, **{k.replace("local://", schema_url): v for k, v in data.items()} ) @@ -174,26 +192,25 @@ def __init__(self, app=None): # force to load ils template before others # it is required for Flask-Security see: # https://pythonhosted.org/Flask-Security/customizing.html#emails - ils_loader = jinja2.ChoiceLoader([ - jinja2.PackageLoader('rero_ils', 'theme/templates'), - app.jinja_loader - ]) + ils_loader = jinja2.ChoiceLoader( + [jinja2.PackageLoader("rero_ils", "theme/templates"), app.jinja_loader] + ) app.jinja_loader = ils_loader # register filters app.add_template_filter(get_record_by_ref) - app.add_template_filter(format_date_filter, name='format_date') + app.add_template_filter(format_date_filter, name="format_date") app.add_template_global(node_assets) - app.add_template_filter(to_pretty_json, name='tojson_pretty') + app.add_template_filter(to_pretty_json, name="tojson_pretty") app.add_template_filter(text_to_id) app.add_template_filter(jsondumps) app.add_template_filter(empty_data) app.add_template_filter(address_block) - app.add_template_filter(message_filter, name='message') + app.add_template_filter(message_filter, name="message") app.add_template_filter(issue_client_reference) app.add_template_filter(translate) - app.jinja_env.add_extension('jinja2.ext.do') - app.jinja_env.globals['version'] = __version__ + app.jinja_env.add_extension("jinja2.ext.do") + app.jinja_env.globals["version"] = __version__ self.register_signals(app) def init_app(self, app): @@ -202,14 +219,14 @@ def init_app(self, app): Wiki(app) NormalizerStopWords(app) self.init_config(app) - app.extensions['rero-ils'] = self + app.extensions["rero-ils"] = self REROILSAPP.register_import_api_blueprint(app) REROILSAPP.register_users_api_blueprint(app) REROILSAPP.register_sru_api_blueprint(app) - if db_log := app.config.get('RERO_ILS_DB_LOGGING'): - logging.getLogger('sqlalchemy.engine').setLevel(db_log) - if es_log := app.config.get('RERO_ILS_ES_LOGGING'): - es_trace_logger = logging.getLogger('elasticsearch.trace') + if db_log := app.config.get("RERO_ILS_DB_LOGGING"): + logging.getLogger("sqlalchemy.engine").setLevel(db_log) + if es_log := app.config.get("RERO_ILS_ES_LOGGING"): + es_trace_logger = logging.getLogger("elasticsearch.trace") es_trace_logger.setLevel(es_log) handler = logging.StreamHandler() es_trace_logger.addHandler(handler) @@ -220,40 +237,38 @@ def register_import_api_blueprint(app): """Imports blueprints initialization.""" def handle_bad_request(e): - return 'not found', 404 + return "not found", 404 - blueprint = Blueprint('api_imports', __name__) - endpoints = app.config.get('RERO_IMPORT_REST_ENDPOINTS', {}) + blueprint = Blueprint("api_imports", __name__) + endpoints = app.config.get("RERO_IMPORT_REST_ENDPOINTS", {}) for key, config in endpoints.items(): # search view - search_view_name = f'import_{key}' - search_path = f'/import_{key}/' + search_view_name = f"import_{key}" + search_path = f"/import_{key}/" search_view = ImportsListResource.as_view( search_view_name, - import_class=config.get('import_class'), - import_size=config.get('import_size') + import_class=config.get("import_class"), + import_size=config.get("import_size"), ) blueprint.add_url_rule(search_path, view_func=search_view) # record view - record_view_name = f'import_{key}_record' - record_path = f'/import_{key}/' + record_view_name = f"import_{key}_record" + record_path = f"/import_{key}/" record_view = ImportsResource.as_view( - record_view_name, - import_class=config.get('import_class') + record_view_name, import_class=config.get("import_class") ) blueprint.add_url_rule(record_path, view_func=record_view) blueprint.register_error_handler( - ResultNotFoundOnTheRemoteServer, - handle_bad_request + ResultNotFoundOnTheRemoteServer, handle_bad_request ) app.register_blueprint(blueprint) @staticmethod def register_users_api_blueprint(app): """User blueprints initialization.""" - blueprint = Blueprint('api_users', __name__) + blueprint = Blueprint("api_users", __name__) @blueprint.errorhandler(ValidationError) def validation_error(error): @@ -261,37 +276,33 @@ def validation_error(error): return JSONSchemaValidationError(error=error).get_response() blueprint.add_url_rule( - '/users/', - view_func=UsersResource.as_view('users_item') + "/users/", view_func=UsersResource.as_view("users_item") ) blueprint.add_url_rule( - '/users/', - view_func=UsersCreateResource.as_view('users_list') + "/users/", view_func=UsersCreateResource.as_view("users_list") ) app.register_blueprint(blueprint) @staticmethod def register_sru_api_blueprint(app): """SRU blueprints initialization.""" - blueprint = Blueprint('api_sru', __name__) - sru_documents_search = SRUDocumentsSearch.as_view('documents') - blueprint.add_url_rule( - '/sru/documents', - view_func=sru_documents_search - ) + blueprint = Blueprint("api_sru", __name__) + sru_documents_search = SRUDocumentsSearch.as_view("documents") + blueprint.add_url_rule("/sru/documents", view_func=sru_documents_search) app.register_blueprint(blueprint) def init_config(self, app): """Initialize configuration.""" # Use theme's base template if theme is installed for k in dir(app.config): - if k.startswith('RERO_ILS_APP_'): + if k.startswith("RERO_ILS_APP_"): app.config.setdefault(k, getattr(app.config, k)) # add keep alive support for angular application # NOTE: this will not work for werkzeug> 2.1.2 # https://werkzeug.palletsprojects.com/en/2.2.x/changes/#version-2-1-2 - if app.config.get('DEBUG'): + if app.config.get("DEBUG"): from werkzeug.serving import WSGIRequestHandler + WSGIRequestHandler.protocol_version = "HTTP/1.1" def register_signals(self, app): @@ -311,8 +322,7 @@ def register_signals(self, app): before_record_index.connect(enrich_patron_data, sender=app) before_record_index.connect(enrich_holding_data, sender=app) before_record_index.connect(enrich_notification_data, sender=app) - before_record_index.connect(enrich_patron_transaction_event_data, - sender=app) + before_record_index.connect(enrich_patron_transaction_event_data, sender=app) before_record_index.connect(enrich_patron_transaction_data, sender=app) before_record_index.connect(enrich_ill_request_data, sender=app) before_record_index.connect(prepare_template_data, sender=app) diff --git a/rero_ils/modules/extensions.py b/rero_ils/modules/extensions.py index ef52403987..ca66688038 100644 --- a/rero_ils/modules/extensions.py +++ b/rero_ils/modules/extensions.py @@ -89,22 +89,25 @@ def _check_fields(self, record): # record from the result. If one hit matches, raise a ValidationError, # otherwise, all should be fine. Enjoy ! terms = [ - Q('term', **{es_field: record[attr]}) + Q("term", **{es_field: record[attr]}) for attr, es_field in self.fields if attr in record ] - es_query = self.search_class()\ - .query('bool', should=terms, minimum_should_match=1)\ - .exclude('term', pid=record.pid)\ - .source().scan() + es_query = ( + self.search_class() + .query("bool", should=terms, minimum_should_match=1) + .exclude("term", pid=record.pid) + .source() + .scan() + ) _exhausted = object() matching_hit = next(es_query, _exhausted) if matching_hit != _exhausted: pid = matching_hit.pid - field_keys = ' and/or '.join(attrs_to_check) - msg = f'{field_keys} value(s) already taken by pid={pid}' + field_keys = " and/or ".join(attrs_to_check) + msg = f"{field_keys} value(s) already taken by pid={pid}" raise ValidationError(msg) pre_commit = _check_fields @@ -153,13 +156,13 @@ def _check_amount(self, record): # NOTE: # an amount of "123,450" is true despite if we configure only 2 # decimals. In same way "123,4" is also a valid value. - regexp = r'^-?(\d+(?:(,|.)\d{1,' + str(self.decimals) + r'})?)$' + regexp = r"^-?(\d+(?:(,|.)\d{1," + str(self.decimals) + r"})?)$" regexp = re.compile(regexp) for value in values_to_check: if not regexp.match(str(value)): decimal_string = 1 / pow(10, self.decimals) - msg = f'`{value}` must be multiple of {decimal_string}' + msg = f"`{value}` must be multiple of {decimal_string}" raise ValidationError(msg) pre_commit = _check_amount diff --git a/rero_ils/modules/fetchers.py b/rero_ils/modules/fetchers.py index 720f670ffe..52b62141c7 100644 --- a/rero_ils/modules/fetchers.py +++ b/rero_ils/modules/fetchers.py @@ -22,11 +22,11 @@ from collections import namedtuple -FetchedPID = namedtuple('FetchedPID', ['provider', 'pid_type', 'pid_value']) +FetchedPID = namedtuple("FetchedPID", ["provider", "pid_type", "pid_value"]) """A pid fetcher.""" -def id_fetcher(record_uuid, data, provider, pid_key='pid'): +def id_fetcher(record_uuid, data, provider, pid_key="pid"): """Fetch a record's identifier. :param record_uuid: The record UUID. @@ -34,7 +34,5 @@ def id_fetcher(record_uuid, data, provider, pid_key='pid'): :return: A :data:`rero_ils.modules.fetchers.FetchedPID` instance. """ return FetchedPID( - provider=provider, - pid_type=provider.pid_type, - pid_value=data[pid_key] + provider=provider, pid_type=provider.pid_type, pid_value=data[pid_key] ) diff --git a/rero_ils/modules/files/cli.py b/rero_ils/modules/files/cli.py index c4cea11e75..85d9a3e3df 100644 --- a/rero_ils/modules/files/cli.py +++ b/rero_ils/modules/files/cli.py @@ -17,6 +17,8 @@ """Click command-line interface for item record management.""" + +import contextlib import os from io import BytesIO from random import choice, randint, shuffle @@ -50,18 +52,15 @@ def create_pdf_file(document): data["authors"] = contributors # Some fields are not well converted # TODO: remove this when the dc conversion will be fixed - try: + with contextlib.suppress(Exception): if descriptions := dc.get("descriptions"): data["summary"] = "\n".join(descriptions) - except Exception: - pass generator = PDFGenerator(data) generator.render() return generator.output() -def create_pdf_record_files(document, metadata, flush=False, - number_of_files=1): +def create_pdf_record_files(document, metadata, flush=False, number_of_files=1): """Creates and attach a pdf file to a given document. :param document: Document - the document record. @@ -70,8 +69,7 @@ def create_pdf_record_files(document, metadata, flush=False, :param file_name_suffix: str - a suffix to add to the file name. """ # add document link - metadata.setdefault( - "document", {'$ref': get_ref_for_pid('doc', document.pid)}) + metadata.setdefault("document", {"$ref": get_ref_for_pid("doc", document.pid)}) ext = current_app.extensions["rero-invenio-files"] # get services record_service = ext.records_service @@ -82,10 +80,13 @@ def create_pdf_record_files(document, metadata, flush=False, try: record = next( document.get_records_files( - lib_pids=[extracted_data_from_ref(metadata.get('library'))])) + lib_pids=[extracted_data_from_ref(metadata.get("library"))] + ) + ) except StopIteration: item = record_service.create( - identity=system_identity, data={"metadata": metadata}) + identity=system_identity, data={"metadata": metadata} + ) record = item._record record.commit() # index the file record @@ -101,10 +102,7 @@ def create_pdf_record_files(document, metadata, flush=False, # TODO: find a cleaner approach i.e. create a permission to allow # boolean operators file_service.init_files( - identity=system_identity, - id_=recid, - data=[{"key": file_name}], - uow=uow + identity=system_identity, id_=recid, data=[{"key": file_name}], uow=uow ) file_service.set_file_content( identity=system_identity, @@ -114,15 +112,11 @@ def create_pdf_record_files(document, metadata, flush=False, uow=uow, ) file_service.commit_file( - identity=system_identity, - id_=recid, - file_key=file_name, - uow=uow + identity=system_identity, id_=recid, file_key=file_name, uow=uow ) uow.commit() if flush: - current_search.flush_and_refresh( - record_service.record_cls.index._name) + current_search.flush_and_refresh(record_service.record_cls.index._name) return record @@ -133,8 +127,7 @@ def load_files_for_document(document, metadata, files): :param metadata: dict - record metadata. :param files: list of str - file paths. """ - metadata.setdefault( - "document", {"$ref": get_ref_for_pid('doc', document.pid)}) + metadata.setdefault("document", {"$ref": get_ref_for_pid("doc", document.pid)}) ext = current_app.extensions["rero-invenio-files"] # get services record_service = ext.records_service @@ -142,10 +135,13 @@ def load_files_for_document(document, metadata, files): try: record = next( document.get_records_files( - lib_pids=[extracted_data_from_ref(metadata.get('library'))])) + lib_pids=[extracted_data_from_ref(metadata.get("library"))] + ) + ) except StopIteration: item = record_service.create( - identity=system_identity, data={"metadata": metadata}) + identity=system_identity, data={"metadata": metadata} + ) record = item._record record.commit() # index the file record @@ -161,30 +157,24 @@ def load_files_for_document(document, metadata, files): file_name = os.path.basename(file_path) stream = open(file_path, "rb") file_service.init_files( - identity=system_identity, - id_=recid, - data=[{"key": file_name}], - uow=uow + identity=system_identity, id_=recid, data=[{"key": file_name}], uow=uow ) file_service.set_file_content( identity=system_identity, id_=recid, file_key=file_name, stream=stream, - uow=uow + uow=uow, ) file_service.commit_file( - identity=system_identity, - id_=recid, - file_key=file_name, - uow=uow) + identity=system_identity, id_=recid, file_key=file_name, uow=uow + ) uow.commit() @click.command() @click.argument("number", type=int) -@click.option("-c", "--collections", multiple=True, - default=["col1", "col2", "col3"]) +@click.option("-c", "--collections", multiple=True, default=["col1", "col2", "col3"]) @with_appcontext def create_files(number, collections): """Create attached files. @@ -196,7 +186,7 @@ def create_files(number, collections): lib_pids = list(Library.get_all_pids()) # for fixtures we want to add file to a random document shuffle(doc_pids) - doc_pids = doc_pids[0:number] + doc_pids = doc_pids[:number] for pid in doc_pids: doc = Document.get_record_by_pid(pid) @@ -207,10 +197,10 @@ def create_files(number, collections): lib_pid = choice(lib_pids) metadata = dict( collections=[choice(collections)], - library={'$ref': get_ref_for_pid('lib', lib_pid)}) + library={"$ref": get_ref_for_pid("lib", lib_pid)}, + ) create_pdf_record_files( - document=doc, metadata=metadata, flush=True, - number_of_files=number_of_files + document=doc, metadata=metadata, flush=True, number_of_files=number_of_files ) @@ -228,8 +218,9 @@ def load_files(document_pid, library_pid, files, collections): """ doc = Document.get_record_by_pid(document_pid) metadata = dict( - document={"$ref": get_ref_for_pid('doc', document_pid)}, - library={"$ref": get_ref_for_pid('lib', library_pid)}) + document={"$ref": get_ref_for_pid("doc", document_pid)}, + library={"$ref": get_ref_for_pid("lib", library_pid)}, + ) if collections: metadata["collections"] = collections click.secho(f"Loading {len(files)} files...", fg="green") diff --git a/rero_ils/modules/files/components.py b/rero_ils/modules/files/components.py index 7a096b2ef3..9cd8c1a1fe 100644 --- a/rero_ils/modules/files/components.py +++ b/rero_ils/modules/files/components.py @@ -17,10 +17,8 @@ """Files components.""" -from invenio_records_resources.services.files.components import \ - FileServiceComponent -from invenio_records_resources.services.records.components import \ - ServiceComponent +from invenio_records_resources.services.files.components import FileServiceComponent +from invenio_records_resources.services.records.components import ServiceComponent from rero_ils.modules.documents.api import Document from rero_ils.modules.libraries.api import Library @@ -45,12 +43,12 @@ def get_additional_informations(self, record): :return a dict with additional informations. """ data = {} - if doc := record.get('document'): - data.setdefault( - 'file', {})['document'] = \ - SpecificOperationLog._get_document_data(doc) - if recid := record.get('recid'): - data.setdefault('file', {})['recid'] = recid + if doc := record.get("document"): + data.setdefault("file", {})["document"] = ( + SpecificOperationLog._get_document_data(doc) + ) + if recid := record.get("recid"): + data.setdefault("file", {})["recid"] = recid return data @@ -60,24 +58,27 @@ class OperationLogsComponent(ServiceComponent): def _create_operation_logs(self, record, operation): """Create operation logs. - :param record: obj - record instance. + :param record: obj - record instance. :param operation: str - CRUD operation """ - # as the invenio record resource record is different than ILSRecord - # a wrapper should be created + class Rec(dict): + # as the invenio record resource record is different than ILSRecord + # a wrapper should be created class provider: - pid_type = 'recid' + pid_type = "recid" rec = Rec() - rec['pid'] = record.pid.pid_value - if library := record.get('metadata', {}).get('library'): + rec["pid"] = record.pid.pid_value + if library := record.get("metadata", {}).get("library"): rec.library_pid = extracted_data_from_ref(library) rec.organisation_pid = Library.get_record_by_pid( - rec.library_pid).organisation_pid - if document := record.get('metadata', {}).get('document'): - rec['document'] = Document.get_record_by_pid( - extracted_data_from_ref(document)) + rec.library_pid + ).organisation_pid + if document := record.get("metadata", {}).get("document"): + rec["document"] = Document.get_record_by_pid( + extracted_data_from_ref(document) + ) OperationLogRecordFactory().create_operation_log(rec, operation) def create(self, identity, data, record, errors=None, **kwargs): @@ -88,7 +89,8 @@ def create(self, identity, data, record, errors=None, **kwargs): :param record: obj - the created record """ self._create_operation_logs( - record=record, operation=OperationLogOperation.CREATE) + record=record, operation=OperationLogOperation.CREATE + ) def update(self, identity, data, record, **kwargs): """Update handler. @@ -98,7 +100,8 @@ def update(self, identity, data, record, **kwargs): :param record: obj - the updated record """ self._create_operation_logs( - record=record, operation=OperationLogOperation.UPDATE) + record=record, operation=OperationLogOperation.UPDATE + ) def delete(self, identity, record, **kwargs): """Delete handler. @@ -107,15 +110,14 @@ def delete(self, identity, record, **kwargs): :param record: obj - the updated record """ self._create_operation_logs( - record=record, operation=OperationLogOperation.DELETE) + record=record, operation=OperationLogOperation.DELETE + ) class OperationLogsFileComponent(FileServiceComponent): """Component to create files CRUD operation logs.""" - def _create_operation_logs( - self, record, file_key, operation, deleted_file=None - ): + def _create_operation_logs(self, record, file_key, operation, deleted_file=None): """Create operation logs. :param record: obj - record instance. @@ -125,32 +127,35 @@ def _create_operation_logs( """ # for deletion the file is not in the record anymore. if deleted_file: - file_metadata = deleted_file.get('metadata', {}) + file_metadata = deleted_file.get("metadata", {}) else: - file_metadata = record.files.get(file_key).get('metadata', {}) + file_metadata = record.files.get(file_key).get("metadata", {}) # only for main files - if file_metadata.get('type') in ['fulltext', 'thumbnail']: + if file_metadata.get("type") in ["fulltext", "thumbnail"]: return # as the invenio record resource record is different than ILSRecord # a wrapper should be created class Rec(dict): class provider: - pid_type = 'file' + pid_type = "file" rec = Rec() - rec['pid'] = file_key - if library := record.get('metadata', {}).get('library'): + rec["pid"] = file_key + if library := record.get("metadata", {}).get("library"): rec.library_pid = extracted_data_from_ref(library) rec.organisation_pid = Library.get_record_by_pid( - rec.library_pid).organisation_pid - if document := record.get('metadata', {}).get('document'): - rec['document'] = Document.get_record_by_pid( - extracted_data_from_ref(document)) - rec['recid'] = record['id'] + rec.library_pid + ).organisation_pid + if document := record.get("metadata", {}).get("document"): + rec["document"] = Document.get_record_by_pid( + extracted_data_from_ref(document) + ) + rec["recid"] = record["id"] OperationLogRecordFactory().create_operation_log( - record=rec, operation=operation) + record=rec, operation=operation + ) def commit_file(self, identity, id_, file_key, record): """Commit file handler. @@ -160,8 +165,7 @@ def commit_file(self, identity, id_, file_key, record): :param file_key: str - file key in the file record. :param record: obj - record instance. """ - self._create_operation_logs( - record, file_key, OperationLogOperation.CREATE) + self._create_operation_logs(record, file_key, OperationLogOperation.CREATE) def delete_file(self, identity, id_, file_key, record, deleted_file): """Delete file handler. @@ -173,8 +177,8 @@ def delete_file(self, identity, id_, file_key, record, deleted_file): :param deleted_file: file instance - the deleted file instance. """ self._create_operation_logs( - record, file_key, OperationLogOperation.DELETE, - deleted_file=deleted_file) + record, file_key, OperationLogOperation.DELETE, deleted_file=deleted_file + ) class ReindexFileComponent(FileServiceComponent): diff --git a/rero_ils/modules/files/dumpers.py b/rero_ils/modules/files/dumpers.py index 12ca14847c..909d7b21a8 100644 --- a/rero_ils/modules/files/dumpers.py +++ b/rero_ils/modules/files/dumpers.py @@ -49,5 +49,6 @@ def dump(self, record, data): data["metadata"]["file_size"] = size lib_pid = data["metadata"]["library"]["pid"] from rero_ils.modules.libraries.api import Library + org_pid = Library.get_record_by_pid(lib_pid).organisation_pid data["metadata"]["organisation"] = {"pid": org_pid, "type": "doc"} diff --git a/rero_ils/modules/files/permissions.py b/rero_ils/modules/files/permissions.py index 41ad9035f7..f0c1fc8bfd 100644 --- a/rero_ils/modules/files/permissions.py +++ b/rero_ils/modules/files/permissions.py @@ -20,8 +20,11 @@ from invenio_access import action_factory from invenio_records_permissions.generators import SystemProcess -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + RecordPermissionPolicy, +) from rero_ils.modules.utils import extracted_data_from_ref # Actions to control Record Files policies for CRUD operations @@ -51,37 +54,31 @@ class FilePermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByAction(read_action), SystemProcess()] can_create = [AllowedByAction(create_action), SystemProcess()] can_update = [ - AllowedByActionRestrictByManageableLibrary( - update_action, get_library_pid), + AllowedByActionRestrictByManageableLibrary(update_action, get_library_pid), SystemProcess(), ] can_delete = [ - AllowedByActionRestrictByManageableLibrary( - delete_action, get_library_pid), + AllowedByActionRestrictByManageableLibrary(delete_action, get_library_pid), SystemProcess(), ] # download/upload a file can_get_content_files = [AllowedByAction(read_action), SystemProcess()] can_set_content_files = [ - AllowedByActionRestrictByManageableLibrary( - create_action, get_library_pid), - SystemProcess() + AllowedByActionRestrictByManageableLibrary(create_action, get_library_pid), + SystemProcess(), ] # files container can_read_files = [AllowedByAction(read_action), SystemProcess()] can_create_files = [AllowedByAction(create_action), SystemProcess()] can_commit_files = [ - AllowedByActionRestrictByManageableLibrary( - create_action, get_library_pid), + AllowedByActionRestrictByManageableLibrary(create_action, get_library_pid), SystemProcess(), ] can_update_files = [ - AllowedByActionRestrictByManageableLibrary( - update_action, get_library_pid), + AllowedByActionRestrictByManageableLibrary(update_action, get_library_pid), SystemProcess(), ] can_delete_files = [ - AllowedByActionRestrictByManageableLibrary( - delete_action, get_library_pid), + AllowedByActionRestrictByManageableLibrary(delete_action, get_library_pid), SystemProcess(), ] diff --git a/rero_ils/modules/files/results.py b/rero_ils/modules/files/results.py index a8e2e802e2..bcef723276 100644 --- a/rero_ils/modules/files/results.py +++ b/rero_ils/modules/files/results.py @@ -28,7 +28,7 @@ def entries(self): """Iterator over the hits.""" for entry in self._results: # keep only the main files - if entry.metadata.get('type') in ['fulltext', 'thumbnail']: + if entry.metadata.get("type") in ["fulltext", "thumbnail"]: continue projection = self._service.file_schema.dump( entry, @@ -37,7 +37,6 @@ def entries(self): ), ) if self._links_item_tpl: - projection["links"] = self._links_item_tpl.expand( - self._identity, entry) + projection["links"] = self._links_item_tpl.expand(self._identity, entry) yield projection diff --git a/rero_ils/modules/files/schemas.py b/rero_ils/modules/files/schemas.py index 157af0aaa9..246e57bdf0 100644 --- a/rero_ils/modules/files/schemas.py +++ b/rero_ils/modules/files/schemas.py @@ -38,8 +38,8 @@ def remove_fields(self, data, **kwargs): :param data: Dict of record data. :returns: Modified data. """ - data.pop('n_files', None) - data.pop('file_size', None) + data.pop("n_files", None) + data.pop("file_size", None) return data diff --git a/rero_ils/modules/files/services.py b/rero_ils/modules/files/services.py index afb52c5ed0..6826b91db8 100644 --- a/rero_ils/modules/files/services.py +++ b/rero_ils/modules/files/services.py @@ -20,12 +20,15 @@ from invenio_records.dumpers import SearchDumper from invenio_records.dumpers.indexedat import IndexedAtDumperExt -from rero_invenio_files.records.services import FileServiceConfig, \ - RecordServiceConfig +from rero_invenio_files.records.services import FileServiceConfig, RecordServiceConfig from .api import RecordWithFile -from .components import OperationLogsComponent, OperationLogsFileComponent, \ - ReindexFileComponent, ReindexRecordComponent +from .components import ( + OperationLogsComponent, + OperationLogsFileComponent, + ReindexFileComponent, + ReindexRecordComponent, +) from .dumpers import FileInformationDumperExt from .permissions import FilePermissionPolicy from .results import MainFileList @@ -42,8 +45,10 @@ class RecordServiceConfig(RecordServiceConfig): permission_policy_cls = FilePermissionPolicy # Service components - components = RecordServiceConfig.components \ - + [OperationLogsComponent, ReindexRecordComponent] + components = RecordServiceConfig.components + [ + OperationLogsComponent, + ReindexRecordComponent, + ] # Dumper for the indexer index_dumper = SearchDumper( diff --git a/rero_ils/modules/holdings/api.py b/rero_ils/modules/holdings/api.py index 8a1ea607a0..f7966c8e22 100644 --- a/rero_ils/modules/holdings/api.py +++ b/rero_ils/modules/holdings/api.py @@ -32,34 +32,40 @@ from jinja2 import Environment from rero_ils.filter import format_date_filter -from rero_ils.modules.api import IlsRecord, IlsRecordError, \ - IlsRecordsIndexer, IlsRecordsSearch +from rero_ils.modules.api import ( + IlsRecord, + IlsRecordError, + IlsRecordsIndexer, + IlsRecordsSearch, +) from rero_ils.modules.documents.api import Document -from rero_ils.modules.errors import MissingRequiredParameterError, \ - RegularReceiveNotAllowed +from rero_ils.modules.errors import ( + MissingRequiredParameterError, + RegularReceiveNotAllowed, +) from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.items.api import Item, ItemsSearch from rero_ils.modules.items.models import ItemIssueStatus from rero_ils.modules.local_fields.api import LocalFieldsSearch -from rero_ils.modules.local_fields.extensions import \ - DeleteRelatedLocalFieldExtension +from rero_ils.modules.local_fields.extensions import DeleteRelatedLocalFieldExtension from rero_ils.modules.locations.api import Location from rero_ils.modules.minters import id_minter -from rero_ils.modules.operation_logs.extensions import \ - OperationLogObserverExtension +from rero_ils.modules.operation_logs.extensions import OperationLogObserverExtension from rero_ils.modules.organisations.api import Organisation from rero_ils.modules.providers import Provider from rero_ils.modules.record_extensions import OrgLibRecordExtension -from rero_ils.modules.utils import extracted_data_from_ref, get_ref_for_pid, \ - get_schema_for_resource, sorted_pids +from rero_ils.modules.utils import ( + extracted_data_from_ref, + get_ref_for_pid, + get_schema_for_resource, + sorted_pids, +) from .models import HoldingIdentifier, HoldingMetadata, HoldingTypes # holing provider HoldingProvider = type( - 'HoldingProvider', - (Provider,), - dict(identifier=HoldingIdentifier, pid_type='hold') + "HoldingProvider", (Provider,), dict(identifier=HoldingIdentifier, pid_type="hold") ) # holing minter holding_id_minter = partial(id_minter, provider=HoldingProvider) @@ -68,7 +74,7 @@ # load jinja Environment JINJA_ENV = Environment() -JINJA_ENV.filters['format_date_filter'] = format_date_filter +JINJA_ENV.filters["format_date_filter"] = format_date_filter class HoldingsSearch(IlsRecordsSearch): @@ -77,9 +83,9 @@ class HoldingsSearch(IlsRecordsSearch): class Meta: """Search only on holdings index.""" - index = 'holdings' + index = "holdings" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -90,7 +96,7 @@ def available_query(self): :returns: a filtered Elasticsearch query. """ # should not masked - return self.exclude('term', _masked=True) + return self.exclude("term", _masked=True) class Holding(IlsRecord): @@ -107,31 +113,31 @@ class Holding(IlsRecord): provider = HoldingProvider model_cls = HoldingMetadata pids_exist_check = { - 'required': { - 'doc': 'document', - 'loc': 'location', - 'itty': 'circulation_category' + "required": { + "doc": "document", + "loc": "location", + "itty": "circulation_category", } } # interval definitions for pattern frequencies # the RDA Frequencies are available here: # http://www.rdaregistry.info/termList/frequency/ frequencies = { - 'rdafr:1001': relativedelta(days=1), # Daily - 'rdafr:1002': relativedelta(days=2, hours=8), # Three times a week - 'rdafr:1003': relativedelta(weeks=2), # Biweekly - 'rdafr:1004': relativedelta(weeks=1), # Weekly - 'rdafr:1005': relativedelta(days=3, hours=12), # Semiweekly - 'rdafr:1006': relativedelta(days=10), # Three times a month - 'rdafr:1007': relativedelta(months=2), # Bimonthly - 'rdafr:1008': relativedelta(months=1), # Monthly - 'rdafr:1009': relativedelta(days=15), # Semimonthly - 'rdafr:1010': relativedelta(months=3), # Quarterly - 'rdafr:1011': relativedelta(months=4), # Three times a year - 'rdafr:1012': relativedelta(months=6), # Semiannual - 'rdafr:1013': relativedelta(years=1), # Annual - 'rdafr:1014': relativedelta(years=2), # Biennial - 'rdafr:1015': relativedelta(years=3) # Triennial + "rdafr:1001": relativedelta(days=1), # Daily + "rdafr:1002": relativedelta(days=2, hours=8), # Three times a week + "rdafr:1003": relativedelta(weeks=2), # Biweekly + "rdafr:1004": relativedelta(weeks=1), # Weekly + "rdafr:1005": relativedelta(days=3, hours=12), # Semiweekly + "rdafr:1006": relativedelta(days=10), # Three times a month + "rdafr:1007": relativedelta(months=2), # Bimonthly + "rdafr:1008": relativedelta(months=1), # Monthly + "rdafr:1009": relativedelta(days=15), # Semimonthly + "rdafr:1010": relativedelta(months=3), # Quarterly + "rdafr:1011": relativedelta(months=4), # Three times a year + "rdafr:1012": relativedelta(months=6), # Semiannual + "rdafr:1013": relativedelta(years=1), # Annual + "rdafr:1014": relativedelta(years=2), # Biennial + "rdafr:1015": relativedelta(years=3), # Triennial } def extended_validation(self, **kwargs): @@ -154,36 +160,43 @@ def extended_validation(self, **kwargs): - if not a serial holdings contain one of optional serials fields. - if notes array has multiple notes with same type """ - document_pid = extracted_data_from_ref( - self.get('document').get('$ref')) + document_pid = extracted_data_from_ref(self.get("document").get("$ref")) document = Document.get_record_by_pid(document_pid) if not document: - return _(f'Document does not exist {document_pid}.') + return _(f"Document does not exist {document_pid}.") if self.is_serial: - patterns = self.get('patterns', {}) - if patterns and \ - patterns.get('frequency') != 'rdafr:1016' \ - and not patterns.get('next_expected_date'): - return _( - 'Must have next expected date for regular frequencies.') + patterns = self.get("patterns", {}) + if ( + patterns + and patterns.get("frequency") != "rdafr:1016" + and not patterns.get("next_expected_date") + ): + return _("Must have next expected date for regular frequencies.") # the enumeration and chronology optional fields are only allowed for # serial or electronic holdings if not self.is_serial ^ self.is_electronic: fields = [ - 'enumerationAndChronology', 'notes', 'index', 'missing_issues', - 'supplementaryContent', 'acquisition_status', - 'acquisition_method', 'acquisition_expected_end_date', - 'general_retention_policy', 'completeness', - 'composite_copy_report', 'issue_binding' + "enumerationAndChronology", + "notes", + "index", + "missing_issues", + "supplementaryContent", + "acquisition_status", + "acquisition_method", + "acquisition_expected_end_date", + "general_retention_policy", + "completeness", + "composite_copy_report", + "issue_binding", ] for field in fields: if self.get(field): - return _(f'{field} is allowed only for serial holdings') + return _(f"{field} is allowed only for serial holdings") # No multiple notes with same type - note_types = [note.get('type') for note in self.get('notes', [])] + note_types = [note.get("type") for note in self.get("notes", [])] if len(note_types) != len(set(note_types)): - return _('Can not have multiple notes of the same type.') + return _("Can not have multiple notes of the same type.") return True def delete(self, force=False, dbcommit=False, delindex=False): @@ -193,10 +206,8 @@ def delete(self, force=False, dbcommit=False, delindex=False): if self.is_serial: # Delete all attached items for item in self.get_all_items(): - item.delete( - force=force, dbcommit=dbcommit, delindex=False) - return super().delete( - force=force, dbcommit=dbcommit, delindex=delindex) + item.delete(force=force, dbcommit=dbcommit, delindex=False) + return super().delete(force=force, dbcommit=dbcommit, delindex=delindex) else: raise IlsRecordError.NotDeleted() @@ -213,32 +224,32 @@ def is_electronic(self): @property def holdings_type(self): """Shortcut to return the type of the holding.""" - return self.get('holdings_type') + return self.get("holdings_type") @property def document_pid(self): """Shortcut for document pid of the holding.""" - return extracted_data_from_ref(self.get('document')) + return extracted_data_from_ref(self.get("document")) @property def document(self): """Shortcut for document record related to this holding.""" - return extracted_data_from_ref(self.get('document'), data='record') + return extracted_data_from_ref(self.get("document"), data="record") @property def circulation_category_pid(self): """Shortcut for circulation_category pid of the holding.""" - return extracted_data_from_ref(self.get('circulation_category')) + return extracted_data_from_ref(self.get("circulation_category")) @property def location_pid(self): """Shortcut for location pid related to the holdings.""" - return extracted_data_from_ref(self.get('location')) + return extracted_data_from_ref(self.get("location")) @property def location(self): """Shortcut for location resource related to the holdings.""" - return extracted_data_from_ref(self.get('location'), data='record') + return extracted_data_from_ref(self.get("location"), data="record") @property def library_pid(self): @@ -258,14 +269,14 @@ def organisation_pid(self): @property def vendor_pid(self): """Shortcut for vendor pid of the holding.""" - if self.get('vendor'): - return extracted_data_from_ref(self.get('vendor')) + if self.get("vendor"): + return extracted_data_from_ref(self.get("vendor")) @property def vendor(self): """Shortcut to return the vendor record.""" - if self.get('vendor'): - return extracted_data_from_ref(self.get('vendor'), data='record') + if self.get("vendor"): + return extracted_data_from_ref(self.get("vendor"), data="record") def get_available_item_pids(self): """Get the list of the available item pids. @@ -273,11 +284,10 @@ def get_available_item_pids(self): :returns: [str] - the list of the available item pids. """ from rero_ils.modules.items.api import ItemsSearch + items_query = ItemsSearch().available_query() - filters = Q('term', holding__pid=self.pid) - return [ - hit.pid for hit in items_query.filter(filters).source('pid').scan() - ] + filters = Q("term", holding__pid=self.pid) + return [hit.pid for hit in items_query.filter(filters).source("pid").scan()] def get_item_pids_with_active_loan(self, item_pids): """Get the list of items pids that have active loans. @@ -290,11 +300,9 @@ def get_item_pids_with_active_loan(self, item_pids): loan_query = LoansSearch().unavailable_query() # the loans corresponding to the given item pids - loan_query = loan_query.filter(Q('terms', item_pid__value=item_pids)) + loan_query = loan_query.filter(Q("terms", item_pid__value=item_pids)) - return [ - hit.item_pid.value for hit in loan_query.source('item_pid').scan() - ] + return [hit.item_pid.value for hit in loan_query.source("item_pid").scan()] def is_available(self): """Get availability for the current holding. @@ -304,7 +312,7 @@ def is_available(self): """ # -------------- Holdings -------------------- # unavailable if masked - if self.get('_masked', False): + if self.get("_masked", False): return False # available if the holding is electronic @@ -321,8 +329,7 @@ def is_available(self): # --------------- Loans ------------------- # get item pids that have active loans - unavailable_item_pids = \ - self.get_item_pids_with_active_loan(available_item_pids) + unavailable_item_pids = self.get_item_pids_with_active_loan(available_item_pids) # available if at least one item don't have active loan return bool(set(available_item_pids) - set(unavailable_item_pids)) @@ -330,17 +337,17 @@ def is_available(self): @property def max_number_of_claims(self): """Shortcut to return the max_number_of_claims.""" - return self.get('patterns', {}).get('max_number_of_claims', 0) + return self.get("patterns", {}).get("max_number_of_claims", 0) @property def days_before_first_claim(self): """Shortcut to return the days_before_first_claim.""" - return self.get('patterns', {}).get('days_before_first_claim', 0) + return self.get("patterns", {}).get("days_before_first_claim", 0) @property def days_before_next_claim(self): """Shortcut to return the days_before_next_claim.""" - return self.get('patterns', {}).get('days_before_next_claim', 0) + return self.get("patterns", {}).get("days_before_next_claim", 0) @property def notes(self): @@ -349,7 +356,7 @@ def notes(self): :return an array of all notes related to the holding. Each note should have two keys : `type` and `content`. """ - return self.get('notes', []) + return self.get("notes", []) def get_note(self, note_type): """Return an holdings note by its type. @@ -357,23 +364,24 @@ def get_note(self, note_type): :param note_type: the type of note (see ``HoldingNoteTypes``) :return the content of the note, None if note type is not found """ - notes = [note.get('content') for note in self.notes - if note.get('type') == note_type] + notes = [ + note.get("content") for note in self.notes if note.get("type") == note_type + ] return next(iter(notes), None) @property def get_items_count_by_holding_pid(self): """Returns items count from holding pid.""" - results = ItemsSearch()\ - .filter('term', holding__pid=self.pid)\ - .source(['pid']).count() + results = ( + ItemsSearch().filter("term", holding__pid=self.pid).source(["pid"]).count() + ) return results @classmethod def get_document_pid_by_holding_pid(cls, holding_pid): """Returns document pid for a holding pid.""" holding = cls.get_record_by_pid(holding_pid) - return extracted_data_from_ref(holding.get('document')) + return extracted_data_from_ref(holding.get("document")) @classmethod def get_holdings_type_by_holding_pid(cls, holding_pid): @@ -385,26 +393,27 @@ def get_holdings_type_by_holding_pid(cls, holding_pid): @classmethod def get_holdings_pid_by_document_pid(cls, document_pid, with_masked=True): """Returns holding pids attached for a given document pid.""" - es_query = HoldingsSearch()\ - .filter('term', document__pid=document_pid)\ - .source(['pid']) + es_query = ( + HoldingsSearch().filter("term", document__pid=document_pid).source(["pid"]) + ) if not with_masked: - es_query = es_query.filter( - 'bool', must_not=[Q('term', _masked=True)]) + es_query = es_query.filter("bool", must_not=[Q("term", _masked=True)]) for holding in es_query.scan(): yield holding.pid @classmethod - def get_holdings_pid_by_document_pid_by_org(cls, document_pid, org_pid, - with_masked=True): + def get_holdings_pid_by_document_pid_by_org( + cls, document_pid, org_pid, with_masked=True + ): """Returns holding pids attached for a given document pid.""" - es_query = HoldingsSearch()\ - .filter('term', document__pid=document_pid)\ - .filter('term', organisation__pid=org_pid)\ - .source(['pid']) + es_query = ( + HoldingsSearch() + .filter("term", document__pid=document_pid) + .filter("term", organisation__pid=org_pid) + .source(["pid"]) + ) if not with_masked: - es_query = es_query.filter( - 'bool', must_not=[Q('term', _masked=True)]) + es_query = es_query.filter("bool", must_not=[Q("term", _masked=True)]) for holding in es_query.scan(): yield holding.pid @@ -412,11 +421,10 @@ def get_items_filter_by_viewcode(self, viewcode): """Return items filter by view code.""" items = [ Item.get_record_by_pid(item_pid) - for item_pid in Item.get_items_pid_by_holding_pid(self.get('pid')) + for item_pid in Item.get_items_pid_by_holding_pid(self.get("pid")) ] - if (viewcode != current_app. - config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')): - org_pid = Organisation.get_record_by_viewcode(viewcode)['pid'] + if viewcode != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): + org_pid = Organisation.get_record_by_viewcode(viewcode)["pid"] return [item for item in items if item.organisation_pid == org_pid] return items @@ -424,16 +432,16 @@ def get_items(self): """Return standard items and received issues for a holding record.""" for item_pid in Item.get_items_pid_by_holding_pid(self.pid): if item := Item.get_record_by_pid(item_pid): - if not item.issue_status or \ - item.issue_status == ItemIssueStatus.RECEIVED: + if ( + not item.issue_status + or item.issue_status == ItemIssueStatus.RECEIVED + ): # inherit holdings first call# # for issues with no 1st call#. - if first_call_number := \ - item.issue_inherited_first_call_number: - item['call_number'] = first_call_number - if second_call_number := \ - item.issue_inherited_second_call_number: - item['second_call_number'] = second_call_number + if first_call_number := item.issue_inherited_first_call_number: + item["call_number"] = first_call_number + if second_call_number := item.issue_inherited_second_call_number: + item["second_call_number"] = second_call_number yield item def get_all_items(self): @@ -447,19 +455,17 @@ def get_links_to_me(self, get_pids=False): :param get_pids: if True list of linked pids if False count of linked records """ - items_query = ItemsSearch().filter('term', holding__pid=self.pid) + items_query = ItemsSearch().filter("term", holding__pid=self.pid) local_fields_query = LocalFieldsSearch().get_local_fields( - self.provider.pid_type, self.pid) + self.provider.pid_type, self.pid + ) if get_pids: items = sorted_pids(items_query) local_fields = sorted_pids(local_fields_query) else: items = items_query.count() local_fields = local_fields_query.count() - links = { - 'items': items, - 'local_fields': local_fields - } + links = {"items": items, "local_fields": local_fields} return {k: v for k, v in links.items() if v} def reasons_not_to_delete(self): @@ -468,20 +474,19 @@ def reasons_not_to_delete(self): if self.is_serial: # Find out if we can delete all items not_deleteable_items = [ - item for item in self.get_items() - if item.reasons_not_to_delete() + item for item in self.get_items() if item.reasons_not_to_delete() ] if not_deleteable_items: count = len(not_deleteable_items) - cannot_delete['others'] = { - _(f'has {count} items with loan attached'): count + cannot_delete["others"] = { + _(f"has {count} items with loan attached"): count } else: links = self.get_links_to_me() # local_fields isn't a reason to block holding suppression - links.pop('local_fields', None) + links.pop("local_fields", None) if links: - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def get_holding_loan_conditions(self): @@ -501,19 +506,18 @@ def find_patron(organisation_pid): self.organisation_pid, self.library_pid, patron.patron_type_pid, - self.circulation_category_pid + self.circulation_category_pid, ) name = _(cipo.get("name")) checkout_duration = cipo.get("checkout_duration") - return f'{name} {checkout_duration} days' + return f"{name} {checkout_duration} days" else: - return ItemType.get_record_by_pid( - self.circulation_category_pid).get('name') + return ItemType.get_record_by_pid(self.circulation_category_pid).get("name") @property def patterns(self): """Shortcut for holdings patterns.""" - return self.get('patterns') + return self.get("patterns") @property def next_issue_display_text(self): @@ -534,9 +538,8 @@ def can(self, action, **kwargs): a list of reasons to disallow if False. """ can, reasons = True, [] - actions = current_app.config\ - .get('HOLDING_CIRCULATION_ACTIONS_VALIDATION', {}) - for func_name in actions['request']: + actions = current_app.config.get("HOLDING_CIRCULATION_ACTIONS_VALIDATION", {}) + for func_name in actions["request"]: class_name = func_name.__self__.__name__ func_callback = obj_or_import_string(func_name) func_can, func_reasons = func_callback(self, **kwargs) @@ -553,7 +556,7 @@ def can_request(cls, holding, **kwargs): :return a tuple with True|False and reasons to disallow if False. """ if holding and not holding.is_serial: - return False, [_('Only serial holdings can be requested.')] + return False, [_("Only serial holdings can be requested.")] return True, [] @classmethod @@ -564,42 +567,39 @@ def _get_next_issue_display_text(cls, patterns): :return: A display text of the next predicted issue. """ issue_data = {} - for pattern in patterns.get('values', []): - pattern_name = pattern.get('name', '') + for pattern in patterns.get("values", []): + pattern_name = pattern.get("name", "") level_data = {} - for level in pattern.get('levels', []): - text_value = level.get('next_value', level.get( - 'starting_value', 1)) - mapping = level.get('mapping_values') + for level in pattern.get("levels", []): + text_value = level.get("next_value", level.get("starting_value", 1)) + mapping = level.get("mapping_values") if mapping: text_value = mapping[text_value - 1] - level_data.update({ - level.get( - 'number_name', level.get('list_name') - ): str(text_value) - }) + level_data.update( + {level.get("number_name", level.get("list_name")): str(text_value)} + ) issue_data[pattern_name] = level_data # TODO: inform the PO about the use of filter format_date_filter # for additional manipulation of the expected date - tmpl = JINJA_ENV.from_string(patterns.get('template')) + tmpl = JINJA_ENV.from_string(patterns.get("template")) - next_expected_date = patterns.get('next_expected_date') + next_expected_date = patterns.get("next_expected_date") # send the expected date info with the issue data - expected_date = datetime.strptime(next_expected_date, '%Y-%m-%d') - issue_data['next_expected_date'] = next_expected_date - issue_data['expected_date'] = { - 'day': expected_date.day, - 'month': expected_date.month, - 'year': expected_date.year - } + expected_date = datetime.strptime(next_expected_date, "%Y-%m-%d") + issue_data["next_expected_date"] = next_expected_date + issue_data["expected_date"] = { + "day": expected_date.day, + "month": expected_date.month, + "year": expected_date.year, + } return tmpl.render(**issue_data), next_expected_date def increment_next_prediction(self): """Increment next prediction.""" - if not self.patterns or not self.patterns.get('values'): + if not self.patterns or not self.patterns.get("values"): return - self['patterns'] = self._increment_next_prediction(self.patterns) + self["patterns"] = self._increment_next_prediction(self.patterns) return self @classmethod @@ -611,27 +611,25 @@ def _increment_next_prediction(cls, patterns): :param patterns: List of a valid holdings patterns. :return: The updated patterns with the next issue. """ - for pattern in patterns.get('values', []): - for level in reversed(pattern.get('levels', [])): - max_value = level.get('completion_value') - if level.get('mapping_values'): - max_value = len(level.get('mapping_values')) - next_value = level.get('next_value', level.get( - 'starting_value', 1)) + for pattern in patterns.get("values", []): + for level in reversed(pattern.get("levels", [])): + max_value = level.get("completion_value") + if level.get("mapping_values"): + max_value = len(level.get("mapping_values")) + next_value = level.get("next_value", level.get("starting_value", 1)) if max_value == next_value: - level['next_value'] = level.get( - 'starting_value', 1) + level["next_value"] = level.get("starting_value", 1) else: - level['next_value'] = next_value + 1 + level["next_value"] = next_value + 1 break - frequency = patterns.get('frequency') + frequency = patterns.get("frequency") if frequency: next_expected_date = datetime.strptime( - patterns.get('next_expected_date'), '%Y-%m-%d') + patterns.get("next_expected_date"), "%Y-%m-%d" + ) interval = cls.frequencies[frequency] next_expected_date = next_expected_date + interval - patterns['next_expected_date'] = \ - next_expected_date.strftime('%Y-%m-%d') + patterns["next_expected_date"] = next_expected_date.strftime("%Y-%m-%d") return patterns def prediction_issues_preview(self, predictions=1): @@ -641,11 +639,10 @@ def prediction_issues_preview(self, predictions=1): :return: An array of issues display text. """ text = [] - if self.patterns and self.patterns.get('values'): + if self.patterns and self.patterns.get("values"): patterns = deepcopy(self.patterns) for r in range(predictions): - issue, expected_date = self._get_next_issue_display_text( - patterns) + issue, expected_date = self._get_next_issue_display_text(patterns) issue_data = self._prepare_issue_data(issue, expected_date) text.append(issue_data) patterns = self._increment_next_prediction(patterns) @@ -653,7 +650,10 @@ def prediction_issues_preview(self, predictions=1): @classmethod def prediction_issues_preview_for_pattern( - cls, patterns, number_of_predictions=1, ): + cls, + patterns, + number_of_predictions=1, + ): """Display preview of next predictions for a given pattern. :param predictions: Number of the next issues to predict. @@ -661,10 +661,9 @@ def prediction_issues_preview_for_pattern( :return: An array of issues display text. """ text = [] - if patterns and patterns.get('values'): + if patterns and patterns.get("values"): for r in range(number_of_predictions): - issue, expected_date = cls._get_next_issue_display_text( - patterns) + issue, expected_date = cls._get_next_issue_display_text(patterns) issue_data = cls._prepare_issue_data(issue, expected_date) text.append(issue_data) patterns = Holding._increment_next_prediction(patterns) @@ -678,102 +677,122 @@ def _prepare_issue_data(issue, expected_date): :param expected_date: The issue expected_date to prepare. :return: The prepared issue data. """ - return {'issue': issue, 'expected_date': expected_date} + return {"issue": issue, "expected_date": expected_date} def _prepare_issue_record( - self, status, item=None, issue_display=None, expected_date=None): + self, status, item=None, issue_display=None, expected_date=None + ): """Prepare the issue record before creating the item.""" data = { - 'issue': { - 'status': status, - 'status_date': datetime.now(timezone.utc).isoformat(), - 'expected_date': expected_date, - 'regular': True + "issue": { + "status": status, + "status_date": datetime.now(timezone.utc).isoformat(), + "expected_date": expected_date, + "regular": True, }, - 'enumerationAndChronology': issue_display, - 'status': 'on_shelf' + "enumerationAndChronology": issue_display, + "status": "on_shelf", } if status == ItemIssueStatus.RECEIVED: - data['issue'][ - 'received_date'] = datetime.now().strftime('%Y-%m-%d') + data["issue"]["received_date"] = datetime.now().strftime("%Y-%m-%d") if item: - if issue := item.pop('issue', None): - data['issue'].update(issue) + if issue := item.pop("issue", None): + data["issue"].update(issue) data.update(item) # ensure that we have the right item fields such as location, # and item_type and document. forced_data = { - '$schema': get_schema_for_resource(Item), - 'organisation': self.get('organisation'), - 'library': self.get('library'), - 'location': self.get('location'), - 'document': self.get('document'), - 'item_type': self.get('circulation_category'), - 'type': 'issue', - 'holding': {'$ref': get_ref_for_pid('hold', self.pid)} + "$schema": get_schema_for_resource(Item), + "organisation": self.get("organisation"), + "library": self.get("library"), + "location": self.get("location"), + "document": self.get("document"), + "item_type": self.get("circulation_category"), + "type": "issue", + "holding": {"$ref": get_ref_for_pid("hold", self.pid)}, } data.update(forced_data) return data - def create_regular_issue(self, status, item=None, dbcommit=False, - reindex=False): + def create_regular_issue(self, status, item=None, dbcommit=False, reindex=False): """Receive the next expected regular issue for the holdings record.""" # receive is allowed only on holdings of type serials with a regular # frequency - if self.holdings_type != HoldingTypes.SERIAL \ - or self.get('patterns', {}).get('frequency') == 'rdafr:1016': + if ( + self.holdings_type != HoldingTypes.SERIAL + or self.get("patterns", {}).get("frequency") == "rdafr:1016" + ): raise RegularReceiveNotAllowed() issue_display, expected_date = self._get_next_issue_display_text( - self.get('patterns')) + self.get("patterns") + ) data = self._prepare_issue_record( status=status, item=item, issue_display=issue_display, - expected_date=expected_date + expected_date=expected_date, ) return Item.create(data=data, dbcommit=dbcommit, reindex=reindex) def get_holding_pid_by_doc_location_item_type( - document_pid, location_pid, item_type_pid, holdings_type='standard'): + document_pid, location_pid, item_type_pid, holdings_type="standard" +): """Returns standard holding pid for document/location/item type.""" - result = HoldingsSearch() \ - .filter('term', document__pid=document_pid) \ - .filter('term', holdings_type=holdings_type) \ - .filter('term', circulation_category__pid=item_type_pid) \ - .filter('term', location__pid=location_pid) \ - .source('pid') \ + result = ( + HoldingsSearch() + .filter("term", document__pid=document_pid) + .filter("term", holdings_type=holdings_type) + .filter("term", circulation_category__pid=item_type_pid) + .filter("term", location__pid=location_pid) + .source("pid") .scan() + ) try: return next(result).pid except StopIteration: return None -def get_holdings_by_document_item_type( - document_pid, item_type_pid): +def get_holdings_by_document_item_type(document_pid, item_type_pid): """Returns holding locations for document/item type.""" - results = HoldingsSearch() \ - .params(preserve_order=True)\ - .filter('term', document__pid=document_pid) \ - .filter('term', circulation_category__pid=item_type_pid) \ - .sort({'pid': {"order": "asc"}}) \ - .source(['pid']) \ + results = ( + HoldingsSearch() + .params(preserve_order=True) + .filter("term", document__pid=document_pid) + .filter("term", circulation_category__pid=item_type_pid) + .sort({"pid": {"order": "asc"}}) + .source(["pid"]) .scan() + ) return [Holding.get_record_by_pid(result.pid) for result in results] def create_holding( - document_pid=None, location_pid=None, item_type_pid=None, - electronic_location=None, holdings_type=HoldingTypes.STANDARD, - patterns=None, enumerationAndChronology=None, - supplementaryContent=None, index=None, missing_issues=None, - call_number=None, second_call_number=None, notes=None, vendor_pid=None, - acquisition_status=None, acquisition_expected_end_date=None, - acquisition_method=None, general_retention_policy=None, - completeness=None, composite_copy_report=None, issue_binding=None, - masked=False): + document_pid=None, + location_pid=None, + item_type_pid=None, + electronic_location=None, + holdings_type=HoldingTypes.STANDARD, + patterns=None, + enumerationAndChronology=None, + supplementaryContent=None, + index=None, + missing_issues=None, + call_number=None, + second_call_number=None, + notes=None, + vendor_pid=None, + acquisition_status=None, + acquisition_expected_end_date=None, + acquisition_method=None, + general_retention_policy=None, + completeness=None, + composite_copy_report=None, + issue_binding=None, + masked=False, +): """Create a new holdings record from a given list of fields. :param document_pid: the document pid. @@ -805,46 +824,35 @@ def create_holding( "'document_pid', 'location_pid' and 'item_type_pid' are required." ) data = { - '$schema': get_schema_for_resource('hold'), - '_masked': masked, - 'holdings_type': holdings_type, - 'location': { - '$ref': get_ref_for_pid('loc', location_pid) - }, - 'circulation_category': { - '$ref': get_ref_for_pid('itty', item_type_pid) - }, - 'document': { - '$ref': get_ref_for_pid('doc', document_pid) - }, - 'enumerationAndChronology': enumerationAndChronology, - 'supplementaryContent': supplementaryContent, - 'index': index, - 'missing_issues': missing_issues, - 'notes': notes, - 'call_number': call_number, - 'second_call_number': second_call_number, - 'acquisition_status': acquisition_status, - 'acquisition_method': acquisition_method, - 'completeness': completeness, - 'issue_binding': issue_binding, - 'composite_copy_report': composite_copy_report, - 'general_retention_policy': general_retention_policy, - 'acquisition_expected_end_date': acquisition_expected_end_date, + "$schema": get_schema_for_resource("hold"), + "_masked": masked, + "holdings_type": holdings_type, + "location": {"$ref": get_ref_for_pid("loc", location_pid)}, + "circulation_category": {"$ref": get_ref_for_pid("itty", item_type_pid)}, + "document": {"$ref": get_ref_for_pid("doc", document_pid)}, + "enumerationAndChronology": enumerationAndChronology, + "supplementaryContent": supplementaryContent, + "index": index, + "missing_issues": missing_issues, + "notes": notes, + "call_number": call_number, + "second_call_number": second_call_number, + "acquisition_status": acquisition_status, + "acquisition_method": acquisition_method, + "completeness": completeness, + "issue_binding": issue_binding, + "composite_copy_report": composite_copy_report, + "general_retention_policy": general_retention_policy, + "acquisition_expected_end_date": acquisition_expected_end_date, } data = {k: v for k, v in data.items() if v} # clean data from None/empty if electronic_location: - data['electronic_location'] = [electronic_location] + data["electronic_location"] = [electronic_location] if vendor_pid: - data['vendor'] = {'$ref': get_ref_for_pid('vndr', vendor_pid)} + data["vendor"] = {"$ref": get_ref_for_pid("vndr", vendor_pid)} if patterns and holdings_type == HoldingTypes.SERIAL: - data['patterns'] = patterns - return Holding.create( - data, - dbcommit=False, - reindex=False, - delete_pid=False - ) + data["patterns"] = patterns + return Holding.create(data, dbcommit=False, reindex=False, delete_pid=False) class HoldingsIndexer(IlsRecordsIndexer): @@ -869,7 +877,7 @@ def delete(self, record): """ # Delete all attached items if record.is_serial: - query = ItemsSearch().filter('term', holding__pid=record.pid) + query = ItemsSearch().filter("term", holding__pid=record.pid) query.delete() ItemsSearch.flush_and_refresh() document = Document.get_record_by_pid(record.document_pid) @@ -882,4 +890,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='hold') + super().bulk_index(record_id_iterator, doc_type="hold") diff --git a/rero_ils/modules/holdings/api_views.py b/rero_ils/modules/holdings/api_views.py index 27b82a49d8..edc116ae7d 100644 --- a/rero_ils/modules/holdings/api_views.py +++ b/rero_ils/modules/holdings/api_views.py @@ -26,20 +26,26 @@ from elasticsearch import exceptions from flask import Blueprint, abort, current_app, jsonify from flask import request as flask_request -from invenio_circulation.errors import CirculationException, \ - MissingRequiredParameterError +from invenio_circulation.errors import ( + CirculationException, + MissingRequiredParameterError, +) from invenio_db import db from jinja2.exceptions import TemplateSyntaxError, UndefinedError from werkzeug.exceptions import NotFound, Unauthorized from rero_ils.modules.decorators import check_authentication from rero_ils.modules.documents.views import record_library_pickup_locations -from rero_ils.modules.errors import NoCirculationActionIsPermitted, \ - RegularReceiveNotAllowed +from rero_ils.modules.errors import ( + NoCirculationActionIsPermitted, + RegularReceiveNotAllowed, +) from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus -from rero_ils.modules.items.views.api_views import \ - check_authentication_for_request, check_logged_user_authentication +from rero_ils.modules.items.views.api_views import ( + check_authentication_for_request, + check_logged_user_authentication, +) from rero_ils.modules.libraries.api import Library from rero_ils.modules.patrons.api import Patron from rero_ils.modules.utils import get_ref_for_pid @@ -48,11 +54,7 @@ from .api import Holding from .models import HoldingCirculationAction -api_blueprint = Blueprint( - 'api_holding', - __name__, - url_prefix='/holding' -) +api_blueprint = Blueprint("api_holding", __name__, url_prefix="/holding") def jsonify_error(func): @@ -60,6 +62,7 @@ def jsonify_error(func): :param func: function that use this decorator """ + @wraps(func) def decorated_view(*args, **kwargs): try: @@ -67,17 +70,16 @@ def decorated_view(*args, **kwargs): except (Unauthorized, NotFound) as error: raise error except (TemplateSyntaxError, UndefinedError) as error: - return jsonify( - {'status': f'error: {error}'}), 400 + return jsonify({"status": f"error: {error}"}), 400 except Exception as error: current_app.logger.error(str(error)) db.session.rollback() - return jsonify( - {'status': f'error: {error}'}), 500 + return jsonify({"status": f"error: {error}"}), 500 + return decorated_view -@api_blueprint.route('//patterns/preview', methods=['GET']) +@api_blueprint.route("//patterns/preview", methods=["GET"]) @check_authentication @jsonify_error def patterns_preview(holding_pid): @@ -87,18 +89,18 @@ def patterns_preview(holding_pid): Optional parameters: size: number of previewed issues - by default 10 """ try: - size = flask_request.args.get('size') + size = flask_request.args.get("size") number_issues = int(size) if size else 10 except ValueError as error: number_issues = 10 holding = Holding.get_record_by_pid(holding_pid) - if holding and holding.get('holdings_type') != 'serial': - return jsonify({'status': 'error: invalid holdings type'}), 400 + if holding and holding.get("holdings_type") != "serial": + return jsonify({"status": "error: invalid holdings type"}), 400 issues = holding.prediction_issues_preview(predictions=number_issues) - return jsonify({'issues': issues}) + return jsonify({"issues": issues}) -@api_blueprint.route("/pattern/preview", methods=['POST']) +@api_blueprint.route("/pattern/preview", methods=["POST"]) @check_authentication @jsonify_error def pattern_preview(): @@ -108,16 +110,17 @@ def pattern_preview(): Optional parameters: size: number of previewed issues - by default 10 """ patterns_data = flask_request.get_json() - pattern = patterns_data.get('data', {}) - size = patterns_data.get('size', 10) - if pattern and pattern.get('frequency') == 'rdafr:1016': - return jsonify({'status': 'error: irregular frequency'}), 400 + pattern = patterns_data.get("data", {}) + size = patterns_data.get("size", 10) + if pattern and pattern.get("frequency") == "rdafr:1016": + return jsonify({"status": "error: irregular frequency"}), 400 issues = Holding.prediction_issues_preview_for_pattern( - pattern, number_of_predictions=size) - return jsonify({'issues': issues}) + pattern, number_of_predictions=size + ) + return jsonify({"issues": issues}) -@api_blueprint.route('//issues', methods=['POST']) +@api_blueprint.route("//issues", methods=["POST"]) @jsonify_error @check_authentication def receive_regular_issue(holding_pid): @@ -139,19 +142,16 @@ def receive_regular_issue(holding_pid): # system librarians may receive for all libraries of organisation. if not can_receive_regular_issue(holding): abort(401) - item = data.get('item', {}) + item = data.get("item", {}) try: issue = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - item=item, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, item=item, dbcommit=True, reindex=True ) except RegularReceiveNotAllowed: # receive allowed only on holding of type serials and regular frequency abort(400) # the created item of type issue is returned - return jsonify({'issue': issue}) + return jsonify({"issue": issue}) def do_holding_jsonify_action(func): @@ -160,42 +160,40 @@ def do_holding_jsonify_action(func): This method for the circulation actions that required access to the holding object before executing the invenio-circulation logic. """ + @wraps(func) def decorated_view(*args, **kwargs): try: data = deepcopy(flask_request.get_json()) - description = data.pop('description') + description = data.pop("description") except KeyError: # The description parameter is missing. - abort(400, str('missing description parameter.')) + abort(400, "missing description parameter.") try: - holding_pid = data.pop('holding_pid', None) + holding_pid = data.pop("holding_pid", None) holding = Holding.get_record_by_pid(holding_pid) if not holding: - abort(404, 'Holding not found') + abort(404, "Holding not found") # create a provisional item item_metadata = { - 'type': 'provisional', - 'document': { - '$ref': get_ref_for_pid('doc', holding.document_pid)}, - 'location': { - '$ref': get_ref_for_pid('loc', holding.location_pid)}, - 'item_type': {'$ref': get_ref_for_pid( - 'itty', holding.circulation_category_pid)}, - 'enumerationAndChronology': description, - 'status': ItemStatus.ON_SHELF, - 'holding': {'$ref': get_ref_for_pid('hold', holding.pid)} + "type": "provisional", + "document": {"$ref": get_ref_for_pid("doc", holding.document_pid)}, + "location": {"$ref": get_ref_for_pid("loc", holding.location_pid)}, + "item_type": { + "$ref": get_ref_for_pid("itty", holding.circulation_category_pid) + }, + "enumerationAndChronology": description, + "status": ItemStatus.ON_SHELF, + "holding": {"$ref": get_ref_for_pid("hold", holding.pid)}, } item = Item.create(item_metadata, dbcommit=True, reindex=True) _, action_applied = func(holding, item, data, *args, **kwargs) - return jsonify({ - 'action_applied': action_applied - }) + return jsonify({"action_applied": action_applied}) except NoCirculationActionIsPermitted as error: # The circulation specs do not allow updates on some loan states. - return jsonify({'status': f'error: {str(error)}'}), 403 + return jsonify({"status": f"error: {str(error)}"}), 403 except MissingRequiredParameterError as error: # Return error 400 when there is a missing required parameter abort(400, str(error)) @@ -205,16 +203,17 @@ def decorated_view(*args, **kwargs): raise error except exceptions.RequestError as error: # missing required parameters - return jsonify({'status': f'error: {error}'}), 400 + return jsonify({"status": f"error: {error}"}), 400 except Exception as error: # TODO: need to know what type of exception and document there. # raise error - current_app.logger.error(f'{func.__name__}: {str(error)}') - return jsonify({'status': f'error: {error}'}), 400 + current_app.logger.error(f"{func.__name__}: {str(error)}") + return jsonify({"status": f"error: {error}"}), 400 + return decorated_view -@api_blueprint.route('/patron_request', methods=['POST']) +@api_blueprint.route("/patron_request", methods=["POST"]) @check_logged_user_authentication @check_authentication_for_request @do_holding_jsonify_action @@ -227,13 +226,13 @@ def patron_request(holding, item, data): description """ patron_pid = Patron.get_current_patron(holding).pid - data['patron_pid'] = patron_pid - data['transaction_user_pid'] = patron_pid - data['transaction_location_pid'] = data['pickup_location_pid'] + data["patron_pid"] = patron_pid + data["transaction_user_pid"] = patron_pid + data["transaction_location_pid"] = data["pickup_location_pid"] return item.request(**data) -@api_blueprint.route('/request', methods=['POST']) +@api_blueprint.route("/request", methods=["POST"]) @check_authentication @do_holding_jsonify_action def librarian_request(holding, item, data): @@ -250,7 +249,7 @@ def librarian_request(holding, item, data): return item.request(**data) -@api_blueprint.route('//can_request', methods=['GET']) +@api_blueprint.route("//can_request", methods=["GET"]) @check_logged_user_authentication @jsonify_error def can_request(holding_pid): @@ -269,35 +268,32 @@ def can_request(holding_pid): holding = Holding.get_record_by_pid(holding_pid) if not holding: - abort(404, 'Holding not found') + abort(404, "Holding not found") - patron_barcode = flask_request.args.get('patron_barcode') - if patron_barcode: - kwargs['patron'] = Patron.get_patron_by_barcode( - barcode=patron_barcode, org_pid=holding.organisation_pid) - if not kwargs['patron']: - abort(404, 'Patron not found') + if patron_barcode := flask_request.args.get("patron_barcode"): + kwargs["patron"] = Patron.get_patron_by_barcode( + barcode=patron_barcode, org_pid=holding.organisation_pid + ) + if not kwargs["patron"]: + abort(404, "Patron not found") - library_pid = flask_request.args.get('library_pid') - if library_pid: - kwargs['library'] = Library.get_record_by_pid(library_pid) - if not kwargs['library']: - abort(404, 'Library not found') + if library_pid := flask_request.args.get("library_pid"): + kwargs["library"] = Library.get_record_by_pid(library_pid) + if not kwargs["library"]: + abort(404, "Library not found") can, reasons = holding.can(HoldingCirculationAction.REQUEST, **kwargs) # check the `reasons_not_request` array. If it's empty, the request is # allowed, otherwise the request is not allowed and we need to return the # reasons why - response = {'can': can} + response = {"can": can} if reasons: - response['reasons'] = { - 'others': {reason: True for reason in reasons} - } + response["reasons"] = {"others": {reason: True for reason in reasons}} return jsonify(response) -@api_blueprint.route('//pickup_locations', methods=['GET']) +@api_blueprint.route("//pickup_locations", methods=["GET"]) @check_logged_user_authentication @jsonify_error def get_pickup_locations(holding_pid): @@ -307,18 +303,14 @@ def get_pickup_locations(holding_pid): """ holding = Holding.get_record_by_pid(holding_pid) if not holding: - abort(404, 'Holding not found') + abort(404, "Holding not found") locations = record_library_pickup_locations(holding) - return jsonify({ - 'locations': locations - }) + return jsonify({"locations": locations}) -@api_blueprint.route('//availability', methods=['GET']) +@api_blueprint.route("//availability", methods=["GET"]) def holding_availability(pid): """HTTP GET request for holding availability.""" if holding := Holding.get_record_by_pid(pid): - return jsonify({ - 'available': holding.is_available() - }) + return jsonify({"available": holding.is_available()}) abort(404) diff --git a/rero_ils/modules/holdings/cli.py b/rero_ils/modules/holdings/cli.py index face51d3b7..a1f2b978cd 100644 --- a/rero_ils/modules/holdings/cli.py +++ b/rero_ils/modules/holdings/cli.py @@ -40,28 +40,30 @@ def get_document_pid_by_rero_number(rero_control_number): """Get pid of document by rero control number.""" - es_documents = DocumentsSearch()\ - .filter('term', identifiedBy__value__raw=rero_control_number)\ - .source('pid') + es_documents = ( + DocumentsSearch() + .filter("term", identifiedBy__value__raw=rero_control_number) + .source("pid") + ) documents = [document.pid for document in es_documents.scan()] return documents[0] if documents else None def get_circ_category(org_pid): """Get a random standard circulation category for an organisation pid.""" - results = ItemTypesSearch()\ - .filter('term', organisation__pid=org_pid)\ - .filter('term', type='standard') \ - .source('pid') + results = ( + ItemTypesSearch() + .filter("term", organisation__pid=org_pid) + .filter("term", type="standard") + .source("pid") + ) records = [record.pid for record in results.scan()] return next(iter(records or []), None) def get_random_location_pid(org_pid): """Return random location for an organisation pid.""" - results = LocationsSearch() \ - .filter('term', organisation__pid=org_pid) \ - .source('pid') + results = LocationsSearch().filter("term", organisation__pid=org_pid).source("pid") locations = [location.pid for location in results.scan()] return next(iter(locations or []), None) @@ -83,28 +85,26 @@ def create_issues_from_holding(holding, min=3, max=9): for _ in range(random.randint(min, max)): # prepare some fields for the issue to ensure a variable recv dates. issue_display, expected_date = holding._get_next_issue_display_text( - holding.get('patterns')) + holding.get("patterns") + ) item = { - 'issue': { - 'received_date': expected_date, + "issue": { + "received_date": expected_date, }, } holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - item=item, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, item=item, dbcommit=True, reindex=True ) holding = Holding.get_record_by_pid(holding.pid) count += 1 return count -@click.command('create_patterns') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.argument('infile', type=click.File('r')) +@click.command("create_patterns") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.argument("infile", type=click.File("r")) @with_appcontext def create_patterns(infile, verbose, debug, lazy): """Create serials patterns for Serial mode of issuance documents. @@ -112,34 +112,33 @@ def create_patterns(infile, verbose, debug, lazy): :param infile: Json patterns file :param lazy: lazy reads file """ - click.secho('Create serials patterns:', fg='green') + click.secho("Create serials patterns:", fg="green") journal_pids = Document.get_all_serial_pids() data = read_json_record(infile) if lazy else json.load(infile) for record_index, record in enumerate(data): - template_name = record.get('template_name') - if rero_control_number := record.get('rero_control_number'): + template_name = record.get("template_name") + if rero_control_number := record.get("rero_control_number"): document_pid = get_document_pid_by_rero_number(rero_control_number) else: try: document_pid = journal_pids[record_index] except IndexError as error: break - patterns = record.get('patterns') - enumerationAndChronology = record.get('enumerationAndChronology') - supplementaryContent = record.get('supplementaryContent') - index = record.get('index') - missing_issues = record.get('missing_issues') - notes = record.get('notes') - call_number = record.get('call_number') - second_call_number = record.get('second_call_number') - acquisition_status = record.get('acquisition_status') - acquisition_method = record.get('acquisition_method') - general_retention_policy = record.get('general_retention_policy') - composite_copy_report = record.get('composite_copy_report') - issue_binding = record.get('issue_binding') - completeness = record.get('completeness') - acquisition_expected_end_date = record.get( - 'acquisition_expected_end_date') + patterns = record.get("patterns") + enumerationAndChronology = record.get("enumerationAndChronology") + supplementaryContent = record.get("supplementaryContent") + index = record.get("index") + missing_issues = record.get("missing_issues") + notes = record.get("notes") + call_number = record.get("call_number") + second_call_number = record.get("second_call_number") + acquisition_status = record.get("acquisition_status") + acquisition_method = record.get("acquisition_method") + general_retention_policy = record.get("general_retention_policy") + composite_copy_report = record.get("composite_copy_report") + issue_binding = record.get("issue_binding") + completeness = record.get("completeness") + acquisition_expected_end_date = record.get("acquisition_expected_end_date") for org_pid in Organisation.get_all_pids(): circ_category_pid = get_circ_category(org_pid) location_pid = get_random_location_pid(org_pid) @@ -148,7 +147,7 @@ def create_patterns(infile, verbose, debug, lazy): document_pid=document_pid, location_pid=location_pid, item_type_pid=circ_category_pid, - holdings_type='serial', + holdings_type="serial", enumerationAndChronology=enumerationAndChronology, supplementaryContent=supplementaryContent, index=index, @@ -164,20 +163,21 @@ def create_patterns(infile, verbose, debug, lazy): call_number=call_number, second_call_number=second_call_number, vendor_pid=vendor_pid, - patterns=patterns) + patterns=patterns, + ) # create minimum 3 and max 9 received issues for this holdings - count = create_issues_from_holding(holding=holdings_record, - min=3, max=9) + count = create_issues_from_holding(holding=holdings_record, min=3, max=9) click.echo( - f'Pattern <{template_name}> created {count} ' - f'received issues for holdings: {holdings_record.pid} ' - f'and document: {document_pid}' + f"Pattern <{template_name}> created {count} " + f"received issues for holdings: {holdings_record.pid} " + f"and document: {document_pid}" ) record_index = record_index + 1 # create some late issues. process_late_issues(dbcommit=True, reindex=True) # make late issues ready for a claim for issue in ItemIssue.get_issues_by_status(status=ItemIssueStatus.LATE): - issue['issue']['status_date'] = \ - (datetime.now(timezone.utc) - timedelta(days=8)).isoformat() + issue["issue"]["status_date"] = ( + datetime.now(timezone.utc) - timedelta(days=8) + ).isoformat() issue.update(issue, dbcommit=True, reindex=True) diff --git a/rero_ils/modules/holdings/dumpers.py b/rero_ils/modules/holdings/dumpers.py index 31b8d680bb..e1317558c6 100644 --- a/rero_ils/modules/holdings/dumpers.py +++ b/rero_ils/modules/holdings/dumpers.py @@ -34,8 +34,8 @@ def dump(self, record, data): """ assert record.is_serial, "Holding type must be 'serial'" data = { - 'pid': record.pid, - 'client_id': record.get('client_id'), - 'order_reference': record.get('order_reference') + "pid": record.pid, + "client_id": record.get("client_id"), + "order_reference": record.get("order_reference"), } return {k: v for k, v in data.items() if v} diff --git a/rero_ils/modules/holdings/jsonresolver.py b/rero_ils/modules/holdings/jsonresolver.py index 852f3cfcfd..28eb86a9de 100644 --- a/rero_ils/modules/holdings/jsonresolver.py +++ b/rero_ils/modules/holdings/jsonresolver.py @@ -22,7 +22,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/holdings/', host='bib.rero.ch') +@jsonresolver.route("/api/holdings/", host="bib.rero.ch") def holding_resolver(pid): """Resolver for holding record.""" - return resolve_json_refs('hold', pid) + return resolve_json_refs("hold", pid) diff --git a/rero_ils/modules/holdings/listener.py b/rero_ils/modules/holdings/listener.py index c9973ae61a..7ccd4f46eb 100644 --- a/rero_ils/modules/holdings/listener.py +++ b/rero_ils/modules/holdings/listener.py @@ -26,12 +26,19 @@ from rero_ils.modules.tasks import process_bulk_queue from rero_ils.modules.utils import extracted_data_from_ref, get_ref_for_pid -from .api import Holding, HoldingsSearch from ..items.api import Item, ItemsIndexer, ItemsSearch +from .api import Holding, HoldingsSearch -def enrich_holding_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_holding_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -39,35 +46,39 @@ def enrich_holding_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == HoldingsSearch.Meta.index: - library_pid = None - organisation_pid = None - - holding = next(HoldingsSearch() - .filter('term', pid=record.pid) - .source('holdings_type').scan(), None) - # get the number of items for ui paging - item_search = ItemsSearch()[0:0]\ - .filter('term', holding__pid=record.pid) - - if holding is not None and holding["holdings_type"] == 'serial': - item_search = ItemsSearch()[0:0]\ - .filter('term', holding__pid=record.pid)\ - .filter('term', issue__status="received") - - # to compute the number of masked item - item_search.aggs.bucket('public_items', 'terms', field='_masked') - results = item_search.source(['organisation', 'library']).execute() - # number of items - json['items_count'] = results.hits.total.value - # number of masked items - number_of_masked_items = 0 - for bucket in results.aggregations.public_items.buckets: - if bucket.key_as_string == 'true': - number_of_masked_items = bucket.doc_count - break - json['public_items_count'] = \ - json['items_count'] - number_of_masked_items + if index.split("-")[0] != HoldingsSearch.Meta.index: + return + library_pid = None + organisation_pid = None + + holding = next( + HoldingsSearch().filter("term", pid=record.pid).source("holdings_type").scan(), + None, + ) + # get the number of items for ui paging + item_search = ItemsSearch()[:0].filter("term", holding__pid=record.pid) + + if holding is not None and holding["holdings_type"] == "serial": + item_search = ( + ItemsSearch()[:0] + .filter("term", holding__pid=record.pid) + .filter("term", issue__status="received") + ) + + # to compute the number of masked item + item_search.aggs.bucket("public_items", "terms", field="_masked") + results = item_search.source(["organisation", "library"]).execute() + # number of items + json["items_count"] = results.hits.total.value + number_of_masked_items = next( + ( + bucket.doc_count + for bucket in results.aggregations.public_items.buckets + if bucket.key_as_string == "true" + ), + 0, + ) + json["public_items_count"] = json["items_count"] - number_of_masked_items def update_items_locations_and_types(sender, record=None, **kwargs): @@ -79,68 +90,69 @@ def update_items_locations_and_types(sender, record=None, **kwargs): :param record: the holdings record. """ - if isinstance(record, Holding) and \ - record.get('holdings_type') == HoldingTypes.SERIAL: - # identify all items linked to this holdings that we need to update - # by excluding form the search items where all the concerned fields - # correspond to the fields in the holdings. - hold_circ_pid = record.circulation_category_pid - hold_loc_pid = record.location_pid - - search = ItemsSearch().filter('term', holding__pid=record.pid) - filters = Q('term', item_type__pid=hold_circ_pid) &\ - Q('term', location__pid=hold_loc_pid) - if hold_call_number := record.get('call_number', None): - filters &= Q( - 'term', - issue__inherited_first_call_number=hold_call_number - ) - if hold_second_call_number := record.get('second_call_number', None): - filters &= Q( - 'term', - issue__inherited_second_call_number=hold_second_call_number + if ( + not isinstance(record, Holding) + or record.get("holdings_type") != HoldingTypes.SERIAL + ): + return + # identify all items linked to this holdings that we need to update + # by excluding form the search items where all the concerned fields + # correspond to the fields in the holdings. + hold_circ_pid = record.circulation_category_pid + hold_loc_pid = record.location_pid + + search = ItemsSearch().filter("term", holding__pid=record.pid) + filters = Q("term", item_type__pid=hold_circ_pid) & Q( + "term", location__pid=hold_loc_pid + ) + if hold_call_number := record.get("call_number", None): + filters &= Q("term", issue__inherited_first_call_number=hold_call_number) + if hold_second_call_number := record.get("second_call_number", None): + filters &= Q( + "term", issue__inherited_second_call_number=hold_second_call_number + ) + item_hits = search.exclude(filters).source(["pid"]) + items = [hit.meta.id for hit in item_hits.scan()] + items_to_index = [] + # update these items so that they inherit the fields location, + # item_type and call numbers from the parent holdings record. + for id in items: + with contextlib.suppress(Exception): + item = Item.get_record(id) + if not item: + continue + items_to_index.append(id) + item_temp_loc_pid, item_temp_type_pid = None, None + # remove the item temporary_location if it is equal to the + # new item location. + if temporary_location := item.get("temporary_location"): + item_temp_loc_pid = extracted_data_from_ref( + temporary_location.get("$ref") ) - item_hits = search.exclude(filters).source(['pid']) - items = [hit.meta.id for hit in item_hits.scan()] - items_to_index = [] - # update these items so that they inherit the fields location, - # item_type and call numbers from the parent holdings record. - for id in items: - with contextlib.suppress(Exception): - item = Item.get_record(id) - if not item: - continue - items_to_index.append(id) - item_temp_loc_pid, item_temp_type_pid = None, None - # remove the item temporary_location if it is equal to the - # new item location. - if temporary_location := item.get('temporary_location'): - item_temp_loc_pid = extracted_data_from_ref( - temporary_location.get('$ref')) - if hold_loc_pid != item.location_pid: - if item_temp_loc_pid == hold_loc_pid: - item.pop('temporary_location', None) - item['location'] = {'$ref': get_ref_for_pid( - 'locations', hold_loc_pid)} - - # remove the item temporary_item_type if it is equal to the - # new item item_type. - if temporary_type := item.get('temporary_item_type'): - item_temp_type_pid = extracted_data_from_ref( - temporary_type.get('$ref')) - if hold_circ_pid != item.item_type_pid: - if item_temp_type_pid == hold_circ_pid: - item.pop('temporary_item_type', None) - item['item_type'] = {'$ref': get_ref_for_pid( - 'item_types', hold_circ_pid)} - # update directly in database. - db.session.query(item.model_cls).filter_by(id=item.id).update( - {item.model_cls.json: item}) - - if items_to_index: - # commit session - db.session.commit() - # bulk indexing of item records. - indexer = ItemsIndexer() - indexer.bulk_index(items_to_index) - process_bulk_queue.apply_async() + if hold_loc_pid != item.location_pid: + if item_temp_loc_pid == hold_loc_pid: + item.pop("temporary_location", None) + item["location"] = {"$ref": get_ref_for_pid("locations", hold_loc_pid)} + + # remove the item temporary_item_type if it is equal to the + # new item item_type. + if temporary_type := item.get("temporary_item_type"): + item_temp_type_pid = extracted_data_from_ref(temporary_type.get("$ref")) + if hold_circ_pid != item.item_type_pid: + if item_temp_type_pid == hold_circ_pid: + item.pop("temporary_item_type", None) + item["item_type"] = { + "$ref": get_ref_for_pid("item_types", hold_circ_pid) + } + # update directly in database. + db.session.query(item.model_cls).filter_by(id=item.id).update( + {item.model_cls.json: item} + ) + + if items_to_index: + # commit session + db.session.commit() + # bulk indexing of item records. + indexer = ItemsIndexer() + indexer.bulk_index(items_to_index) + process_bulk_queue.apply_async() diff --git a/rero_ils/modules/holdings/models.py b/rero_ils/modules/holdings/models.py index f70d14f73b..b3bdab738a 100644 --- a/rero_ils/modules/holdings/models.py +++ b/rero_ils/modules/holdings/models.py @@ -27,48 +27,48 @@ class HoldingIdentifier(RecordIdentifier): """Sequence generator for holdings identifiers.""" - __tablename__ = 'holding_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "holding_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class HoldingMetadata(db.Model, RecordMetadataBase): """Holding record metadata.""" - __tablename__ = 'holding_metadata' + __tablename__ = "holding_metadata" class HoldingTypes: """Class to list all possible holding types.""" - ELECTRONIC = 'electronic' - SERIAL = 'serial' - STANDARD = 'standard' + ELECTRONIC = "electronic" + SERIAL = "serial" + STANDARD = "standard" class HoldingNoteTypes: """Class to list all holdings possible note types.""" - GENERAL = 'general_note' - STAFF = 'staff_note' - CONSERVATION = 'conservation_note' - RECEPTION = 'reception_note' - CLAIM = 'claim_note' - ROUTING = 'routing_note' - BINDING = 'binding_note' - ACQUISITION = 'acquisition_note' + GENERAL = "general_note" + STAFF = "staff_note" + CONSERVATION = "conservation_note" + RECEPTION = "reception_note" + CLAIM = "claim_note" + ROUTING = "routing_note" + BINDING = "binding_note" + ACQUISITION = "acquisition_note" # TODO: Add any of the above items to the array # to display them on the interface - PUBLIC = [ - ] + PUBLIC = [] class HoldingCirculationAction: """Enum class to list all possible action about an holding.""" - REQUEST = 'request' + REQUEST = "request" diff --git a/rero_ils/modules/holdings/permissions.py b/rero_ils/modules/holdings/permissions.py index 3e3562d6c7..d06b20ca5b 100644 --- a/rero_ils/modules/holdings/permissions.py +++ b/rero_ils/modules/holdings/permissions.py @@ -20,16 +20,19 @@ from invenio_access import action_factory, any_user from invenio_records_permissions.generators import Generator -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + RecordPermissionPolicy, +) # Actions to control Holdings policies for CRUD operations -search_action = action_factory('hold-search') -read_action = action_factory('hold-read') -create_action = action_factory('hold-create') -update_action = action_factory('hold-update') -delete_action = action_factory('hold-delete') -access_action = action_factory('hold-access') +search_action = action_factory("hold-search") +read_action = action_factory("hold-read") +create_action = action_factory("hold-create") +update_action = action_factory("hold-update") +delete_action = action_factory("hold-delete") +access_action = action_factory("hold-access") class DisallowIfNotSerialHolding(Generator): @@ -52,13 +55,13 @@ class HoldingsPermissionPolicy(RecordPermissionPolicy): can_read = [AllowedByAction(read_action)] can_create = [ AllowedByActionRestrictByManageableLibrary(create_action), - DisallowIfNotSerialHolding() + DisallowIfNotSerialHolding(), ] can_update = [ AllowedByActionRestrictByManageableLibrary(update_action), - DisallowIfNotSerialHolding() + DisallowIfNotSerialHolding(), ] can_delete = [ AllowedByActionRestrictByManageableLibrary(delete_action), - DisallowIfNotSerialHolding() + DisallowIfNotSerialHolding(), ] diff --git a/rero_ils/modules/holdings/serializers.py b/rero_ils/modules/holdings/serializers.py index 5e6c27eb13..d6cbe13691 100644 --- a/rero_ils/modules/holdings/serializers.py +++ b/rero_ils/modules/holdings/serializers.py @@ -20,8 +20,12 @@ from rero_ils.modules.item_types.api import ItemType from rero_ils.modules.libraries.api import LibrariesSearch from rero_ils.modules.locations.api import LocationsSearch -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer, RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + CachedDataSerializerMixin, + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) from .api import Holding @@ -38,21 +42,21 @@ def _postprocess_search_hit(self, hit: dict) -> None: * holdings availability * location and library name. """ - metadata = hit.get('metadata', {}) - record = Holding.get_record_by_pid(metadata.get('pid')) + metadata = hit.get("metadata", {}) + record = Holding.get_record_by_pid(metadata.get("pid")) # Circulation category - circ_category_pid = metadata['circulation_category']['pid'] + circ_category_pid = metadata["circulation_category"]["pid"] circ_category = self.get_resource(ItemType, circ_category_pid) - metadata['circulation_category'] = circ_category.dumps() + metadata["circulation_category"] = circ_category.dumps() # Library & location - if pid := metadata.get('location', {}).get('pid'): - loc_name = self.get_resource(LocationsSearch(), pid)['name'] - metadata['location']['name'] = loc_name - if pid := metadata.get('library', {}).get('pid'): - lib_name = self.get_resource(LibrariesSearch(), pid)['name'] - metadata['library']['name'] = lib_name + if pid := metadata.get("location", {}).get("pid"): + loc_name = self.get_resource(LocationsSearch(), pid)["name"] + metadata["location"]["name"] = loc_name + if pid := metadata.get("library", {}).get("pid"): + lib_name = self.get_resource(LibrariesSearch(), pid)["name"] + metadata["library"]["name"] = lib_name super()._postprocess_search_hit(hit) _json = HoldingsJSONSerializer(RecordSchemaJSONV1) -json_holdings_search = search_responsify(_json, 'application/rero+json') +json_holdings_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/holdings/tasks.py b/rero_ils/modules/holdings/tasks.py index e79c39c652..74f092247e 100644 --- a/rero_ils/modules/holdings/tasks.py +++ b/rero_ils/modules/holdings/tasks.py @@ -22,17 +22,19 @@ from celery import shared_task from flask import current_app -from .api import Holding, HoldingsIndexer, HoldingsSearch from ..utils import set_timestamp +from .api import Holding, HoldingsIndexer, HoldingsSearch @shared_task(ignore_result=True) def delete_standard_holdings_having_no_items(): """Removes standard holdings records with no attached items.""" - es_query = HoldingsSearch() \ - .filter('term', holdings_type='standard') \ - .filter('term', items_count=0) \ - .source('pid') + es_query = ( + HoldingsSearch() + .filter("term", holdings_type="standard") + .filter("term", items_count=0) + .source("pid") + ) search_results = [hit for hit in es_query.scan()] count = len(search_results) @@ -47,14 +49,15 @@ def delete_standard_holdings_having_no_items(): errors += 1 reasons = record.reasons_not_to_delete() current_app.logger.error( - f'Can not delete standard holding: ' - f'{hit.pid} {reasons} {err}') + f"Can not delete standard holding: " f"{hit.pid} {reasons} {err}" + ) else: # delete holding from index HoldingsIndexer().client.delete( - id=hit.meta.id, index='holdings', doc_type='_doc') + id=hit.meta.id, index="holdings", doc_type="_doc" + ) deleted += 1 - counts = {'count': count, 'deleted': deleted, 'errors': errors} - set_timestamp('delete_standard_holdings_having_no_items', **counts) + counts = {"count": count, "deleted": deleted, "errors": errors} + set_timestamp("delete_standard_holdings_having_no_items", **counts) return counts diff --git a/rero_ils/modules/holdings/utils.py b/rero_ils/modules/holdings/utils.py index d94c4420fa..6e3ab3f44f 100644 --- a/rero_ils/modules/holdings/utils.py +++ b/rero_ils/modules/holdings/utils.py @@ -39,13 +39,15 @@ def get_late_serial_holdings(): :return: A `Holding` resource generator """ yesterday = datetime.now(timezone.utc) - timedelta(days=1) - yesterday = yesterday.strftime('%Y-%m-%d') - query = HoldingsSearch() \ - .filter('term', holdings_type='serial') \ - .filter('term', acquisition_status='currently_received') \ - .filter('range', patterns__next_expected_date={'lte': yesterday}) \ - .exclude('term', patterns__frequency='rdafr:1016') \ + yesterday = yesterday.strftime("%Y-%m-%d") + query = ( + HoldingsSearch() + .filter("term", holdings_type="serial") + .filter("term", acquisition_status="currently_received") + .filter("range", patterns__next_expected_date={"lte": yesterday}) + .exclude("term", patterns__frequency="rdafr:1016") .source(False) + ) for hit in query.scan(): yield Holding.get_record(hit.meta.id) @@ -61,15 +63,13 @@ def create_next_late_expected_issues(dbcommit=False, reindex=False): for holding in get_late_serial_holdings(): try: issue = holding.create_regular_issue( - status=ItemIssueStatus.LATE, - dbcommit=dbcommit, - reindex=reindex + status=ItemIssueStatus.LATE, dbcommit=dbcommit, reindex=reindex ) issue.issue_status = ItemIssueStatus.LATE issue.update(issue, dbcommit=dbcommit, reindex=reindex) counter += 1 except RegularReceiveNotAllowed as err: pid = holding.pid - msg = f'Cannot receive next expected issue for Holding#{pid}' - current_app.logger.error(f'{msg}::{str(err)}') + msg = f"Cannot receive next expected issue for Holding#{pid}" + current_app.logger.error(f"{msg}::{str(err)}") return counter diff --git a/rero_ils/modules/holdings/views.py b/rero_ils/modules/holdings/views.py index 8cb97b33a7..1d8600f915 100644 --- a/rero_ils/modules/holdings/views.py +++ b/rero_ils/modules/holdings/views.py @@ -23,12 +23,12 @@ from flask import Blueprint, abort, current_app, render_template from invenio_records_ui.signals import record_viewed -from .api import Holding from ..documents.api import Document from ..item_types.api import ItemType from ..libraries.api import Library from ..locations.api import Location from ..utils import extracted_data_from_ref +from .api import Holding def holding_view_method(pid, record, template=None, **kwargs): @@ -41,30 +41,35 @@ def holding_view_method(pid, record, template=None, **kwargs): :param **kwargs: Additional view arguments based on URL rule. :return: The rendered template. """ - record_viewed.send( - current_app._get_current_object(), pid=pid, record=record - ) - document_pid = extracted_data_from_ref(record.get('document')) + record_viewed.send(current_app._get_current_object(), pid=pid, record=record) + document_pid = extracted_data_from_ref(record.get("document")) document = Document.get_record_by_pid(document_pid) - location_pid = extracted_data_from_ref(record.get('location')) + location_pid = extracted_data_from_ref(record.get("location")) location = Location.get_record_by_pid(location_pid) - library_pid = extracted_data_from_ref(location.get('library')) + library_pid = extracted_data_from_ref(location.get("library")) library = Library.get_record_by_pid(library_pid) - item_type_pid = extracted_data_from_ref(record.get('circulation_category')) + item_type_pid = extracted_data_from_ref(record.get("circulation_category")) circulation_category = ItemType.get_record_by_pid(item_type_pid) - items = record.get_items_filter_by_viewcode(kwargs['viewcode']) + items = record.get_items_filter_by_viewcode(kwargs["viewcode"]) return render_template( - template, pid=pid, record=record, document=document, - location=location, circulation_category=circulation_category, - library=library, viewcode=kwargs['viewcode'], items=items) + template, + pid=pid, + record=record, + document=document, + location=location, + circulation_category=circulation_category, + library=library, + viewcode=kwargs["viewcode"], + items=items, + ) blueprint = Blueprint( - 'holding', + "holding", __name__, - url_prefix='/holding', - template_folder='templates', - static_folder='static', + url_prefix="/holding", + template_folder="templates", + static_folder="static", ) diff --git a/rero_ils/modules/ill_requests/api.py b/rero_ils/modules/ill_requests/api.py index bdbd3ce35c..bdfd88291a 100644 --- a/rero_ils/modules/ill_requests/api.py +++ b/rero_ils/modules/ill_requests/api.py @@ -34,14 +34,18 @@ from rero_ils.modules.utils import extracted_data_from_ref from .extensions import IllRequestOperationLogObserverExtension -from .models import ILLRequestIdentifier, ILLRequestMetadata, \ - ILLRequestNoteStatus, ILLRequestStatus +from .models import ( + ILLRequestIdentifier, + ILLRequestMetadata, + ILLRequestNoteStatus, + ILLRequestStatus, +) # provider ILLRequestProvider = type( - 'ILLRequestProvider', + "ILLRequestProvider", (Provider,), - dict(identifier=ILLRequestIdentifier, pid_type='illr') + dict(identifier=ILLRequestIdentifier, pid_type="illr"), ) # minter ill_request_id_minter = partial(id_minter, provider=ILLRequestProvider) @@ -55,7 +59,7 @@ class ILLRequestsSearch(IlsRecordsSearch): class Meta: """Search only on ill_request index.""" - index = 'ill_requests' + index = "ill_requests" doc_types = None def get_ill_requests_total_for_patron(self, patron_pid): @@ -66,14 +70,11 @@ def get_ill_requests_total_for_patron(self, patron_pid): :param patron_pid: the patron pid being searched. :return: return total of ill requests. """ - months = current_app.config.get('RERO_ILS_ILL_HIDE_MONTHS', 6) + months = current_app.config.get("RERO_ILS_ILL_HIDE_MONTHS", 6) date_delta = datetime.now(timezone.utc) - relativedelta(months=months) - filters = Q( - 'range', - _created={'lte': 'now', 'gte': date_delta} - ) - filters |= Q('term', status=ILLRequestStatus.PENDING) - filters &= Q('term', patron__pid=patron_pid) + filters = Q("range", _created={"lte": "now", "gte": date_delta}) + filters |= Q("term", status=ILLRequestStatus.PENDING) + filters &= Q("term", patron__pid=patron_pid) return self.filter(filters).count() @@ -85,9 +86,7 @@ class ILLRequest(IlsRecord): provider = ILLRequestProvider model_cls = ILLRequestMetadata - _extensions = [ - IllRequestOperationLogObserverExtension() - ] + _extensions = [IllRequestOperationLogObserverExtension()] def extended_validation(self, **kwargs): """Validate record against schema. @@ -96,22 +95,21 @@ def extended_validation(self, **kwargs): required * Ensures that only one note of each type is present. """ - if self.is_copy and self.get('pages') is None: - return 'Required property : `pages`' + if self.is_copy and self.get("pages") is None: + return "Required property : `pages`" - note_types = [note.get('type') for note in self.get('notes', [])] + note_types = [note.get("type") for note in self.get("notes", [])] if len(note_types) != len(set(note_types)): - return _('Can not have multiple notes of the same type.') + return _("Can not have multiple notes of the same type.") return True @classmethod def _build_requests_query(cls, patron_pid, status=None): """Private function to build a request query linked to a patron.""" - query = ILLRequestsSearch() \ - .filter('term', patron__pid=patron_pid) + query = ILLRequestsSearch().filter("term", patron__pid=patron_pid) if status: - query = query.filter('term', status=status) + query = query.filter("term", status=status) return query @classmethod @@ -123,7 +121,7 @@ def get_request_pids_by_patron_pid(cls, patron_pid, status=None): :return a generator of request pid """ query = cls._build_requests_query(patron_pid, status) - results = query.source('pid').scan() + results = query.source("pid").scan() for result in results: yield result.pid @@ -141,12 +139,12 @@ def get_requests_by_patron_pid(cls, patron_pid, status=None): @property def is_copy(self): """Is request is a request copy.""" - return self.get('copy', False) + return self.get("copy", False) @property def patron_pid(self): """Get patron pid for ill_request.""" - return extracted_data_from_ref(self.get('patron')) + return extracted_data_from_ref(self.get("patron")) @property def organisation_pid(self): @@ -156,13 +154,16 @@ def organisation_pid(self): @property def public_note(self): """Get the public note for ill_requests.""" - notes = [note.get('content') for note in self.get('notes', []) - if note.get('type') == ILLRequestNoteStatus.PUBLIC_NOTE] + notes = [ + note.get("content") + for note in self.get("notes", []) + if note.get("type") == ILLRequestNoteStatus.PUBLIC_NOTE + ] return next(iter(notes or []), None) def get_pickup_location(self): """Get the pickup location.""" - location_pid = extracted_data_from_ref(self.get('pickup_location')) + location_pid = extracted_data_from_ref(self.get("pickup_location")) return Location.get_record_by_pid(location_pid) def get_library(self): @@ -180,4 +181,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='illr') + super().bulk_index(record_id_iterator, doc_type="illr") diff --git a/rero_ils/modules/ill_requests/cli.py b/rero_ils/modules/ill_requests/cli.py index 6aece856af..9f23e70cba 100644 --- a/rero_ils/modules/ill_requests/cli.py +++ b/rero_ils/modules/ill_requests/cli.py @@ -31,41 +31,39 @@ from rero_ils.modules.utils import get_ref_for_pid -@click.command('create_ill_requests') -@click.option('-f', '--requests_file', 'input_file', help='Request input file') +@click.command("create_ill_requests") +@click.option("-f", "--requests_file", "input_file", help="Request input file") @with_appcontext def create_ill_requests(input_file): """Create ILL request for each organisation.""" locations = get_locations() patron_pids = {} - with open(input_file, 'r', encoding='utf-8') as request_file: + with open(input_file, "r", encoding="utf-8") as request_file: requests = json.load(request_file) for request_data in requests: for organisation_pid, location_pid in locations.items(): - if 'pid' in request_data: - del request_data['pid'] + if "pid" in request_data: + del request_data["pid"] if organisation_pid not in patron_pids: patron_pids[organisation_pid] = [ - pid for pid in Patron.get_all_pids_for_organisation( - organisation_pid) + pid + for pid in Patron.get_all_pids_for_organisation( + organisation_pid + ) if Patron.get_record_by_pid(pid).is_patron ] patron_pid = random.choice(patron_pids[organisation_pid]) - request_data['patron'] = { - '$ref': get_ref_for_pid('patrons', patron_pid) + request_data["patron"] = { + "$ref": get_ref_for_pid("patrons", patron_pid) } - request_data['pickup_location'] = { - '$ref': get_ref_for_pid('locations', location_pid) + request_data["pickup_location"] = { + "$ref": get_ref_for_pid("locations", location_pid) } - request = ILLRequest.create( - request_data, - dbcommit=True, - reindex=True - ) + request = ILLRequest.create(request_data, dbcommit=True, reindex=True) click.echo( - f'\tRequest: #{request.pid} \t' - f'for org#{request.organisation_pid}' + f"\tRequest: #{request.pid} \t" + f"for org#{request.organisation_pid}" ) diff --git a/rero_ils/modules/ill_requests/extensions.py b/rero_ils/modules/ill_requests/extensions.py index cb2b015603..fe795c5a01 100644 --- a/rero_ils/modules/ill_requests/extensions.py +++ b/rero_ils/modules/ill_requests/extensions.py @@ -19,8 +19,7 @@ import contextlib -from rero_ils.modules.operation_logs.extensions import \ - OperationLogObserverExtension +from rero_ils.modules.operation_logs.extensions import OperationLogObserverExtension class IllRequestOperationLogObserverExtension(OperationLogObserverExtension): @@ -32,13 +31,11 @@ def get_additional_informations(self, record): :param record: the observed record. :return a dict with additional informations. """ - data = {'ill_request': { - 'status': record.get('status') - }} + data = {"ill_request": {"status": record.get("status")}} # if the location or library doesn't exist anymore, # we do not inject the library pid in the operation log with contextlib.suppress(Exception): - data['ill_request']['library_pid'] = record.get_library().pid - if loan_status := record.get('loan_status'): - data['ill_request']['loan_status'] = loan_status + data["ill_request"]["library_pid"] = record.get_library().pid + if loan_status := record.get("loan_status"): + data["ill_request"]["loan_status"] = loan_status return data diff --git a/rero_ils/modules/ill_requests/forms.py b/rero_ils/modules/ill_requests/forms.py index ff8df103e4..ac94323957 100644 --- a/rero_ils/modules/ill_requests/forms.py +++ b/rero_ils/modules/ill_requests/forms.py @@ -20,8 +20,15 @@ from flask_babel import gettext as _ from flask_babel import lazy_gettext from flask_wtf import FlaskForm -from wtforms import FormField, IntegerField, RadioField, SelectField, \ - StringField, TextAreaField, validators +from wtforms import ( + FormField, + IntegerField, + RadioField, + SelectField, + StringField, + TextAreaField, + validators, +) from wtforms.fields.html5 import URLField from rero_ils.modules.utils import get_ref_for_pid @@ -37,19 +44,14 @@ class Meta: csrf = False journal_title = StringField( - validators=[ - validators.Optional(), - validators.Length(min=3) - ], - description=_('Journal or book title') + validators=[validators.Optional(), validators.Length(min=3)], + description=_("Journal or book title"), ) volume = StringField( - description=_('Volume'), - render_kw={'placeholder': '1, 2, ...'} + description=_("Volume"), render_kw={"placeholder": "1, 2, ..."} ) number = StringField( - description=_('Number'), - render_kw={'placeholder': '1, January, ...'} + description=_("Number"), render_kw={"placeholder": "1, January, ..."} ) @@ -62,44 +64,29 @@ class Meta: csrf = False title = StringField( - label=_('Title'), - validators=[ - validators.DataRequired(), - validators.Length(min=3) - ] + label=_("Title"), + validators=[validators.DataRequired(), validators.Length(min=3)], ) authors = StringField( - label=_('Authors'), - validators=[ - validators.Optional(), - validators.Length(min=3) - ], - render_kw={'placeholder': _('author#1; author#2')} + label=_("Authors"), + validators=[validators.Optional(), validators.Length(min=3)], + render_kw={"placeholder": _("author#1; author#2")}, ) year = IntegerField( - label=_('Year'), - validators=[ - validators.Optional() - ], - render_kw={'placeholder': '2020'} + label=_("Year"), + validators=[validators.Optional()], + render_kw={"placeholder": "2020"}, ) publisher = StringField( - label=_('Publisher'), - validators=[ - validators.Optional(), - validators.Length(min=3) - ] + label=_("Publisher"), + validators=[validators.Optional(), validators.Length(min=3)], ) identifier = StringField( - label=_('Identifier'), - description=_('Example: 978-0-901690-54-6 (ISBN), ' - '2049-3630 (ISSN), ...'), - render_kw={'placeholder': _('ISBN, ISSN')} - ) - source = FormField( - ILLRequestDocumentSource, - label=_('Published in') + label=_("Identifier"), + description=_("Example: 978-0-901690-54-6 (ISBN), " "2049-3630 (ISSN), ..."), + render_kw={"placeholder": _("ISBN, ISSN")}, ) + source = FormField(ILLRequestDocumentSource, label=_("Published in")) class ILLRequestSourceForm(FlaskForm): @@ -110,24 +97,19 @@ class Meta: csrf = False - origin = StringField( - description=_('Library catalog name') - ) + origin = StringField(description=_("Library catalog name")) url = URLField( - description=_('Link of the document'), - render_kw={'placeholder': 'https://...'} + description=_("Link of the document"), render_kw={"placeholder": "https://..."} ) def validate(self, **kwargs): """Custom validation for this form.""" if self.url.data: - self.origin.validators = [ - validators.DataRequired() - ] + self.origin.validators = [validators.DataRequired()] if self.origin.data: self.url.validators = [ validators.DataRequired(), - validators.URL(require_tld=False) + validators.URL(require_tld=False), ] return super().validate(kwargs) @@ -137,32 +119,21 @@ class ILLRequestForm(FlaskForm): document = FormField(ILLRequestDocumentForm) copy = RadioField( - label=_('Scope'), - choices=[(0, _('Loan')), (1, _('Copy'))], + label=_("Scope"), + choices=[(0, _("Loan")), (1, _("Copy"))], default=0, - description=_('Define if the request is for a copy or full document.') - ) - pages = StringField( - label=_('Pages') - ) - source = FormField( - ILLRequestSourceForm, - label=_('Found in') - ) - note = TextAreaField( - label='Note', - render_kw={'rows': 5} + description=_("Define if the request is for a copy or full document."), ) + pages = StringField(label=_("Pages")) + source = FormField(ILLRequestSourceForm, label=_("Found in")) + note = TextAreaField(label="Note", render_kw={"rows": 5}) pickup_location = SelectField( - label=_('Pickup location'), + label=_("Pickup location"), # Choices will be loaded dynamically because they should # be given inside app_context - choices=[('', lazy_gettext('Select…'))], - description=_('Select the location where this request will be ' - 'operated'), - validators=[ - validators.DataRequired() - ] + choices=[("", lazy_gettext("Select…"))], + description=_("Select the location where this request will be " "operated"), + validators=[validators.DataRequired()], ) def validate(self, **kwargs): @@ -171,56 +142,57 @@ def validate(self, **kwargs): # if 'copy' is set to True, then 'pages' is required field custom_validate = True - if self.copy.data == '1' and len(self.pages.data.strip()) == 0: + if self.copy.data == "1" and len(self.pages.data.strip()) == 0: custom_validate = False self.pages.errors.append( - _('As you request a document part, you need to specify ' - 'requested pages') + _( + "As you request a document part, you need to specify " + "requested pages" + ) ) return form_validate and custom_validate def get_data(self): """Return the form as a valid ILLRequest data structure.""" - data = remove_empties_from_dict({ - 'document': { - 'title': self.document.title.data, - 'authors': self.document.authors.data, - 'publisher': self.document.publisher.data, - 'year': str(self.document.year.data or ''), - 'identifier': self.document.identifier.data, - 'source': { - 'journal_title': self.document.source.journal_title.data, - 'volume': self.document.source.volume.data, - 'number': self.document.source.number.data, - } - }, - # the loan status is required by the jsonschema, it is always - # PENDING on ill request creation - 'loan_status': "PENDING", - 'pickup_location': { - '$ref': get_ref_for_pid('locations', self.pickup_location.data) - }, - 'pages': self.pages.data, - 'found_in': { - 'source': self.source.origin.data, - 'url': self.source.url.data + data = remove_empties_from_dict( + { + "document": { + "title": self.document.title.data, + "authors": self.document.authors.data, + "publisher": self.document.publisher.data, + "year": str(self.document.year.data or ""), + "identifier": self.document.identifier.data, + "source": { + "journal_title": self.document.source.journal_title.data, + "volume": self.document.source.volume.data, + "number": self.document.source.number.data, + }, + }, + # the loan status is required by the jsonschema, it is always + # PENDING on ill request creation + "loan_status": "PENDING", + "pickup_location": { + "$ref": get_ref_for_pid("locations", self.pickup_location.data) + }, + "pages": self.pages.data, + "found_in": { + "source": self.source.origin.data, + "url": self.source.url.data, + }, } - }) + ) if self.note.data: - data['notes'] = [{ - 'type': 'public_note', - 'content': self.note.data - }] + data["notes"] = [{"type": "public_note", "content": self.note.data}] # if we put 'copy' in the dict before the dict cleaning and if 'copy' # is set to 'No', then it will be removed by `remove_empties_from_dict` # So we need to add it after the cleaning - data['copy'] = self.copy.data == '1' + data["copy"] = self.copy.data == "1" # if user select 'not specified' into the ILL request form, this value # must be removed from the dict. - if data.get('document', {}).get('year') == 'n/a': - del data['document']['year'] + if data.get("document", {}).get("year") == "n/a": + del data["document"]["year"] return data diff --git a/rero_ils/modules/ill_requests/jsonresolver.py b/rero_ils/modules/ill_requests/jsonresolver.py index afab41b6cb..8bd9895a22 100644 --- a/rero_ils/modules/ill_requests/jsonresolver.py +++ b/rero_ils/modules/ill_requests/jsonresolver.py @@ -22,7 +22,7 @@ from rero_ils.modules.jsonresolver import resolve_json_refs -@jsonresolver.route('/api/ill_requests/', host='bib.rero.ch') +@jsonresolver.route("/api/ill_requests/", host="bib.rero.ch") def ill_request_resolver(pid): """Resolver for ill_request record.""" - return resolve_json_refs('illr', pid) + return resolve_json_refs("illr", pid) diff --git a/rero_ils/modules/ill_requests/listener.py b/rero_ils/modules/ill_requests/listener.py index d0ddeac147..7cad628c17 100644 --- a/rero_ils/modules/ill_requests/listener.py +++ b/rero_ils/modules/ill_requests/listener.py @@ -17,13 +17,14 @@ """Signals connector for Item.""" -from .api import ILLRequest, ILLRequestsSearch from ..locations.api import Location from ..utils import extracted_data_from_ref +from .api import ILLRequest, ILLRequestsSearch -def enrich_ill_request_data(sender, json=None, record=None, index=None, - doc_type=None, **dummy_kwargs): +def enrich_ill_request_data( + sender, json=None, record=None, index=None, doc_type=None, **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -31,20 +32,13 @@ def enrich_ill_request_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == ILLRequestsSearch.Meta.index: + if index.split("-")[0] == ILLRequestsSearch.Meta.index: if not isinstance(record, ILLRequest): - record = ILLRequest.get_record_by_pid(record.get('pid')) - json['organisation'] = { - 'pid': record.organisation_pid - } + record = ILLRequest.get_record_by_pid(record.get("pid")) + json["organisation"] = {"pid": record.organisation_pid} # add patron name to ES index (for faceting) - patron = extracted_data_from_ref( - record.get('patron').get('$ref'), 'record') - json['patron']['name'] = patron.formatted_name - # add library informations to ES index (for faceting) - loc_pid = json.get('pickup_location', {}).get('pid') - if loc_pid: + patron = extracted_data_from_ref(record.get("patron").get("$ref"), "record") + json["patron"]["name"] = patron.formatted_name + if loc_pid := json.get("pickup_location", {}).get("pid"): parent_lib = Location.get_record_by_pid(loc_pid).get_library() - json['library'] = { - 'pid': parent_lib.pid - } + json["library"] = {"pid": parent_lib.pid} diff --git a/rero_ils/modules/ill_requests/models.py b/rero_ils/modules/ill_requests/models.py index 5f9b26badd..0d82e1680a 100644 --- a/rero_ils/modules/ill_requests/models.py +++ b/rero_ils/modules/ill_requests/models.py @@ -27,32 +27,33 @@ class ILLRequestIdentifier(RecordIdentifier): """Sequence generator for ILLRequest identifiers.""" - __tablename__ = 'ill_request_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "ill_request_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class ILLRequestMetadata(db.Model, RecordMetadataBase): """ILLRequest record metadata.""" - __tablename__ = 'ill_request_metadata' + __tablename__ = "ill_request_metadata" class ILLRequestStatus: """Available status for an ILL request.""" - PENDING = 'pending' - VALIDATED = 'validated' - DENIED = 'denied' - CLOSED = 'closed' + PENDING = "pending" + VALIDATED = "validated" + DENIED = "denied" + CLOSED = "closed" class ILLRequestNoteStatus: """Available note status for an ILL request.""" - PUBLIC_NOTE = 'public_note' - STAFF_NOTE = 'staff_note' + PUBLIC_NOTE = "public_note" + STAFF_NOTE = "staff_note" diff --git a/rero_ils/modules/ill_requests/permissions.py b/rero_ils/modules/ill_requests/permissions.py index 99431dbea3..058c2b73c6 100644 --- a/rero_ils/modules/ill_requests/permissions.py +++ b/rero_ils/modules/ill_requests/permissions.py @@ -19,17 +19,20 @@ """Permissions for ILL request.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, \ - AllowedByActionRestrictByOwnerOrOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + AllowedByActionRestrictByOwnerOrOrganisation, + RecordPermissionPolicy, +) # Actions to control ILL request policy -search_action = action_factory('illr-search') -read_action = action_factory('illr-read') -create_action = action_factory('illr-create') -update_action = action_factory('illr-update') -delete_action = action_factory('illr-delete') -access_action = action_factory('illr-access') +search_action = action_factory("illr-search") +read_action = action_factory("illr-read") +create_action = action_factory("illr-create") +update_action = action_factory("illr-update") +delete_action = action_factory("illr-delete") +access_action = action_factory("illr-access") class ILLRequestPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/ill_requests/serializers.py b/rero_ils/modules/ill_requests/serializers.py index e1a739d088..9680b2d4dd 100644 --- a/rero_ils/modules/ill_requests/serializers.py +++ b/rero_ils/modules/ill_requests/serializers.py @@ -21,8 +21,12 @@ from rero_ils.modules.libraries.api import LibrariesSearch from rero_ils.modules.locations.api import LocationsSearch -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer, RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + CachedDataSerializerMixin, + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) class ILLRequestJSONSerializer(JSONSerializer, CachedDataSerializerMixin): @@ -30,22 +34,21 @@ class ILLRequestJSONSerializer(JSONSerializer, CachedDataSerializerMixin): def _postprocess_search_hit(self, hit: dict) -> None: """Post-process each hit of a search result.""" - metadata = hit.get('metadata', {}) - if pid := metadata.get('pickup_location', {}).get('pid'): + metadata = hit.get("metadata", {}) + if pid := metadata.get("pickup_location", {}).get("pid"): location = self.get_resource(LocationsSearch(), pid) - pickup_name = location.get('ill_pickup_name', location.get('name')) - metadata['pickup_location']['name'] = pickup_name + pickup_name = location.get("ill_pickup_name", location.get("name")) + metadata["pickup_location"]["name"] = pickup_name super()._postprocess_search_hit(hit) def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) super()._postprocess_search_aggregations(aggregations) _json = ILLRequestJSONSerializer(RecordSchemaJSONV1) -json_ill_request_search = search_responsify(_json, 'application/rero+json') -json_ill_request_response = record_responsify(_json, 'application/rero+json') +json_ill_request_search = search_responsify(_json, "application/rero+json") +json_ill_request_response = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/ill_requests/utils.py b/rero_ils/modules/ill_requests/utils.py index 72f7ef29eb..999881aab8 100644 --- a/rero_ils/modules/ill_requests/utils.py +++ b/rero_ils/modules/ill_requests/utils.py @@ -25,11 +25,9 @@ def get_pickup_location_options(): """Get all ill pickup location for all patron accounts.""" for ptrn_pid in [ptrn.pid for ptrn in current_patrons]: - for pid in Location.get_pickup_location_pids(ptrn_pid, - is_ill_pickup=True): + for pid in Location.get_pickup_location_pids(ptrn_pid, is_ill_pickup=True): location = Location.get_record_by_pid(pid) - location_name = location.get( - 'ill_pickup_name', location.get('name')) + location_name = location.get("ill_pickup_name", location.get("name")) yield (location.pid, location_name) @@ -41,8 +39,8 @@ def get_production_activity(doc, types=None): :return: generator production activity object """ assert types - for activity in doc.get('provisionActivity', []): - if activity['type'] in types: + for activity in doc.get("provisionActivity", []): + if activity["type"] in types: yield activity @@ -54,8 +52,8 @@ def get_production_activity_statement(production_activity, types=None): :return: generator statement object """ assert types - for statement in production_activity.get('statement', []): - if statement['type'] in types: + for statement in production_activity.get("statement", []): + if statement["type"] in types: yield statement @@ -67,6 +65,6 @@ def get_document_identifiers(doc, types=None): :returns: generator of ``rero_ils.commons.Identifier`` object """ assert types # ensure a least one type is asked - for identifier in doc.get('identifiedBy', []): - if identifier['type'] in types: + for identifier in doc.get("identifiedBy", []): + if identifier["type"] in types: yield identifier diff --git a/rero_ils/modules/ill_requests/views.py b/rero_ils/modules/ill_requests/views.py index c3477838b5..044833e1d1 100644 --- a/rero_ils/modules/ill_requests/views.py +++ b/rero_ils/modules/ill_requests/views.py @@ -19,36 +19,47 @@ from __future__ import absolute_import, print_function -from flask import Blueprint, current_app, flash, redirect, render_template, \ - request, url_for +from flask import ( + Blueprint, + current_app, + flash, + redirect, + render_template, + request, + url_for, +) from flask_babel import lazy_gettext as _ from rero_ils.modules.documents.api import Document from rero_ils.modules.documents.views import create_title_text -from .api import ILLRequest -from .forms import ILLRequestForm -from .models import ILLRequestStatus -from .utils import get_document_identifiers, get_pickup_location_options, \ - get_production_activity, get_production_activity_statement +from ...permissions import check_user_is_authenticated from ..commons.identifiers import IdentifierType from ..decorators import check_logged_as_patron from ..locations.api import Location from ..patrons.api import current_patrons from ..utils import extracted_data_from_ref, get_ref_for_pid -from ...permissions import check_user_is_authenticated +from .api import ILLRequest +from .forms import ILLRequestForm +from .models import ILLRequestStatus +from .utils import ( + get_document_identifiers, + get_pickup_location_options, + get_production_activity, + get_production_activity_statement, +) blueprint = Blueprint( - 'ill_requests', + "ill_requests", __name__, - url_prefix='/', - template_folder='templates', - static_folder='static', + url_prefix="/", + template_folder="templates", + static_folder="static", ) -@blueprint.route('/ill_requests/new', methods=['GET', 'POST']) -@check_user_is_authenticated(redirect_to='security.login') +@blueprint.route("/ill_requests/new", methods=["GET", "POST"]) +@check_user_is_authenticated(redirect_to="security.login") @check_logged_as_patron def ill_request_form(viewcode): """Return professional view.""" @@ -56,26 +67,27 @@ def ill_request_form(viewcode): # pickup locations selection are based on app context then the choices # can't be "calculated" on the form creation (context free). form.pickup_location.choices = [ - *form.pickup_location.choices, *list(sorted( - get_pickup_location_options(), key=lambda pickup: pickup[1]))] + *form.pickup_location.choices, + *list(sorted(get_pickup_location_options(), key=lambda pickup: pickup[1])), + ] # Extraction of the pids organizations from the connected patron - org_pids = ','.join( - [patron.organisation_pid for patron in current_patrons]) + org_pids = ",".join([patron.organisation_pid for patron in current_patrons]) # Populate data only if we are on the global view # and that the function is allowed in the configuration - if request.method == 'GET' and 'record_pid' in request.args \ - and current_app.config.get('RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW') \ - and viewcode == current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): - _populate_document_data_form(request.args['record_pid'], form) - - if request.method == 'POST' and form.validate_on_submit(): + if ( + request.method == "GET" + and "record_pid" in request.args + and current_app.config.get("RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW") + and viewcode == current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ): + _populate_document_data_form(request.args["record_pid"], form) + + if request.method == "POST" and form.validate_on_submit(): ill_request_data = form.get_data() # get the pickup location pid - loc_pid = extracted_data_from_ref( - ill_request_data['pickup_location']) + loc_pid = extracted_data_from_ref(ill_request_data["pickup_location"]) # get the patron account of the same org of the location pid def get_patron(loc_pid): @@ -84,24 +96,22 @@ def get_patron(loc_pid): if ptrn.organisation_pid == loc.organisation_pid: return ptrn - ill_request_data['patron'] = { - '$ref': get_ref_for_pid('patrons', get_patron(loc_pid).pid) + ill_request_data["patron"] = { + "$ref": get_ref_for_pid("patrons", get_patron(loc_pid).pid) } - ill_request_data['status'] = ILLRequestStatus.PENDING + ill_request_data["status"] = ILLRequestStatus.PENDING ILLRequest.create(ill_request_data, dbcommit=True, reindex=True) - flash( - _('The request has been transmitted to your library.'), - 'success' + flash(_("The request has been transmitted to your library."), "success") + return redirect( + url_for("patrons.profile", viewcode=viewcode, org_pids=org_pids) ) - return redirect(url_for( - 'patrons.profile', - viewcode=viewcode, - org_pids=org_pids)) - return render_template('rero_ils/ill_request_form.html', - form=form, - viewcode=viewcode, - org_pids=org_pids) + return render_template( + "rero_ils/ill_request_form.html", + form=form, + viewcode=viewcode, + org_pids=org_pids, + ) def _populate_document_data_form(doc_pid, form): @@ -114,42 +124,44 @@ def _populate_document_data_form(doc_pid, form): if not doc: return # Document title - form.document.title.data = create_title_text(doc.get('title')) + form.document.title.data = create_title_text(doc.get("title")) # Document authors (only first three) - statements = doc.get('responsibilityStatement', []) + statements = doc.get("responsibilityStatement", []) authors = [] for statement in statements: - authors.extend(author.get('value') for author in statement) + authors.extend(author.get("value") for author in statement) if authors: - form.document.authors.data = '; '.join(authors[:3]) + form.document.authors.data = "; ".join(authors[:3]) # Document publisher and year - types = ['bf:Publication'] + types = ["bf:Publication"] if production_activity := next(get_production_activity(doc, types), None): # Document date - if date := production_activity.get('startDate'): + if date := production_activity.get("startDate"): form.document.year.data = date - statement_types = ['bf:Agent'] + statement_types = ["bf:Agent"] # Document publisher - if statement := next(get_production_activity_statement( - production_activity, - statement_types - ), None): - if label := statement.get('label'): - form.document.publisher.data = label[0].get('value') + if statement := next( + get_production_activity_statement(production_activity, statement_types), + None, + ): + if label := statement.get("label"): + form.document.publisher.data = label[0].get("value") # Document identifier types = [ - IdentifierType.ISBN, IdentifierType.EAN, IdentifierType.ISSN, - IdentifierType.L_ISSN] + IdentifierType.ISBN, + IdentifierType.EAN, + IdentifierType.ISSN, + IdentifierType.L_ISSN, + ] if identifier := next(get_document_identifiers(doc, types), None): - type = _(identifier.get('type')) - value = identifier.get('value') - form.document.identifier.data = f'{value} ({type})' + identifier_type = _(identifier.get("type")) + value = identifier.get("value") + form.document.identifier.data = f"{value} ({identifier_type})" # Document source - form.source.origin.data = current_app.config.get( - 'RERO_ILS_ILL_DEFAULT_SOURCE') + form.source.origin.data = current_app.config.get("RERO_ILS_ILL_DEFAULT_SOURCE") form.source.url.data = url_for( - 'invenio_records_ui.doc', - viewcode=current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'), + "invenio_records_ui.doc", + viewcode=current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"), pid_value=doc_pid, - _external=True) + _external=True, + ) diff --git a/rero_ils/modules/imports/api.py b/rero_ils/modules/imports/api.py index ee78f581bd..e0ddaa9169 100644 --- a/rero_ils/modules/imports/api.py +++ b/rero_ils/modules/imports/api.py @@ -33,21 +33,26 @@ from redis import Redis from six import BytesIO -from rero_ils.modules.documents.dojson.contrib.marc21tojson import \ - marc21_dnb, marc21_kul, marc21_loc, marc21_slsp, marc21_ugent +from rero_ils.modules.documents.dojson.contrib.marc21tojson import ( + marc21_dnb, + marc21_kul, + marc21_loc, + marc21_slsp, + marc21_ugent, +) from rero_ils.modules.documents.dojson.contrib.unimarctojson import unimarc class Import(object): """Import class.""" - name = '' - url = '' - url_api = '' + name = "" + url = "" + url_api = "" search = {} to_json_processor = None status_code = 444 - status_msg = '' + status_msg = "" max_results = 50 timeout_connect = 5 timeout_request = 60 @@ -58,29 +63,23 @@ def __init__(self): assert self.url assert self.url_api assert self.search - assert self.search.get('anywhere') + assert self.search.get("anywhere") assert self.to_json_processor self.init_results() - self.cache = Redis.from_url(current_app.config.get( - 'RERO_IMPORT_CACHE' - )) - self.cache_expire = current_app.config.get('RERO_IMPORT_CACHE_EXPIRE') + self.cache = Redis.from_url(current_app.config.get("RERO_IMPORT_CACHE")) + self.cache_expire = current_app.config.get("RERO_IMPORT_CACHE_EXPIRE") def init_results(self): """Init results.""" self.results = { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'total': { - 'value': 0, - 'relation': 'eq' - }, - 'remote_total': 0 + "aggregations": {}, + "hits": { + "hits": [], + "total": {"value": 0, "relation": "eq"}, + "remote_total": 0, }, - 'links': {}, - 'permissions': {} - + "links": {}, + "permissions": {}, } self.data = [] @@ -95,7 +94,7 @@ def get_id(self, json_data): :param id: json document :return: id of the record """ - return json_data.get('001') + return json_data.get("001") def get_link(self, id): """Get direct link to record. @@ -107,8 +106,8 @@ def get_link(self, id): url=self.url, max_results=1, what=id, - relation='all', - where=self.search.get('recordid') + relation="all", + where=self.search.get("recordid"), ) return url_api @@ -129,15 +128,11 @@ def calculate_aggregations_add(self, type, data, id): :param id: id for the type """ if data: - ids_indexes = self.aggregations_creation[type].get( - data, - {'ids': set()} - ) - ids_indexes['ids'].add(id) + ids_indexes = self.aggregations_creation[type].get(data, {"ids": set()}) + ids_indexes["ids"].add(id) self.aggregations_creation[type][data] = ids_indexes - def calculate_aggregations_add_sub(self, type, data, sub_type, sub_data, - id): + def calculate_aggregations_add_sub(self, type, data, sub_type, sub_data, id): """Add data to aggregations_creation. :param type: type of the aggregation @@ -148,13 +143,12 @@ def calculate_aggregations_add_sub(self, type, data, sub_type, sub_data, """ if data: ids_indexes = self.aggregations_creation[type].get( - data, - {'ids': set(), 'sub_type': sub_type, 'sub': {}} + data, {"ids": set(), "sub_type": sub_type, "sub": {}} ) - ids_indexes['ids'].add(id) + ids_indexes["ids"].add(id) # check if we have data for subtype if sub_data: - ids_indexes['sub'].setdefault(sub_data, set()).add(id) + ids_indexes["sub"].setdefault(sub_data, set()).add(id) self.aggregations_creation[type][data] = ids_indexes def calculate_aggregations(self, record, id): @@ -164,38 +158,39 @@ def calculate_aggregations(self, record, id): :param id: id for the record :param indexd: index of the record """ - for document_type in record['type']: + for document_type in record["type"]: self.calculate_aggregations_add_sub( - type='document_type', - data=document_type['main_type'], - sub_type='document_subtype', - sub_data=document_type.get('subtype'), - id=id + type="document_type", + data=document_type["main_type"], + sub_type="document_subtype", + sub_data=document_type.get("subtype"), + id=id, ) - provision_activitys = record.get('provisionActivity', []) - year_min = int(datetime.now().strftime('%Y')) + provision_activitys = record.get("provisionActivity", []) + year_min = int(datetime.now().strftime("%Y")) year_max = 1400 for provision_activity in provision_activitys: - if date := provision_activity.get('startDate'): - self.calculate_aggregations_add('year', date, id) + if date := provision_activity.get("startDate"): + self.calculate_aggregations_add("year", date, id) int_date = int(date) year_min = min(int_date, year_min) year_max = max(int_date, year_max) - for agent in record.get('contribution', []): - if authorized_access_point := agent.get( - 'entity', {}).get('authorized_access_point'): + for agent in record.get("contribution", []): + if authorized_access_point := agent.get("entity", {}).get( + "authorized_access_point" + ): name = authorized_access_point - elif text := agent.get('entity', {}).get('_text'): + elif text := agent.get("entity", {}).get("_text"): name = text if name: - self.calculate_aggregations_add('author', name, id) + self.calculate_aggregations_add("author", name, id) - languages = record.get('language', []) + languages = record.get("language", []) for language in languages: - lang = language.get('value') - self.calculate_aggregations_add('language', lang, id) + lang = language.get("value") + self.calculate_aggregations_add("language", lang, id) return year_min, year_max + 1 @@ -206,66 +201,57 @@ def create_aggregations(self, results): :return: dictionary with results and added aggregations """ self.aggregations_creation = { - 'document_type': {}, - 'author': {}, - 'year': {}, - 'language': {} + "document_type": {}, + "author": {}, + "year": {}, + "language": {}, } - if year_config := results.get( - 'aggregations', {}).get('year', {}).get('config'): - results['aggregations'] = { - 'year': { - 'config': year_config - } - } + if year_config := results.get("aggregations", {}).get("year", {}).get("config"): + results["aggregations"] = {"year": {"config": year_config}} else: - results['aggregations'] = {} - year_min = int(datetime.now().strftime('%Y')) + results["aggregations"] = {} + year_min = int(datetime.now().strftime("%Y")) year_max = 1400 - for data in results['hits']['hits']: - y_min, y_max = self.calculate_aggregations( - data['metadata'], - data['id'] - ) + for data in results["hits"]["hits"]: + y_min, y_max = self.calculate_aggregations(data["metadata"], data["id"]) year_min = min(y_min, year_min) year_max = max(y_max, year_max) for agg, values in self.aggregations_creation.items(): buckets = [] for key, value in values.items(): - ids = value['ids'] + ids = value["ids"] bucket_data = { - 'ids': list(ids), - 'doc_count': len(ids), - 'key': str(key), - 'doc_count_error_upper_bound': 0, - 'sum_other_doc_count': 0 + "ids": list(ids), + "doc_count": len(ids), + "key": str(key), + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0, } - if subs := value.get('sub'): + if subs := value.get("sub"): sub_buckets = [] for sub_key, sub_value in subs.items(): - sub_buckets.append({ - 'ids': list(sub_value), - 'doc_count': len(sub_value), - 'key': sub_key - }) - sub_buckets.sort( - key=lambda e: (-e['doc_count'], e['key'])) - bucket_data[value.get('sub_type')] = { - 'buckets': sub_buckets - } + sub_buckets.append( + { + "ids": list(sub_value), + "doc_count": len(sub_value), + "key": sub_key, + } + ) + sub_buckets.sort(key=lambda e: (-e["doc_count"], e["key"])) + bucket_data[value.get("sub_type")] = {"buckets": sub_buckets} buckets.append(bucket_data) - buckets.sort(key=lambda e: (-e['doc_count'], e['key'])) + buckets.sort(key=lambda e: (-e["doc_count"], e["key"])) if buckets: - results['aggregations'][agg] = {'buckets': buckets} - results['aggregations'].setdefault('year', {}) - if 'config' not in results['aggregations']['year']: - results['aggregations']['year']['config'] = { - 'max': year_max, - 'min': year_min, - 'step': 1 + results["aggregations"][agg] = {"buckets": buckets} + results["aggregations"].setdefault("year", {}) + if "config" not in results["aggregations"]["year"]: + results["aggregations"]["year"]["config"] = { + "max": year_max, + "min": year_min, + "step": 1, } - results['aggregations']['year']['type'] = 'range' - results['hits']['total']['value'] = len(results['hits']['hits']) + results["aggregations"]["year"]["type"] = "range" + results["hits"]["total"]["value"] = len(results["hits"]["hits"]) return results def filter_records(self, results, ids): @@ -276,9 +262,9 @@ def filter_records(self, results, ids): :return: dictionary with results filtered by ids and adapted aggregations """ - hits = results['hits']['hits'] - hits = list(filter(lambda hit: hit['id'] in ids, hits)) - results['hits']['hits'] = hits + hits = results["hits"]["hits"] + hits = list(filter(lambda hit: hit["id"] in ids, hits)) + results["hits"]["hits"] = hits self.create_aggregations(results) return results @@ -291,13 +277,10 @@ def get_ids_for_aggregation(self, results, aggregation, key): :return: list of ids """ ids = [] - buckets = results.get('aggregations').get( - aggregation, {} - ).get('buckets', []) - bucket = list( - filter(lambda bucket: bucket['key'] == str(key), buckets)) + buckets = results.get("aggregations").get(aggregation, {}).get("buckets", []) + bucket = list(filter(lambda bucket: bucket["key"] == str(key), buckets)) if bucket: - ids = bucket[0]['ids'] + ids = bucket[0]["ids"] return ids def get_ids_for_aggregation_sub(self, results, agg, key, sub_agg, sub_key): @@ -312,18 +295,15 @@ def get_ids_for_aggregation_sub(self, results, agg, key, sub_agg, sub_key): :return: list of ids """ ids = [] - buckets = results.get('aggregations').get(agg, {}).get('buckets', []) - if bucket := list( - filter(lambda bucket: bucket['key'] == str(key), buckets) - ): - sub_buckets = bucket[0].get(sub_agg, {}).get('buckets', []) + buckets = results.get("aggregations").get(agg, {}).get("buckets", []) + if bucket := list(filter(lambda bucket: bucket["key"] == str(key), buckets)): + sub_buckets = bucket[0].get(sub_agg, {}).get("buckets", []) sub_bucket = list( filter( - lambda sub_bucket: sub_bucket['key'] == str(sub_key), - sub_buckets + lambda sub_bucket: sub_bucket["key"] == str(sub_key), sub_buckets ) ) - ids = sub_bucket[0]['ids'] + ids = sub_bucket[0]["ids"] return ids def _create_sru_url(self, what, relation, where, max_results): @@ -342,7 +322,7 @@ def _create_sru_url(self, what, relation, where, max_results): max_results=max_results, what=what, relation=relation, - where=where_search[0] + where=where_search[0], ) for key in where_search[1:]: url_api = f'{url_api} OR {key} {relation} "{what}"' @@ -352,7 +332,7 @@ def _create_sru_url(self, what, relation, where, max_results): max_results=max_results, what=what, relation=relation, - where=where_search + where=where_search, ) return url_api @@ -360,32 +340,29 @@ def clean_marc(self, json_data): """Clean JSON data from unwanted tags.""" new_json_data = {} new_order = [] - if leader := json_data.get('leader'): - new_json_data = {'leader': leader} - new_order.append('leader') + if leader := json_data.get("leader"): + new_json_data = {"leader": leader} + new_order.append("leader") for key in sorted(json_data.keys()): # Don't use 9XX tag's if ( - key[0] != '9' and - key != 'leader' and - key != '__order__' and - key[:3].isdigit() + key[0] != "9" + and key != "leader" + and key != "__order__" + and key[:3].isdigit() ): new_json_data[key] = json_data[key] new_order.extend( key - for key in list(json_data['__order__']) - if ( - key[0] != '9' and - key != 'leader' and - key[:3].isdigit() - ) + for key in list(json_data["__order__"]) + if (key[0] != "9" and key != "leader" and key[:3].isdigit()) ) - new_json_data['__order__'] = new_order + new_json_data["__order__"] = new_order return GroupableOrderedDict(new_json_data) - def search_records(self, what, relation, where='anywhere', max_results=0, - no_cache=False): + def search_records( + self, what, relation, where="anywhere", max_results=0, no_cache=False + ): """Get the records. :param what: what term to search @@ -394,51 +371,45 @@ def search_records(self, what, relation, where='anywhere', max_results=0, :param max_results: maximum records to search :param no_cache: do not use cache if true """ + def _split_stream(stream): """Yield record elements from given XML stream.""" try: for _, element in etree.iterparse( - stream, - tag='{http://www.loc.gov/zing/srw/}' - 'record'): + stream, tag="{http://www.loc.gov/zing/srw/}" "record" + ): yield element except Exception: current_app.logger.error( - f'Import: {self.name} ' - 'error: XML SPLIT ' - f'url: {url_api}' + f"Import: {self.name} " "error: XML SPLIT " f"url: {url_api}" ) return [] if max_results == 0: max_results = self.max_results - if self.name == 'LOC' and relation == "all": + if self.name == "LOC" and relation == "all": relation = "=" self.init_results() - url_api = 'Not yet set' + url_api = "Not yet set" if not what: return self.results, 200 try: - cache_key = f'{self.name}_{what}_{relation}_{where}_{max_results}' + cache_key = f"{self.name}_{what}_{relation}_{where}_{max_results}" cache = self.cache.get(cache_key) if cache and not no_cache: cache_data = pickle.loads(cache) - self.results['hits'] = cache_data['hits'] - self.data = cache_data['data'] + self.results["hits"] = cache_data["hits"] + self.data = cache_data["data"] self.status_code = 200 else: url_api = self._create_sru_url( - what=what, - relation=relation, - where=where, - max_results=max_results + what=what, relation=relation, where=where, max_results=max_results ) response = requests.get( - url_api, - timeout=(self.timeout_connect, self.timeout_request) + url_api, timeout=(self.timeout_connect, self.timeout_request) ) self.status_code = response.status_code - self.status_msg = 'Request error.' + self.status_msg = "Request error." response.raise_for_status() for xml_record in _split_stream(BytesIO(response.content)): @@ -454,59 +425,56 @@ def _split_stream(stream): id_ = self.get_id(json_data) if record and id_: data = { - 'id': id_, - 'links': { - 'self': self.get_link(id_), - 'marc21': self.get_marc21_link(id_) + "id": id_, + "links": { + "self": self.get_link(id_), + "marc21": self.get_marc21_link(id_), }, - 'metadata': record, - 'source': self.name + "metadata": record, + "source": self.name, } self.data.append(json_data) - self.results['hits']['hits'].append(data) - self.results['hits']['remote_total'] = int( + self.results["hits"]["hits"].append(data) + self.results["hits"]["remote_total"] = int( etree.parse(BytesIO(response.content)) - .find('{*}numberOfRecords').text + .find("{*}numberOfRecords") + .text ) # save to cache if we have hits - if self.results['hits']['hits']: - cache_data = { - 'hits': self.results['hits'], - 'data': self.data - } + if self.results["hits"]["hits"]: + cache_data = {"hits": self.results["hits"], "data": self.data} self.cache.setex( cache_key, timedelta(minutes=self.cache_expire), - value=pickle.dumps(cache_data) + value=pickle.dumps(cache_data), ) - self.results['hits']['total']['value'] = len( - self.results['hits']['hits']) + self.results["hits"]["total"]["value"] = len(self.results["hits"]["hits"]) self.create_aggregations(self.results) except requests.exceptions.ConnectionError as error: self.status_code = 433 self.status_msg = str(error) except requests.exceptions.HTTPError as error: - current_app.logger.error(f'{type(error)} {error}') + current_app.logger.error(f"{type(error)} {error}") self.status_code = error.response.status_code self.status_msg = str(error) - current_app.logger.error(f'HTTPError: {traceback.format_exc()}') + current_app.logger.error(f"HTTPError: {traceback.format_exc()}") except Exception as error: self.status_code = 500 self.status_msg = str(error) - current_app.logger.error(f'Exception: {traceback.format_exc()}') + current_app.logger.error(f"Exception: {traceback.format_exc()}") if self.status_code > 400: # TODO: enable error logging only for 500 # if self.status_code == 500: current_app.logger.error( - f'Import: {self.name} ' - f'code: {self.status_code} ' - f'error: {self.status_msg} ' - f'url: {url_api}' + f"Import: {self.name} " + f"code: {self.status_code} " + f"error: {self.status_msg} " + f"url: {url_api}" ) - self.results['errors'] = { - 'code': self.status_code, - 'message': self.status_msg, - 'url': url_api + self.results["errors"] = { + "code": self.status_code, + "message": self.status_msg, + "url": url_api, } return self.results, self.status_code @@ -514,24 +482,26 @@ def _split_stream(stream): class BnfImport(Import): """Import class for BNF.""" - name = 'BNF' - url = 'https://catalogue.bnf.fr' - url_api = '{url}/api/SRU?'\ - 'version=1.2&operation=searchRetrieve'\ - '&recordSchema=unimarcxchange-anl&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "BNF" + url = "https://catalogue.bnf.fr" + url_api = ( + "{url}/api/SRU?" + "version=1.2&operation=searchRetrieve" + "&recordSchema=unimarcxchange-anl&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://www.bnf.fr/sites/default/files/2019-04/tableau_criteres_sru.pdf search = { - 'ean': 'bib.ean', - 'anywhere': 'bib.anywhere', - 'author': 'bib.author', - 'title': 'bib.title', - 'doctype': 'bib.doctype', - 'recordid': 'bib.recordid', - 'isbn': 'bib.isbn', - 'issn': 'bib.issn', - 'date': 'bib.date' + "ean": "bib.ean", + "anywhere": "bib.anywhere", + "author": "bib.author", + "title": "bib.title", + "doctype": "bib.doctype", + "recordid": "bib.recordid", + "isbn": "bib.isbn", + "issn": "bib.issn", + "date": "bib.date", } to_json_processor = unimarc.do @@ -543,35 +513,36 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_bnf_record', **args) + return url_for("api_imports.import_bnf_record", **args) class LoCImport(Import): """Import class for Library of Congress.""" - name = 'LOC' - url = 'http://lx2.loc.gov:210' - url_api = '{url}/lcdb?'\ - 'version=1.2&operation=searchRetrieve'\ - '&recordSchema=marcxml&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "LOC" + url = "http://lx2.loc.gov:210" + url_api = ( + "{url}/lcdb?" + "version=1.2&operation=searchRetrieve" + "&recordSchema=marcxml&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # http://www.loc.gov/standards/sru/resources/lcServers.html search = { - 'ean': 'dc.identifier', - 'anywhere': 'anywhere', - 'author': 'dc.creator', - 'title': 'dc.title', - 'doctype': 'dc.type', - 'recordid': 'dc.identifier', - 'isbn': 'dc.identifier', - 'issn': 'dc.identifier', - 'date': 'dc.date' + "ean": "dc.identifier", + "anywhere": "anywhere", + "author": "dc.creator", + "title": "dc.title", + "doctype": "dc.type", + "recordid": "dc.identifier", + "isbn": "dc.identifier", + "issn": "dc.identifier", + "date": "dc.date", } to_json_processor = marc21_loc.do @@ -583,8 +554,8 @@ def get_id(self, json_data): :param id: json document :return: id of the record """ - if json_data.get('010__'): - return json_data.get('010__').get('a').strip() + if json_data.get("010__"): + return json_data.get("010__").get("a").strip() def get_marc21_link(self, id): """Get direct link to marc21 record. @@ -593,35 +564,36 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_loc_record', **args) + return url_for("api_imports.import_loc_record", **args) class DNBImport(Import): """Import class for DNB.""" - name = 'DNB' - url = 'https://services.dnb.de' - url_api = '{url}/sru/dnb?'\ - 'version=1.1&operation=searchRetrieve'\ - '&recordSchema=MARC21-xml&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "DNB" + url = "https://services.dnb.de" + url_api = ( + "{url}/sru/dnb?" + "version=1.1&operation=searchRetrieve" + "&recordSchema=MARC21-xml&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://www.dnb.de/EN/Professionell/Metadatendienste/Datenbezug/SRU/sru_node.html search = { - 'ean': 'dnb.num', - 'anywhere': 'dnb.woe', - 'author': 'dnb.atr', - 'title': 'dnb.tit', - 'doctype': 'dnb.mat', - 'recordid': 'dnb.num', - 'isbn': 'dnb.num', - 'issn': 'dnb.num', - 'date': 'dnb.jhr' + "ean": "dnb.num", + "anywhere": "dnb.woe", + "author": "dnb.atr", + "title": "dnb.tit", + "doctype": "dnb.mat", + "recordid": "dnb.num", + "isbn": "dnb.num", + "issn": "dnb.num", + "date": "dnb.jhr", } to_json_processor = marc21_dnb.do @@ -633,35 +605,36 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_dnb_record', **args) + return url_for("api_imports.import_dnb_record", **args) class SUDOCImport(Import): """Import class for SUDOC.""" - name = 'SUDOC' - url = 'https://www.sudoc.abes.fr' - url_api = '{url}/cbs/sru/?'\ - 'version=1.1&operation=searchRetrieve'\ - '&recordSchema=unimarc&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "SUDOC" + url = "https://www.sudoc.abes.fr" + url_api = ( + "{url}/cbs/sru/?" + "version=1.1&operation=searchRetrieve" + "&recordSchema=unimarc&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://abes.fr/wp-content/uploads/2023/05/guide-utilisation-service-sru-catalogue-sudoc.pdf search = { - 'ean': 'isb', - 'anywhere': ['tou', 'num', 'ppn'], - 'author': 'dc.creator', - 'title': 'dc.title', - 'doctype': 'tdo', - 'recordid': 'ppn', - 'isbn': 'isb', - 'issn': 'isn', - 'date': 'dc.date' + "ean": "isb", + "anywhere": ["tou", "num", "ppn"], + "author": "dc.creator", + "title": "dc.title", + "doctype": "tdo", + "recordid": "ppn", + "isbn": "isb", + "issn": "isn", + "date": "dc.date", } to_json_processor = unimarc.do @@ -673,34 +646,35 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_sudoc_record', **args) + return url_for("api_imports.import_sudoc_record", **args) class SLSPImport(Import): """Import class for SLSP.""" - name = 'SLSP' - url = 'https://swisscovery.slsp.ch' - url_api = '{url}/view/sru/41SLSP_NETWORK?'\ - 'version=1.2&operation=searchRetrieve'\ - '&recordSchema=marcxml&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "SLSP" + url = "https://swisscovery.slsp.ch" + url_api = ( + "{url}/view/sru/41SLSP_NETWORK?" + "version=1.2&operation=searchRetrieve" + "&recordSchema=marcxml&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://slsp.ch/fr/metadata # https://developers.exlibrisgroup.com/alma/integrations/sru/ search = { - 'anywhere': 'alma.all_for_ui', - 'author': 'alma.author', - 'title': 'alma.title', - 'recordid': 'alma.mms_id', - 'isbn': 'alma.isbn', - 'issn': 'alma.issn', - 'date': 'alma.date' + "anywhere": "alma.all_for_ui", + "author": "alma.author", + "title": "alma.title", + "recordid": "alma.mms_id", + "isbn": "alma.isbn", + "issn": "alma.issn", + "date": "alma.date", } to_json_processor = marc21_slsp.do @@ -712,35 +686,36 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_slsp_record', **args) + return url_for("api_imports.import_slsp_record", **args) class UGentImport(Import): """Import class for Univ. of Gent (Belgium).""" - name = 'UGent' - url = 'https://lib.ugent.be/sru' - url_api = '{url}?'\ - 'version=1.1&operation=searchRetrieve'\ - '&recordSchema=marcxml&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "UGent" + url = "https://lib.ugent.be/sru" + url_api = ( + "{url}?" + "version=1.1&operation=searchRetrieve" + "&recordSchema=marcxml&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://lib.ugent.be/sru search = { - 'ean': 'isbn', - 'anywhere': 'all', - 'author': 'author', - 'title': 'title', - 'doctype': 'dc.type', - 'recordid': 'all', - 'isbn': 'isbn', - 'issn': 'issn', - 'date': 'year' + "ean": "isbn", + "anywhere": "all", + "author": "author", + "title": "title", + "doctype": "dc.type", + "recordid": "all", + "isbn": "isbn", + "issn": "issn", + "date": "year", } to_json_processor = marc21_ugent.do @@ -751,12 +726,12 @@ def get_id(self, json_data): :param id: json document :return: id of the record """ - id = None - if json_data.get('001'): - id = json_data.get('001') - elif json_data.get('090__'): - id = json_data.get('090__').get('a').strip() - return id + id_ = None + if json_data.get("001"): + id_ = json_data.get("001") + elif json_data.get("090__"): + id_ = json_data.get("090__").get("a").strip() + return id_ def get_marc21_link(self, id): """Get direct link to marc21 record. @@ -765,33 +740,34 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_ugent_record', **args) + return url_for("api_imports.import_ugent_record", **args) class KULImport(Import): """Import class for KULeuven.""" - name = 'KUL' - url = 'https://eu.alma.exlibrisgroup.com' - url_api = '{url}/view/sru/32KUL_LIBIS_NETWORK?'\ - 'version=1.2&operation=searchRetrieve'\ - '&recordSchema=marcxml&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "KUL" + url = "https://eu.alma.exlibrisgroup.com" + url_api = ( + "{url}/view/sru/32KUL_LIBIS_NETWORK?" + "version=1.2&operation=searchRetrieve" + "&recordSchema=marcxml&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://developers.exlibrisgroup.com/alma/integrations/sru/ search = { - 'anywhere': 'alma.all_for_ui', - 'author': 'alma.creator', - 'title': 'alma.title', - 'recordid': 'alma.mms_id', - 'isbn': 'alma.isbn', - 'issn': 'alma.issn', - 'date': 'alma.date' + "anywhere": "alma.all_for_ui", + "author": "alma.creator", + "title": "alma.title", + "recordid": "alma.mms_id", + "isbn": "alma.isbn", + "issn": "alma.issn", + "date": "alma.date", } to_json_processor = marc21_kul.do @@ -803,33 +779,34 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_kul_record', **args) + return url_for("api_imports.import_kul_record", **args) class RenouvaudImport(Import): """Import class for Renouvaud.""" - name = 'Renouvaud' - url = 'https://renouvaud.primo.exlibrisgroup.com' - url_api = '{url}/view/sru/41BCULAUSA_NETWORK?'\ - 'version=1.2&operation=searchRetrieve'\ - '&recordSchema=marcxml&maximumRecords={max_results}'\ - '&startRecord=1&query={where} {relation} "{what}"' + name = "Renouvaud" + url = "https://renouvaud.primo.exlibrisgroup.com" + url_api = ( + "{url}/view/sru/41BCULAUSA_NETWORK?" + "version=1.2&operation=searchRetrieve" + "&recordSchema=marcxml&maximumRecords={max_results}" + '&startRecord=1&query={where} {relation} "{what}"' + ) # https://slsp.ch/fr/metadata # https://developers.exlibrisgroup.com/alma/integrations/sru/ search = { - 'anywhere': 'alma.all_for_ui', - 'author': 'alma.author', - 'title': 'alma.title', - 'recordid': 'alma.mms_id', - 'isbn': 'alma.isbn', - 'issn': 'alma.issn', - 'date': 'alma.date' + "anywhere": "alma.all_for_ui", + "author": "alma.author", + "title": "alma.title", + "recordid": "alma.mms_id", + "isbn": "alma.isbn", + "issn": "alma.issn", + "date": "alma.date", } to_json_processor = marc21_slsp.do @@ -841,9 +818,8 @@ def get_marc21_link(self, id): :return: url for id """ args = { - 'id': id, - '_external': True, - current_app.config.get( - 'REST_MIMETYPE_QUERY_ARG_NAME', 'format'): 'marc' + "id": id, + "_external": True, + current_app.config.get("REST_MIMETYPE_QUERY_ARG_NAME", "format"): "marc", } - return url_for('api_imports.import_renouvaud_record', **args) + return url_for("api_imports.import_renouvaud_record", **args) diff --git a/rero_ils/modules/imports/exceptions.py b/rero_ils/modules/imports/exceptions.py index 45fe1de1f1..e9e0354d35 100644 --- a/rero_ils/modules/imports/exceptions.py +++ b/rero_ils/modules/imports/exceptions.py @@ -24,4 +24,4 @@ class ResultNotFoundOnTheRemoteServer(RESTException): """Non existent remote record.""" code = 404 - description = 'Record not found on the remote server.' + description = "Record not found on the remote server." diff --git a/rero_ils/modules/imports/serializers/__init__.py b/rero_ils/modules/imports/serializers/__init__.py index 43d4909d81..e6fb0e7b31 100644 --- a/rero_ils/modules/imports/serializers/__init__.py +++ b/rero_ils/modules/imports/serializers/__init__.py @@ -21,11 +21,15 @@ from rero_ils.modules.serializers import search_responsify from .response import record_responsify -from .serializers import ImportSchemaJSONV1, ImportsMarcSearchSerializer, \ - ImportsSearchSerializer, UIImportsSearchSerializer +from .serializers import ( + ImportSchemaJSONV1, + ImportsMarcSearchSerializer, + ImportsSearchSerializer, + UIImportsSearchSerializer, +) -def json_record_serializer_factory(import_class, serializer_type='record'): +def json_record_serializer_factory(import_class, serializer_type="record"): """JSON record factory. create json serializer for the given import class. @@ -33,19 +37,19 @@ def json_record_serializer_factory(import_class, serializer_type='record'): :param serializer_type: type of serializer (record, uirecord) :return: Records-REST response serializer """ - if serializer_type == 'record': + if serializer_type == "record": return record_responsify( ImportsSearchSerializer( - ImportSchemaJSONV1, - record_processor=import_class.to_json_processor), - 'application/json' + ImportSchemaJSONV1, record_processor=import_class.to_json_processor + ), + "application/json", ) - if serializer_type == 'uirecord': + if serializer_type == "uirecord": return record_responsify( UIImportsSearchSerializer( - ImportSchemaJSONV1, - record_processor=import_class.to_json_processor), - 'application/rero+json' + ImportSchemaJSONV1, record_processor=import_class.to_json_processor + ), + "application/rero+json", ) @@ -53,9 +57,8 @@ def json_record_serializer_factory(import_class, serializer_type='record'): json_v1_uisearch = UIImportsSearchSerializer(ImportSchemaJSONV1) json_v1_record_marc = ImportsMarcSearchSerializer(ImportSchemaJSONV1) -json_v1_import_search = search_responsify(json_v1_search, - 'application/json') -json_v1_import_uisearch = search_responsify(json_v1_uisearch, - 'application/rero+json') -json_v1_import_record_marc = record_responsify(json_v1_record_marc, - 'application/json+marc') +json_v1_import_search = search_responsify(json_v1_search, "application/json") +json_v1_import_uisearch = search_responsify(json_v1_uisearch, "application/rero+json") +json_v1_import_record_marc = record_responsify( + json_v1_record_marc, "application/json+marc" +) diff --git a/rero_ils/modules/imports/serializers/response.py b/rero_ils/modules/imports/serializers/response.py index f9ec13d53e..d2551bea36 100644 --- a/rero_ils/modules/imports/serializers/response.py +++ b/rero_ils/modules/imports/serializers/response.py @@ -34,9 +34,11 @@ def record_responsify(serializer, mimetype, dojson_class=None): def view(pid, record, code=200, headers=None, links_factory=None): response = current_app.response_class( - serializer.serialize(pid, record, links_factory=links_factory, - dojson_class=dojson_class), - mimetype=mimetype) + serializer.serialize( + pid, record, links_factory=links_factory, dojson_class=dojson_class + ), + mimetype=mimetype, + ) response.status_code = code # TODO: do we have to set an etag? # response.set_etag('xxxxx') diff --git a/rero_ils/modules/imports/serializers/serializers.py b/rero_ils/modules/imports/serializers/serializers.py index d0968af2a7..fbea571845 100644 --- a/rero_ils/modules/imports/serializers/serializers.py +++ b/rero_ils/modules/imports/serializers/serializers.py @@ -46,27 +46,28 @@ def __init__(self, *args, **kwargs): self.record_processor = kwargs.pop("record_processor", marc21.do) super(JSONSerializer, self).__init__(*args, **kwargs) - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None, **kwargs): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. :param search_result: Elasticsearch search result. :param links: Dictionary of links to add to response. """ - for hit in search_result['hits']['hits']: - hit['metadata'] = self.post_process(hit['metadata']) - hit['metadata']['pid'] = hit['id'] + for hit in search_result["hits"]["hits"]: + hit["metadata"] = self.post_process(hit["metadata"]) + hit["metadata"]["pid"] = hit["id"] results = dict( hits=dict( - hits=search_result['hits']['hits'], - total=search_result['hits']['total']['value'], - remote_total=search_result['hits']['remote_total'], + hits=search_result["hits"]["hits"], + total=search_result["hits"]["total"]["value"], + remote_total=search_result["hits"]["remote_total"], ), - aggregations=search_result.get('aggregations', dict()), + aggregations=search_result.get("aggregations", {}), ) - if errors := search_result.get('errors'): - results['errors'] = errors + if errors := search_result.get("errors"): + results["errors"] = errors # TODO: If we have multiple types for a document we have to Correct # the document type buckets here. return json.dumps(results, **self._format_args()) @@ -89,18 +90,18 @@ def serialize(self, pid, record, links_factory=None, **kwargs): :param links: Dictionary of links to add to response. """ return json.dumps( - dict(metadata=self.post_process( - self.record_processor(record))), - **self._format_args()) + dict(metadata=self.post_process(self.record_processor(record))), + **self._format_args(), + ) class UIImportsSearchSerializer(ImportsSearchSerializer): """Serializing records as JSON with additional data.""" entity_mapping = { - 'authorized_access_point': 'authorized_access_point', - 'identifiedBy': 'identifiedBy', - 'type': 'type' + "authorized_access_point": "authorized_access_point", + "identifiedBy": "identifiedBy", + "type": "type", } def post_process(self, metadata): @@ -113,36 +114,34 @@ def post_process(self, metadata): """ # TODO: See it this is ok. from rero_ils.modules.documents.api import Document + metadata = Document(data=metadata).dumps() - titles = metadata.get('title', []) - text_title = TitleExtension.format_text(titles, with_subtitle=False) - if text_title: - metadata['ui_title_text'] = text_title - responsibility = metadata.get('responsibilityStatement', []) - text_title = TitleExtension.format_text( - titles, responsibility, with_subtitle=False) - if text_title: - metadata['ui_title_text_responsibility'] = text_title - for entity_type in ['contribution', 'subjects', 'genreForm']: + titles = metadata.get("title", []) + if text_title := TitleExtension.format_text(titles, with_subtitle=False): + metadata["ui_title_text"] = text_title + responsibility = metadata.get("responsibilityStatement", []) + if text_title := TitleExtension.format_text( + titles, responsibility, with_subtitle=False + ): + metadata["ui_title_text_responsibility"] = text_title + for entity_type in ["contribution", "subjects", "genreForm"]: entities = metadata.get(entity_type, []) new_entities = [] for entity in entities: - ent = entity['entity'] + ent = entity["entity"] # convert a MEF link into a local entity if entity_data := JsonRef.replace_refs(ent, loader=None).get( - 'metadata' + "metadata" ): ent = { local_value: entity_data[local_key] - for local_key, local_value - in self.entity_mapping.items() + for local_key, local_value in self.entity_mapping.items() if entity_data.get(local_key) } - new_entities.append({'entity': ent}) + new_entities.append({"entity": ent}) if new_entities: - metadata[entity_type] = \ - process_i18n_literal_fields(new_entities) + metadata[entity_type] = process_i18n_literal_fields(new_entities) return metadata @@ -156,12 +155,13 @@ def serialize(self, pid, record, links_factory=None, **kwargs): :param search_result: Elasticsearch search result. :param links: Dictionary of links to add to response. """ + def sort_ordered_dict(ordered_dict): res = [] for key, value in ordered_dict.items(): - if key != '__order__': + if key != "__order__": if len(key) == 5: - key = f'{key[:3]} {key[3:]}' + key = f"{key[:3]} {key[3:]}" if isinstance(value, dict): res.append([key, sort_ordered_dict(value)]) else: diff --git a/rero_ils/modules/imports/views.py b/rero_ils/modules/imports/views.py index c1d2dbc289..f526d059d0 100644 --- a/rero_ils/modules/imports/views.py +++ b/rero_ils/modules/imports/views.py @@ -28,26 +28,25 @@ from rero_ils.modules.decorators import check_logged_as_librarian from .exceptions import ResultNotFoundOnTheRemoteServer -from .serializers import json_record_serializer_factory, \ - json_v1_import_record_marc, json_v1_import_search, \ - json_v1_import_uisearch - -api_blueprint = Blueprint( - 'api_import', - __name__, - url_prefix='/imports' +from .serializers import ( + json_record_serializer_factory, + json_v1_import_record_marc, + json_v1_import_search, + json_v1_import_uisearch, ) +api_blueprint = Blueprint("api_import", __name__, url_prefix="/imports") -@api_blueprint.route('/config/', methods=['GET']) + +@api_blueprint.route("/config/", methods=["GET"]) @check_logged_as_librarian def get_config(): """Get configuration from config.py.""" - sources = current_app.config.get('RERO_IMPORT_REST_ENDPOINTS', {}).values() + sources = current_app.config.get("RERO_IMPORT_REST_ENDPOINTS", {}).values() for source in sources: - source.pop('import_class', None) - source.pop('import_size', None) - return jsonify(sorted(sources, key=lambda s: s.get('weight', 100))) + source.pop("import_class", None) + source.pop("import_size", None) + return jsonify(sorted(sources, key=lambda s: s.get("weight", 100))) class ImportsListResource(ContentNegotiatedMethodView): @@ -55,93 +54,83 @@ class ImportsListResource(ContentNegotiatedMethodView): def __init__(self, **kwargs): """Init.""" - self.import_class = obj_or_import_string(kwargs.pop('import_class')) - self.import_size = kwargs.pop('import_size', 50) + self.import_class = obj_or_import_string(kwargs.pop("import_class")) + self.import_size = kwargs.pop("import_size", 50) super().__init__( method_serializers={ - 'GET': { - 'application/json': json_v1_import_search, - 'application/rero+json': json_v1_import_uisearch + "GET": { + "application/json": json_v1_import_search, + "application/rero+json": json_v1_import_uisearch, } }, serializers_query_aliases={ - 'json': 'application/json', - 'rerojson': 'application/rero+json' - }, - default_method_media_type={ - 'GET': 'application/json' + "json": "application/json", + "rerojson": "application/rero+json", }, - default_media_type='application/json', - **kwargs + default_method_media_type={"GET": "application/json"}, + default_media_type="application/json", + **kwargs, ) def get(self, **kwargs): """Implement the GET.""" - no_cache = True if flask_request.args.get('no_cache') else False - query = flask_request.args.get('q') + no_cache = True if flask_request.args.get("no_cache") else False + query = flask_request.args.get("q") try: - query_split = query.split(':') + query_split = query.split(":") where = query_split[0] relation = query_split[1] - what = ':'.join(query_split[2:]) + what = ":".join(query_split[2:]) except Exception: - where = 'anywhere' - relation = 'all' + where = "anywhere" + relation = "all" what = query - size = flask_request.args.get('size', self.import_size) + size = flask_request.args.get("size", self.import_size) do_import = self.import_class() results, status_code = do_import.search_records( what=what, relation=relation, where=where, max_results=size, - no_cache=no_cache + no_cache=no_cache, ) - if filter_years := flask_request.args.get('year'): - values = dict(zip(['from', 'to'], filter_years.split('--'))) - values.setdefault('from', 1900) - values.setdefault('to', 2555) + if filter_years := flask_request.args.get("year"): + values = dict(zip(["from", "to"], filter_years.split("--"))) + values.setdefault("from", 1900) + values.setdefault("to", 2555) ids = [] - for year in range(int(values['from']), int(values['to'])): + for year in range(int(values["from"]), int(values["to"])): year_ids = do_import.get_ids_for_aggregation( - results=results, - aggregation='year', - key=int(year) + results=results, aggregation="year", key=int(year) ) ids += year_ids results = do_import.filter_records(results, list(set(ids))) - filter_type = flask_request.args.get('document_type') + filter_type = flask_request.args.get("document_type") if filter_type: - sub_filter_type = flask_request.args.get('document_subtype') + sub_filter_type = flask_request.args.get("document_subtype") if sub_filter_type: ids = do_import.get_ids_for_aggregation_sub( results=results, - agg='document_type', + agg="document_type", key=filter_type, - sub_agg='document_subtype', - sub_key=sub_filter_type + sub_agg="document_subtype", + sub_key=sub_filter_type, ) else: ids = do_import.get_ids_for_aggregation( - results=results, - aggregation='document_type', - key=filter_type + results=results, aggregation="document_type", key=filter_type ) results = do_import.filter_records(results, ids) - filter_author = flask_request.args.get('author') + filter_author = flask_request.args.get("author") if filter_author: ids = do_import.get_ids_for_aggregation( - results=results, - aggregation='author', - key=filter_author + results=results, aggregation="author", key=filter_author ) results = do_import.filter_records(results, ids) - filter_language = flask_request.args.get('language') + filter_language = flask_request.args.get("language") if filter_language: ids = do_import.get_ids_for_aggregation( - results=results, - aggregation='language', - key=filter_language + results=results, aggregation="language", key=filter_language ) results = do_import.filter_records(results, ids) # return None, results @@ -155,43 +144,41 @@ class ImportsResource(ContentNegotiatedMethodView): def __init__(self, **kwargs): """Init.""" - self.import_class = obj_or_import_string(kwargs.pop('import_class')) - self.import_size = kwargs.pop('import_size', 50) + self.import_class = obj_or_import_string(kwargs.pop("import_class")) + self.import_size = kwargs.pop("import_size", 50) super().__init__( method_serializers={ - 'GET': { - 'application/json': json_record_serializer_factory( + "GET": { + "application/json": json_record_serializer_factory( self.import_class ), - 'application/rero+json': json_record_serializer_factory( - self.import_class, serializer_type='uirecord' + "application/rero+json": json_record_serializer_factory( + self.import_class, serializer_type="uirecord" ), - 'application/marc+json': json_v1_import_record_marc + "application/marc+json": json_v1_import_record_marc, } }, serializers_query_aliases={ - 'json': 'application/json', - 'rerojson': 'application/rero+json', - 'marc': 'application/marc+json' - }, - default_method_media_type={ - 'GET': 'application/json' + "json": "application/json", + "rerojson": "application/rero+json", + "marc": "application/marc+json", }, - default_media_type='application/json', - **kwargs + default_method_media_type={"GET": "application/json"}, + default_media_type="application/json", + **kwargs, ) def get(self, id, **kwargs): """Implement the GET.""" - no_cache = True if flask_request.args.get('no_cache') else False - size = flask_request.args.get('size', self.import_size) + no_cache = True if flask_request.args.get("no_cache") else False + size = flask_request.args.get("size", self.import_size) do_import = self.import_class() do_import.search_records( what=id, - relation='all', - where='recordid', + relation="all", + where="recordid", max_results=size, - no_cache=no_cache + no_cache=no_cache, ) if not do_import.data: raise ResultNotFoundOnTheRemoteServer diff --git a/rero_ils/modules/indexer_utils.py b/rero_ils/modules/indexer_utils.py index b75fd3a3d6..a193a9bd80 100644 --- a/rero_ils/modules/indexer_utils.py +++ b/rero_ils/modules/indexer_utils.py @@ -35,16 +35,16 @@ def record_to_index(record): :return: index. """ index_names = current_search.mappings.keys() - schema = record.get('$schema', '') + schema = record.get("$schema", "") if isinstance(schema, dict): - schema = schema.get('$ref', '') + schema = schema.get("$ref", "") # authorities specific transformation - if re.search(r'/mef/', schema): - schema = re.sub(r'/mef/', '/remote_entities/', schema) - schema = re.sub(r'mef-contribution', 'remote_entity', schema) + if re.search(r"/mef/", schema): + schema = re.sub(r"/mef/", "/remote_entities/", schema) + schema = re.sub(r"mef-contribution", "remote_entity", schema) if index := schema_to_index(schema, index_names=index_names): return index else: - return current_app.config['INDEXER_DEFAULT_INDEX'] + return current_app.config["INDEXER_DEFAULT_INDEX"] diff --git a/rero_ils/modules/item_types/api.py b/rero_ils/modules/item_types/api.py index 25df8d8c3f..7887871142 100644 --- a/rero_ils/modules/item_types/api.py +++ b/rero_ils/modules/item_types/api.py @@ -18,26 +18,28 @@ """API for manipulating item types.""" + from __future__ import absolute_import, print_function +import contextlib from functools import partial from elasticsearch_dsl import Q from flask_babel import gettext as _ -from .models import ItemTypeIdentifier, ItemTypeMetadata from ..api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch from ..circ_policies.api import CircPoliciesSearch from ..fetchers import id_fetcher from ..minters import id_minter from ..providers import Provider from ..utils import extracted_data_from_ref, sorted_pids +from .models import ItemTypeIdentifier, ItemTypeMetadata # provider ItemTypeProvider = type( - 'ItemTypeProvider', + "ItemTypeProvider", (Provider,), - dict(identifier=ItemTypeIdentifier, pid_type='itty') + dict(identifier=ItemTypeIdentifier, pid_type="itty"), ) # minter item_type_id_minter = partial(id_minter, provider=ItemTypeProvider) @@ -51,9 +53,9 @@ class ItemTypesSearch(IlsRecordsSearch): class Meta: """Search only on item_types index.""" - index = 'item_types' + index = "item_types" doc_types = None - fields = ('*',) + fields = ("*",) facets = {} default_filter = None @@ -74,34 +76,34 @@ def extended_validation(self, **kwargs): per organisation. """ online_type_pid = self.get_organisation().online_circulation_category() - if self.get('type') == 'online' and online_type_pid and \ - self.pid != online_type_pid: - return _('Another online item type exists in this organisation') + if ( + self.get("type") == "online" + and online_type_pid + and self.pid != online_type_pid + ): + return _("Another online item type exists in this organisation") return True def get_organisation(self): """Get organisation.""" from ..organisations.api import Organisation - org_pid = extracted_data_from_ref(self.get('organisation')) + + org_pid = extracted_data_from_ref(self.get("organisation")) return Organisation.get_record_by_pid(org_pid) @classmethod def get_pid_by_name(cls, name): """Get pid by name.""" pid = None - try: - pids = [ + with contextlib.suppress(Exception): + if pids := [ n.pid for n in ItemTypesSearch() - .filter('term', item_type_name=name) - .source(includes=['pid']) + .filter("term", item_type_name=name) + .source(includes=["pid"]) .scan() - ] - if len(pids) > 0: + ]: pid = pids[0] - except Exception: - pass - # needs app_context to work, but is called before return pid @classmethod @@ -113,10 +115,13 @@ def exist_name_and_organisation_pid(cls, name, organisation_pid): :return: A ES hit if a circulation category already use thi name in the organisation; otherwise, return None. """ - item_type = ItemTypesSearch() \ - .filter('term', item_type_name=name) \ - .filter('term', organisation__pid=organisation_pid)\ - .source().scan() + item_type = ( + ItemTypesSearch() + .filter("term", item_type_name=name) + .filter("term", organisation__pid=organisation_pid) + .source() + .scan() + ) try: return next(item_type) except StopIteration: @@ -129,21 +134,20 @@ def get_links_to_me(self, get_pids=False): if False count of linked records """ from ..items.api import ItemsSearch + links = {} - items_query = ItemsSearch().filter('bool', should=[ - Q('term', item_type__pid=self.pid), - Q('term', temporary_item_type__pid=self.pid) - ]) - cipo_query = CircPoliciesSearch() \ - .filter( - 'nested', - path='settings', - query=Q( - 'bool', must=[ - Q('match', settings__item_type__pid=self.pid) - ] - ) - ) + items_query = ItemsSearch().filter( + "bool", + should=[ + Q("term", item_type__pid=self.pid), + Q("term", temporary_item_type__pid=self.pid), + ], + ) + cipo_query = CircPoliciesSearch().filter( + "nested", + path="settings", + query=Q("bool", must=[Q("match", settings__item_type__pid=self.pid)]), + ) if get_pids: items = sorted_pids(items_query) circ_policies = sorted_pids(cipo_query) @@ -151,17 +155,16 @@ def get_links_to_me(self, get_pids=False): items = items_query.count() circ_policies = cipo_query.count() if items: - links['items'] = items + links["items"] = items if circ_policies: - links['circ_policies'] = circ_policies + links["circ_policies"] = circ_policies return links def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} - links = self.get_links_to_me() - if links: - cannot_delete['links'] = links + if links := self.get_links_to_me(): + cannot_delete["links"] = links return cannot_delete def get_label(self, language=None): @@ -172,16 +175,17 @@ def get_label(self, language=None): return the item_type name. """ if language: - labels = self.get('displayed_status', []) \ - if self.get('negative_availability', False) \ - else self.get('circulation_information', []) + labels = ( + self.get("displayed_status", []) + if self.get("negative_availability", False) + else self.get("circulation_information", []) + ) label = [ - entry['label'] for entry in labels - if entry['language'] == language + entry["label"] for entry in labels if entry["language"] == language ] if label and label[0]: return label[0] - return self.get('name') + return self.get("name") class ItemTypesIndexer(IlsRecordsIndexer): @@ -194,4 +198,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='itty') + super().bulk_index(record_id_iterator, doc_type="itty") diff --git a/rero_ils/modules/item_types/jsonresolver.py b/rero_ils/modules/item_types/jsonresolver.py index d2682f8994..37de8ee1f0 100644 --- a/rero_ils/modules/item_types/jsonresolver.py +++ b/rero_ils/modules/item_types/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/item_types/', host='bib.rero.ch') +@jsonresolver.route("/api/item_types/", host="bib.rero.ch") def item_type_resolver(pid): """Item type resolver.""" - return resolve_json_refs('itty', pid) + return resolve_json_refs("itty", pid) diff --git a/rero_ils/modules/item_types/listener.py b/rero_ils/modules/item_types/listener.py index 32f34b56f9..f725bc9623 100644 --- a/rero_ils/modules/item_types/listener.py +++ b/rero_ils/modules/item_types/listener.py @@ -26,21 +26,24 @@ def negative_availability_changes(sender, record=None, *args, **kwargs): """Reindex related items if negative availability changes.""" - if isinstance(record, ItemType): - ori_record = ItemType.get_record_by_pid(record.pid) - record_availability = record.get('negative_availability', False) - original_availability = ori_record.get('negative_availability', False) - if record_availability != original_availability: - # get all item uuid's related to the item type and mark them for - # reindex into a asynchronous celery queue. - item_uuids = [] - search = ItemsSearch()\ - .filter('bool', should=[ - Q('match', item_type__pid=record.pid), - Q('match', temporary_item_type__pid=record.pid) - ]) \ - .source().scan() - for hit in search: - item_uuids.append(hit.meta.id) - ItemTypesIndexer().bulk_index(item_uuids) - process_bulk_queue.apply_async() + if not isinstance(record, ItemType): + return + ori_record = ItemType.get_record_by_pid(record.pid) + record_availability = record.get("negative_availability", False) + original_availability = ori_record.get("negative_availability", False) + if record_availability != original_availability: + search = ( + ItemsSearch() + .filter( + "bool", + should=[ + Q("match", item_type__pid=record.pid), + Q("match", temporary_item_type__pid=record.pid), + ], + ) + .source() + .scan() + ) + item_uuids = [hit.meta.id for hit in search] + ItemTypesIndexer().bulk_index(item_uuids) + process_bulk_queue.apply_async() diff --git a/rero_ils/modules/item_types/models.py b/rero_ils/modules/item_types/models.py index dc96f2f24e..c7b79f2f55 100644 --- a/rero_ils/modules/item_types/models.py +++ b/rero_ils/modules/item_types/models.py @@ -27,16 +27,17 @@ class ItemTypeIdentifier(RecordIdentifier): """Sequence generator for ItemType identifiers.""" - __tablename__ = 'item_type_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "item_type_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class ItemTypeMetadata(db.Model, RecordMetadataBase): """ItemType record metadata.""" - __tablename__ = 'item_type_metadata' + __tablename__ = "item_type_metadata" diff --git a/rero_ils/modules/item_types/permissions.py b/rero_ils/modules/item_types/permissions.py index d465c9b91d..2d65c723cc 100644 --- a/rero_ils/modules/item_types/permissions.py +++ b/rero_ils/modules/item_types/permissions.py @@ -19,15 +19,18 @@ """Permissions for item types.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) -search_action = action_factory('itty-search') -read_action = action_factory('itty-read') -create_action = action_factory('itty-create') -update_action = action_factory('itty-update') -delete_action = action_factory('itty-delete') -access_action = action_factory('itty-access') +search_action = action_factory("itty-search") +read_action = action_factory("itty-read") +create_action = action_factory("itty-create") +update_action = action_factory("itty-update") +delete_action = action_factory("itty-delete") +access_action = action_factory("itty-access") class ItemTypePermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/item_types/views.py b/rero_ils/modules/item_types/views.py index 001c3048c7..5adc1a49d1 100644 --- a/rero_ils/modules/item_types/views.py +++ b/rero_ils/modules/item_types/views.py @@ -26,27 +26,21 @@ from ..patrons.api import current_librarian blueprint = Blueprint( - 'item_types', + "item_types", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) -@blueprint.route('/item_types/name/validate/', methods=["GET"]) +@blueprint.route("/item_types/name/validate/", methods=["GET"]) @check_logged_as_librarian def name_validate(name): """Item type name validation.""" - response = { - 'name': None - } + response = {"name": None} if current_librarian: - patron_type = ItemType.exist_name_and_organisation_pid( - name, - current_librarian.organisation.pid - ) - if patron_type: - response = { - 'name': patron_type.name - } + if patron_type := ItemType.exist_name_and_organisation_pid( + name, current_librarian.organisation.pid + ): + response = {"name": patron_type.name} return jsonify(response) diff --git a/rero_ils/modules/items/api/__init__.py b/rero_ils/modules/items/api/__init__.py index 89691fb66f..6f0d101ae9 100644 --- a/rero_ils/modules/items/api/__init__.py +++ b/rero_ils/modules/items/api/__init__.py @@ -17,13 +17,18 @@ """Item data module.""" -from .api import Item, ItemsIndexer, ItemsSearch, item_id_fetcher, \ - item_id_minter +from .api import Item, ItemsIndexer, ItemsSearch, item_id_fetcher, item_id_minter from .circulation import ItemCirculation from .issue import ItemIssue from .record import ItemRecord __all__ = ( - 'Item', 'ItemRecord', 'ItemCirculation', 'ItemIssue', 'ItemsSearch', - 'ItemsIndexer', 'item_id_fetcher', 'item_id_minter' + "Item", + "ItemRecord", + "ItemCirculation", + "ItemIssue", + "ItemsSearch", + "ItemsIndexer", + "item_id_fetcher", + "item_id_minter", ) diff --git a/rero_ils/modules/items/api/api.py b/rero_ils/modules/items/api/api.py index 39cb445aff..1137c25665 100644 --- a/rero_ils/modules/items/api/api.py +++ b/rero_ils/modules/items/api/api.py @@ -17,6 +17,8 @@ # along with this program. If not, see . """API for manipulating items.""" + +import contextlib from datetime import datetime, timezone from functools import partial @@ -24,8 +26,7 @@ from elasticsearch_dsl import Q from invenio_search import current_search_client -from rero_ils.modules.api import IlsRecordError, IlsRecordsIndexer, \ - IlsRecordsSearch +from rero_ils.modules.api import IlsRecordError, IlsRecordsIndexer, IlsRecordsSearch from rero_ils.modules.documents.api import DocumentsSearch from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.item_types.api import ItemTypesSearch @@ -35,15 +36,13 @@ from rero_ils.modules.providers import Provider from rero_ils.modules.utils import extracted_data_from_ref +from ..models import ItemIdentifier, ItemMetadata, ItemStatus from .circulation import ItemCirculation from .issue import ItemIssue -from ..models import ItemIdentifier, ItemMetadata, ItemStatus # provider ItemProvider = type( - 'ItemProvider', - (Provider,), - dict(identifier=ItemIdentifier, pid_type='item') + "ItemProvider", (Provider,), dict(identifier=ItemIdentifier, pid_type="item") ) # minter item_id_minter = partial(id_minter, provider=ItemProvider) @@ -57,9 +56,9 @@ class ItemsSearch(IlsRecordsSearch): class Meta: """Search only on item index.""" - index = 'items' + index = "items" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -71,30 +70,30 @@ def available_query(self): """ must_not_filters = [ # should not be masked - Q('term', _masked=True), + Q("term", _masked=True), # should not be in_transit (even without loan) - Q('term', status=ItemStatus.IN_TRANSIT), + Q("term", status=ItemStatus.IN_TRANSIT), # if issue the status should be received - Q('exists', field='issue') & ~Q('term', issue__status='received') + Q("exists", field="issue") & ~Q("term", issue__status="received"), ] if not_available_item_types := [ hit.pid for hit in ItemTypesSearch() - .source('pid') - .filter('term', negative_availability=True) + .source("pid") + .filter("term", negative_availability=True) .scan() ]: # negative availability item type and not temporary item types - has_items_filters = \ - Q('terms', item_type__pid=not_available_item_types) - has_items_filters &= ~Q('exists', field='temporary_item_type') + has_items_filters = Q("terms", item_type__pid=not_available_item_types) + has_items_filters &= ~Q("exists", field="temporary_item_type") # temporary item types with negative availability has_items_filters |= Q( - 'terms', temporary_item_type__pid=not_available_item_types) + "terms", temporary_item_type__pid=not_available_item_types + ) # add to the must not filters must_not_filters.append(has_items_filters) - return self.filter(Q('bool', must_not=must_not_filters)) + return self.filter(Q("bool", must_not=must_not_filters)) class Item(ItemCirculation, ItemIssue): @@ -105,56 +104,53 @@ class Item(ItemCirculation, ItemIssue): provider = ItemProvider model_cls = ItemMetadata pids_exist_check = { - 'required': { - 'loc': 'location', - 'doc': 'document', - 'itty': 'item_type' - }, - 'not_required': { - 'org': 'organisation', + "required": {"loc": "location", "doc": "document", "itty": "item_type"}, + "not_required": { + "org": "organisation", # We can not make the holding required because it is created later - 'hold': 'holding' - } + "hold": "holding", + }, } def delete_from_index(self): """Delete record from index.""" - try: + with contextlib.suppress(NotFoundError): ItemsIndexer().delete(self) - except NotFoundError: - pass def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} links = self.get_links_to_me() # local_fields aren't a reason to block suppression - links.pop('local_fields', None) + links.pop("local_fields", None) if links: - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def in_collection(self, **kwargs): """Get published collection pids for current item.""" from ...collections.api import CollectionsSearch + output = [] - search = CollectionsSearch() \ - .filter('term', items__pid=self.get('pid')) \ - .filter('term', published=True) \ - .sort({'title_sort': {'order': 'asc'}}) \ - .params(preserve_order=True) \ - .source(['pid', 'organisation', 'title', 'description']) + search = ( + CollectionsSearch() + .filter("term", items__pid=self.get("pid")) + .filter("term", published=True) + .sort({"title_sort": {"order": "asc"}}) + .params(preserve_order=True) + .source(["pid", "organisation", "title", "description"]) + ) orgs = {} for hit in search.scan(): hit = hit.to_dict() - org_pid = hit['organisation']['pid'] + org_pid = hit["organisation"]["pid"] if org_pid not in orgs: orgs[org_pid] = Organisation.get_record_by_pid(org_pid) collection_data = { - 'pid': hit['pid'], # required property - 'title': hit['title'], # required property - 'description': hit.get('description'), # optional property - 'viewcode': orgs[org_pid].get('code') + "pid": hit["pid"], # required property + "title": hit["title"], # required property + "description": hit.get("description"), # optional property + "viewcode": orgs[org_pid].get("code"), } collection_data = {k: v for k, v in collection_data.items() if v} output.append(collection_data) @@ -162,13 +158,13 @@ def in_collection(self, **kwargs): def replace_refs(self): """Replace $ref with real data.""" - tmp_itty_end_date = self.get('temporary_item_type', {}).get('end_date') - tmp_loc_end_date = self.get('temporary_location', {}).get('end_date') + tmp_itty_end_date = self.get("temporary_item_type", {}).get("end_date") + tmp_loc_end_date = self.get("temporary_location", {}).get("end_date") data = super().replace_refs() if tmp_itty_end_date: - data['temporary_item_type']['end_date'] = tmp_itty_end_date + data["temporary_item_type"]["end_date"] = tmp_itty_end_date if tmp_loc_end_date: - data['temporary_location']['end_date'] = tmp_loc_end_date + data["temporary_location"]["end_date"] = tmp_loc_end_date return data @classmethod @@ -182,15 +178,15 @@ def get_item_record_for_ui(cls, **kwargs): :return: the item record. """ from ...loans.api import Loan + item = None - item_pid = kwargs.get('item_pid') - item_barcode = kwargs.pop('item_barcode', None) - loan_pid = kwargs.get('pid') + item_pid = kwargs.get("item_pid") + item_barcode = kwargs.pop("item_barcode", None) + loan_pid = kwargs.get("pid") if item_pid: item = Item.get_record_by_pid(item_pid) elif item_barcode: - org_pid = kwargs.get( - 'organisation_pid', current_librarian.organisation_pid) + org_pid = kwargs.get("organisation_pid", current_librarian.organisation_pid) item = Item.get_item_by_barcode(item_barcode, org_pid) elif loan_pid: item_pid = Loan.get_record_by_pid(loan_pid).item_pid @@ -203,12 +199,11 @@ def format_end_date(cls, end_date): # (`datetime.now(timezone.utc)` by default) if end_date is None: end_date = datetime.now(timezone.utc) - end_date = end_date.strftime('%Y-%m-%d') + end_date = end_date.strftime("%Y-%m-%d") return end_date @classmethod - def get_items_with_obsolete_temporary_item_type_or_location( - cls, end_date=None): + def get_items_with_obsolete_temporary_item_type_or_location(cls, end_date=None): """Get all items with an obsolete temporary item_type or location. An end_date could be attached to the item temporary item_type or @@ -222,14 +217,14 @@ def get_items_with_obsolete_temporary_item_type_or_location( end_date = cls.format_end_date(end_date) items_query = ItemsSearch() loc_es_quey = items_query.filter( - 'range', temporary_location__end_date={'lte': end_date}) - locs = [ - (hit.meta.id, 'loc') for hit in loc_es_quey.source('pid').scan()] + "range", temporary_location__end_date={"lte": end_date} + ) + locs = [(hit.meta.id, "loc") for hit in loc_es_quey.source("pid").scan()] itty_es_query = items_query.filter( - 'range', temporary_item_type__end_date={'lte': end_date}) - itty = [(hit.meta.id, 'itty') for hit in itty_es_query.source( - 'pid').scan()] + "range", temporary_item_type__end_date={"lte": end_date} + ) + itty = [(hit.meta.id, "itty") for hit in itty_es_query.source("pid").scan()] hits = itty + locs for id, field_type in hits: yield Item.get_record(id), field_type @@ -248,9 +243,8 @@ def _es_item(cls, record): :returns: the elasticsearch document or {} """ try: - es_item = current_search_client.get( - ItemsSearch.Meta.index, record.id) - return es_item['_source'] + es_item = current_search_client.get(ItemsSearch.Meta.index, record.id) + return es_item["_source"] except NotFoundError: return {} @@ -262,19 +256,19 @@ def _update_status_in_doc(cls, record, es_item): :param es_item: a dict of the elasticsearch item """ # retrieve the document in the corresponding es index - document_pid = extracted_data_from_ref(record.get('document')) + document_pid = extracted_data_from_ref(record.get("document")) doc = next( DocumentsSearch() .extra(version=True) - .filter('term', pid=document_pid) + .filter("term", pid=document_pid) .scan() ) # update the item status in the document data = doc.to_dict() - for hold in data.get('holdings', []): - for item in hold.get('items', []): - if item['pid'] == record.pid: - item['status'] = record['status'] + for hold in data.get("holdings", []): + for item in hold.get("items", []): + if item["pid"] == record.pid: + item["status"] = record["status"] break else: continue @@ -285,7 +279,8 @@ def _update_status_in_doc(cls, record, es_item): id=doc.meta.id, body=data, version=doc.meta.version, - version_type='external_gte') + version_type="external_gte", + ) def index(self, record): """Index an item. @@ -302,19 +297,19 @@ def index(self, record): return_value = super().index(record) # fast document reindex for circulation operations - if es_item and record.get('status') != es_item.get('status'): + if es_item and record.get("status") != es_item.get("status"): self._update_status_in_doc(record, es_item) return return_value # reindex the holding / doc for non circulation operations - holding_pid = extracted_data_from_ref(record.get('holding')) + holding_pid = extracted_data_from_ref(record.get("holding")) holding = Holding.get_record_by_pid(holding_pid) holding.reindex() # reindex the old holding old_holding_pid = None if es_item: # reindex old holding ot update hte count - old_holding_pid = es_item.get('holding', {}).get('pid') + old_holding_pid = es_item.get("holding", {}).get("pid") if old_holding_pid != holding_pid: old_holding = Holding.get_record_by_pid(old_holding_pid) old_holding.reindex() @@ -328,16 +323,14 @@ def delete(self, record): from rero_ils.modules.holdings.api import Holding return_value = super().delete(record) - holding_pid = extracted_data_from_ref(record.get('holding')) + holding_pid = extracted_data_from_ref(record.get("holding")) holding = Holding.get_record_by_pid(holding_pid) # delete only if a standard item deleted = False if not holding.is_serial: - try: + with contextlib.suppress(IlsRecordError.NotDeleted): holding.delete(force=False, dbcommit=True, delindex=True) deleted = True - except IlsRecordError.NotDeleted: - pass if not deleted: # for items count holding.reindex() @@ -348,4 +341,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='item') + super().bulk_index(record_id_iterator, doc_type="item") diff --git a/rero_ils/modules/items/api/circulation.py b/rero_ils/modules/items/api/circulation.py index e1feb595ec..6ffa975c49 100644 --- a/rero_ils/modules/items/api/circulation.py +++ b/rero_ils/modules/items/api/circulation.py @@ -24,11 +24,15 @@ from flask import current_app from flask_babel import gettext as _ from invenio_circulation.api import get_loan_for_item -from invenio_circulation.errors import ItemNotAvailableError, \ - NoValidTransitionAvailableError +from invenio_circulation.errors import ( + ItemNotAvailableError, + NoValidTransitionAvailableError, +) from invenio_circulation.proxies import current_circulation -from invenio_circulation.search.api import search_by_patron_item_or_document, \ - search_by_pid +from invenio_circulation.search.api import ( + search_by_patron_item_or_document, + search_by_pid, +) from invenio_pidstore.errors import PersistentIdentifierError from invenio_records_rest.utils import obj_or_import_string from invenio_search import current_search @@ -36,33 +40,38 @@ from rero_ils.modules.locations.api import LocationsSearch from rero_ils.modules.patron_transactions.api import PatronTransactionsSearch -from .record import ItemRecord -from ..decorators import add_action_parameters_and_flush_indexes, \ - check_operation_allowed -from ..models import ItemCirculationAction, ItemIssueStatus, ItemStatus -from ..utils import item_pid_to_object +from ....filter import format_date_filter from ...circ_policies.api import CircPolicy from ...errors import NoCirculationAction from ...item_types.api import ItemType from ...libraries.api import Library from ...libraries.exceptions import LibraryNeverOpen -from ...loans.api import Loan, get_last_transaction_loc_for_item, \ - get_request_by_item_pid_by_patron_pid +from ...loans.api import ( + Loan, + get_last_transaction_loc_for_item, + get_request_by_item_pid_by_patron_pid, +) from ...loans.models import LoanAction, LoanState from ...locations.api import Location from ...patrons.api import Patron from ...utils import extracted_data_from_ref, sorted_pids -from ....filter import format_date_filter +from ..decorators import ( + add_action_parameters_and_flush_indexes, + check_operation_allowed, +) +from ..models import ItemCirculationAction, ItemIssueStatus, ItemStatus +from ..utils import item_pid_to_object +from .record import ItemRecord class ItemCirculation(ItemRecord): """Item circulation class.""" statuses = { - LoanState.ITEM_ON_LOAN: 'on_loan', - LoanState.ITEM_AT_DESK: 'at_desk', - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: 'in_transit', - LoanState.ITEM_IN_TRANSIT_TO_HOUSE: 'in_transit', + LoanState.ITEM_ON_LOAN: "on_loan", + LoanState.ITEM_AT_DESK: "at_desk", + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: "in_transit", + LoanState.ITEM_IN_TRANSIT_TO_HOUSE: "in_transit", } def change_status_commit_and_reindex(self): @@ -71,30 +80,29 @@ def change_status_commit_and_reindex(self): Commits and reindex the item. This method is executed after every successfull circulation action. """ - current_search.flush_and_refresh( - current_circulation.loan_search_cls.Meta.index) + current_search.flush_and_refresh(current_circulation.loan_search_cls.Meta.index) self.status_update(self, dbcommit=True, reindex=True, forceindex=True) def prior_validate_actions(self, **kwargs): """Check if the validate action can be executed or not.""" - if loan_pid := kwargs.get('pid'): + if loan_pid := kwargs.get("pid"): # no item validation is possible when an item has an active loan. states = self.get_loans_states_by_item_pid_exclude_loan_pid( - self.pid, loan_pid) - active_states = current_app.config[ - 'CIRCULATION_STATES_LOAN_ACTIVE'] + self.pid, loan_pid + ) + active_states = current_app.config["CIRCULATION_STATES_LOAN_ACTIVE"] if set(active_states).intersection(states): raise NoValidTransitionAvailableError() else: # no validation is possible when loan is not found/given - current_app.logger.error( - 'NoCirculationAction prior_validate_actions') + current_app.logger.error("NoCirculationAction prior_validate_actions") raise NoCirculationAction( - _('No circulation action performed: validate_actions')) + _("No circulation action performed: validate_actions") + ) def prior_extend_loan_actions(self, **kwargs): """Actions to execute before an extend_loan action.""" - loan_pid = kwargs.get('pid') + loan_pid = kwargs.get("pid") checked_out = True # we consider loan as checked-out if not loan_pid: loan = self.get_first_loan_by_state(LoanState.ITEM_ON_LOAN) @@ -110,7 +118,8 @@ def prior_extend_loan_actions(self, **kwargs): have_request = LoanState.PENDING in self.get_loan_states_for_an_item() if not checked_out or have_request: raise NoCirculationAction( - _('No circulation action performed: extend_loan_actions')) + _("No circulation action performed: extend_loan_actions") + ) return loan, kwargs @@ -121,20 +130,19 @@ def prior_checkin_actions(self, **kwargs): if not states: # CHECKIN_1_1: item on_shelf, no pending loans. self.checkin_item_on_shelf(states, **kwargs) - elif (LoanState.ITEM_AT_DESK not in states and - LoanState.ITEM_ON_LOAN not in states): + elif ( + LoanState.ITEM_AT_DESK not in states + and LoanState.ITEM_ON_LOAN not in states + ): if LoanState.ITEM_IN_TRANSIT_FOR_PICKUP in states: # CHECKIN_4: item in_transit (IN_TRANSIT_FOR_PICKUP) - loan, kwargs = self.checkin_item_in_transit_for_pickup( - **kwargs) + loan, kwargs = self.checkin_item_in_transit_for_pickup(**kwargs) elif LoanState.ITEM_IN_TRANSIT_TO_HOUSE in states: # CHECKIN_5: item in_transit (IN_TRANSIT_TO_HOUSE) - loan, kwargs = self.checkin_item_in_transit_to_house( - states, **kwargs) + loan, kwargs = self.checkin_item_in_transit_to_house(states, **kwargs) elif LoanState.PENDING in states: # CHECKIN_1_2_1: item on_shelf, with pending loans. - loan, kwargs = self.validate_item_first_pending_request( - **kwargs) + loan, kwargs = self.validate_item_first_pending_request(**kwargs) elif LoanState.ITEM_AT_DESK in states: # CHECKIN_2: item at_desk self.checkin_item_at_desk(**kwargs) @@ -143,55 +151,50 @@ def prior_checkin_actions(self, **kwargs): loan = self.get_first_loan_by_state(state=LoanState.ITEM_ON_LOAN) return loan, kwargs - def complete_action_missing_params( - self, item=None, checkin_loan=None, **kwargs): + def complete_action_missing_params(self, item=None, checkin_loan=None, **kwargs): """Add the missing parameters before executing a circulation action.""" # TODO: find a better way to code this part. if not checkin_loan: loan = None - loan_pid = kwargs.get('pid') - if loan_pid: + if loan_pid := kwargs.get("pid"): loan = Loan.get_record_by_pid(loan_pid) - patron_pid = kwargs.get('patron_pid') + patron_pid = kwargs.get("patron_pid") if patron_pid and not loan: data = { - 'item_pid': item_pid_to_object(item.pid), - 'patron_pid': patron_pid + "item_pid": item_pid_to_object(item.pid), + "patron_pid": patron_pid, } - data.setdefault( - 'transaction_date', datetime.utcnow().isoformat()) + data.setdefault("transaction_date", datetime.utcnow().isoformat()) loan = Loan.create(data, dbcommit=True, reindex=True) if not patron_pid and loan: - kwargs.setdefault('patron_pid', loan.patron_pid) + kwargs.setdefault("patron_pid", loan.patron_pid) - kwargs.setdefault('pid', loan.pid) - kwargs.setdefault('patron_pid', patron_pid) + kwargs.setdefault("pid", loan.pid) + kwargs.setdefault("patron_pid", patron_pid) else: - kwargs['patron_pid'] = checkin_loan.get('patron_pid') - kwargs['pid'] = checkin_loan.pid + kwargs["patron_pid"] = checkin_loan.get("patron_pid") + kwargs["pid"] = checkin_loan.pid loan = checkin_loan - kwargs['item_pid'] = item_pid_to_object(item.pid) + kwargs["item_pid"] = item_pid_to_object(item.pid) - kwargs['transaction_date'] = datetime.utcnow().isoformat() - document_pid = extracted_data_from_ref(item.get('document')) - kwargs.setdefault('document_pid', document_pid) + kwargs["transaction_date"] = datetime.utcnow().isoformat() + document_pid = extracted_data_from_ref(item.get("document")) + kwargs.setdefault("document_pid", document_pid) # set the transaction location for the circulation transaction - transaction_location_pid = kwargs.get( - 'transaction_location_pid', None) + transaction_location_pid = kwargs.get("transaction_location_pid", None) if not transaction_location_pid: - transaction_library_pid = kwargs.pop( - 'transaction_library_pid', None) + transaction_library_pid = kwargs.pop("transaction_library_pid", None) if transaction_library_pid is not None: lib = Library.get_record_by_pid(transaction_library_pid) - kwargs['transaction_location_pid'] = \ - lib.get_transaction_location_pid() + kwargs["transaction_location_pid"] = lib.get_transaction_location_pid() # set the pickup_location_pid field if not found for loans that are # ready for checkout. - if not kwargs.get('pickup_location_pid') and \ - loan.get('state') in [LoanState.CREATED, LoanState.ITEM_AT_DESK]: - kwargs['pickup_location_pid'] = \ - kwargs.get('transaction_location_pid') + if not kwargs.get("pickup_location_pid") and loan.get("state") in [ + LoanState.CREATED, + LoanState.ITEM_AT_DESK, + ]: + kwargs["pickup_location_pid"] = kwargs.get("transaction_location_pid") return loan, kwargs def checkin_item_on_shelf(self, loans_list, **kwargs): @@ -203,28 +206,30 @@ def checkin_item_on_shelf(self, loans_list, **kwargs): """ # CHECKIN_1_1: item on_shelf, no pending loans. libraries = self.compare_item_pickup_transaction_libraries(**kwargs) - transaction_item_libraries = libraries['transaction_item_libraries'] + transaction_item_libraries = libraries["transaction_item_libraries"] if transaction_item_libraries: # CHECKIN_1_1_1, item library = transaction library # item will be checked in in home library, no action if self.status != ItemStatus.ON_SHELF: - self.status_update( - self, dbcommit=True, reindex=True, forceindex=True) - raise NoCirculationAction(_( - 'No circulation action performed: ' - 'Item returned at owning library')) - raise NoCirculationAction( - _('No circulation action performed: on shelf')) + self.status_update(self, dbcommit=True, reindex=True, forceindex=True) + raise NoCirculationAction( + _( + "No circulation action performed: " + "Item returned at owning library" + ) + ) + raise NoCirculationAction(_("No circulation action performed: on shelf")) else: # CHECKIN_1_1_2: item library != transaction library # item will be checked-in in an external library, no # circulation action performed, add item status in_transit - self['status'] = ItemStatus.IN_TRANSIT + self["status"] = ItemStatus.IN_TRANSIT self.status_update( - self, on_shelf=False, dbcommit=True, reindex=True, - forceindex=True) + self, on_shelf=False, dbcommit=True, reindex=True, forceindex=True + ) raise NoCirculationAction( - _('No circulation action performed: in transit added')) + _("No circulation action performed: in transit added") + ) def checkin_item_at_desk(self, **kwargs): """Checkin actions for at_desk item. @@ -233,26 +238,27 @@ def checkin_item_at_desk(self, **kwargs): :param kwargs : all others named arguments """ # CHECKIN_2: item at_desk - at_desk_loan = self.get_first_loan_by_state( - state=LoanState.ITEM_AT_DESK) - kwargs['pickup_location_pid'] = \ - at_desk_loan['pickup_location_pid'] + at_desk_loan = self.get_first_loan_by_state(state=LoanState.ITEM_AT_DESK) + kwargs["pickup_location_pid"] = at_desk_loan["pickup_location_pid"] libraries = self.compare_item_pickup_transaction_libraries(**kwargs) - if libraries['transaction_pickup_libraries']: + if libraries["transaction_pickup_libraries"]: # CHECKIN_2_1: pickup location = transaction library # (no action, item is: at_desk (ITEM_AT_DESK)) raise NoCirculationAction( - _('No circulation action performed: item at desk')) + _("No circulation action performed: item at desk") + ) # CHECKIN_2_2: pickup location != transaction library # item is: in_transit - at_desk_loan['state'] = LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + at_desk_loan["state"] = LoanState.ITEM_IN_TRANSIT_FOR_PICKUP at_desk_loan.update(at_desk_loan, dbcommit=True, reindex=True) - self['status'] = ItemStatus.IN_TRANSIT + self["status"] = ItemStatus.IN_TRANSIT self.status_update( - self, on_shelf=False, dbcommit=True, reindex=True, forceindex=True) + self, on_shelf=False, dbcommit=True, reindex=True, forceindex=True + ) raise NoCirculationAction( - _('No circulation action performed: in transit added')) + _("No circulation action performed: in transit added") + ) def checkin_item_in_transit_for_pickup(self, **kwargs): """Checkin actions for item in_transit for pickup. @@ -262,21 +268,22 @@ def checkin_item_in_transit_for_pickup(self, **kwargs): """ # CHECKIN_4: item in_transit (IN_TRANSIT_FOR_PICKUP) in_transit_loan = self.get_first_loan_by_state( - state=LoanState.ITEM_IN_TRANSIT_FOR_PICKUP) - kwargs['pickup_location_pid'] = \ - in_transit_loan['pickup_location_pid'] + state=LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + ) + kwargs["pickup_location_pid"] = in_transit_loan["pickup_location_pid"] libraries = self.compare_item_pickup_transaction_libraries(**kwargs) - if libraries['transaction_pickup_libraries']: + if libraries["transaction_pickup_libraries"]: # CHECKIN_4_1: pickup location = transaction library # (delivery_receive current loan, item is: at_desk(ITEM_AT_DESK)) - kwargs['receive_in_transit_request'] = True + kwargs["receive_in_transit_request"] = True loan = in_transit_loan return loan, kwargs else: # CHECKIN_4_2: pickup location != transaction library # (no action, item is: in_transit (IN_TRANSIT_FOR_PICKUP)) raise NoCirculationAction( - _('No circulation action performed: in transit for pickup')) + _("No circulation action performed: in transit for pickup") + ) def checkin_item_in_transit_to_house(self, loans_list, **kwargs): """Checkin actions for an item in IN_TRANSIT_TO_HOUSE with no requests. @@ -287,68 +294,67 @@ def checkin_item_in_transit_to_house(self, loans_list, **kwargs): """ # CHECKIN_5: item in_transit (IN_TRANSIT_TO_HOUSE) libraries = self.compare_item_pickup_transaction_libraries(**kwargs) - transaction_item_libraries = libraries['transaction_item_libraries'] + transaction_item_libraries = libraries["transaction_item_libraries"] in_transit_loan = self.get_first_loan_by_state( - state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE) + state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE + ) if LoanState.PENDING not in loans_list: # CHECKIN_5_1: item has no pending loans if not transaction_item_libraries: # CHECKIN_5_1_2: item location != transaction library # (no action, item is: in_transit (IN_TRANSIT_TO_HOUSE)) raise NoCirculationAction( - _('No circulation action performed: in transit to house')) + _("No circulation action performed: in transit to house") + ) # CHECKIN_5_1_1: item location = transaction library # (house_receive current loan, item is: on_shelf) - kwargs['receive_in_transit_request'] = True + kwargs["receive_in_transit_request"] = True loan = in_transit_loan else: # CHECKIN_5_2: item has pending requests. loan, kwargs = self.checkin_item_in_transit_to_house_with_requests( - in_transit_loan, **kwargs) + in_transit_loan, **kwargs + ) return loan, kwargs - def checkin_item_in_transit_to_house_with_requests( - self, in_transit_loan, **kwargs): + def checkin_item_in_transit_to_house_with_requests(self, in_transit_loan, **kwargs): """Checkin actions for an item in IN_TRANSIT_TO_HOUSE with requests. :param item : the item record :param in_transit_loan: the in_transit loan attached to the item :param kwargs : all others named arguments """ - # CHECKIN_5_2: pending loan exists. - pending = self.get_first_loan_by_state(state=LoanState.PENDING) - if pending: + if pending := self.get_first_loan_by_state(state=LoanState.PENDING): pending_params = kwargs - pending_params['pickup_location_pid'] = \ - pending['pickup_location_pid'] - libraries = self.compare_item_pickup_transaction_libraries( - **kwargs) - if libraries['transaction_pickup_libraries']: + pending_params["pickup_location_pid"] = pending["pickup_location_pid"] + libraries = self.compare_item_pickup_transaction_libraries(**kwargs) + if libraries["transaction_pickup_libraries"]: # CHECKIN_5_2_1_1: pickup location of first PENDING loan = item # library (house_receive current loan, item is: at_desk # [automatic validate first PENDING loan] - if libraries['item_pickup_libraries']: - kwargs['receive_current_and_validate_first'] = True + if libraries["item_pickup_libraries"]: + kwargs["receive_current_and_validate_first"] = True loan = in_transit_loan else: # CHECKIN_5_2_1_2: pickup location of first PENDING loan != # item library (cancel current loan, item is: at_desk # automatic validate first PENDING loan - kwargs['cancel_current_and_receive_first'] = True + kwargs["cancel_current_and_receive_first"] = True loan = in_transit_loan else: # CHECKIN_5_2_2: pickup location of first PENDING loan != # transaction library - if libraries['item_pickup_libraries']: + if libraries["item_pickup_libraries"]: # CHECKIN_5_2_2_1: pickup location of first PENDING loan = # item library (no action, item is: IN_TRANSIT) raise NoCirculationAction( - _('No circulation action performed: in transit')) + _("No circulation action performed: in transit") + ) else: # CHECKIN_5_2_2_2: pickup location of first PENDING loan != # item library (checkin current loan, item is: in_transit) # [automatic cancel current, automatic validate first loan] - kwargs['cancel_current_and_receive_first'] = True + kwargs["cancel_current_and_receive_first"] = True loan = in_transit_loan return loan, kwargs @@ -358,11 +364,9 @@ def validate_item_first_pending_request(self, **kwargs): :param item : the item record :param kwargs : all others named arguments """ - # CHECKIN_1_2_1: item on_shelf, with pending loans. - pending = self.get_first_loan_by_state(state=LoanState.PENDING) - if pending: + if pending := self.get_first_loan_by_state(state=LoanState.PENDING): # validate the first pending request. - kwargs['validate_current_loan'] = True + kwargs["validate_current_loan"] = True loan = pending return loan, kwargs @@ -375,26 +379,24 @@ def compare_item_pickup_transaction_libraries(self, **kwargs): `transaction_pickup_libraries`: between transaction and pickup `item_pickup_libraries`: between item and pickup """ - trans_loc_pid = kwargs.pop('transaction_location_pid', None) - trans_lib_pid = kwargs.pop('transaction_library_pid', None) - if not trans_lib_pid: - trans_lib_pid = Location.get_record_by_pid(trans_loc_pid)\ - .library_pid - - pickup_loc_pid = kwargs.pop('pickup_location_pid', None) - pickup_lib_pid = kwargs.pop('pickup_library_pid', None) + trans_loc_pid = kwargs.pop("transaction_location_pid", None) + trans_lib_pid = ( + kwargs.pop("transaction_library_pid", None) + or Location.get_record_by_pid(trans_loc_pid).library_pid + ) + + pickup_loc_pid = kwargs.pop("pickup_location_pid", None) + pickup_lib_pid = kwargs.pop("pickup_library_pid", None) if not pickup_lib_pid: if not pickup_loc_pid: pickup_lib_pid = trans_lib_pid else: - pickup_lib_pid = Location\ - .get_record_by_pid(pickup_loc_pid)\ - .library_pid + pickup_lib_pid = Location.get_record_by_pid(pickup_loc_pid).library_pid return { - 'transaction_item_libraries': self.library_pid == trans_lib_pid, - 'transaction_pickup_libraries': pickup_lib_pid == trans_lib_pid, - 'item_pickup_libraries': self.library_pid == pickup_lib_pid + "transaction_item_libraries": self.library_pid == trans_lib_pid, + "transaction_pickup_libraries": pickup_lib_pid == trans_lib_pid, + "item_pickup_libraries": self.library_pid == pickup_lib_pid, } @check_operation_allowed(ItemCirculationAction.CHECKOUT) @@ -402,43 +404,40 @@ def compare_item_pickup_transaction_libraries(self, **kwargs): def checkout(self, current_loan, **kwargs): """Checkout item to the user.""" action_params, actions = self.prior_checkout_actions(kwargs) - loan = Loan.get_record_by_pid(action_params.get('pid')) - current_loan = loan or Loan.create( - action_params, - dbcommit=True, - reindex=True - ) - old_state = current_loan.get('state') + loan = Loan.get_record_by_pid(action_params.get("pid")) + current_loan = loan or Loan.create(action_params, dbcommit=True, reindex=True) + old_state = current_loan.get("state") # If 'end_date' is specified, we need to check if the selected date is # not a closed date. If it's a closed date, then we need to update the # value to the next open day. - if 'end_date' in action_params: + if "end_date" in action_params: # circulation parameters are to calculate from transaction library. - transaction_library_pid = LocationsSearch().get_record_by_pid( - kwargs.get('transaction_location_pid')).library.pid - if not transaction_library_pid: - transaction_library_pid = self.library_pid + transaction_library_pid = ( + LocationsSearch() + .get_record_by_pid(kwargs.get("transaction_location_pid")) + .library.pid + ) or self.library_pid library = Library.get_record_by_pid(transaction_library_pid) - if not library.is_open(action_params['end_date'], True): + if not library.is_open(action_params["end_date"], True): # If library has no open dates, keep the default due date # to avoid circulation errors with suppress(LibraryNeverOpen): - new_end_date = library.next_open(action_params['end_date']) - new_end_date = new_end_date.astimezone()\ - .replace(microsecond=0).isoformat() - action_params['end_date'] = new_end_date + new_end_date = library.next_open(action_params["end_date"]) + new_end_date = ( + new_end_date.astimezone().replace(microsecond=0).isoformat() + ) + action_params["end_date"] = new_end_date # Call invenio_circulation for 'checkout' trigger loan = current_circulation.circulation.trigger( - current_loan, - **dict(action_params, trigger='checkout') + current_loan, **dict(action_params, trigger="checkout") ) - new_state = loan.get('state') + new_state = loan.get("state") if old_state == new_state: current_app.logger.error( - f'Loan state has not changed after CHECKOUT: {loan.pid} ' - f'state: {old_state} ' - f'kwargs: {kwargs}' + f"Loan state has not changed after CHECKOUT: {loan.pid} " + f"state: {old_state} " + f"kwargs: {kwargs}" ) actions.update({LoanAction.CHECKOUT: loan}) return self, actions @@ -447,11 +446,9 @@ def checkout(self, current_loan, **kwargs): def cancel_loan(self, current_loan, **kwargs): """Cancel a given item loan for a patron.""" loan = current_circulation.circulation.trigger( - current_loan, **dict(kwargs, trigger='cancel') + current_loan, **dict(kwargs, trigger="cancel") ) - return self, { - LoanAction.CANCEL: loan - } + return self, {LoanAction.CANCEL: loan} def cancel_item_request(self, pid, **kwargs): """A smart cancel request for an item. Some actions are performed. @@ -463,41 +460,40 @@ def cancel_item_request(self, pid, **kwargs): actions = {} loan = Loan.get_record_by_pid(pid) # decide which actions need to be executed according to loan state. - actions_to_execute = self.checks_before_a_cancel_item_request( - loan, **kwargs) + actions_to_execute = self.checks_before_a_cancel_item_request(loan, **kwargs) # execute the actions - if actions_to_execute.get('cancel_loan'): + if actions_to_execute.get("cancel_loan"): item, actions = self.cancel_loan(pid=loan.pid, **kwargs) - if actions_to_execute.get('loan_update', {}).get('state'): - loan['state'] = actions_to_execute['loan_update']['state'] + if actions_to_execute.get("loan_update", {}).get("state"): + loan["state"] = actions_to_execute["loan_update"]["state"] loan.update(loan, dbcommit=True, reindex=True) - self.status_update( - self, dbcommit=True, reindex=True, forceindex=True) + self.status_update(self, dbcommit=True, reindex=True, forceindex=True) actions.update({LoanAction.UPDATE: loan}) - elif actions_to_execute.get('validate_first_pending'): + elif actions_to_execute.get("validate_first_pending"): pending = self.get_first_loan_by_state(state=LoanState.PENDING) - loan_pickup = loan.get('pickup_location_pid', None) - pending_pickup = pending.get('pickup_location_pid', None) + loan_pickup = loan.get("pickup_location_pid", None) + pending_pickup = pending.get("pickup_location_pid", None) # If the item is at_desk at the same location as the next loan # pickup we can validate the next loan so that it becomes at desk # for the next patron. - if loan.get('state') == LoanState.ITEM_AT_DESK\ - and loan_pickup == pending_pickup: + if ( + loan.get("state") == LoanState.ITEM_AT_DESK + and loan_pickup == pending_pickup + ): item, actions = self.cancel_loan(pid=loan.pid, **kwargs) - kwargs['transaction_location_pid'] = loan_pickup - kwargs.pop('transaction_library_pid', None) + kwargs["transaction_location_pid"] = loan_pickup + kwargs.pop("transaction_library_pid", None) item, validate_actions = self.validate_request( - pid=pending.pid, - **kwargs) + pid=pending.pid, **kwargs + ) actions.update(validate_actions) # Otherwise, we simply change the state of the next loan and it # will be validated at the next checkin at the pickup location. else: - pending['state'] = LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + pending["state"] = LoanState.ITEM_IN_TRANSIT_FOR_PICKUP pending.update(pending, dbcommit=True, reindex=True) item, actions = self.cancel_loan(pid=loan.pid, **kwargs) - self.status_update(self, dbcommit=True, - reindex=True, forceindex=True) + self.status_update(self, dbcommit=True, reindex=True, forceindex=True) actions.update({LoanAction.UPDATE: loan}) item = self return item, actions @@ -510,71 +506,78 @@ def checks_before_a_cancel_item_request(self, loan, **kwargs): :return: the item record and list of actions performed """ actions_to_execute = { - 'cancel_loan': False, - 'loan_update': {}, - 'validate_first_pending': False + "cancel_loan": False, + "loan_update": {}, + "validate_first_pending": False, } libraries = self.compare_item_pickup_transaction_libraries(**kwargs) # List all loan states attached to this item except the loan to cancel. # If the list is empty, no pending request/loan are linked to this item - states = self.get_loans_states_by_item_pid_exclude_loan_pid( - self.pid, loan.pid) + states = self.get_loans_states_by_item_pid_exclude_loan_pid(self.pid, loan.pid) if not states: - if loan['state'] in \ - [LoanState.PENDING, LoanState.ITEM_IN_TRANSIT_TO_HOUSE]: + if loan["state"] in [LoanState.PENDING, LoanState.ITEM_IN_TRANSIT_TO_HOUSE]: # CANCEL_REQUEST_1_2, CANCEL_REQUEST_5_1_1: # cancel the current loan is the only action - actions_to_execute['cancel_loan'] = True - elif loan['state'] == LoanState.ITEM_ON_LOAN: + actions_to_execute["cancel_loan"] = True + elif loan["state"] == LoanState.ITEM_ON_LOAN: # CANCEL_REQUEST_3_1: no cancel action is possible on the loan # of a CHECKED_IN item. raise NoCirculationAction( - _('No circulation action is possible: CHECKED_IN')) - elif loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: + _("No circulation action is possible: CHECKED_IN") + ) + elif loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: # CANCEL_REQUEST_4_1_1: cancelling a ITEM_IN_TRANSIT_FOR_PICKUP # loan with no pending request puts the item on in_transit # and the loan becomes ITEM_IN_TRANSIT_TO_HOUSE. - actions_to_execute['loan_update']['state'] = \ - LoanState.ITEM_IN_TRANSIT_TO_HOUSE + actions_to_execute["loan_update"][ + "state" + ] = LoanState.ITEM_IN_TRANSIT_TO_HOUSE # Mark the loan to be cancelled to create an # OperationLog about this cancellation. - actions_to_execute['cancel_loan'] = True - elif loan['state'] == LoanState.ITEM_AT_DESK: - if not libraries['item_pickup_libraries']: + actions_to_execute["cancel_loan"] = True + elif loan["state"] == LoanState.ITEM_AT_DESK: + if not libraries["item_pickup_libraries"]: # CANCEL_REQUEST_2_1_1_1: when item library and pickup # pickup library arent equal, update loan to go in_transit. - actions_to_execute['loan_update']['state'] = \ - LoanState.ITEM_IN_TRANSIT_TO_HOUSE + actions_to_execute["loan_update"][ + "state" + ] = LoanState.ITEM_IN_TRANSIT_TO_HOUSE # Always mark the loan to be cancelled to create an # OperationLog about this cancellation. - actions_to_execute['cancel_loan'] = True - elif loan['state'] == LoanState.ITEM_AT_DESK and \ - LoanState.PENDING in states: + actions_to_execute["cancel_loan"] = True + elif loan["state"] == LoanState.ITEM_AT_DESK and LoanState.PENDING in states: # CANCEL_REQUEST_2_1_2: when item at desk with pending loan, cancel # the loan triggers an automatic validation of first pending loan. - actions_to_execute['validate_first_pending'] = True - elif loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP and \ - LoanState.PENDING in states: + actions_to_execute["validate_first_pending"] = True + elif ( + loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + and LoanState.PENDING in states + ): # CANCEL_REQUEST_4_1_2: when item in_transit with pending loan, # cancel the loan triggers an automatic validation of 1st loan. - actions_to_execute['validate_first_pending'] = True - elif loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE and \ - LoanState.PENDING in states: + actions_to_execute["validate_first_pending"] = True + elif ( + loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + and LoanState.PENDING in states + ): # CANCEL_REQUEST_5_1_2: when item in_transit with pending loan, # cancelling the loan triggers an automatic validation of first # pending loan. - actions_to_execute['validate_first_pending'] = True - elif loan['state'] == LoanState.PENDING and \ - any(state in states for state in [ + actions_to_execute["validate_first_pending"] = True + elif loan["state"] == LoanState.PENDING and any( + state in states + for state in [ LoanState.ITEM_AT_DESK, LoanState.ITEM_ON_LOAN, LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - LoanState.PENDING]): + LoanState.PENDING, + ] + ): # CANCEL_REQUEST_1_2, CANCEL_REQUEST_2_2, CANCEL_REQUEST_3_2, # CANCEL_REQUEST_4_2 CANCEL_REQUEST_5_2: # canceling a pending loan does not affect the other active loans. - actions_to_execute['cancel_loan'] = True + actions_to_execute["cancel_loan"] = True return actions_to_execute @@ -582,51 +585,43 @@ def checks_before_a_cancel_item_request(self, loan, **kwargs): def validate_request(self, current_loan, **kwargs): """Validate item request.""" loan = current_circulation.circulation.trigger( - current_loan, **dict(kwargs, trigger='validate_request') + current_loan, **dict(kwargs, trigger="validate_request") ) - return self, { - LoanAction.VALIDATE: loan - } + return self, {LoanAction.VALIDATE: loan} @add_action_parameters_and_flush_indexes @check_operation_allowed(ItemCirculationAction.EXTEND) def extend_loan(self, current_loan, **kwargs): """Extend checkout duration for this item.""" loan = current_circulation.circulation.trigger( - current_loan, **dict(kwargs, trigger='extend') + current_loan, **dict(kwargs, trigger="extend") ) - return self, { - LoanAction.EXTEND: loan - } + return self, {LoanAction.EXTEND: loan} @check_operation_allowed(ItemCirculationAction.REQUEST) @add_action_parameters_and_flush_indexes def request(self, current_loan, **kwargs): """Request item for the user and create notifications.""" - old_state = current_loan.get('state') + old_state = current_loan.get("state") loan = current_circulation.circulation.trigger( - current_loan, **dict(kwargs, trigger='request') + current_loan, **dict(kwargs, trigger="request") ) - new_state = loan.get('state') + new_state = loan.get("state") if old_state == new_state: current_app.logger.error( - f'Loan state has not changed after REQUEST: {loan.pid} ' - f'state: {old_state} ' - f'kwargs: {kwargs}' + f"Loan state has not changed after REQUEST: {loan.pid} " + f"state: {old_state} " + f"kwargs: {kwargs}" ) - return self, { - LoanAction.REQUEST: loan - } + return self, {LoanAction.REQUEST: loan} @add_action_parameters_and_flush_indexes def receive(self, current_loan, **kwargs): """Receive an item.""" loan = current_circulation.circulation.trigger( - current_loan, **dict(kwargs, trigger='receive') + current_loan, **dict(kwargs, trigger="receive") ) - return self, { - LoanAction.RECEIVE: loan - } + return self, {LoanAction.RECEIVE: loan} def checkin_triggers_validate_current_loan(self, actions, **kwargs): """Validate the current loan. @@ -635,11 +630,10 @@ def checkin_triggers_validate_current_loan(self, actions, **kwargs): :param kwargs : all others named arguments :return: the item record and list of actions performed """ - validate_current_loan = kwargs.pop('validate_current_loan', None) - if validate_current_loan: + if validate_current_loan := kwargs.pop("validate_current_loan", None): item, validate_actions = self.validate_request(**kwargs) actions = {LoanAction.VALIDATE: validate_actions} - actions.update(validate_actions) + actions |= validate_actions return item, actions return self, actions @@ -651,99 +645,86 @@ def actions_after_a_checkin(self, checkin_loan, actions, **kwargs): :param kwargs : all others named arguments :return: the item record and list of actions performed """ - # if item is requested we will automatically: - # - cancel the checked-in loan if still active - # - validate the next request - requests = self.number_of_requests() - if requests: - request = next(self.get_requests()) - if checkin_loan.is_active: - params = kwargs - params['pid'] = checkin_loan.pid - item, cancel_actions = self.cancel_loan(**params) - actions.update(cancel_actions) - # pass the correct transaction location - transaction_loc_pid = checkin_loan.get( - 'transaction_location_pid') - request['transaction_location_pid'] = transaction_loc_pid - # validate the request - item, validate_actions = self.validate_request(**request) - actions.update(validate_actions) - validate_loan = validate_actions[LoanAction.VALIDATE] - # receive the request if it is requested at transaction library - if validate_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: - trans_loc = Location.get_record_by_pid(transaction_loc_pid) - req_loc = Location.get_record_by_pid( - request.get('pickup_location_pid')) - if req_loc.library_pid == trans_loc.library_pid: - item, receive_action = self.receive(**request) - actions.update(receive_action) - return item, actions - return self, actions + if not (requests := self.number_of_requests()): + return self, actions + request = next(self.get_requests()) + if checkin_loan.is_active: + params = kwargs + params["pid"] = checkin_loan.pid + item, cancel_actions = self.cancel_loan(**params) + actions.update(cancel_actions) + # pass the correct transaction location + transaction_loc_pid = checkin_loan.get("transaction_location_pid") + request["transaction_location_pid"] = transaction_loc_pid + # validate the request + item, validate_actions = self.validate_request(**request) + actions.update(validate_actions) + validate_loan = validate_actions[LoanAction.VALIDATE] + # receive the request if it is requested at transaction library + if validate_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: + trans_loc = Location.get_record_by_pid(transaction_loc_pid) + req_loc = Location.get_record_by_pid(request.get("pickup_location_pid")) + if req_loc.library_pid == trans_loc.library_pid: + item, receive_action = self.receive(**request) + actions.update(receive_action) + return item, actions - def checkin_triggers_receive_in_transit_current_loan( - self, actions, **kwargs): + def checkin_triggers_receive_in_transit_current_loan(self, actions, **kwargs): """Receive the item_in_transit_for_pickup loan. :param actions : dict the list of actions performed :param kwargs : all others named arguments :return: the item record and list of actions performed """ - receive_in_transit_request = kwargs.pop( - 'receive_in_transit_request', None) - if receive_in_transit_request: + if receive_in_transit_request := kwargs.pop("receive_in_transit_request", None): item, receive_action = self.receive(**kwargs) actions.update(receive_action) # receive_loan = receive_action[LoanAction.RECEIVE] return item, actions return self, actions - def checkin_triggers_receive_and_validate_requests( - self, actions, **kwargs): + def checkin_triggers_receive_and_validate_requests(self, actions, **kwargs): """Receive the item_in_transit_in_house and validate first loan. :param actions : dict the list of actions performed :param kwargs : all others named arguments :return: the item record and list of actions performed """ - receive_current_and_validate_first = kwargs.pop( - 'receive_current_and_validate_first', None) - if receive_current_and_validate_first: - item, receive_action = self.receive(**kwargs) - actions.update(receive_action) - receive_loan = receive_action[LoanAction.RECEIVE] - # validate first request - requests = item.number_of_requests() - if requests: - request = next(item.get_requests()) - if receive_loan.is_active: - params = kwargs - params['pid'] = receive_loan.pid - item, cancel_actions = item.cancel_loan(**params) - actions.update(cancel_actions) - # pass the correct transaction location - transaction_loc_pid = receive_loan.get( - 'transaction_location_pid') - request['transaction_location_pid'] = transaction_loc_pid - # validate the request - item, validate_actions = item.validate_request(**request) - actions.update(validate_actions) - validate_loan = validate_actions[LoanAction.VALIDATE] - # receive request if it is requested at transaction library - if validate_loan['state'] == \ - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: - trans_loc = Location.get_record_by_pid( - transaction_loc_pid) - req_loc = Location.get_record_by_pid( - request.get('pickup_location_pid')) - if req_loc.library_pid == trans_loc.library_pid: - item, receive_action = item.receive(**request) - actions.update(receive_action) - return item, actions - return self, actions + if not ( + receive_current_and_validate_first := kwargs.pop( + "receive_current_and_validate_first", None + ) + ): + return self, actions + item, receive_action = self.receive(**kwargs) + actions.update(receive_action) + receive_loan = receive_action[LoanAction.RECEIVE] + if requests := item.number_of_requests(): + request = next(item.get_requests()) + if receive_loan.is_active: + params = kwargs + params["pid"] = receive_loan.pid + item, cancel_actions = item.cancel_loan(**params) + actions.update(cancel_actions) + # pass the correct transaction location + transaction_loc_pid = receive_loan.get("transaction_location_pid") + request["transaction_location_pid"] = transaction_loc_pid + # validate the request + item, validate_actions = item.validate_request(**request) + actions.update(validate_actions) + validate_loan = validate_actions[LoanAction.VALIDATE] + # receive request if it is requested at transaction library + if validate_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: + trans_loc = Location.get_record_by_pid(transaction_loc_pid) + req_loc = Location.get_record_by_pid(request.get("pickup_location_pid")) + if req_loc.library_pid == trans_loc.library_pid: + item, receive_action = item.receive(**request) + actions.update(receive_action) + return item, actions def checkin_triggers_cancel_and_receive_first_loan( - self, current_loan, actions, **kwargs): + self, current_loan, actions, **kwargs + ): """Cancel the current loan and receive the first request. :param current_loan : loan to cancel @@ -751,111 +732,109 @@ def checkin_triggers_cancel_and_receive_first_loan( :param kwargs : all others named arguments :return: the item record and list of actions performed """ - cancel_current_and_receive_first = kwargs.pop( - 'cancel_current_and_receive_first', None) - if cancel_current_and_receive_first: - params = kwargs - params['pid'] = current_loan.pid - item, cancel_actions = self.cancel_loan(**params) - actions.update(cancel_actions) - cancel_loan = cancel_actions[LoanAction.CANCEL] - # receive the first request - requests = item.number_of_requests() - if requests: - request = next(item.get_requests()) - # pass the correct transaction location - transaction_loc_pid = cancel_loan.get( - 'transaction_location_pid') - request['transaction_location_pid'] = transaction_loc_pid - # validate the request - item, validate_actions = item.validate_request(**request) - actions.update(validate_actions) - validate_loan = validate_actions[LoanAction.VALIDATE] - # receive request if it is requested at transaction library - if validate_loan['state'] == \ - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: - trans_loc = Location.get_record_by_pid( - transaction_loc_pid) - req_loc = Location.get_record_by_pid( - request.get('pickup_location_pid')) - if req_loc.library_pid == trans_loc.library_pid: - item, receive_action = item.receive(**request) - actions.update(receive_action) - return item, actions - return self, actions + if not ( + cancel_current_and_receive_first := kwargs.pop( + "cancel_current_and_receive_first", None + ) + ): + return self, actions + params = kwargs + params["pid"] = current_loan.pid + item, cancel_actions = self.cancel_loan(**params) + actions.update(cancel_actions) + cancel_loan = cancel_actions[LoanAction.CANCEL] + if requests := item.number_of_requests(): + request = next(item.get_requests()) + # pass the correct transaction location + transaction_loc_pid = cancel_loan.get("transaction_location_pid") + request["transaction_location_pid"] = transaction_loc_pid + # validate the request + item, validate_actions = item.validate_request(**request) + actions.update(validate_actions) + validate_loan = validate_actions[LoanAction.VALIDATE] + # receive request if it is requested at transaction library + if validate_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: + trans_loc = Location.get_record_by_pid(transaction_loc_pid) + req_loc = Location.get_record_by_pid(request.get("pickup_location_pid")) + if req_loc.library_pid == trans_loc.library_pid: + item, receive_action = item.receive(**request) + actions.update(receive_action) + return item, actions @add_action_parameters_and_flush_indexes def checkin(self, current_loan, **kwargs): """Perform a smart checkin action.""" actions = {} # checkin actions for an item on_shelf - item, actions = self.checkin_triggers_validate_current_loan( - actions, **kwargs) + item, actions = self.checkin_triggers_validate_current_loan(actions, **kwargs) if actions: return item, actions # checkin actions for an item in_transit with no requests item, actions = self.checkin_triggers_receive_in_transit_current_loan( - actions, **kwargs) + actions, **kwargs + ) if actions: return item, actions # checkin actions for an item in_transit_to_house at home library item, actions = self.checkin_triggers_receive_and_validate_requests( - actions, **kwargs) + actions, **kwargs + ) if actions: return item, actions # checkin actions for an item in_transit_to_house at external library item, actions = self.checkin_triggers_cancel_and_receive_first_loan( - current_loan, actions, **kwargs) + current_loan, actions, **kwargs + ) if actions: return item, actions # standard checkin actions checkin_loan = current_circulation.circulation.trigger( - current_loan, **dict(kwargs, trigger='checkin') + current_loan, **dict(kwargs, trigger="checkin") ) actions = {LoanAction.CHECKIN: checkin_loan} # validate and receive actions to execute after a standard checkin - item, actions = self.actions_after_a_checkin( - checkin_loan, actions, **kwargs) + item, actions = self.actions_after_a_checkin(checkin_loan, actions, **kwargs) return self, actions def prior_checkout_actions(self, action_params): """Actions executed prior to a checkout.""" actions = {} - if action_params.get('pid'): - loan = Loan.get_record_by_pid(action_params.get('pid')) - if loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP and\ - loan.get('patron_pid') == action_params.get('patron_pid'): + if action_params.get("pid"): + loan = Loan.get_record_by_pid(action_params.get("pid")) + if loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP and loan.get( + "patron_pid" + ) == action_params.get("patron_pid"): item, receive_actions = self.receive(**action_params) - actions.update(receive_actions) - elif loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE: + actions |= receive_actions + elif loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE: # do not pass the patron_pid when cancelling a loan cancel_params = deepcopy(action_params) - cancel_params.pop('patron_pid') + cancel_params.pop("patron_pid") item, cancel_actions = self.cancel_loan(**cancel_params) actions.update(cancel_actions) - del action_params['pid'] + del action_params["pid"] # TODO: Check what's wrong in this case because Loan is cancel # but loan variable is not updated and after prior_checkout # a checkout is done on the item (it becomes ON_LOAN) else: loan = get_loan_for_item(item_pid_to_object(self.pid)) - if loan and loan['state'] != LoanState.ITEM_AT_DESK: - item, cancel_actions = self.cancel_loan(pid=loan.get('pid')) + if loan and loan["state"] != LoanState.ITEM_AT_DESK: + item, cancel_actions = self.cancel_loan(pid=loan.get("pid")) actions.update(cancel_actions) # CHECKOUT_1_2_2: checkout denied if some pending loan are linked to it # with different patrons # Except while coming from an ITEM_IN_TRANSIT_TO_HOUSE loan because # the loan is cancelled and then came up in ON_SHELF to be checkout # by the second patron. - if self.status == ItemStatus.ON_SHELF and \ - loan['state'] != LoanState.ITEM_IN_TRANSIT_TO_HOUSE: + if ( + self.status == ItemStatus.ON_SHELF + and loan["state"] != LoanState.ITEM_IN_TRANSIT_TO_HOUSE + ): for res in self.get_item_loans_by_state(state=LoanState.PENDING): - if res.patron_pid != loan.get('patron_pid'): + if res.patron_pid != loan.get("patron_pid"): item_pid = item_pid_to_object(self.pid) - msg = "A pending loan exists for patron %s" % \ - res.patron_pid - raise ItemNotAvailableError( - item_pid=item_pid, description=msg) + msg = f"A pending loan exists for patron {res.patron_pid}" + raise ItemNotAvailableError(item_pid=item_pid, description=msg) # exit from loop after evaluation of the first request. break return action_params, actions @@ -864,10 +843,13 @@ def prior_checkout_actions(self, action_params): def get_loans_by_item_pid(cls, item_pid): """Return any loan loans for item.""" item_pid_object = item_pid_to_object(item_pid) - results = current_circulation.loan_search_cls()\ - .filter('term', item_pid__value=item_pid_object['value'])\ - .filter('term', item_pid__type=item_pid_object['type'])\ - .source(includes='pid').scan() + results = ( + current_circulation.loan_search_cls() + .filter("term", item_pid__value=item_pid_object["value"]) + .filter("term", item_pid__type=item_pid_object["type"]) + .source(includes="pid") + .scan() + ) for loan in results: yield Loan.get_record_by_pid(loan.pid) @@ -880,29 +862,40 @@ def get_loans_states_by_item_pid_exclude_loan_pid(cls, item_pid, loan_pid): :return: the list of loans states attached to the item """ exclude_states = [ - LoanState.ITEM_RETURNED, LoanState.CANCELLED, LoanState.CREATED] + LoanState.ITEM_RETURNED, + LoanState.CANCELLED, + LoanState.CREATED, + ] item_pid_object = item_pid_to_object(item_pid) - results = current_circulation.loan_search_cls()\ - .filter('term', item_pid__value=item_pid_object['value'])\ - .filter('term', item_pid__type=item_pid_object['type'])\ - .exclude('terms', state=exclude_states)\ - .source(includes='pid').scan() - return [Loan.get_record_by_pid(loan.pid)['state'] - for loan in results if loan.pid != loan_pid] + results = ( + current_circulation.loan_search_cls() + .filter("term", item_pid__value=item_pid_object["value"]) + .filter("term", item_pid__type=item_pid_object["type"]) + .exclude("terms", state=exclude_states) + .source(includes="pid") + .scan() + ) + return [ + Loan.get_record_by_pid(loan.pid)["state"] + for loan in results + if loan.pid != loan_pid + ] @classmethod def get_loan_pid_with_item_on_loan(cls, item_pid): """Returns loan pid for checked out item.""" - search = search_by_pid(item_pid=item_pid_to_object( - item_pid), filter_states=[LoanState.ITEM_ON_LOAN]) - results = search.source(['pid']).scan() + search = search_by_pid( + item_pid=item_pid_to_object(item_pid), + filter_states=[LoanState.ITEM_ON_LOAN], + ) + results = search.source(["pid"]).scan() try: return next(results).pid except StopIteration: return None @classmethod - def get_pendings_loans(cls, library_pid=None, sort_by='_created'): + def get_pendings_loans(cls, library_pid=None, sort_by="_created"): """Return list of sorted pending loans for a given library. default sort is set to _created @@ -910,40 +903,45 @@ def get_pendings_loans(cls, library_pid=None, sort_by='_created'): # check if library exists lib = Library.get_record_by_pid(library_pid) if not lib: - raise Exception('Invalid Library PID') + raise Exception("Invalid Library PID") # the '-' prefix means a desc order. - sort_by = sort_by or '_created' - order_by = 'asc' - if sort_by.startswith('-'): + sort_by = sort_by or "_created" + order_by = "asc" + if sort_by.startswith("-"): sort_by = sort_by[1:] - order_by = 'desc' - - results = current_circulation.loan_search_cls()\ - .params(preserve_order=True)\ - .filter('term', state=LoanState.PENDING)\ - .filter('term', library_pid=library_pid)\ - .sort({sort_by: {"order": order_by}})\ - .source(includes='pid').scan() + order_by = "desc" + + results = ( + current_circulation.loan_search_cls() + .params(preserve_order=True) + .filter("term", state=LoanState.PENDING) + .filter("term", library_pid=library_pid) + .sort({sort_by: {"order": order_by}}) + .source(includes="pid") + .scan() + ) for loan in results: yield Loan.get_record_by_pid(loan.pid) @classmethod - def get_checked_out_loan_infos(cls, patron_pid, sort_by='_created'): + def get_checked_out_loan_infos(cls, patron_pid, sort_by="_created"): """Returns sorted checked out loans for a given patron.""" # the '-' prefix means a desc order. - sort_by = sort_by or '_created' - order_by = 'asc' - if sort_by.startswith('-'): + sort_by = sort_by or "_created" + order_by = "asc" + if sort_by.startswith("-"): sort_by = sort_by[1:] - order_by = 'desc' - - results = search_by_patron_item_or_document( - patron_pid=patron_pid, - filter_states=[LoanState.ITEM_ON_LOAN] - ).params(preserve_order=True)\ - .sort({sort_by: {"order": order_by}})\ - .source(['pid', 'item_pid.value'])\ - .scan() + order_by = "desc" + + results = ( + search_by_patron_item_or_document( + patron_pid=patron_pid, filter_states=[LoanState.ITEM_ON_LOAN] + ) + .params(preserve_order=True) + .sort({sort_by: {"order": order_by}}) + .source(["pid", "item_pid.value"]) + .scan() + ) for data in results: yield data.pid, data.item_pid.value @@ -974,19 +972,23 @@ def patron_has_an_active_loan_on_item(self, patron): :return: True is requested otherwise False. """ if patron: - search = search_by_patron_item_or_document( - item_pid=item_pid_to_object(self.pid), - patron_pid=patron.pid, - filter_states=[ - LoanState.PENDING, - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_AT_DESK, - LoanState.ITEM_ON_LOAN - ]).params(preserve_order=True).source(['state']) - return len( - list( - dict.fromkeys( - [result.state for result in search.scan()]))) > 0 + search = ( + search_by_patron_item_or_document( + item_pid=item_pid_to_object(self.pid), + patron_pid=patron.pid, + filter_states=[ + LoanState.PENDING, + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, + LoanState.ITEM_AT_DESK, + LoanState.ITEM_ON_LOAN, + ], + ) + .params(preserve_order=True) + .source(["state"]) + ) + return ( + len(list(dict.fromkeys([result.state for result in search.scan()]))) > 0 + ) # CIRCULATION METHODS ===================================================== def can(self, action, **kwargs): @@ -998,7 +1000,7 @@ def can(self, action, **kwargs): a list of reasons to disallow if False. """ can, reasons = True, [] - actions = current_app.config.get('CIRCULATION_ACTIONS_VALIDATION', {}) + actions = current_app.config.get("CIRCULATION_ACTIONS_VALIDATION", {}) for func_name in actions.get(action, []): func_callback = obj_or_import_string(func_name) func_can, func_reasons = func_callback(self, **kwargs) @@ -1016,94 +1018,88 @@ def can_request(cls, item, **kwargs): """ reasons = [] if item.status in [ItemStatus.MISSING, ItemStatus.EXCLUDED]: - reasons.append(_('Item status disallows the operation.')) - if 'patron' in kwargs: - patron = kwargs['patron'] + reasons.append(_("Item status disallows the operation.")) + if "patron" in kwargs: + patron = kwargs["patron"] if patron.organisation_pid != item.organisation_pid: - reasons.append(_('Item and patron are not in the same ' - 'organisation.')) - if patron.patron.get('barcode') and \ - item.patron_has_an_active_loan_on_item( - patron): - reasons.append(_('Item is already checked-out or requested by ' - 'patron.')) + reasons.append( + _("Item and patron are not in the same " "organisation.") + ) + if patron.patron.get("barcode") and item.patron_has_an_active_loan_on_item( + patron + ): + reasons.append( + _("Item is already checked-out or requested by " "patron.") + ) return len(reasons) == 0, reasons - def action_filter(self, action, organisation_pid, library_pid, loan, - patron_pid, patron_type_pid): + def action_filter( + self, action, organisation_pid, library_pid, loan, patron_pid, patron_type_pid + ): """Filter actions.""" circ_policy = CircPolicy.provide_circ_policy( organisation_pid, library_pid, patron_type_pid, - self.item_type_circulation_category_pid + self.item_type_circulation_category_pid, ) - data = { - 'action_validated': True, - 'new_action': None - } - if action == 'extend': + data = {"action_validated": True, "new_action": None} + if action == "extend": can, reasons = self.can(ItemCirculationAction.EXTEND, loan=loan) if not can: - data['action_validated'] = False - if action == 'checkout' and not circ_policy.can_checkout: - data['action_validated'] = False + data["action_validated"] = False + if action == "checkout" and not circ_policy.can_checkout: + data["action_validated"] = False elif ( - action == 'receive' and circ_policy.can_checkout and - loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP and - loan.get('patron_pid') == patron_pid + action == "receive" + and circ_policy.can_checkout + and loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + and loan.get("patron_pid") == patron_pid ): - data['action_validated'] = False - data['new_action'] = 'checkout' + data["action_validated"] = False + data["new_action"] = "checkout" return data @property def actions(self): """Get all available actions.""" - transitions = current_app.config.get('CIRCULATION_LOAN_TRANSITIONS') + transitions = current_app.config.get("CIRCULATION_LOAN_TRANSITIONS") loan = get_loan_for_item(item_pid_to_object(self.pid)) actions = set() if loan: organisation_pid = self.organisation_pid library_pid = self.library_pid - patron_pid = loan.get('patron_pid') - patron_type_pid = Patron.get_record_by_pid( - patron_pid).patron_type_pid - for transition in transitions.get(loan['state']): - action = transition.get('trigger') + patron_pid = loan.get("patron_pid") + patron_type_pid = Patron.get_record_by_pid(patron_pid).patron_type_pid + for transition in transitions.get(loan["state"]): + action = transition.get("trigger") data = self.action_filter( action=action, organisation_pid=organisation_pid, library_pid=library_pid, loan=loan, patron_pid=patron_pid, - patron_type_pid=patron_type_pid + patron_type_pid=patron_type_pid, ) - if data.get('action_validated'): + if data.get("action_validated"): actions.add(action) - if data.get('new_action'): - actions.add(data.get('new_action')) + if data.get("new_action"): + actions.add(data.get("new_action")) # default actions if not loan: for transition in transitions.get(LoanState.CREATED): - action = transition.get('trigger') + action = transition.get("trigger") actions.add(action) # remove unsupported action - for action in ['cancel', 'request']: - try: + for action in ["cancel", "request"]: + with suppress(KeyError): actions.remove(action) - # not yet supported - # actions.add('cancel_loan') - except KeyError: - pass # rename - try: - actions.remove('extend') - actions.add('extend_loan') - except KeyError: - pass + with suppress(KeyError): + actions.remove("extend") + actions.add("extend_loan") # if self['status'] == ItemStatus.MISSING: # actions.add('return_missing') # else: @@ -1111,8 +1107,9 @@ def actions(self): return actions @classmethod - def status_update(cls, item, on_shelf=True, dbcommit=False, - reindex=False, forceindex=False): + def status_update( + cls, item, on_shelf=True, dbcommit=False, reindex=False, forceindex=False + ): """Update item status. The item normally inherits its status from its active loan. In other @@ -1125,19 +1122,19 @@ def status_update(cls, item, on_shelf=True, dbcommit=False, :param reindex: reindex record :param forceindex: force the reindexation """ - loan = get_loan_for_item(item_pid_to_object(item.pid)) - if loan: - item['status'] = cls.statuses[loan['state']] - elif item['status'] != ItemStatus.MISSING and on_shelf: - item['status'] = ItemStatus.ON_SHELF + if loan := get_loan_for_item(item_pid_to_object(item.pid)): + item["status"] = cls.statuses[loan["state"]] + elif item["status"] != ItemStatus.MISSING and on_shelf: + item["status"] = ItemStatus.ON_SHELF item.commit() if dbcommit: item.dbcommit(reindex=True, forceindex=True) def item_has_active_loan_or_request(self): """Return True if active loan or a request found for item.""" - states = [LoanState.PENDING] + \ - current_app.config['CIRCULATION_STATES_LOAN_ACTIVE'] + states = [LoanState.PENDING] + current_app.config[ + "CIRCULATION_STATES_LOAN_ACTIVE" + ] search = search_by_pid( item_pid=item_pid_to_object(self.pid), filter_states=states, @@ -1150,12 +1147,9 @@ def return_missing(self): The item's status will be set to ItemStatus.ON_SHELF. """ # TODO: check transaction location - self['status'] = ItemStatus.ON_SHELF - self.status_update( - self, dbcommit=True, reindex=True, forceindex=True) - return self, { - LoanAction.RETURN_MISSING: None - } + self["status"] = ItemStatus.ON_SHELF + self.status_update(self, dbcommit=True, reindex=True, forceindex=True) + return self, {LoanAction.RETURN_MISSING: None} def get_links_to_me(self, get_pids=False): """Record links. @@ -1173,16 +1167,18 @@ def get_links_to_me(self, get_pids=False): LoanState.CREATED, LoanState.CANCELLED, LoanState.ITEM_RETURNED, - ] + ], + ) + query_fees = ( + PatronTransactionsSearch() + .filter("term", item__pid=self.pid) + .filter("term", status="open") + .filter("range", total_amount={"gt": 0}) + ) + query_collections = CollectionsSearch().filter("term", items__pid=self.pid) + query_local_fields = LocalFieldsSearch().get_local_fields( + self.provider.pid_type, self.pid ) - query_fees = PatronTransactionsSearch()\ - .filter('term', item__pid=self.pid)\ - .filter('term', status='open')\ - .filter('range', total_amount={'gt': 0}) - query_collections = CollectionsSearch()\ - .filter('term', items__pid=self.pid) - query_local_fields = LocalFieldsSearch()\ - .get_local_fields(self.provider.pid_type, self.pid) if get_pids: loans = sorted_pids(query_loans) @@ -1195,10 +1191,10 @@ def get_links_to_me(self, get_pids=False): collections = query_collections.count() local_fields = query_local_fields.count() links = { - 'loans': loans, - 'fees': fees, - 'collections': collections, - 'local_fields': local_fields + "loans": loans, + "fees": fees, + "collections": collections, + "local_fields": local_fields, } return {k: v for k, v in links.items() if v} @@ -1214,25 +1210,27 @@ def get_requests(self, sort_by=None, output=None): """ def _list_obj(): - order_by = 'asc' - sort_term = sort_by or '_created' - if sort_term.startswith('-'): - (sort_term, order_by) = (sort_term[1:], 'desc') - es_query = query\ - .params(preserve_order=True)\ - .sort({sort_term: {'order': order_by}}) + order_by = "asc" + sort_term = sort_by or "_created" + if sort_term.startswith("-"): + (sort_term, order_by) = (sort_term[1:], "desc") + es_query = query.params(preserve_order=True).sort( + {sort_term: {"order": order_by}} + ) for result in es_query.scan(): yield Loan.get_record_by_pid(result.pid) query = search_by_pid( - item_pid=item_pid_to_object(self.pid), filter_states=[ + item_pid=item_pid_to_object(self.pid), + filter_states=[ LoanState.PENDING, LoanState.ITEM_AT_DESK, - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP - ]).source(['pid']) - if output == 'pids': + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, + ], + ).source(["pid"]) + if output == "pids": return [hit.pid for hit in query.scan()] - elif output == 'count': + elif output == "count": return query.count() else: return _list_obj() @@ -1256,16 +1254,17 @@ def get_item_loans_by_state(self, state=None, sort_by=None): :param sort_by : field to use for sorting :return: loans found """ - search = search_by_pid( - item_pid=item_pid_to_object(self.pid), filter_states=[ - state - ]).params(preserve_order=True).source(['pid']) - order_by = 'asc' - sort_by = sort_by or '_created' - if sort_by.startswith('-'): + search = ( + search_by_pid(item_pid=item_pid_to_object(self.pid), filter_states=[state]) + .params(preserve_order=True) + .source(["pid"]) + ) + order_by = "asc" + sort_by = sort_by or "_created" + if sort_by.startswith("-"): sort_by = sort_by[1:] - order_by = 'desc' - search = search.sort({sort_by: {'order': order_by}}) + order_by = "desc" + search = search.sort({sort_by: {"order": order_by}}) for result in search.scan(): yield Loan.get_record_by_pid(result.pid) @@ -1274,14 +1273,20 @@ def get_loan_states_for_an_item(self): :return: list of all loan states attached to the item """ - search = search_by_pid( - item_pid=item_pid_to_object(self.pid), filter_states=[ - LoanState.PENDING, - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - LoanState.ITEM_AT_DESK, - LoanState.ITEM_ON_LOAN - ]).params(preserve_order=True).source(['state']) + search = ( + search_by_pid( + item_pid=item_pid_to_object(self.pid), + filter_states=[ + LoanState.PENDING, + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, + LoanState.ITEM_IN_TRANSIT_TO_HOUSE, + LoanState.ITEM_AT_DESK, + LoanState.ITEM_ON_LOAN, + ], + ) + .params(preserve_order=True) + .source(["state"]) + ) return list(dict.fromkeys([result.state for result in search.scan()])) def is_available(self): @@ -1295,7 +1300,7 @@ def is_available(self): items_query = ItemsSearch().available_query() # check item availability - if not items_query.filter('term', pid=self.pid).count(): + if not items_query.filter("term", pid=self.pid).count(): return False # --------------- Loans ------------------- @@ -1306,30 +1311,25 @@ def is_available(self): def availability_text(self): """Availability text to display for an item.""" circ_category = self.circulation_category - if circ_category.get('negative_availability'): - return circ_category.get('displayed_status', []) + [{ - 'language': 'default', - 'label': circ_category.get('name') - }] + if circ_category.get("negative_availability"): + return circ_category.get("displayed_status", []) + [ + {"language": "default", "label": circ_category.get("name")} + ] label = self.status if self.is_issue and self.issue_status != ItemIssueStatus.RECEIVED: label = self.issue_status - return [{ - 'language': 'default', - 'label': label - }] + return [{"language": "default", "label": label}] @property def temp_item_type_negative_availability(self): """Get the temporary item type neg availability.""" - if self.get('temporary_item_type'): - return ItemType.get_record_by_pid(extracted_data_from_ref( - self.get('temporary_item_type')) - ).get('negative_availability', False) + if self.get("temporary_item_type"): + return ItemType.get_record_by_pid( + extracted_data_from_ref(self.get("temporary_item_type")) + ).get("negative_availability", False) return False - def get_item_end_date(self, format='short', time_format='medium', - language=None): + def get_item_end_date(self, format="short", time_format="medium", language=None): """Get item due date for a given item. :param format: The date format, ex: 'full', 'medium', 'short' @@ -1338,9 +1338,8 @@ def get_item_end_date(self, format='short', time_format='medium', :param language: The language to fix the language format :return: original date, formatted date or None """ - loan = get_loan_for_item(item_pid_to_object(self.pid)) - if loan: - end_date = loan['end_date'] + if loan := get_loan_for_item(item_pid_to_object(self.pid)): + end_date = loan["end_date"] if format: return format_date_filter( end_date, @@ -1353,48 +1352,46 @@ def get_item_end_date(self, format='short', time_format='medium', def get_extension_count(self): """Get item renewal count.""" - loan = get_loan_for_item(item_pid_to_object(self.pid)) - if loan: - return loan.get('extension_count', 0) + if loan := get_loan_for_item(item_pid_to_object(self.pid)): + return loan.get("extension_count", 0) return 0 def number_of_requests(self): """Get number of requests for a given item.""" - return self.get_requests(output='count') + return self.get_requests(output="count") def patron_request_rank(self, patron): """Get the rank of patron in list of requests on this item.""" if patron: requests = self.get_requests() for rank, request in enumerate(requests, start=1): - if request['patron_pid'] == patron.pid: + if request["patron_pid"] == patron.pid: return rank return 0 def is_requested_by_patron(self, patron_barcode): """Check if the item is requested by a given patron.""" - patron = Patron.get_patron_by_barcode( - barcode=patron_barcode, org_pid=self.organisation_pid) - if patron: - request = get_request_by_item_pid_by_patron_pid( + if patron := Patron.get_patron_by_barcode( + barcode=patron_barcode, org_pid=self.organisation_pid + ): + if request := get_request_by_item_pid_by_patron_pid( item_pid=self.pid, patron_pid=patron.pid - ) - if request: + ): return True return False @classmethod - def get_requests_to_validate( - cls, library_pid=None, sort_by=None): + def get_requests_to_validate(cls, library_pid=None, sort_by=None): """Returns list of requests to validate for a given library.""" - loans = cls.get_pendings_loans( - library_pid=library_pid, sort_by=sort_by) + loans = cls.get_pendings_loans(library_pid=library_pid, sort_by=sort_by) returned_item_pids = [] for loan in loans: - item_pid = loan.get('item_pid', {}).get('value') + item_pid = loan.get("item_pid", {}).get("value") item = cls.get_record_by_pid(item_pid) - if item.status == ItemStatus.ON_SHELF and \ - item_pid not in returned_item_pids: + if ( + item.status == ItemStatus.ON_SHELF + and item_pid not in returned_item_pids + ): returned_item_pids.append(item_pid) yield item, loan @@ -1408,8 +1405,9 @@ def item_exists(item_pid): :rtype: bool """ from .api import Item + try: - Item.get_record_by_pid(item_pid.get('value')) + Item.get_record_by_pid(item_pid.get("value")) except PersistentIdentifierError: return False return True @@ -1418,9 +1416,9 @@ def item_exists(item_pid): def get_checked_out_items(cls, patron_pid=None, sort_by=None): """Return sorted checked out items for a given patron.""" from .api import Item + loan_infos = cls.get_checked_out_loan_infos( - patron_pid=patron_pid, - sort_by=sort_by + patron_pid=patron_pid, sort_by=sort_by ) returned_item_pids = [] for loan_pid, item_pid in loan_infos: diff --git a/rero_ils/modules/items/api/issue.py b/rero_ils/modules/items/api/issue.py index 3a332b5403..3400a9adfe 100644 --- a/rero_ils/modules/items/api/issue.py +++ b/rero_ils/modules/items/api/issue.py @@ -19,14 +19,13 @@ """API for manipulating the item issue.""" from datetime import datetime, timezone -from rero_ils.modules.notifications.api import Notification, \ - NotificationsSearch +from rero_ils.modules.notifications.api import Notification, NotificationsSearch from rero_ils.modules.notifications.dispatcher import Dispatcher from rero_ils.modules.notifications.models import NotificationType from rero_ils.modules.utils import get_ref_for_pid -from .record import ItemRecord from ..models import TypeOfItem +from .record import ItemRecord class ItemIssue(ItemRecord): @@ -35,52 +34,52 @@ class ItemIssue(ItemRecord): @property def is_issue(self): """Is this item is an issue or not.""" - return self.get('type') == TypeOfItem.ISSUE + return self.get("type") == TypeOfItem.ISSUE @property def expected_date(self): """Shortcut for issue expected date.""" - return self.get('issue', {}).get('expected_date') + return self.get("issue", {}).get("expected_date") @expected_date.setter def expected_date(self, value): """Setter for the issue expected date.""" - self.setdefault('issue', {})['expected_date'] = value + self.setdefault("issue", {})["expected_date"] = value @property def received_date(self): """Shortcut for issue received date.""" - return self.get('issue', {}).get('received_date') + return self.get("issue", {}).get("received_date") @property def sort_date(self): """Shortcut for issue sort date.""" - return self.get('issue', {}).get('sort_date') + return self.get("issue", {}).get("sort_date") @sort_date.setter def sort_date(self, value): """Setter for the issue sort date.""" - self.setdefault('issue', {})['sort_date'] = value + self.setdefault("issue", {})["sort_date"] = value @property def issue_status(self): """Shortcut for issue status.""" - return self.get('issue', {}).get('status') + return self.get("issue", {}).get("status") @issue_status.setter def issue_status(self, value): """Setter for issue status.""" - self.setdefault('issue', {})['status'] = value + self.setdefault("issue", {})["status"] = value @property def issue_is_regular(self): """Shortcut for issue is regular.""" - return self.get('issue', {}).get('regular', True) + return self.get("issue", {}).get("regular", True) @property def issue_status_date(self): """Shortcut for issue status date.""" - return self.get('issue', {}).get('status_date') + return self.get("issue", {}).get("status_date") @property def vendor(self): @@ -112,9 +111,10 @@ def issue_inherited_first_call_number(self): it returns the parent holdings first call number if exists. """ from rero_ils.modules.holdings.api import Holding - if self.is_issue and not self.get('call_number'): + + if self.is_issue and not self.get("call_number"): holding = Holding.get_record_by_pid(self.holding_pid) - return holding.get('call_number') + return holding.get("call_number") @property def issue_inherited_second_call_number(self): @@ -124,9 +124,10 @@ def issue_inherited_second_call_number(self): it returns the parent holdings second call number if exists. """ from rero_ils.modules.holdings.api import Holding - if self.is_issue and not self.get('second_call_number'): + + if self.is_issue and not self.get("second_call_number"): holding = Holding.get_record_by_pid(self.holding_pid) - return holding.get('second_call_number') + return holding.get("second_call_number") @classmethod def get_issues_pids_by_status(cls, issue_status, holdings_pid=None): @@ -137,15 +138,19 @@ def get_issues_pids_by_status(cls, issue_status, holdings_pid=None): :return a generator of issues pid. """ from .api import ItemsSearch - query = ItemsSearch() \ - .filter('term', issue__status=issue_status) \ - .filter('term', type='issue') + + query = ( + ItemsSearch() + .filter("term", issue__status=issue_status) + .filter("term", type="issue") + ) if holdings_pid: - query = query.filter('term', holding__pid=holdings_pid) - query = query\ - .params(preserve_order=True) \ - .sort({'_created': {'order': 'asc'}}) \ - .source(['pid']) + query = query.filter("term", holding__pid=holdings_pid) + query = ( + query.params(preserve_order=True) + .sort({"_created": {"order": "asc"}}) + .source(["pid"]) + ) return [hit.pid for hit in query.scan()] @@ -158,6 +163,7 @@ def get_issues_by_status(cls, status, holdings_pid=None): :return a generator of Item. """ from .api import Item + for pid in cls.get_issues_pids_by_status(status, holdings_pid): yield Item.get_record_by_pid(pid) @@ -174,22 +180,23 @@ def claims(self, recipients): """ # Create the notification and dispatch it synchronously. record = { - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'notification_type': NotificationType.CLAIM_ISSUE, - 'context': { - 'item': {'$ref': get_ref_for_pid('item', self.pid)}, - 'recipients': recipients, - 'number': self.claims_count + 1 - } + "creation_date": datetime.now(timezone.utc).isoformat(), + "notification_type": NotificationType.CLAIM_ISSUE, + "context": { + "item": {"$ref": get_ref_for_pid("item", self.pid)}, + "recipients": recipients, + "number": self.claims_count + 1, + }, } notif = Notification.create(data=record, dbcommit=True, reindex=True) dispatcher_result = Dispatcher.dispatch_notifications( - notification_pids=[notif.get('pid')]) + notification_pids=[notif.get("pid")] + ) # If the dispatcher result is correct, reindex myself to update claims # information into ElasticSearch engine. Reload the notification to # obtain the correct notification metadata (status, process_date, ...) - if dispatcher_result.get('sent', 0): + if dispatcher_result.get("sent", 0): self.reindex() notif = Notification.get_record(notif.id) return notif diff --git a/rero_ils/modules/items/api/record.py b/rero_ils/modules/items/api/record.py index 7054fda4b2..4989fc94de 100644 --- a/rero_ils/modules/items/api/record.py +++ b/rero_ils/modules/items/api/record.py @@ -26,15 +26,19 @@ from rero_ils.modules.api import IlsRecord from rero_ils.modules.holdings.models import HoldingTypes from rero_ils.modules.item_types.api import ItemType -from rero_ils.modules.local_fields.extensions import \ - DeleteRelatedLocalFieldExtension -from rero_ils.modules.operation_logs.extensions import \ - UntrackedFieldsOperationLogObserverExtension +from rero_ils.modules.local_fields.extensions import DeleteRelatedLocalFieldExtension +from rero_ils.modules.operation_logs.extensions import ( + UntrackedFieldsOperationLogObserverExtension, +) from rero_ils.modules.organisations.api import Organisation from rero_ils.modules.record_extensions import OrgLibRecordExtension -from rero_ils.modules.utils import date_string_to_utc, \ - extracted_data_from_ref, generate_item_barcode, get_ref_for_pid, \ - trim_item_barcode_for_record +from rero_ils.modules.utils import ( + date_string_to_utc, + extracted_data_from_ref, + generate_item_barcode, + get_ref_for_pid, + trim_item_barcode_for_record, +) from ..extensions import IssueSortDateExtension, IssueStatusExtension from ..models import TypeOfItem @@ -48,8 +52,8 @@ class ItemRecord(IlsRecord): IssueSortDateExtension(), IssueStatusExtension(), OrgLibRecordExtension(), - UntrackedFieldsOperationLogObserverExtension(['status']), - DeleteRelatedLocalFieldExtension() + UntrackedFieldsOperationLogObserverExtension(["status"]), + DeleteRelatedLocalFieldExtension(), ] def extended_validation(self, **kwargs): @@ -78,57 +82,65 @@ def extended_validation(self, **kwargs): """ from . import ItemsSearch - if barcode := self.get('barcode'): + + if barcode := self.get("barcode"): if ( ItemsSearch() - .exclude('term', pid=self.pid) - .filter('term', barcode=barcode) - .source('pid') + .exclude("term", pid=self.pid) + .filter("term", barcode=barcode) + .source("pid") .count() ): - return _(f'Barcode {barcode} is already taken.') + return _(f"Barcode {barcode} is already taken.") from ...holdings.api import Holding - holding_pid = extracted_data_from_ref(self.get('holding').get('$ref')) + + holding_pid = extracted_data_from_ref(self.get("holding").get("$ref")) holding = Holding.get_record_by_pid(holding_pid) if not holding: - return _(f'Holding does not exist: {holding_pid}') - - if self.get('issue') and self.get('type') == TypeOfItem.STANDARD: - return _('Standard item can not have a issue field.') - if self.get('type') == TypeOfItem.ISSUE: - if not self.get('issue', {}): - return _('Issue item must have an issue field.') - if not self.get('enumerationAndChronology'): - return _('enumerationAndChronology field is required ' - 'for an issue item') - note_types = [note.get('type') for note in self.get('notes', [])] + return _(f"Holding does not exist: {holding_pid}") + + if self.get("issue") and self.get("type") == TypeOfItem.STANDARD: + return _("Standard item can not have a issue field.") + if self.get("type") == TypeOfItem.ISSUE: + if not self.get("issue", {}): + return _("Issue item must have an issue field.") + if not self.get("enumerationAndChronology"): + return _( + "enumerationAndChronology field is required " "for an issue item" + ) + note_types = [note.get("type") for note in self.get("notes", [])] if len(note_types) != len(set(note_types)): - return _('Can not have multiple notes of the same type.') + return _("Can not have multiple notes of the same type.") # check temporary item type data - if tmp_itty := self.get('temporary_item_type'): - if tmp_itty['$ref'] == self['item_type']['$ref']: - return _('Temporary circulation category cannot be the same ' - 'than default circulation category.') - if tmp_itty.get('end_date'): - end_date = date_string_to_utc(tmp_itty.get('end_date')) + if tmp_itty := self.get("temporary_item_type"): + if tmp_itty["$ref"] == self["item_type"]["$ref"]: + return _( + "Temporary circulation category cannot be the same " + "than default circulation category." + ) + if tmp_itty.get("end_date"): + end_date = date_string_to_utc(tmp_itty.get("end_date")) if end_date <= pytz.utc.localize(datetime.now()): - return _('Temporary circulation category end date must be ' - 'a date in the future.') + return _( + "Temporary circulation category end date must be " + "a date in the future." + ) return True @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=False, reindex=False, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=False, reindex=False, **kwargs + ): """Create item record.""" - data = cls._prepare_item_record(data=data, mode='create') + data = cls._prepare_item_record(data=data, mode="create") data = cls._set_issue_status_date(data=data) - record = super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) + record = super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) holding = cls._increment_next_prediction_for_holding( - record, dbcommit=dbcommit, reindex=reindex) + record, dbcommit=dbcommit, reindex=reindex + ) # As we potentially update the parent holding when we create an issue # item, we need to commit this holding even if `dbcommit` iss set to # false. Without this commit, only the current (Item) resource will be @@ -156,7 +168,7 @@ def update(self, data, commit=True, dbcommit=False, reindex=False): :return: The updated item record. """ data = self._set_issue_status_date(data) - data = self._prepare_item_record(data=data, mode='update') + data = self._prepare_item_record(data=data, mode="update") super().update(data, commit, dbcommit, reindex) # TODO: some item updates do not require holding re-linking return self @@ -181,37 +193,39 @@ def _set_issue_status_date(cls, data): :param data: The record to update. :return: The updated record. """ - if data.get('type') != TypeOfItem.ISSUE: + if data.get("type") != TypeOfItem.ISSUE: return data - status = data.get('issue', {}).get('status') - item = cls.get_record_by_pid(data.get('pid')) + status = data.get("issue", {}).get("status") + item = cls.get_record_by_pid(data.get("pid")) now = datetime.now(timezone.utc).isoformat() if item: # item already exists if status and status != item.issue_status: - data['issue']['status_date'] = now + data["issue"]["status_date"] = now else: # item creation if status: - data['issue']['status_date'] = now + data["issue"]["status_date"] = now return data @classmethod def _increment_next_prediction_for_holding( - cls, item, dbcommit=False, reindex=False): + cls, item, dbcommit=False, reindex=False + ): """Increment next issue for items with regular frequencies.""" from ...holdings.api import Holding + holding = Holding.get_record_by_pid(item.holding_pid) - if item.get('type') == 'issue' and \ - item.get('issue', {}).get('regular') and \ - holding.holdings_type == 'serial' and \ - holding.get('patterns') and \ - holding.get('patterns', {}).get('frequency') != 'rdafr:1016': + if ( + item.get("type") == "issue" + and item.get("issue", {}).get("regular") + and holding.holdings_type == "serial" + and holding.get("patterns") + and holding.get("patterns", {}).get("frequency") != "rdafr:1016" + ): updated_holding = holding.increment_next_prediction() return holding.update( - data=updated_holding, - dbcommit=dbcommit, - reindex=reindex + data=updated_holding, dbcommit=dbcommit, reindex=reindex ) @classmethod @@ -225,50 +239,48 @@ def link_item_to_holding(cls, record, mode): :param mode: update or create mode. :return: the updated record with matched holdings record """ - from ...holdings.api import Holding, create_holding, \ - get_holding_pid_by_doc_location_item_type + from ...holdings.api import ( + Holding, + create_holding, + get_holding_pid_by_doc_location_item_type, + ) old_holding_pid = None old_holding_type = None - if record.get('holding'): - old_holding_pid = extracted_data_from_ref( - record['holding'], data='pid') - old_holding_type = Holding.get_holdings_type_by_holding_pid( - old_holding_pid) + if record.get("holding"): + old_holding_pid = extracted_data_from_ref(record["holding"], data="pid") + old_holding_type = Holding.get_holdings_type_by_holding_pid(old_holding_pid) - if ( - mode == 'create' and record.get('holding')) or ( + if (mode == "create" and record.get("holding")) or ( old_holding_type in [HoldingTypes.SERIAL, HoldingTypes.ELECTRONIC] ): return record # item type is important for linking to the correct holdings type. - item_record_type = record.get('type', 'standard') + item_record_type = record.get("type", "standard") # get pids from $ref - document_pid = extracted_data_from_ref(record['document'], data='pid') - location_pid = extracted_data_from_ref(record['location'], data='pid') - item_type_pid = extracted_data_from_ref( - record['item_type'], data='pid') + document_pid = extracted_data_from_ref(record["document"], data="pid") + location_pid = extracted_data_from_ref(record["location"], data="pid") + item_type_pid = extracted_data_from_ref(record["item_type"], data="pid") holding_pid = get_holding_pid_by_doc_location_item_type( - document_pid, location_pid, item_type_pid, item_record_type) + document_pid, location_pid, item_type_pid, item_record_type + ) # we will NOT create serial holdings for items - if not holding_pid and item_record_type != 'serial': + if not holding_pid and item_record_type != "serial": holdings_record = create_holding( document_pid=document_pid, location_pid=location_pid, - item_type_pid=item_type_pid) + item_type_pid=item_type_pid, + ) holding_pid = holdings_record.pid # update item record with the parent holding record if different # from the old holding pid if not old_holding_pid or holding_pid != old_holding_pid: - record['holding'] = {'$ref': get_ref_for_pid( - 'hold', - holding_pid - )} + record["holding"] = {"$ref": get_ref_for_pid("hold", holding_pid)} return record @classmethod @@ -294,26 +306,28 @@ def _prepare_item_record(cls, data, mode): def get_items_pid_by_holding_pid(cls, holding_pid, with_masked=True): """Returns item pids from holding pid.""" from . import ItemsSearch - es_query = ItemsSearch() \ - .params(preserve_order=True)\ - .filter('term', holding__pid=holding_pid) \ - .sort({'pid': {"order": "asc"}}) \ - .source(['pid']) + + es_query = ( + ItemsSearch() + .params(preserve_order=True) + .filter("term", holding__pid=holding_pid) + .sort({"pid": {"order": "asc"}}) + .source(["pid"]) + ) if not with_masked: - es_query = es_query.filter( - 'bool', must_not=[Q('term', _masked=True)]) + es_query = es_query.filter("bool", must_not=[Q("term", _masked=True)]) for item in es_query.scan(): yield item.pid @property def holding_pid(self): """Shortcut for item holding pid.""" - return extracted_data_from_ref(self.get('holding')) + return extracted_data_from_ref(self.get("holding")) @property def holding(self): """Shortcut for item holding.""" - return extracted_data_from_ref(self.get('holding'), data='record') + return extracted_data_from_ref(self.get("holding"), data="record") @property def holding_location_pid(self): @@ -330,13 +344,13 @@ def holding_library_pid(self): @property def document_pid(self): """Shortcut for item document pid.""" - return extracted_data_from_ref(self['document']) + return extracted_data_from_ref(self["document"]) @classmethod def get_document_pid_by_item_pid(cls, item_pid): """Returns document pid from item pid.""" item = cls.get_record_by_pid(item_pid) - return extracted_data_from_ref(item['document']) + return extracted_data_from_ref(item["document"]) @classmethod def get_document_pid_by_item_pid_object(cls, item_pid): @@ -347,8 +361,8 @@ def get_document_pid_by_item_pid_object(cls, item_pid): :return: the document pid :rtype: str """ - item = cls.get_record_by_pid(item_pid.get('value')) - return extracted_data_from_ref(item['document']) + item = cls.get_record_by_pid(item_pid.get("value")) + return extracted_data_from_ref(item["document"]) @classmethod def get_items_pid_by_document_pid(cls, document_pid): @@ -359,9 +373,13 @@ def get_items_pid_by_document_pid(cls, document_pid): :rtype generator """ from . import ItemsSearch - results = ItemsSearch()\ - .filter('term', document__pid=document_pid)\ - .source(['pid']).scan() + + results = ( + ItemsSearch() + .filter("term", document__pid=document_pid) + .source(["pid"]) + .scan() + ) for item in results: yield item_pid_to_object(item.pid) @@ -377,13 +395,11 @@ def get_item_by_barcode(cls, barcode, organisation_pid=None): :rtype `rero_ils.modules.items.api.api.Item` """ from . import ItemsSearch - filters = Q('term', barcode=barcode) + + filters = Q("term", barcode=barcode) if organisation_pid: - filters &= Q('term', organisation__pid=organisation_pid) - results = ItemsSearch()\ - .filter(filters)\ - .source(includes='pid')\ - .scan() + filters &= Q("term", organisation__pid=organisation_pid) + results = ItemsSearch().filter(filters).source(includes="pid").scan() with suppress(StopIteration): return cls.get_record_by_pid(next(results).pid) @@ -397,49 +413,49 @@ def get_library(self): def get_location(self): """Shortcut to the location of the item.""" - return extracted_data_from_ref(self['location'], data='record') + return extracted_data_from_ref(self["location"], data="record") def get_circulation_location(self): """Get the location to used for circulation operation.""" # By default, the location used for circulation operations is the main # item location except if this item has a `temporary_location` and this # location isn't yet over. - if tmp_location := self.get('temporary_location'): - if end_date := tmp_location.get('end_date'): + if tmp_location := self.get("temporary_location"): + if end_date := tmp_location.get("end_date"): now_date = pytz.utc.localize(datetime.now()) end_date = date_string_to_utc(end_date) if now_date > end_date: return self.get_location() - return extracted_data_from_ref(tmp_location['$ref'], data='record') + return extracted_data_from_ref(tmp_location["$ref"], data="record") return self.get_location() @property def status(self): """Shortcut for item status.""" - return self.get('status', '') + return self.get("status", "") @property def enumerationAndChronology(self): """Shortcut for item enumerationAndChronology.""" - return self.get('enumerationAndChronology', '') + return self.get("enumerationAndChronology", "") @property def item_type_pid(self): """Shortcut for item type pid.""" - if self.get('item_type'): - return extracted_data_from_ref(self.get('item_type')) + if self.get("item_type"): + return extracted_data_from_ref(self.get("item_type")) @property def temporary_item_type_pid(self): """Shortcut for temporary item type pid.""" - if tmp_item_type := self.get('temporary_item_type', {}): + if tmp_item_type := self.get("temporary_item_type", {}): # if the temporary_item_type end_date is over : return none - if end_date := tmp_item_type.get('end_date'): + if end_date := tmp_item_type.get("end_date"): now_date = pytz.utc.localize(datetime.now()) end_date = date_string_to_utc(end_date) if now_date > end_date: return None - return extracted_data_from_ref(tmp_item_type.get('$ref')) + return extracted_data_from_ref(tmp_item_type.get("$ref")) @property def item_type_circulation_category_pid(self): @@ -453,22 +469,20 @@ def circulation_category(self): :return the in-used circulation category for this item. :rtype rero_ils.modules.item_types.api.ItemType """ - return ItemType.get_record_by_pid( - self.item_type_circulation_category_pid - ) + return ItemType.get_record_by_pid(self.item_type_circulation_category_pid) @property def item_record_type(self): """Shortcut for item type, whether a standard or an issue record.""" - return self.get('type') + return self.get("type") @property def holding_circulation_category_pid(self): """Shortcut for holding circulation category pid of an item.""" from ...holdings.api import Holding + if self.holding_pid: - return Holding.get_record_by_pid( - self.holding_pid).circulation_category_pid + return Holding.get_record_by_pid(self.holding_pid).circulation_category_pid @property def call_numbers(self): @@ -477,15 +491,16 @@ def call_numbers(self): Inherit call numbers where applicable. """ from ...holdings.api import Holding - if self.get('type') == 'standard': - data = [self.get(key) for key in ['call_number', - 'second_call_number']] + + if self.get("type") == "standard": + data = [self.get(key) for key in ["call_number", "second_call_number"]] else: data = [] holding = Holding.get_record_by_pid( - extracted_data_from_ref(self.get('holding'))) + extracted_data_from_ref(self.get("holding")) + ) - for key in ['call_number', 'second_call_number']: + for key in ["call_number", "second_call_number"]: if self.get(key): data.append(self.get(key)) elif holding.get(key): @@ -495,14 +510,14 @@ def call_numbers(self): @property def location_pid(self): """Shortcut for item location pid.""" - if self.get('location'): - return extracted_data_from_ref(self.get('location')) + if self.get("location"): + return extracted_data_from_ref(self.get("location")) @property def location(self): """Shortcut to get item related location resource.""" - if self.get('location'): - return extracted_data_from_ref(self.get('location'), data='record') + if self.get("location"): + return extracted_data_from_ref(self.get("location"), data="record") @property def library_pid(self): @@ -519,15 +534,15 @@ def library(self): @property def organisation_pid(self): """Get organisation pid for item.""" - if self.get('organisation'): - return extracted_data_from_ref(self.get('organisation')) + if self.get("organisation"): + return extracted_data_from_ref(self.get("organisation")) return self.get_library().organisation_pid @property def organisation_view(self): """Get Organisation view for item.""" organisation = Organisation.get_record_by_pid(self.organisation_pid) - return organisation['view_code'] + return organisation["view_code"] def get_owning_pickup_location_pid(self): """Returns the pickup location pid for the item owning location. @@ -547,7 +562,7 @@ def notes(self): :return an array of all notes related to the item. Each note should have two keys : `type` and `content`. """ - return self.get('notes', []) + return self.get("notes", []) def get_note(self, note_type): """Return an item note by its type. @@ -555,8 +570,9 @@ def get_note(self, note_type): :param note_type: the type of note (see ``ItemNoteTypes``) :return the content of the note, None if note type is not found """ - notes = [note.get('content') for note in self.notes - if note.get('type') == note_type] + notes = [ + note.get("content") for note in self.notes if note.get("type") == note_type + ] return next(iter(notes), None) @property @@ -565,9 +581,8 @@ def is_new_acquisition(self): :return True if Item is a new acquisition, False otherwise """ - if acquisition_date := self.get('acquisition_date'): - return datetime.strptime( - acquisition_date, '%Y-%m-%d') < datetime.now() + if acquisition_date := self.get("acquisition_date"): + return datetime.strptime(acquisition_date, "%Y-%m-%d") < datetime.now() return False @classmethod @@ -578,6 +593,10 @@ def get_number_masked_items_by_holdings_pid(cls, holding_pid): :return number of un masked items. """ from . import ItemsSearch - query = ItemsSearch().filter('term', holding__pid=holding_pid) - return query.filter('bool', must_not=[Q('term', _masked=True)]) \ - .source(['pid']).count() + + query = ItemsSearch().filter("term", holding__pid=holding_pid) + return ( + query.filter("bool", must_not=[Q("term", _masked=True)]) + .source(["pid"]) + .count() + ) diff --git a/rero_ils/modules/items/cli.py b/rero_ils/modules/items/cli.py index 9d883d84dc..d9032008e5 100644 --- a/rero_ils/modules/items/cli.py +++ b/rero_ils/modules/items/cli.py @@ -28,7 +28,6 @@ import click from flask.cli import with_appcontext -from .models import ItemIdentifier, ItemNoteTypes, ItemStatus from ..documents.api import Document from ..holdings.models import HoldingIdentifier from ..item_types.api import ItemType @@ -36,6 +35,7 @@ from ..locations.api import Location from ..patrons.api import Patron from ..utils import extracted_data_from_ref, get_ref_for_pid +from .models import ItemIdentifier, ItemNoteTypes, ItemStatus class StreamArray(list): @@ -58,7 +58,7 @@ def __len__(self): return self._len -@click.command('reindex_items') +@click.command("reindex_items") @with_appcontext def reindex_items(): """Reindexing of item.""" @@ -68,27 +68,29 @@ def reindex_items(): item.reindex() -@click.command('create_items') -@click.option('-c', '--count', 'count', - type=click.INT, default=-1, help='default=for all records') -@click.option('-i', '--itemscount', 'itemscount', - type=click.INT, default=1, help='default=1') -@click.option('-m', '--missing', 'missing', - type=click.INT, default=5, help='default=5') +@click.command("create_items") +@click.option( + "-c", "--count", "count", type=click.INT, default=-1, help="default=for all records" +) +@click.option( + "-i", "--itemscount", "itemscount", type=click.INT, default=1, help="default=1" +) +@click.option("-m", "--missing", "missing", type=click.INT, default=5, help="default=5") # @click.argument('output', type=click.File('w')) -@click.option('-t', '--items_f', 'items_f', help='Items output file.') -@click.option('-h', '--holdings_f', 'holdings_f', help='Holdings output file.') +@click.option("-t", "--items_f", "items_f", help="Items output file.") +@click.option("-h", "--holdings_f", "holdings_f", help="Holdings output file.") @with_appcontext def create_items(count, itemscount, missing, items_f, holdings_f): """Create circulation items.""" + def generate(count, itemscount, missing): if count == -1: count = Document.count() click.secho( - f'Starting generating {count} items, random {itemscount} ...', - fg='green', + f"Starting generating {count} items, random {itemscount} ...", + fg="green", ) locations_pids = get_locations() @@ -102,20 +104,24 @@ def generate(count, itemscount, missing): workshop_item = 1 documents_pids = Document.get_all_pids() with click.progressbar( - reversed(list(documents_pids)[:count]), length=count) as bar: + reversed(list(documents_pids)[:count]), length=count + ) as bar: for document_pid in bar: holdings = [{}] # we will not create holdings for ebook and journal documents - doc_type = Document.get_record_by_pid( - document_pid).get('type')[0] - if doc_type.get('subtype') == 'docsubtype_e-book' \ - or doc_type.get('main_type') == 'docmaintype_serial': + doc_type = Document.get_record_by_pid(document_pid).get("type")[0] + if ( + doc_type.get("subtype") == "docsubtype_e-book" + or doc_type.get("main_type") == "docmaintype_serial" + ): continue - if Document.get_record_by_pid( - document_pid).get('type') in ['ebook', 'journal']: + if Document.get_record_by_pid(document_pid).get("type") in [ + "ebook", + "journal", + ]: continue - for i in range(0, randint(1, itemscount)): + for i in range(randint(1, itemscount)): org = random.choice(list(locations_pids.keys())) location_pid = random.choice(locations_pids[org]) item_type_pid = random.choice(item_types_pids[org]) @@ -123,23 +129,28 @@ def generate(count, itemscount, missing): holding_found = False new_holding = None for hold in holdings: - if hold.get('location_pid') == location_pid and \ - hold.get('item_type_pid') == item_type_pid: - item_holding_pid = hold.get('pid') + if ( + hold.get("location_pid") == location_pid + and hold.get("item_type_pid") == item_type_pid + ): + item_holding_pid = hold.get("pid") holding_found = True if not holding_found: holding_pid += 1 item_holding_pid = holding_pid holdings.append( - {'pid': item_holding_pid, - 'location_pid': location_pid, - 'item_type_pid': item_type_pid}) + { + "pid": item_holding_pid, + "location_pid": location_pid, + "item_type_pid": item_type_pid, + } + ) new_holding = create_holding_record( - item_holding_pid, location_pid, - item_type_pid, document_pid) - if org == '3': + item_holding_pid, location_pid, item_type_pid, document_pid + ) + if org == "3": # set a prefix for items of the workshop organisation - barcode = f'fictive{workshop_item}' + barcode = f"fictive{workshop_item}" if workshop_item < 17: # fix the status of the first 16 items to ON_SHELF status = ItemStatus.ON_SHELF @@ -162,15 +173,15 @@ def generate(count, itemscount, missing): status=status, new_acquisition=new_acquisition, price=price, - legacy_checkout_count=legacy_checkout_count + legacy_checkout_count=legacy_checkout_count, ) item_pid += 1 yield item, new_holding items = [] holdings = [] - with open(holdings_f, 'w', encoding='utf-8') as holdings_file: - with open(items_f, 'w', encoding='utf-8') as items_file: + with open(holdings_f, "w", encoding="utf-8") as holdings_file: + with open(items_f, "w", encoding="utf-8") as items_file: for item, holding in generate(count, itemscount, missing): items.append(item) if holding: @@ -179,8 +190,7 @@ def generate(count, itemscount, missing): json.dump(holdings, indent=2, fp=holdings_file) -def create_holding_record( - holding_pid, location_pid, item_type_pid, document_pid): +def create_holding_record(holding_pid, location_pid, item_type_pid, document_pid): """Prepare holdings record for creation. :param holding_pid: holdings pid. @@ -190,20 +200,13 @@ def create_holding_record( :return holding: unmasked holding record. """ - holding = { - 'pid': str(holding_pid), - 'holdings_type': 'standard', - 'location': { - '$ref': get_ref_for_pid('locations', location_pid) - }, - 'circulation_category': { - '$ref': get_ref_for_pid('item_types', item_type_pid) - }, - 'document': { - '$ref': get_ref_for_pid('documents', document_pid) - } + return { + "pid": str(holding_pid), + "holdings_type": "standard", + "location": {"$ref": get_ref_for_pid("locations", location_pid)}, + "circulation_category": {"$ref": get_ref_for_pid("item_types", item_type_pid)}, + "document": {"$ref": get_ref_for_pid("documents", document_pid)}, } - return holding def get_locations(): @@ -214,10 +217,8 @@ def get_locations(): to_return = {} for pid in Location.get_all_pids(): record = Location.get_record_by_pid(pid) - if not record.get('is_online'): - org_pid = extracted_data_from_ref( - record.get_library().get('organisation') - ) + if not record.get("is_online"): + org_pid = extracted_data_from_ref(record.get_library().get("organisation")) to_return.setdefault(org_pid, []).append(pid) return to_return @@ -230,15 +231,25 @@ def get_item_types(): to_return = {} for pid in ItemType.get_all_pids(): record = ItemType.get_record_by_pid(pid) - if record.get('type') != 'online': - org_pid = extracted_data_from_ref(record.get('organisation')) + if record.get("type") != "online": + org_pid = extracted_data_from_ref(record.get("organisation")) to_return.setdefault(org_pid, []).append(pid) return to_return -def create_random_item(item_pid, location_pid, missing, item_type_pid, - document_pid, holding_pid, barcode, status, - new_acquisition, price, legacy_checkout_count): +def create_random_item( + item_pid, + location_pid, + missing, + item_type_pid, + document_pid, + holding_pid, + barcode, + status, + new_acquisition, + price, + legacy_checkout_count, +): """Create items with randomised values.""" if not status: status = ItemStatus.ON_SHELF @@ -246,26 +257,18 @@ def create_random_item(item_pid, location_pid, missing, item_type_pid, status = ItemStatus.MISSING missing -= 1 item = { - 'pid': str(item_pid), - 'barcode': barcode, - 'call_number': str(item_pid).zfill(5), - 'status': status, - 'location': { - '$ref': get_ref_for_pid('locations', location_pid) - }, - 'item_type': { - '$ref': get_ref_for_pid('item_types', item_type_pid) - }, - 'document': { - '$ref': get_ref_for_pid('documents', document_pid) - }, - 'holding': { - '$ref': get_ref_for_pid('holdings', holding_pid) - }, - 'type': 'standard', - 'pac_code': '2_controlled_consumption', - 'price': price, - 'legacy_checkout_count': legacy_checkout_count + "pid": str(item_pid), + "barcode": barcode, + "call_number": str(item_pid).zfill(5), + "status": status, + "location": {"$ref": get_ref_for_pid("locations", location_pid)}, + "item_type": {"$ref": get_ref_for_pid("item_types", item_type_pid)}, + "document": {"$ref": get_ref_for_pid("documents", document_pid)}, + "holding": {"$ref": get_ref_for_pid("holdings", holding_pid)}, + "type": "standard", + "pac_code": "2_controlled_consumption", + "price": price, + "legacy_checkout_count": legacy_checkout_count, } # ACQUISITION DATE # add acquisition date if item is a new acquisition @@ -273,46 +276,56 @@ def create_random_item(item_pid, location_pid, missing, item_type_pid, if new_acquisition: diff = datetime.timedelta(random.randint(-31, 365)) acquisition_date = datetime.date.today() - diff - item['acquisition_date'] = acquisition_date.strftime('%Y-%m-%d') + item["acquisition_date"] = acquisition_date.strftime("%Y-%m-%d") # RANDOMLY ADD NOTES # we will add a note to +/- 60% of the items. # if an item has notes, between one and 9 notes will be add if random.random() < 0.6: - item['notes'] = random.sample([{ - 'type': ItemNoteTypes.GENERAL, - 'content': 'Here you can read a general/public note' - }, { - 'type': ItemNoteTypes.STAFF, - 'content': 'This is a staff note only visible by staff members.' - }, { - 'type': ItemNoteTypes.CHECKIN, - 'content': f'Checkin note for {barcode}' - }, { - 'type': ItemNoteTypes.CHECKOUT, - 'content': f'Checkout note for {barcode}' - }, { - 'type': ItemNoteTypes.ACQUISITION, - 'content': 'Acquisition note content' - }, { - 'type': ItemNoteTypes.BINDING, - 'content': 'Link with an other item (same subject) : ' - 'dummy_link' - }, { - 'type': ItemNoteTypes.PROVENANCE, - 'content': 'Antique library collection' - }, { - 'type': ItemNoteTypes.CONDITION, - 'content': 'Missing some pages :-(' - }, { - 'type': ItemNoteTypes.PATRIMONIAL, - 'content': 'Part of the UNESCO books collection' - }], k=random.randint(1, 9)) + item["notes"] = random.sample( + [ + { + "type": ItemNoteTypes.GENERAL, + "content": "Here you can read a general/public note", + }, + { + "type": ItemNoteTypes.STAFF, + "content": "This is a staff note only visible by staff members.", + }, + { + "type": ItemNoteTypes.CHECKIN, + "content": f"Checkin note for {barcode}", + }, + { + "type": ItemNoteTypes.CHECKOUT, + "content": f"Checkout note for {barcode}", + }, + { + "type": ItemNoteTypes.ACQUISITION, + "content": "Acquisition note content", + }, + { + "type": ItemNoteTypes.BINDING, + "content": "Link with an other item (same subject) : " + 'dummy_link', + }, + { + "type": ItemNoteTypes.PROVENANCE, + "content": "Antique library collection", + }, + {"type": ItemNoteTypes.CONDITION, "content": "Missing some pages :-("}, + { + "type": ItemNoteTypes.PATRIMONIAL, + "content": "Part of the UNESCO books collection", + }, + ], + k=random.randint(1, 9), + ) # RANDOMLY ADD SECOND CALL NUMBER # we will add a second call number to +/- 25% of the items. if random.random() < 0.25: - item['second_call_number'] = ''.join( + item["second_call_number"] = "".join( random.choices(string.ascii_uppercase + string.digits, k=5) ) @@ -325,5 +338,5 @@ def get_patrons_barcodes(): barcodes = [] for uuid in patrons_ids: patron = Patron.get_record(uuid) - barcodes = barcodes + patron.patron.get('barcode', []) + barcodes = barcodes + patron.patron.get("barcode", []) return barcodes diff --git a/rero_ils/modules/items/decorators.py b/rero_ils/modules/items/decorators.py index 4ef107c56f..b180d947fa 100644 --- a/rero_ils/modules/items/decorators.py +++ b/rero_ils/modules/items/decorators.py @@ -24,8 +24,7 @@ from invenio_circulation.errors import CirculationException from invenio_records_rest.utils import obj_or_import_string -from rero_ils.modules.loans.api import Loan, \ - get_request_by_item_pid_by_patron_pid +from rero_ils.modules.loans.api import Loan, get_request_by_item_pid_by_patron_pid def add_action_parameters_and_flush_indexes(function): @@ -35,35 +34,39 @@ def add_action_parameters_and_flush_indexes(function): parameters are given. Adds missing parameters if any. Ensures the right loan transition for the given action. """ + @wraps(function) def wrapper(item, *args, **kwargs): """Executed before loan action.""" checkin_loan = None - if function.__name__ == 'validate_request': + if function.__name__ == "validate_request": # checks if the given loan pid can be validated item.prior_validate_actions(**kwargs) - elif function.__name__ == 'checkin': + elif function.__name__ == "checkin": # the smart checkin requires extra checks/actions before a checkin loan, kwargs = item.prior_checkin_actions(**kwargs) checkin_loan = loan # CHECKOUT: Case where no loan PID - elif function.__name__ == 'checkout' and not kwargs.get('pid'): - patron_pid = kwargs['patron_pid'] + elif function.__name__ == "checkout" and not kwargs.get("pid"): + patron_pid = kwargs["patron_pid"] item_pid = item.pid request = get_request_by_item_pid_by_patron_pid( - item_pid=item_pid, patron_pid=patron_pid) + item_pid=item_pid, patron_pid=patron_pid + ) if request: - kwargs['pid'] = request.pid - elif function.__name__ == 'extend_loan': + kwargs["pid"] = request.pid + elif function.__name__ == "extend_loan": loan, kwargs = item.prior_extend_loan_actions(**kwargs) checkin_loan = loan loan, kwargs = item.complete_action_missing_params( - item=item, checkin_loan=checkin_loan, **kwargs) + item=item, checkin_loan=checkin_loan, **kwargs + ) Loan.check_required_params(function.__name__, **kwargs) item, action_applied = function(item, loan, *args, **kwargs) item.change_status_commit_and_reindex() return item, action_applied + return wrapper @@ -82,19 +85,21 @@ def check_operation_allowed(action): :param action: the action to check as ItemCirculationAction part. :raise CirculationException if a function disallow the operation. """ + def inner_function(func): @wraps(func) def decorated_view(*args, **kwargs): - override_blocking = kwargs.pop('override_blocking', False) + override_blocking = kwargs.pop("override_blocking", False) override_blocking = bool(override_blocking) if not override_blocking: - actions = current_app.config.get( - 'CIRCULATION_ACTIONS_VALIDATION', {}) + actions = current_app.config.get("CIRCULATION_ACTIONS_VALIDATION", {}) for func_name in actions.get(action, []): func_callback = obj_or_import_string(func_name) can, reasons = func_callback(args[0], **kwargs) if not can: raise CirculationException(description=reasons[0]) return func(*args, **kwargs) + return decorated_view + return inner_function diff --git a/rero_ils/modules/items/dumpers.py b/rero_ils/modules/items/dumpers.py index dde7cf2079..fc89850b2b 100644 --- a/rero_ils/modules/items/dumpers.py +++ b/rero_ils/modules/items/dumpers.py @@ -24,15 +24,12 @@ from rero_ils.modules.collections.api import CollectionsSearch from rero_ils.modules.commons.exceptions import MissingDataException from rero_ils.modules.documents.api import Document -from rero_ils.modules.documents.dumpers import \ - TitleDumper as DocumentTitleDumper +from rero_ils.modules.documents.dumpers import TitleDumper as DocumentTitleDumper from rero_ils.modules.holdings.api import Holding from rero_ils.modules.holdings.dumpers import ClaimIssueHoldingDumper from rero_ils.modules.item_types.api import ItemType -from rero_ils.modules.libraries.dumpers import \ - LibrarySerialClaimNotificationDumper -from rero_ils.modules.loans.dumpers import \ - CirculationDumper as LoanCirculationDumper +from rero_ils.modules.libraries.dumpers import LibrarySerialClaimNotificationDumper +from rero_ils.modules.loans.dumpers import CirculationDumper as LoanCirculationDumper from rero_ils.modules.locations.api import Location from rero_ils.modules.vendors.dumpers import VendorClaimIssueNotificationDumper @@ -49,20 +46,21 @@ def dump(self, record, data): """ location = record.get_location() data = { - 'pid': record.pid, - 'barcode': record.get('barcode'), - 'call_numbers': record.call_numbers, - 'location_name': location.get('name'), - 'library_name': location.get_library().get('name'), - 'enumerationAndChronology': record.get('enumerationAndChronology') + "pid": record.pid, + "barcode": record.get("barcode"), + "call_numbers": record.call_numbers, + "location_name": location.get("name"), + "library_name": location.get_library().get("name"), + "enumerationAndChronology": record.get("enumerationAndChronology"), } if item_type_pid := record.item_type_pid: if item_type := ItemType.get_record_by_pid(item_type_pid): - data['item_type'] = item_type['name'] + data["item_type"] = item_type["name"] if temporary_item_type_pid := record.temporary_item_type_pid: if temporary_item_type := ItemType.get_record_by_pid( - temporary_item_type_pid): - data['temporary_item_type'] = temporary_item_type['name'] + temporary_item_type_pid + ): + data["temporary_item_type"] = temporary_item_type["name"] data = {k: v for k, v in data.items() if v} return data @@ -83,10 +81,10 @@ def dump(self, record, data): # Add the inherited call numbers from parent holding record if item # call numbers is empty. - if all(k not in data for k in ['call_number', 'second_call_number']): + if all(k not in data for k in ["call_number", "second_call_number"]): holding = Holding.get_record_by_pid(record.holding_pid) - data['call_number'] = holding.get('call_number') - data['second_call_number'] = holding.get('second_call_number') + data["call_number"] = holding.get("call_number") + data["second_call_number"] = holding.get("second_call_number") data = {k: v for k, v in data.items() if v} return data @@ -98,24 +96,24 @@ class ClaimIssueNotificationDumper(InvenioRecordsDumper): def dump(self, record, data): """Dump an item issue for claim notification generation.""" if not record.is_issue: - raise TypeError('record must be an `ItemIssue` resource') + raise TypeError("record must be an `ItemIssue` resource") if not (holding := record.holding): - raise MissingDataException('item.holding') + raise MissingDataException("item.holding") if not (vendor := holding.vendor): - raise MissingDataException('item.holding.vendor') - - data.update({ - 'vendor': vendor.dumps( - dumper=VendorClaimIssueNotificationDumper()), - 'document': holding.document.dumps( - dumper=DocumentTitleDumper()), - 'library': holding.library.dumps( - dumper=LibrarySerialClaimNotificationDumper()), - 'holdings': holding.dumps( - dumper=ClaimIssueHoldingDumper()), - 'enumerationAndChronology': record.enumerationAndChronology, - 'claim_counter': record.claims_count - }) + raise MissingDataException("item.holding.vendor") + + data.update( + { + "vendor": vendor.dumps(dumper=VendorClaimIssueNotificationDumper()), + "document": holding.document.dumps(dumper=DocumentTitleDumper()), + "library": holding.library.dumps( + dumper=LibrarySerialClaimNotificationDumper() + ), + "holdings": holding.dumps(dumper=ClaimIssueHoldingDumper()), + "enumerationAndChronology": record.enumerationAndChronology, + "claim_counter": record.claims_count, + } + ) return {k: v for k, v in data.items() if v is not None} @@ -126,42 +124,41 @@ def dump(self, record, data): """Dump an item for circulation actions.""" item = record.replace_refs() data = deepcopy(dict(item)) - document = Document.get_record_by_pid(item['document']['pid']) + document = Document.get_record_by_pid(item["document"]["pid"]) doc_data = document.dumps() - data['document']['title'] = doc_data['title'] + data["document"]["title"] = doc_data["title"] - location = Location.get_record_by_pid(item['location']['pid']) + location = Location.get_record_by_pid(item["location"]["pid"]) loc_data = deepcopy(dict(location)) - data['location']['name'] = loc_data['name'] + data["location"]["name"] = loc_data["name"] # TODO: check if it is required - data['location']['organisation'] = { - 'pid': record.organisation_pid - } + data["location"]["organisation"] = {"pid": record.organisation_pid} # add library and location name on same field (used for sorting) library = location.get_library() - data['library_location_name'] = \ - f'{library["name"]}: {data["location"]["name"]}' + data["library_location_name"] = f'{library["name"]}: {data["location"]["name"]}' - data['actions'] = list(record.actions) + data["actions"] = list(record.actions) # add the current pending requests count - data['current_pending_requests'] = record.get_requests(output='count') + data["current_pending_requests"] = record.get_requests(output="count") # add metadata of the first pending request - requests = record.get_requests(sort_by='_created') + requests = record.get_requests(sort_by="_created") if first_request := next(requests, None): - data['pending_loans'] = [ - first_request.dumps(LoanCirculationDumper()) - ] + data["pending_loans"] = [first_request.dumps(LoanCirculationDumper())] # add temporary location name - if temporary_location_pid := item.get('temporary_location', {}).get( - 'pid' - ): - data['temporary_location']['name'] = Location.get_record_by_pid( - temporary_location_pid).get('name') + if temporary_location_pid := item.get("temporary_location", {}).get("pid"): + data["temporary_location"]["name"] = Location.get_record_by_pid( + temporary_location_pid + ).get("name") # add collections - results = CollectionsSearch().active_by_item_pid(item['pid'])\ - .params(preserve_order=True).source('title').scan() + results = ( + CollectionsSearch() + .active_by_item_pid(item["pid"]) + .params(preserve_order=True) + .source("title") + .scan() + ) if collections := [collection.title for collection in results]: - data['collections'] = collections + data["collections"] = collections return data diff --git a/rero_ils/modules/items/extensions.py b/rero_ils/modules/items/extensions.py index 2d1416443f..a35c988d96 100644 --- a/rero_ils/modules/items/extensions.py +++ b/rero_ils/modules/items/extensions.py @@ -74,11 +74,8 @@ def _control_status(record): # date) BUT this manager could forget to update the issue status to # 'expected' in this case, this extension will automatically change # the issue status. - if ( - record.issue_status == ItemIssueStatus.LATE and - record.received_date - ): - record['issue'].pop('received_date', None) + if record.issue_status == ItemIssueStatus.LATE and record.received_date: + record["issue"].pop("received_date", None) invalid_statuses = [ItemIssueStatus.LATE] if record.is_issue and record.issue_status in invalid_statuses: expected_date = ciso8601.parse_datetime(record.expected_date) diff --git a/rero_ils/modules/items/jsonresolver.py b/rero_ils/modules/items/jsonresolver.py index 92f671aee5..c68fa07320 100644 --- a/rero_ils/modules/items/jsonresolver.py +++ b/rero_ils/modules/items/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/items/', host='bib.rero.ch') +@jsonresolver.route("/api/items/", host="bib.rero.ch") def item_resolver(pid): """Item resolver.""" - return resolve_json_refs('item', pid) + return resolve_json_refs("item", pid) diff --git a/rero_ils/modules/items/listener.py b/rero_ils/modules/items/listener.py index c14c128a01..92db212905 100644 --- a/rero_ils/modules/items/listener.py +++ b/rero_ils/modules/items/listener.py @@ -20,14 +20,22 @@ from rero_ils.modules.documents.api import Document from rero_ils.modules.local_fields.api import LocalField -from rero_ils.modules.local_fields.dumpers import \ - ElasticSearchDumper as LocalFieldESDumper +from rero_ils.modules.local_fields.dumpers import ( + ElasticSearchDumper as LocalFieldESDumper, +) from .api import Item, ItemsSearch -def enrich_item_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_item_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -35,48 +43,43 @@ def enrich_item_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] != ItemsSearch.Meta.index: + if index.split("-")[0] != ItemsSearch.Meta.index: return if not isinstance(record, Item): - record = Item.get_record_by_pid(record.get('pid')) + record = Item.get_record_by_pid(record.get("pid")) # Document type - document = Document.get_record_by_pid(json['document']['pid']) - json['document']['document_type'] = document['type'] + document = Document.get_record_by_pid(json["document"]["pid"]) + json["document"]["document_type"] = document["type"] # Current pending requests - json['current_pending_requests'] = record.get_requests(output='count') + json["current_pending_requests"] = record.get_requests(output="count") - # add related local fields - local_fields = [ + if local_fields := [ field.dumps(dumper=LocalFieldESDumper()) for field in LocalField.get_local_fields(record) - ] - if local_fields: - json['local_fields'] = local_fields + ]: + json["local_fields"] = local_fields if record.is_issue: # Issue `sort_date` is an optional field but value is used to sort # issues from one another ; if this field is empty, use the issue # `expected_date` as value - json['issue']['sort_date'] = record.sort_date or record.expected_date + json["issue"]["sort_date"] = record.sort_date or record.expected_date # inherited_first_call_number to issue if call_number := record.issue_inherited_first_call_number: - json['issue']['inherited_first_call_number'] = call_number + json["issue"]["inherited_first_call_number"] = call_number # inherited_second_call_number to issue if call_number := record.issue_inherited_second_call_number: - json['issue']['inherited_second_call_number'] = call_number + json["issue"]["inherited_second_call_number"] = call_number # inject vendor pid if vendor_pid := record.vendor_pid: - json['vendor'] = {'pid': vendor_pid, 'type': 'vndr'} + json["vendor"] = {"pid": vendor_pid, "type": "vndr"} # inject claims information: counter and dates if notifications := record.claim_notifications: dates = [ - notification['creation_date'] + notification["creation_date"] for notification in notifications - if 'creation_date' in notification + if "creation_date" in notification ] - json['issue']['claims'] = { - 'counter': len(notifications), - 'dates': dates - } + json["issue"]["claims"] = {"counter": len(notifications), "dates": dates} diff --git a/rero_ils/modules/items/models.py b/rero_ils/modules/items/models.py index a8f5268284..143407c57f 100644 --- a/rero_ils/modules/items/models.py +++ b/rero_ils/modules/items/models.py @@ -27,11 +27,11 @@ class ItemIdentifier(RecordIdentifier): """Sequence generator for Item identifiers.""" - __tablename__ = 'item_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "item_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), + db.BigInteger().with_variant(db.Integer, "sqlite"), primary_key=True, autoincrement=True, ) @@ -40,66 +40,60 @@ class ItemIdentifier(RecordIdentifier): class ItemMetadata(db.Model, RecordMetadataBase): """Item record metadata.""" - __tablename__ = 'item_metadata' + __tablename__ = "item_metadata" class TypeOfItem: """Enum class to list all possible item type.""" - STANDARD = 'standard' - ISSUE = 'issue' - PROVISIONAL = 'provisional' + STANDARD = "standard" + ISSUE = "issue" + PROVISIONAL = "provisional" class ItemStatus: """Class holding all available circulation item statuses.""" - ON_SHELF = 'on_shelf' - AT_DESK = 'at_desk' - ON_LOAN = 'on_loan' - IN_TRANSIT = 'in_transit' - EXCLUDED = 'excluded' - MISSING = 'missing' + ON_SHELF = "on_shelf" + AT_DESK = "at_desk" + ON_LOAN = "on_loan" + IN_TRANSIT = "in_transit" + EXCLUDED = "excluded" + MISSING = "missing" class ItemIssueStatus: """Enum class to list all possible status of an issue item.""" - DELETED = 'deleted' - EXPECTED = 'expected' - LATE = 'late' - RECEIVED = 'received' + DELETED = "deleted" + EXPECTED = "expected" + LATE = "late" + RECEIVED = "received" class ItemCirculationAction: """Enum class to list all possible action about an item.""" - CHECKOUT = 'checkout' - CHECKIN = 'checkin' - REQUEST = 'request' - EXTEND = 'extend' + CHECKOUT = "checkout" + CHECKIN = "checkin" + REQUEST = "request" + EXTEND = "extend" class ItemNoteTypes: """Class to list all possible note types.""" - ACQUISITION = 'acquisition_note' - BINDING = 'binding_note' - CHECKIN = 'checkin_note' - CHECKOUT = 'checkout_note' - CONDITION = 'condition_note' - GENERAL = 'general_note' - PATRIMONIAL = 'patrimonial_note' - PROVENANCE = 'provenance_note' - STAFF = 'staff_note' - - PUBLIC = [ - GENERAL, - BINDING, - PROVENANCE, - CONDITION, - PATRIMONIAL - ] + ACQUISITION = "acquisition_note" + BINDING = "binding_note" + CHECKIN = "checkin_note" + CHECKOUT = "checkout_note" + CONDITION = "condition_note" + GENERAL = "general_note" + PATRIMONIAL = "patrimonial_note" + PROVENANCE = "provenance_note" + STAFF = "staff_note" + + PUBLIC = [GENERAL, BINDING, PROVENANCE, CONDITION, PATRIMONIAL] INVENTORY_LIST_CATEGORY = [ GENERAL, @@ -110,5 +104,5 @@ class ItemNoteTypes: BINDING, CONDITION, PATRIMONIAL, - PROVENANCE + PROVENANCE, ] diff --git a/rero_ils/modules/items/permissions.py b/rero_ils/modules/items/permissions.py index 589d591e2a..6b8f531123 100644 --- a/rero_ils/modules/items/permissions.py +++ b/rero_ils/modules/items/permissions.py @@ -19,19 +19,22 @@ """Permissions for items.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + RecordPermissionPolicy, +) # Specific action about items -late_issue_management = action_factory('late-issue-management') +late_issue_management = action_factory("late-issue-management") # Actions to control Items policies for CRUD operations -search_action = action_factory('item-search') -read_action = action_factory('item-read') -create_action = action_factory('item-create') -update_action = action_factory('item-update') -delete_action = action_factory('item-delete') -access_action = action_factory('item-access') +search_action = action_factory("item-search") +read_action = action_factory("item-read") +create_action = action_factory("item-create") +update_action = action_factory("item-update") +delete_action = action_factory("item-delete") +access_action = action_factory("item-access") class ItemPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/items/serializers/__init__.py b/rero_ils/modules/items/serializers/__init__.py index 967bc7165e..a539e5dd81 100644 --- a/rero_ils/modules/items/serializers/__init__.py +++ b/rero_ils/modules/items/serializers/__init__.py @@ -19,8 +19,12 @@ from invenio_records_rest.serializers.response import record_responsify -from rero_ils.modules.serializers import JSONSerializer, RecordSchemaJSONV1, \ - search_responsify, search_responsify_file +from rero_ils.modules.serializers import ( + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, + search_responsify_file, +) from .csv import ItemCSVSerializer from .json import ItemsJSONSerializer @@ -28,88 +32,86 @@ _csv = ItemCSVSerializer( JSONSerializer, csv_included_fields=[ - 'document_pid', - 'document_title', - 'document_creator', - 'document_main_type', - 'document_sub_type', - 'document_masked', - 'document_isbn', - 'document_issn', - 'document_series_statement', - 'document_edition_statement', - 'document_publication_year', - 'document_publisher', - 'document_local_field_1', - 'document_local_field_2', - 'document_local_field_3', - 'document_local_field_4', - 'document_local_field_5', - 'document_local_field_6', - 'document_local_field_7', - 'document_local_field_8', - 'document_local_field_9', - 'document_local_field_10', - 'item_acquisition_date', - 'item_pid', - 'item_create_date', - 'item_barcode', - 'item_call_number', - 'item_second_call_number', - 'item_legacy_checkout_count', - 'item_type', - 'item_library_name', - 'item_location_name', - 'item_pac_code', - 'item_holding_pid', - 'item_price', - 'item_status', - 'item_item_type', - 'item_general_note', - 'item_staff_note', - 'item_checkin_note', - 'item_checkout_note', - 'item_acquisition_note', - 'item_binding_note', - 'item_condition_note', - 'item_patrimonial_note', - 'item_provenance_note', - 'temporary_item_type', - 'temporary_item_type_expiry_date', - 'item_masked', - 'item_enumerationAndChronology', - 'item_local_field_1', - 'item_local_field_2', - 'item_local_field_3', - 'item_local_field_4', - 'item_local_field_5', - 'item_local_field_6', - 'item_local_field_7', - 'item_local_field_8', - 'item_local_field_9', - 'item_local_field_10', - 'issue_status', - 'issue_status_date', - 'issue_claims_count', - 'issue_expected_date', - 'issue_regular', - 'item_checkouts_count', - 'item_renewals_count', - 'last_transaction_date', - 'last_checkout_date', - 'current_pending_requests' - ] + "document_pid", + "document_title", + "document_creator", + "document_main_type", + "document_sub_type", + "document_masked", + "document_isbn", + "document_issn", + "document_series_statement", + "document_edition_statement", + "document_publication_year", + "document_publisher", + "document_local_field_1", + "document_local_field_2", + "document_local_field_3", + "document_local_field_4", + "document_local_field_5", + "document_local_field_6", + "document_local_field_7", + "document_local_field_8", + "document_local_field_9", + "document_local_field_10", + "item_acquisition_date", + "item_pid", + "item_create_date", + "item_barcode", + "item_call_number", + "item_second_call_number", + "item_legacy_checkout_count", + "item_type", + "item_library_name", + "item_location_name", + "item_pac_code", + "item_holding_pid", + "item_price", + "item_status", + "item_item_type", + "item_general_note", + "item_staff_note", + "item_checkin_note", + "item_checkout_note", + "item_acquisition_note", + "item_binding_note", + "item_condition_note", + "item_patrimonial_note", + "item_provenance_note", + "temporary_item_type", + "temporary_item_type_expiry_date", + "item_masked", + "item_enumerationAndChronology", + "item_local_field_1", + "item_local_field_2", + "item_local_field_3", + "item_local_field_4", + "item_local_field_5", + "item_local_field_6", + "item_local_field_7", + "item_local_field_8", + "item_local_field_9", + "item_local_field_10", + "issue_status", + "issue_status_date", + "issue_claims_count", + "issue_expected_date", + "issue_regular", + "item_checkouts_count", + "item_renewals_count", + "last_transaction_date", + "last_checkout_date", + "current_pending_requests", + ], ) """CSV serializer.""" csv_item_response = record_responsify(_csv, "text/csv") csv_item_search = search_responsify_file( - _csv, 'text/csv', - file_extension='csv', - file_suffix='inventory' + _csv, "text/csv", file_extension="csv", file_suffix="inventory" ) """JSON serializer.""" _json = ItemsJSONSerializer(RecordSchemaJSONV1) -json_item_search = search_responsify(_json, 'application/rero+json') -json_item_response = record_responsify(_json, 'application/rero+json') +json_item_search = search_responsify(_json, "application/rero+json") +json_item_response = record_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/items/serializers/collector.py b/rero_ils/modules/items/serializers/collector.py index 38772f14c4..da9915b705 100644 --- a/rero_ils/modules/items/serializers/collector.py +++ b/rero_ils/modules/items/serializers/collector.py @@ -28,38 +28,38 @@ from rero_ils.modules.local_fields.api import LocalField from rero_ils.modules.operation_logs.api import OperationLogsSearch -from ..models import ItemCirculationAction, ItemNoteTypes from ...notifications.api import NotificationsSearch +from ..models import ItemCirculationAction, ItemNoteTypes -class Collector(): +class Collector: """collect data for csv.""" # define chunk size chunk_size = 1000 - separator = ' | ' + separator = " | " role_filter = [ - 'rsp', - 'cre', - 'enj', - 'dgs', - 'prg', - 'dsr', - 'ctg', - 'cmp', - 'inv', - 'com', - 'pht', - 'ivr', - 'art', - 'ive', - 'chr', - 'aut', - 'arc', - 'fmk', - 'pra', - 'csl' + "rsp", + "cre", + "enj", + "dgs", + "prg", + "dsr", + "ctg", + "cmp", + "inv", + "com", + "pht", + "ivr", + "art", + "ive", + "chr", + "aut", + "arc", + "fmk", + "pra", + "csl", ] @classmethod @@ -91,88 +91,97 @@ def _build_doc(data): document_data = {} # document title document_titles = filter( - lambda t: t.get('type') == 'bf:Title', - data.get('title', {}) + lambda t: t.get("type") == "bf:Title", data.get("title", {}) ) if title := next(document_titles): - document_data['document_title'] = title.get('_text') + document_data["document_title"] = title.get("_text") # document masked - bool_values = ('No', 'Yes') - is_masked = data.get('masked', False) - document_data['document_masked'] = bool_values[is_masked] + bool_values = ("No", "Yes") + is_masked = data.get("masked", False) + document_data["document_masked"] = bool_values[is_masked] # process contributions creator = [] - for contribution in data.get('contribution', []): - if any(role in contribution.get('role') - for role in cls.role_filter): - authorized_access_point = \ - f'authorized_access_point_{language}' - if authorized_access_point in contribution.get('entity'): - creator.append( - contribution['entity'][ - authorized_access_point] - ) - document_data['document_creator'] = ' ; '.join(creator) + for contribution in data.get("contribution", []): + if any(role in contribution.get("role") for role in cls.role_filter): + authorized_access_point = f"authorized_access_point_{language}" + if authorized_access_point in contribution.get("entity"): + creator.append(contribution["entity"][authorized_access_point]) + document_data["document_creator"] = " ; ".join(creator) # document type/subtypes doc_types = [] doc_subtypes = [] - for document_type in data.get('type'): - doc_types.append(document_type.get('main_type')) - doc_subtypes.append(document_type.get('subtype')) + for document_type in data.get("type"): + doc_types.append(document_type.get("main_type")) + doc_subtypes.append(document_type.get("subtype")) if doc_types := filter(None, doc_types): - document_data['document_main_type'] = ', '.join(doc_types) + document_data["document_main_type"] = ", ".join(doc_types) if doc_subtypes := filter(None, doc_subtypes): - document_data['document_sub_type'] = ', '.join(doc_subtypes) + document_data["document_sub_type"] = ", ".join(doc_subtypes) # identifiers document_data |= { - 'document_isbn': cls.separator.join(data.get('isbn', [])), - 'document_issn': cls.separator.join(data.get('issn', [])) + "document_isbn": cls.separator.join(data.get("isbn", [])), + "document_issn": cls.separator.join(data.get("issn", [])), } # document_series_statement - document_data['document_series_statement'] = cls.separator.join( - data['value'] - for serie in data.get('seriesStatement', []) - for data in serie.get('_text', []) + document_data["document_series_statement"] = cls.separator.join( + data["value"] + for serie in data.get("seriesStatement", []) + for data in serie.get("_text", []) ) # document_edition_statement - document_data['document_edition_statement'] = cls.separator.join( - edition.get('value') - for edition_statement in data.get('editionStatement', []) - for edition in edition_statement.get('_text', []) + document_data["document_edition_statement"] = cls.separator.join( + edition.get("value") + for edition_statement in data.get("editionStatement", []) + for edition in edition_statement.get("_text", []) ) # provision activity # we only use the first provision activity of type # `bf:publication` publications = [ - prov for prov in data.get('provisionActivity', []) - if prov.get('type') == 'bf:Publication' + prov + for prov in data.get("provisionActivity", []) + if prov.get("type") == "bf:Publication" ] if provision_activity := next(iter(publications), None): - start_date = provision_activity.get('startDate', '') - end_date = provision_activity.get('endDate') - document_data['document_publication_year'] = \ - f'{start_date} - {end_date}' if end_date else start_date - - document_data['document_publisher'] = cls.separator.join( - data['value'] - for stmt in provision_activity.get('statement', []) - for data in stmt.get('label', []) - if stmt['type'] == 'bf:Agent' + start_date = provision_activity.get("startDate", "") + end_date = provision_activity.get("endDate") + document_data["document_publication_year"] = ( + f"{start_date} - {end_date}" if end_date else start_date + ) + + document_data["document_publisher"] = cls.separator.join( + data["value"] + for stmt in provision_activity.get("statement", []) + for data in stmt.get("label", []) + if stmt["type"] == "bf:Agent" ) return document_data - doc_search = DocumentsSearch() \ - .filter('terms', holdings__items__pid=list(item_pids)) \ - .source(['pid', 'title', 'contribution', 'provisionActivity', - 'type', '_masked', 'isbn', 'issn', 'seriesStatement', - 'editionStatement']) + doc_search = ( + DocumentsSearch() + .filter("terms", holdings__items__pid=list(item_pids)) + .source( + [ + "pid", + "title", + "contribution", + "provisionActivity", + "type", + "_masked", + "isbn", + "issn", + "seriesStatement", + "editionStatement", + ] + ) + ) docs = {} for doc in doc_search.scan(): docs[doc.pid] = _build_doc(doc.to_dict()) @@ -187,52 +196,52 @@ def get_item_data(hit): """ hit = hit.to_dict() csv_data = { - 'item_create_date': ciso8601.parse_datetime( - hit['_created']).date(), - 'item_type_pid': hit.get('item_type', {}).get('pid'), - 'item_library_pid': hit.get('library', {}).get('pid'), - 'item_location_pid': hit.get('location', {}).get('pid'), - 'document_pid': hit.get('document', {}).get('pid'), - 'item_holding_pid': hit.get('holding', {}).get('pid'), - 'item_org_pid': hit.get('organisation', {}).get('pid'), - 'temporary_item_type_pid': hit.get( - 'temporary_item_type', {}).get('pid'), - 'item_masked': 'No', - 'item_status': hit.get('status'), - 'issue': hit.get('issue', {}), - 'current_pending_requests': hit.get('current_pending_requests', 0) + "item_create_date": ciso8601.parse_datetime(hit["_created"]).date(), + "item_type_pid": hit.get("item_type", {}).get("pid"), + "item_library_pid": hit.get("library", {}).get("pid"), + "item_location_pid": hit.get("location", {}).get("pid"), + "document_pid": hit.get("document", {}).get("pid"), + "item_holding_pid": hit.get("holding", {}).get("pid"), + "item_org_pid": hit.get("organisation", {}).get("pid"), + "temporary_item_type_pid": hit.get("temporary_item_type", {}).get("pid"), + "item_masked": "No", + "item_status": hit.get("status"), + "issue": hit.get("issue", {}), + "current_pending_requests": hit.get("current_pending_requests", 0), } fields = [ - ('pid', 'item_pid'), - ('barcode', 'item_barcode'), - ('call_number', 'item_call_number'), - ('second_call_number', 'item_second_call_number'), - ('legacy_checkout_count', 'item_legacy_checkout_count'), - ('pac_code', 'item_pac_code'), - ('price', 'item_price'), - ('type', 'item_item_type'), - ('enumerationAndChronology', 'item_enumerationAndChronology') + ("pid", "item_pid"), + ("barcode", "item_barcode"), + ("call_number", "item_call_number"), + ("second_call_number", "item_second_call_number"), + ("legacy_checkout_count", "item_legacy_checkout_count"), + ("pac_code", "item_pac_code"), + ("price", "item_price"), + ("type", "item_item_type"), + ("enumerationAndChronology", "item_enumerationAndChronology"), ] for field_name, new_field_name in fields: csv_data[new_field_name] = hit.get(field_name) # dates fields - if end_date := hit.get('temporary_item_type', {}).get('end_date'): - csv_data['temporary_item_type_expiry_date'] = \ - ciso8601.parse_datetime(end_date).date() - if acquisition_date := hit.get('acquisition_date'): - csv_data['item_acquisition_date'] = \ - ciso8601.parse_datetime(acquisition_date).date() - if item_create_date := hit.get('_created'): - csv_data['item_create_date'] = \ - ciso8601.parse_datetime(item_create_date).date() + if end_date := hit.get("temporary_item_type", {}).get("end_date"): + csv_data["temporary_item_type_expiry_date"] = ciso8601.parse_datetime( + end_date + ).date() + if acquisition_date := hit.get("acquisition_date"): + csv_data["item_acquisition_date"] = ciso8601.parse_datetime( + acquisition_date + ).date() + if item_create_date := hit.get("_created"): + csv_data["item_create_date"] = ciso8601.parse_datetime( + item_create_date + ).date() # process notes - for note in hit.get('notes', []): - if note.get('type') in ItemNoteTypes.INVENTORY_LIST_CATEGORY: - csv_data[f"item_{note.get('type')}"] = note.get( - 'content') + for note in hit.get("notes", []): + if note.get("type") in ItemNoteTypes.INVENTORY_LIST_CATEGORY: + csv_data[f"item_{note.get('type')}"] = note.get("content") # item masking - csv_data['item_masked'] = 'Yes' if hit.get('_masked') else 'No' + csv_data["item_masked"] = "Yes" if hit.get("_masked") else "No" return csv_data @@ -243,23 +252,23 @@ def append_issue_data(hit, csv_data): :param csv_data: data dictionary. """ # process item issue - if csv_data['item_item_type'] != 'issue': + if csv_data["item_item_type"] != "issue": return - issue = csv_data.pop('issue', None) - if issue.get('inherited_first_call_number') \ - and not csv_data.get('item_call_number'): - csv_data['item_call_number'] = \ - issue.get('inherited_first_call_number') - csv_data['issue_status'] = issue.get('status') - if issue.get('status_date'): - csv_data['issue_status_date'] = \ - ciso8601.parse_datetime( - issue.get('status_date')).date() - csv_data['issue_claims_count'] = NotificationsSearch()\ - .get_claims_count(csv_data['item_pid']) - csv_data['issue_expected_date'] = \ - issue.get('expected_date') - csv_data['issue_regular'] = issue.get('regular') + issue = csv_data.pop("issue", None) + if issue.get("inherited_first_call_number") and not csv_data.get( + "item_call_number" + ): + csv_data["item_call_number"] = issue.get("inherited_first_call_number") + csv_data["issue_status"] = issue.get("status") + if issue.get("status_date"): + csv_data["issue_status_date"] = ciso8601.parse_datetime( + issue.get("status_date") + ).date() + csv_data["issue_claims_count"] = NotificationsSearch().get_claims_count( + csv_data["item_pid"] + ) + csv_data["issue_expected_date"] = issue.get("expected_date") + csv_data["issue_regular"] = issue.get("regular") @staticmethod def append_document_data(csv_data, documents): @@ -270,10 +279,10 @@ def append_document_data(csv_data, documents): """ try: # update csv data with document - csv_data.update(documents.get(csv_data.get('document_pid'))) + csv_data.update(documents.get(csv_data.get("document_pid"))) except Exception as err: current_app.logger.error( - 'ERROR in csv serializer: ' + "ERROR in csv serializer: " f'{message} on document: {csv_data.get("document_pid")}' ) @@ -283,6 +292,7 @@ def append_local_fields(cls, csv_data): :param csv_data: data dictionary. """ + def _append_res_local_fields(resource_type, resource_pid, csv_data): """Append local fields data. @@ -290,17 +300,18 @@ def _append_res_local_fields(resource_type, resource_pid, csv_data): :param resource_type: resource_type. :param resource_pid: resource_pid. """ - lf_type = 'document' if resource_type == 'doc' else resource_type - org_pid = csv_data['item_org_pid'] + lf_type = "document" if resource_type == "doc" else resource_type + org_pid = csv_data["item_org_pid"] local_fields = LocalField.get_local_fields_by_id( - resource_type, resource_pid, organisation_pid=org_pid) + resource_type, resource_pid, organisation_pid=org_pid + ) for field, num in itertools.product(local_fields, range(1, 11)): - field_name = f'{lf_type}_local_field_{num}' - if field_data := field.get('fields', {}).get(f'field_{num}'): + field_name = f"{lf_type}_local_field_{num}" + if field_data := field.get("fields", {}).get(f"field_{num}"): csv_data[field_name] = cls.separator.join(field_data) - _append_res_local_fields('item', csv_data['item_pid'], csv_data) - _append_res_local_fields('doc', csv_data['document_pid'], csv_data) + _append_res_local_fields("item", csv_data["item_pid"], csv_data) + _append_res_local_fields("doc", csv_data["document_pid"], csv_data) @staticmethod def get_loans_by_item_pids(item_pids=None, chunk_size=200): @@ -309,62 +320,73 @@ def get_loans_by_item_pids(item_pids=None, chunk_size=200): :param item_pids: item pids. :return list of dicts of item statistics. """ + def _get_loans_by_item_pids(pids): # initial es query to return all loans for the given item_pids - query = OperationLogsSearch()\ - .filter('terms', loan__item__pid=pids) + query = OperationLogsSearch().filter("terms", loan__item__pid=pids) # adds checkouts aggregation checkout_agg = A( - 'filter', - term={'loan.trigger': ItemCirculationAction.CHECKOUT}, + "filter", + term={"loan.trigger": ItemCirculationAction.CHECKOUT}, aggs=dict( item_pid=A( - 'terms', field='loan.item.pid', + "terms", + field="loan.item.pid", size=chunk_size, - aggs=dict(last_op=A('max', field='date'))))) - query.aggs.bucket('checkout', checkout_agg) + aggs=dict(last_op=A("max", field="date")), + ) + ), + ) + query.aggs.bucket("checkout", checkout_agg) # adds renewal aggregation renewal_agg = A( - 'filter', - term={'loan.trigger': ItemCirculationAction.EXTEND}, - aggs=dict(item_pid=A( - 'terms', size=chunk_size, field='loan.item.pid'))) - query.aggs.bucket('renewal', renewal_agg) + "filter", + term={"loan.trigger": ItemCirculationAction.EXTEND}, + aggs=dict(item_pid=A("terms", size=chunk_size, field="loan.item.pid")), + ) + query.aggs.bucket("renewal", renewal_agg) # adds last transaction aggregation for the fours triggers below. triggers = [ ItemCirculationAction.CHECKOUT, ItemCirculationAction.CHECKIN, - ItemCirculationAction.EXTEND + ItemCirculationAction.EXTEND, ] loans_agg = A( - 'filter', terms={'loan.trigger': triggers}, aggs=dict( + "filter", + terms={"loan.trigger": triggers}, + aggs=dict( item_pid=A( - 'terms', - field='loan.item.pid', + "terms", + field="loan.item.pid", size=chunk_size, - aggs=dict(last_op=A('max', field='date'))))) - query.aggs.bucket('loans', loans_agg) + aggs=dict(last_op=A("max", field="date")), + ) + ), + ) + query.aggs.bucket("loans", loans_agg) # query execution - result = query[0:0].execute() + result = query[:0].execute() # dump output into a dict # checkouts data items_stats = { term.key: { - 'checkout_count': term.doc_count, - 'last_checkout': ciso8601.parse_datetime( - term.last_op.value_as_string).date()} + "checkout_count": term.doc_count, + "last_checkout": ciso8601.parse_datetime( + term.last_op.value_as_string + ).date(), + } for term in result.aggregations.checkout.item_pid } # renewal data for term in result.aggregations.renewal.item_pid: items_stats.setdefault(term.key, {}) - items_stats[term.key]['renewal_count'] = term.doc_count + items_stats[term.key]["renewal_count"] = term.doc_count # last_transaction data for term in result.aggregations.loans.item_pid: items_stats.setdefault(term.key, {}) - items_stats[term.key]['last_transaction'] = \ - ciso8601.parse_datetime( - term.last_op.value_as_string).date() + items_stats[term.key]["last_transaction"] = ciso8601.parse_datetime( + term.last_op.value_as_string + ).date() return items_stats chunk_pids = [] @@ -388,7 +410,7 @@ def append_loan_data(hit, csv_data, items_stats): :param loans: loans data. """ stat = next(items_stats) - csv_data['item_checkouts_count'] = stat.get('checkout_count', 0) - csv_data['item_renewals_count'] = stat.get('renewal_count', 0) - csv_data['last_transaction_date'] = stat.get('last_transaction') - csv_data['last_checkout_date'] = stat.get('last_checkout') + csv_data["item_checkouts_count"] = stat.get("checkout_count", 0) + csv_data["item_renewals_count"] = stat.get("renewal_count", 0) + csv_data["last_transaction_date"] = stat.get("last_transaction") + csv_data["last_checkout_date"] = stat.get("last_checkout") diff --git a/rero_ils/modules/items/serializers/csv.py b/rero_ils/modules/items/serializers/csv.py index 041b5a6497..49ea41e9c7 100644 --- a/rero_ils/modules/items/serializers/csv.py +++ b/rero_ils/modules/items/serializers/csv.py @@ -36,8 +36,9 @@ class ItemCSVSerializer(CSVSerializer, CachedDataSerializerMixin): """Serialize item search for csv.""" - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -48,7 +49,7 @@ def serialize_search(self, pid_fetcher, search_result, links=None, # language language = request.args.get("lang", current_i18n.language) if not language or language not in get_i18n_supported_languages(): - language = current_app.config.get('BABEL_DEFAULT_LANGUAGE', 'en') + language = current_app.config.get("BABEL_DEFAULT_LANGUAGE", "en") def generate_csv(): """Generate CSV records.""" @@ -58,25 +59,25 @@ def _process_item_types_libs_locs(csv_data): :param csv_data: Dictionary of data. """ - itty_pid = csv_data.get('item_type_pid') - csv_data['item_type'] = self\ - .get_resource(ItemTypesSearch(), itty_pid)\ - .get('name') + itty_pid = csv_data.get("item_type_pid") + csv_data["item_type"] = self.get_resource( + ItemTypesSearch(), itty_pid + ).get("name") # temporary item_type - if itty_pid := csv_data.pop('temporary_item_type_pid', None): - csv_data['temporary_item_type'] = self\ - .get_resource(ItemTypesSearch(), itty_pid)\ - .get('name') + if itty_pid := csv_data.pop("temporary_item_type_pid", None): + csv_data["temporary_item_type"] = self.get_resource( + ItemTypesSearch(), itty_pid + ).get("name") # library - lib_pid = csv_data.pop('item_library_pid') - csv_data['item_library_name'] = self\ - .get_resource(LibrariesSearch(), lib_pid)\ - .get('name') + lib_pid = csv_data.pop("item_library_pid") + csv_data["item_library_name"] = self.get_resource( + LibrariesSearch(), lib_pid + ).get("name") # location - loc_pid = csv_data.pop('item_location_pid') - csv_data['item_location_name'] = self\ - .get_resource(LocationsSearch(), loc_pid)\ - .get('name') + loc_pid = csv_data.pop("item_location_pid") + csv_data["item_location_name"] = self.get_resource( + LocationsSearch(), loc_pid + ).get("name") headers = dict.fromkeys(self.csv_included_fields) @@ -89,7 +90,8 @@ def _process_item_types_libs_locs(csv_data): for pids, batch_results in Collector.batch(results=search_result): # get documents documents = Collector.get_documents_by_item_pids( - item_pids=pids, language=language) + item_pids=pids, language=language + ) # get loans items_stats = Collector.get_loans_by_item_pids(item_pids=pids) for hit in batch_results: diff --git a/rero_ils/modules/items/serializers/json.py b/rero_ils/modules/items/serializers/json.py index b752f97ecc..0fcf844f5e 100644 --- a/rero_ils/modules/items/serializers/json.py +++ b/rero_ils/modules/items/serializers/json.py @@ -25,8 +25,7 @@ from rero_ils.modules.libraries.api import LibrariesSearch from rero_ils.modules.locations.api import LocationsSearch from rero_ils.modules.organisations.api import OrganisationsSearch -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer +from rero_ils.modules.serializers import CachedDataSerializerMixin, JSONSerializer from rero_ils.modules.vendors.api import VendorsSearch @@ -35,6 +34,7 @@ class ItemsJSONSerializer(JSONSerializer, CachedDataSerializerMixin): def _postprocess_search_hit(self, hit: dict) -> None: """Post-process each hit of a search result.""" + def _set_item_type_circulation_information(metadata, pid): """Get Item type circulation information. @@ -42,83 +42,84 @@ def _set_item_type_circulation_information(metadata, pid): :param: pid: the item type pid. """ record = self.get_resource(ItemTypesSearch(), pid) or {} - if circulation := record.get('circulation_information'): - metadata['item_type']['circulation_information'] = circulation + if circulation := record.get("circulation_information"): + metadata["item_type"]["circulation_information"] = circulation - metadata = hit.get('metadata', {}) - doc_pid = metadata.get('document').get('pid') + metadata = hit.get("metadata", {}) + doc_pid = metadata.get("document").get("pid") document = self.get_resource(DocumentsSearch(), doc_pid) - metadata['ui_title_text'] = TitleExtension.format_text( - document['title'], with_subtitle=True) + metadata["ui_title_text"] = TitleExtension.format_text( + document["title"], with_subtitle=True + ) - item = self.get_resource(Item, metadata.get('pid')) + item = self.get_resource(Item, metadata.get("pid")) # Item in collection if collection := item.in_collection(): - metadata['in_collection'] = collection + metadata["in_collection"] = collection # Temporary location - if (temp_loc := metadata.get('temporary_location')) \ - and 'pid' in temp_loc: - temp_loc_pid = temp_loc['pid'] - temp_loc['name'] = self\ - .get_resource(LocationsSearch(), temp_loc_pid).get('name') + if (temp_loc := metadata.get("temporary_location")) and "pid" in temp_loc: + temp_loc_pid = temp_loc["pid"] + temp_loc["name"] = self.get_resource(LocationsSearch(), temp_loc_pid).get( + "name" + ) # Organisation - org_pid = metadata['organisation']['pid'] + org_pid = metadata["organisation"]["pid"] organisation = self.get_resource(OrganisationsSearch(), org_pid) - metadata['organisation']['viewcode'] = organisation.get('code') + metadata["organisation"]["viewcode"] = organisation.get("code") # Library - library_pid = metadata['library']['pid'] + library_pid = metadata["library"]["pid"] library = self.get_resource(LibrariesSearch(), library_pid) - metadata['library']['name'] = library.get('name') + metadata["library"]["name"] = library.get("name") # Location - location_pid = metadata['location']['pid'] + location_pid = metadata["location"]["pid"] location = self.get_resource(LocationsSearch(), location_pid) - metadata['location']['name'] = location.get('name') + metadata["location"]["name"] = location.get("name") # Try to serialize circulation information from best possible # related `ItemType` resource if exists. - if itty_pid := metadata.get('temporary_item_type', {}).get('pid'): + if itty_pid := metadata.get("temporary_item_type", {}).get("pid"): itty_rec = self.get_resource(ItemTypesSearch(), itty_pid) or {} - if circulation := itty_rec.get('circulation_information'): - metadata['item_type']['circulation_information'] = circulation + if circulation := itty_rec.get("circulation_information"): + metadata["item_type"]["circulation_information"] = circulation super()._postprocess_search_hit(hit) def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('location', {}).get('buckets', []), - LocationsSearch, 'name' + aggregations.get("location", {}).get("buckets", []), LocationsSearch, "name" ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('item_type', {}).get('buckets', []), - ItemTypesSearch, 'name' + aggregations.get("item_type", {}).get("buckets", []), + ItemTypesSearch, + "name", ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('temporary_item_type', {}).get('buckets', []), - ItemTypesSearch, 'name' + aggregations.get("temporary_item_type", {}).get("buckets", []), + ItemTypesSearch, + "name", ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('temporary_location', {}).get('buckets', []), - LocationsSearch, 'name' + aggregations.get("temporary_location", {}).get("buckets", []), + LocationsSearch, + "name", ) JSONSerializer.enrich_bucket_with_data( - aggregations.get('vendor', {}).get('buckets', []), - VendorsSearch, 'name' + aggregations.get("vendor", {}).get("buckets", []), VendorsSearch, "name" ) - if aggregations.get('current_requests'): - aggregations['current_requests']['type'] = 'range' - aggregations['current_requests']['config'] = { - 'min': 1, - 'max': 100, - 'step': 1 + if aggregations.get("current_requests"): + aggregations["current_requests"]["type"] = "range" + aggregations["current_requests"]["config"] = { + "min": 1, + "max": 100, + "step": 1, } - if aggr := aggregations.get('claims_date'): + if aggr := aggregations.get("claims_date"): JSONSerializer.add_date_range_configuration(aggr) super()._postprocess_search_aggregations(aggregations) @@ -132,13 +133,14 @@ def preprocess_record(self, pid, record, links_factory=None, **kwargs): """ if record.is_issue and (notifications := record.claim_notifications): dates = [ - notification['creation_date'] + notification["creation_date"] for notification in notifications - if 'creation_date' in notification + if "creation_date" in notification ] - record.setdefault('issue', {})['claims'] = { - 'counter': len(notifications), - 'dates': dates + record.setdefault("issue", {})["claims"] = { + "counter": len(notifications), + "dates": dates, } return super().preprocess_record( - pid=pid, record=record, links_factory=links_factory, kwargs=kwargs) + pid=pid, record=record, links_factory=links_factory, kwargs=kwargs + ) diff --git a/rero_ils/modules/items/tasks.py b/rero_ils/modules/items/tasks.py index b2ecd70017..26135369b3 100644 --- a/rero_ils/modules/items/tasks.py +++ b/rero_ils/modules/items/tasks.py @@ -29,8 +29,7 @@ from rero_ils.modules.utils import extracted_data_from_ref, set_timestamp from .api import Item -from .utils import get_provisional_items_candidate_to_delete, \ - update_late_expected_issue +from .utils import get_provisional_items_candidate_to_delete, update_late_expected_issue @shared_task() @@ -55,10 +54,10 @@ def delete_provisional_items(): current_app.logger.error(error) msg_dict = { - 'number_of_candidate_items_to_delete': counter, - 'number_of_deleted_items': deleted_items + "number_of_candidate_items_to_delete": counter, + "number_of_deleted_items": deleted_items, } - set_timestamp('claims-creation', **msg_dict) + set_timestamp("claims-creation", **msg_dict) return msg_dict @@ -73,12 +72,11 @@ def process_late_issues(dbcommit=True, reindex=True): :return: number of modified or created issues. """ # Perform serial type holding with passed `next_expected_date` - counter = create_next_late_expected_issues( - dbcommit=dbcommit, reindex=reindex) + counter = create_next_late_expected_issues(dbcommit=dbcommit, reindex=reindex) # Perform already created issue with passed `next_expected_date` counter += update_late_expected_issue(dbcommit=dbcommit, reindex=reindex) - msg = f'expected_issues_to_late: {counter}' - set_timestamp('late-issues-creation', msg=msg) + msg = f"expected_issues_to_late: {counter}" + set_timestamp("late-issues-creation", msg=msg) return msg @@ -91,27 +89,31 @@ def clean_obsolete_temporary_item_types_and_locations(): commit change """ counter = 0 - for item, field_type in \ - Item.get_items_with_obsolete_temporary_item_type_or_location(): + for ( + item, + field_type, + ) in Item.get_items_with_obsolete_temporary_item_type_or_location(): counter += 1 - if field_type == 'itty': - tmp_itty_data = item.pop('temporary_item_type', {}) + if field_type == "itty": + tmp_itty_data = item.pop("temporary_item_type", {}) tmp_itty_name = extracted_data_from_ref( - tmp_itty_data['$ref'], 'record').get('name') - tmp_itty_enddate = tmp_itty_data['end_date'] - msg = f'Removed obsolete temporary_item_type {tmp_itty_name} \ - {tmp_itty_enddate} from item pid {item.pid}' - elif field_type == 'loc': - tmp_loc_data = item.pop('temporary_location', {}) - tmp_loc_name = extracted_data_from_ref( - tmp_loc_data['$ref'], 'record').get('name') - tmp_loc_enddate = tmp_loc_data['end_date'] - msg = f'Removed obsolete temporary_location {tmp_loc_name} \ - {tmp_loc_enddate} from item pid {item.pid}' + tmp_itty_data["$ref"], "record" + ).get("name") + tmp_itty_enddate = tmp_itty_data["end_date"] + msg = f"Removed obsolete temporary_item_type {tmp_itty_name} \ + {tmp_itty_enddate} from item pid {item.pid}" + elif field_type == "loc": + tmp_loc_data = item.pop("temporary_location", {}) + tmp_loc_name = extracted_data_from_ref(tmp_loc_data["$ref"], "record").get( + "name" + ) + tmp_loc_enddate = tmp_loc_data["end_date"] + msg = f"Removed obsolete temporary_location {tmp_loc_name} \ + {tmp_loc_enddate} from item pid {item.pid}" current_app.logger.info(msg) item.update(item, dbcommit=True, reindex=True) - count = {'deleted fields': counter} - set_timestamp('clean_obsolete_temporary_item_types_and_locations', **count) + count = {"deleted fields": counter} + set_timestamp("clean_obsolete_temporary_item_types_and_locations", **count) return count @@ -122,7 +124,6 @@ def delete_holding(holding_pid, force=False, dbcommit=True, delindex=True): holding_rec = Holding.get_record_by_pid(holding_pid) try: # TODO: Need to split DB and elasticsearch deletion. - holding_rec.delete(force=force, dbcommit=dbcommit, - delindex=delindex) + holding_rec.delete(force=force, dbcommit=dbcommit, delindex=delindex) except IlsRecordError.NotDeleted: - current_app.logger.warning(f'Holding not deleted: {holding_pid}') + current_app.logger.warning(f"Holding not deleted: {holding_pid}") diff --git a/rero_ils/modules/items/utils.py b/rero_ils/modules/items/utils.py index 2d6474d8b6..fd300f1db3 100644 --- a/rero_ils/modules/items/utils.py +++ b/rero_ils/modules/items/utils.py @@ -18,8 +18,7 @@ """Item utils.""" from datetime import datetime, timedelta, timezone -from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus, \ - TypeOfItem +from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus, TypeOfItem from rero_ils.modules.locations.api import LocationsSearch from rero_ils.modules.notifications.models import RecipientType from rero_ils.modules.patron_transactions.api import PatronTransactionsSearch @@ -33,7 +32,7 @@ def item_pid_to_object(item_pid): :return: the item_pid object :rtype: object """ - return {'value': item_pid, 'type': 'item'} + return {"value": item_pid, "type": "item"} def item_location_retriever(item_pid): @@ -46,10 +45,7 @@ def item_location_retriever(item_pid): """ from .api import Item - # TODO: for requests we probably need the transation_location_pid - # to deal with multiple pickup locations for a library - item = Item.get_record_by_pid(item_pid.get('value')) - if item: + if item := Item.get_record_by_pid(item_pid.get("value")): # TODO: this will be useful for the very specific rero use cases # last_location = item.get_last_location() @@ -67,10 +63,9 @@ def same_location_validator(item_pid, input_location_pid): :rtype: boolean """ from rero_ils.modules.items.api import ItemsSearch - lib_from_loc = LocationsSearch().get_record_by_pid( - input_location_pid).library.pid - lib_from_item = ItemsSearch().get_record_by_pid( - item_pid.get('value')).library.pid + + lib_from_loc = LocationsSearch().get_record_by_pid(input_location_pid).library.pid + lib_from_item = ItemsSearch().get_record_by_pid(item_pid.get("value")).library.pid return lib_from_loc == lib_from_item @@ -83,12 +78,13 @@ def exists_available_item(items=None): :return True if one item is available; false otherwise. """ from rero_ils.modules.items.api import Item + items = items or [] for item in items: if isinstance(item, str): # `item` seems to be an item pid item = Item.get_record_by_pid(item) if not isinstance(item, Item): - raise ValueError('All items should be Item resource.') + raise ValueError("All items should be Item resource.") if item.is_available(): return True return False @@ -105,18 +101,22 @@ def get_provisional_items_candidate_to_delete(): from rero_ils.modules.items.api import Item, ItemsSearch # query ES index for open fees - query_fees = PatronTransactionsSearch()\ - .filter('term', status='open')\ - .filter('exists', field='item')\ - .filter('range', total_amount={'gt': 0})\ - .source('item') + query_fees = ( + PatronTransactionsSearch() + .filter("term", status="open") + .filter("exists", field="item") + .filter("range", total_amount={"gt": 0}) + .source("item") + ) # list of item pids with open fees item_pids_with_fees = [hit.item.pid for hit in query_fees.scan()] - query = ItemsSearch()\ - .filter('term', type=TypeOfItem.PROVISIONAL) \ - .filter('terms', status=[ItemStatus.ON_SHELF]) \ - .exclude('terms', pid=item_pids_with_fees)\ + query = ( + ItemsSearch() + .filter("term", type=TypeOfItem.PROVISIONAL) + .filter("terms", status=[ItemStatus.ON_SHELF]) + .exclude("terms", pid=item_pids_with_fees) .source(False) + ) for hit in query.scan(): yield Item.get_record(hit.meta.id) @@ -131,12 +131,14 @@ def update_late_expected_issue(dbcommit=False, reindex=False): from rero_ils.modules.items.api import Item, ItemsSearch yesterday = datetime.now(timezone.utc) - timedelta(days=1) - yesterday = yesterday.strftime('%Y-%m-%d') - query = ItemsSearch() \ - .filter('term', type=TypeOfItem.ISSUE) \ - .filter('term', issue__status=ItemIssueStatus.EXPECTED) \ - .filter('range', issue__expected_date={'lte': yesterday}) \ + yesterday = yesterday.strftime("%Y-%m-%d") + query = ( + ItemsSearch() + .filter("term", type=TypeOfItem.ISSUE) + .filter("term", issue__status=ItemIssueStatus.EXPECTED) + .filter("range", issue__expected_date={"lte": yesterday}) .source(False) + ) counter = 0 for counter, hit in enumerate(query.scan(), 1): item = Item.get_record(hit.meta.id) @@ -159,11 +161,12 @@ def get_recipient_suggestions(issue): suggestions = {} if (vendor := issue.vendor) and (email := vendor.serial_email): suggestions.setdefault(email, set()).update([RecipientType.TO]) - if settings := (issue.library or {}).get('serial_acquisition_settings'): - if email := settings.get('shipping_informations', {}).get('email'): - suggestions.setdefault(email, set())\ - .update([RecipientType.CC, RecipientType.REPLY_TO]) - if email := settings.get('billing_informations', {}).get('email'): + if settings := (issue.library or {}).get("serial_acquisition_settings"): + if email := settings.get("shipping_informations", {}).get("email"): + suggestions.setdefault(email, set()).update( + [RecipientType.CC, RecipientType.REPLY_TO] + ) + if email := settings.get("billing_informations", {}).get("email"): suggestions.setdefault(email, set()) if email := current_librarian.user.email: suggestions.setdefault(email, set()) @@ -173,8 +176,8 @@ def get_recipient_suggestions(issue): # return a recipient suggestion array. cleaned_suggestions = [] for recipient_address, recipient_types in suggestions.items(): - suggestion = {'address': recipient_address} + suggestion = {"address": recipient_address} if recipient_types: - suggestion['type'] = list(recipient_types) + suggestion["type"] = list(recipient_types) cleaned_suggestions.append(suggestion) return cleaned_suggestions diff --git a/rero_ils/modules/items/views/__init__.py b/rero_ils/modules/items/views/__init__.py index 261299769c..3ba205d033 100644 --- a/rero_ils/modules/items/views/__init__.py +++ b/rero_ils/modules/items/views/__init__.py @@ -23,16 +23,11 @@ from .api_views import api_blueprint from .rest import InventoryListResource -inventory_list = InventoryListResource.as_view( - 'inventory_search' -) -api_blueprint.add_url_rule( - '/inventory', - view_func=inventory_list -) +inventory_list = InventoryListResource.as_view("inventory_search") +api_blueprint.add_url_rule("/inventory", view_func=inventory_list) blueprints = [ api_blueprint, ] -__all__ = 'api_blueprint' +__all__ = "api_blueprint" diff --git a/rero_ils/modules/items/views/api_views.py b/rero_ils/modules/items/views/api_views.py index 86b0f0dd3e..8189437493 100644 --- a/rero_ils/modules/items/views/api_views.py +++ b/rero_ils/modules/items/views/api_views.py @@ -29,51 +29,46 @@ from flask import request as flask_request from flask_login import current_user from invenio_circulation.api import get_loan_for_item -from invenio_circulation.errors import CirculationException, \ - MissingRequiredParameterError +from invenio_circulation.errors import ( + CirculationException, + MissingRequiredParameterError, +) from jinja2 import TemplateNotFound, UndefinedError from werkzeug.exceptions import NotFound from rero_ils.modules.circ_policies.api import CircPolicy from rero_ils.modules.decorators import check_authentication, check_permission from rero_ils.modules.documents.views import record_library_pickup_locations -from rero_ils.modules.errors import NoCirculationAction, \ - NoCirculationActionIsPermitted +from rero_ils.modules.errors import NoCirculationAction, NoCirculationActionIsPermitted from rero_ils.modules.libraries.api import Library from rero_ils.modules.loans.api import Loan -from rero_ils.modules.loans.dumpers import \ - CirculationDumper as LoanCirculationDumper +from rero_ils.modules.loans.dumpers import CirculationDumper as LoanCirculationDumper from rero_ils.modules.operation_logs.api import OperationLogsSearch -from rero_ils.modules.operation_logs.permissions import \ - search_action as op_log_search_action +from rero_ils.modules.operation_logs.permissions import ( + search_action as op_log_search_action, +) from rero_ils.modules.patrons.api import Patron, current_librarian from rero_ils.permissions import request_item_permission +from ...commons.exceptions import MissingDataException from ..api import Item from ..dumpers import CirculationActionDumper, ClaimIssueNotificationDumper from ..models import ItemCirculationAction, ItemStatus from ..permissions import late_issue_management as late_issue_management_action from ..utils import get_recipient_suggestions, item_pid_to_object -from ...commons.exceptions import MissingDataException -api_blueprint = Blueprint( - 'api_item', - __name__, - url_prefix='/item' -) +api_blueprint = Blueprint("api_item", __name__, url_prefix="/item") -blueprint = Blueprint( - 'items', - __name__ -) +blueprint = Blueprint("items", __name__) def check_logged_user_authentication(func): """Decorator to check authentication for user HTTP API.""" + @wraps(func) def decorated_view(*args, **kwargs): if not current_user.is_authenticated: - return jsonify({'status': 'error: Unauthorized'}), 401 + return jsonify({"status": "error: Unauthorized"}), 401 return func(*args, **kwargs) return decorated_view @@ -81,10 +76,11 @@ def decorated_view(*args, **kwargs): def check_authentication_for_request(func): """Decorator to check authentication for item requests HTTP API.""" + @wraps(func) def decorated_view(*args, **kwargs): if not request_item_permission.require().can(): - return jsonify({'status': 'error: Forbidden'}), 403 + return jsonify({"status": "error: Forbidden"}), 403 return func(*args, **kwargs) return decorated_view @@ -92,6 +88,7 @@ def decorated_view(*args, **kwargs): def jsonify_error(func): """Jsonify errors.""" + @wraps(func) def decorated_view(*args, **kwargs): try: @@ -101,7 +98,8 @@ def decorated_view(*args, **kwargs): except Exception as error: # raise error current_app.logger.error(str(error)) - return jsonify({'status': f'error: {error}'}), 500 + return jsonify({"status": f"error: {error}"}), 500 + return decorated_view @@ -111,20 +109,22 @@ def do_loan_jsonify_action(func): This method for the circulation actions that executed directly on the loan object and do not need to have direct access to the item object. """ + @wraps(func) def decorated_view(*args, **kwargs): try: data = deepcopy(flask_request.get_json()) - loan_pid = data.pop('pid', None) - pickup_location_pid = data.get('pickup_location_pid', None) + loan_pid = data.pop("pid", None) + pickup_location_pid = data.get("pickup_location_pid", None) if not loan_pid or not pickup_location_pid: - return jsonify({'status': 'error: Bad request'}), 400 + return jsonify({"status": "error: Bad request"}), 400 loan = Loan.get_record_by_pid(loan_pid) updated_loan = func(loan, data, *args, **kwargs) return jsonify(updated_loan) except NoCirculationActionIsPermitted as error: # The circulation specs do not allow updates on some loan states. abort(403, str(error)) + return decorated_view @@ -134,31 +134,32 @@ def do_item_jsonify_action(func): This method for the circulation actions that required access to the item object before executing the invenio-circulation logic. """ + @wraps(func) def decorated_view(*args, **kwargs): try: data = deepcopy(flask_request.get_json()) item = Item.get_item_record_for_ui(**data) - data.pop('item_barcode', None) + data.pop("item_barcode", None) if not item: abort(404) - item_data, action_applied = \ - func(item, data, *args, **kwargs) + item_data, action_applied = func(item, data, *args, **kwargs) for action, loan in action_applied.items(): if loan: - action_applied[action] = loan.dumps( - LoanCirculationDumper()) - - return jsonify({ - 'metadata': item_data.dumps(CirculationActionDumper()), - 'action_applied': action_applied - }) + action_applied[action] = loan.dumps(LoanCirculationDumper()) + + return jsonify( + { + "metadata": item_data.dumps(CirculationActionDumper()), + "action_applied": action_applied, + } + ) except NoCirculationAction as error: - return jsonify({'status': f'error: {str(error)}'}), 400 + return jsonify({"status": f"error: {str(error)}"}), 400 except NoCirculationActionIsPermitted as error: # The circulation specs do not allow updates on some loan states. - return jsonify({'status': f'error: {str(error)}'}), 403 + return jsonify({"status": f"error: {str(error)}"}), 403 except MissingRequiredParameterError as error: # Return error 400 when there is a missing required parameter abort(400, str(error)) @@ -168,16 +169,17 @@ def decorated_view(*args, **kwargs): raise error except exceptions.RequestError as error: # missing required parameters - return jsonify({'status': f'error: {error}'}), 400 + return jsonify({"status": f"error: {error}"}), 400 except Exception as error: # TODO: need to know what type of exception and document there. # raise error - current_app.logger.error(f'{func.__name__}: {str(error)}') - return jsonify({'status': f'error: {error}'}), 400 + current_app.logger.error(f"{func.__name__}: {str(error)}") + return jsonify({"status": f"error: {error}"}), 400 + return decorated_view -@api_blueprint.route('/patron_request', methods=['POST']) +@api_blueprint.route("/patron_request", methods=["POST"]) @check_logged_user_authentication @check_authentication_for_request @do_item_jsonify_action @@ -195,13 +197,13 @@ def patron_request(item, data): """ # get the patron account of the same org of the location pid patron_pid = Patron.get_current_patron(item).pid - data['patron_pid'] = patron_pid - data['transaction_user_pid'] = patron_pid - data['transaction_location_pid'] = data['pickup_location_pid'] + data["patron_pid"] = patron_pid + data["transaction_user_pid"] = patron_pid + data["transaction_location_pid"] = data["pickup_location_pid"] return item.request(**data) -@api_blueprint.route('/request', methods=['POST']) +@api_blueprint.route("/request", methods=["POST"]) @check_authentication @do_item_jsonify_action def librarian_request(item, data): @@ -222,7 +224,7 @@ def librarian_request(item, data): return item.request(**data) -@api_blueprint.route('/cancel_item_request', methods=['POST']) +@api_blueprint.route("/cancel_item_request", methods=["POST"]) @check_logged_user_authentication @do_item_jsonify_action def cancel_item_request(item, data): @@ -241,7 +243,7 @@ def cancel_item_request(item, data): return item.cancel_item_request(**data) -@api_blueprint.route('/checkout', methods=['POST']) +@api_blueprint.route("/checkout", methods=["POST"]) # @profile(sort_by='cumulative', lines_to_print=100) @check_authentication @do_item_jsonify_action @@ -259,12 +261,11 @@ def checkout(item, data): :param item: the item resource on which checkout operation will be done. :param data: additional data used for the circ operation (as a dict). """ - data['override_blocking'] = flask_request.args.get( - 'override_blocking', False) + data["override_blocking"] = flask_request.args.get("override_blocking", False) return item.checkout(**data) -@api_blueprint.route("/checkin", methods=['POST']) +@api_blueprint.route("/checkin", methods=["POST"]) # @profile(sort_by='cumulative', lines_to_print=100) @check_authentication @do_item_jsonify_action @@ -284,7 +285,7 @@ def checkin(item, data): return item.checkin(**data) -@api_blueprint.route("/update_loan_pickup_location", methods=['POST']) +@api_blueprint.route("/update_loan_pickup_location", methods=["POST"]) @check_authentication @do_loan_jsonify_action def update_loan_pickup_location(loan, data): @@ -302,7 +303,7 @@ def update_loan_pickup_location(loan, data): return loan.update_pickup_location(**data) -@api_blueprint.route('/validate_request', methods=['POST']) +@api_blueprint.route("/validate_request", methods=["POST"]) @check_authentication @do_item_jsonify_action def validate_request(item, data): @@ -321,7 +322,7 @@ def validate_request(item, data): return item.validate_request(**data) -@api_blueprint.route('/receive', methods=['POST']) +@api_blueprint.route("/receive", methods=["POST"]) @check_authentication @do_item_jsonify_action def receive(item, data): @@ -338,7 +339,7 @@ def receive(item, data): return item.receive(**data) -@api_blueprint.route('/return_missing', methods=['POST']) +@api_blueprint.route("/return_missing", methods=["POST"]) @check_authentication @do_item_jsonify_action def return_missing(item, data): @@ -355,7 +356,7 @@ def return_missing(item, data): return item.return_missing() -@api_blueprint.route('/extend_loan', methods=['POST']) +@api_blueprint.route("/extend_loan", methods=["POST"]) @check_logged_user_authentication @do_item_jsonify_action def extend_loan(item, data): @@ -372,65 +373,51 @@ def extend_loan(item, data): return item.extend_loan(**data) -@api_blueprint.route('/requested_loans/', methods=['GET']) +@api_blueprint.route("/requested_loans/", methods=["GET"]) @check_authentication @jsonify_error def requested_loans(library_pid): """HTTP GET request for sorted requested loans for a library.""" metadata = Loan.requested_loans_to_validate(library_pid) - return jsonify({ - 'hits': { - 'total': { - 'value': len(metadata) - }, - 'hits': metadata - } - }) + return jsonify({"hits": {"total": {"value": len(metadata)}, "hits": metadata}}) -@api_blueprint.route('/loans/', methods=['GET']) +@api_blueprint.route("/loans/", methods=["GET"]) @check_authentication @jsonify_error def loans(patron_pid): """HTTP GET request for sorted loans for a patron pid.""" - sort_by = flask_request.args.get('sort') + sort_by = flask_request.args.get("sort") items = Item.get_checked_out_items(patron_pid=patron_pid, sort_by=sort_by) metadata = [] for item in items: item_data = item.replace_refs() - metadata.append({ - 'item': { - 'pid': item.pid, - 'organisation_pid': item_data.get('organisation').get('pid'), - 'barcode': item.get('barcode') + metadata.append( + { + "item": { + "pid": item.pid, + "organisation_pid": item_data.get("organisation").get("pid"), + "barcode": item.get("barcode"), + } } - }) - return jsonify({ - 'hits': { - 'total': { - 'value': len(metadata) - }, - 'hits': metadata - } - }) + ) + return jsonify({"hits": {"total": {"value": len(metadata)}, "hits": metadata}}) -@api_blueprint.route('/barcode/', methods=['GET']) +@api_blueprint.route("/barcode/", methods=["GET"]) @check_authentication @jsonify_error def item(item_barcode): """HTTP GET request for requested loans for a library item and patron.""" - item = Item.get_item_by_barcode( - item_barcode, current_librarian.organisation_pid) + item = Item.get_item_by_barcode(item_barcode, current_librarian.organisation_pid) if not item: abort(404) loan = get_loan_for_item(item_pid_to_object(item.pid)) if loan: - loan = Loan.get_record_by_pid( - loan.get('pid')).dumps(LoanCirculationDumper()) + loan = Loan.get_record_by_pid(loan.get("pid")).dumps(LoanCirculationDumper()) item_dumps = item.dumps(CirculationActionDumper()) - if patron_pid := flask_request.args.get('patron_pid'): + if patron_pid := flask_request.args.get("patron_pid"): patron = Patron.get_record_by_pid(patron_pid) organisation_pid = item.organisation_pid library_pid = item.library_pid @@ -440,32 +427,27 @@ def item(item_barcode): organisation_pid=organisation_pid, library_pid=library_pid, patron_type_pid=patron_type_pid, - item_type_pid=item_type_pid + item_type_pid=item_type_pid, ) new_actions = [] # If circulation policy doesn't allow checkout operation no need to # perform special check describe below. if circ_policy.can_checkout: - for action in item_dumps.get('actions', []): - if action == 'checkout': + for action in item_dumps.get("actions", []): + if action == "checkout": if ( item.number_of_requests() > 0 and item.patron_request_rank(patron) == 1 or item.number_of_requests() <= 0 ): new_actions.append(action) - elif action == 'receive' and item.number_of_requests() == 0: - new_actions.append('checkout') - item_dumps['actions'] = new_actions - return jsonify({ - 'metadata': { - 'item': item_dumps, - 'loan': loan - } - }) + elif action == "receive" and item.number_of_requests() == 0: + new_actions.append("checkout") + item_dumps["actions"] = new_actions + return jsonify({"metadata": {"item": item_dumps, "loan": loan}}) -@api_blueprint.route('//availability', methods=['GET']) +@api_blueprint.route("//availability", methods=["GET"]) @jsonify_error def item_availability(pid): """HTTP GET request for item availability.""" @@ -473,19 +455,19 @@ def item_availability(pid): if not item: abort(404) data = dict(available=item.is_available()) - if flask_request.args.get('more_info'): + if flask_request.args.get("more_info"): extra = { - 'status': item['status'], - 'circulation_message': item.availability_text, - 'number_of_request': item.number_of_requests() + "status": item["status"], + "circulation_message": item.availability_text, + "number_of_request": item.number_of_requests(), } - if not data['available'] and extra['status'] == ItemStatus.ON_LOAN: - extra['due_date'] = item.get_item_end_date(format=None) + if not data["available"] and extra["status"] == ItemStatus.ON_LOAN: + extra["due_date"] = item.get_item_end_date(format=None) data |= extra return jsonify(data) -@api_blueprint.route('//can_request', methods=['GET']) +@api_blueprint.route("//can_request", methods=["GET"]) @check_logged_user_authentication @jsonify_error def can_request(item_pid): @@ -503,16 +485,17 @@ def can_request(item_pid): kwargs = {} item = Item.get_record_by_pid(item_pid) if not item: - abort(404, 'Item not found') - if patron_barcode := flask_request.args.get('patron_barcode'): - kwargs['patron'] = Patron.get_patron_by_barcode( - barcode=patron_barcode, org_pid=item.organisation_pid) - if not kwargs['patron']: - abort(404, 'Patron not found') - if library_pid := flask_request.args.get('library_pid'): - kwargs['library'] = Library.get_record_by_pid(library_pid) - if not kwargs['library']: - abort(404, 'Library not found') + abort(404, "Item not found") + if patron_barcode := flask_request.args.get("patron_barcode"): + kwargs["patron"] = Patron.get_patron_by_barcode( + barcode=patron_barcode, org_pid=item.organisation_pid + ) + if not kwargs["patron"]: + abort(404, "Patron not found") + if library_pid := flask_request.args.get("library_pid"): + kwargs["library"] = Library.get_record_by_pid(library_pid) + if not kwargs["library"]: + abort(404, "Library not found") # ask item if the request is possible with these data. can, reasons = item.can(ItemCirculationAction.REQUEST, **kwargs) @@ -520,15 +503,13 @@ def can_request(item_pid): # check the `reasons_not_request` array. If it's empty, the request is # allowed ; if not the request is disallowed, and we need to return the # reasons why - response = {'can': can} + response = {"can": can} if reasons: - response['reasons'] = { - 'others': {reason: True for reason in reasons} - } + response["reasons"] = {"others": {reason: True for reason in reasons}} return jsonify(response) -@api_blueprint.route('//pickup_locations', methods=['GET']) +@api_blueprint.route("//pickup_locations", methods=["GET"]) @check_logged_user_authentication @jsonify_error def get_pickup_locations(item_pid): @@ -538,14 +519,12 @@ def get_pickup_locations(item_pid): """ item = Item.get_record_by_pid(item_pid) if not item: - abort(404, 'Item not found') + abort(404, "Item not found") locations = record_library_pickup_locations(item) - return jsonify({ - 'locations': locations - }) + return jsonify({"locations": locations}) -@api_blueprint.route('//stats', methods=['GET']) +@api_blueprint.route("//stats", methods=["GET"]) @check_permission([op_log_search_action]) @jsonify_error def stats(item_pid): @@ -553,40 +532,40 @@ def stats(item_pid): :param item_pid: the item pid """ - search = OperationLogsSearch()\ - .filter('term', loan__item__pid=item_pid)\ - .filter('term', record__type='loan') + search = ( + OperationLogsSearch() + .filter("term", loan__item__pid=item_pid) + .filter("term", record__type="loan") + ) trigger = A( - 'terms', - field='loan.trigger', - aggs={ - 'year': A('filter', Q('range', date={'gte': 'now-1y'})) - } + "terms", + field="loan.trigger", + aggs={"year": A("filter", Q("range", date={"gte": "now-1y"}))}, ) - search.aggs.bucket('trigger', trigger) + search.aggs.bucket("trigger", trigger) search = search[:0] results = search.execute() - output = {'total': {}, 'total_year': {}} + output = {"total": {}, "total_year": {}} for result in results.aggregations.trigger.buckets: - output['total'][result.key] = result.doc_count - output['total_year'][result.key] = result.year.doc_count + output["total"][result.key] = result.doc_count + output["total_year"][result.key] = result.year.doc_count # Add legacy checkout count if item := Item.get_record_by_pid(item_pid): - legacy_count = item.get('legacy_checkout_count', 0) - output['total'].setdefault('checkout', 0) - output['total']['checkout'] += legacy_count + legacy_count = item.get("legacy_checkout_count", 0) + output["total"].setdefault("checkout", 0) + output["total"]["checkout"] += legacy_count return jsonify(output) -@api_blueprint.route('//issue/claims/preview', methods=['GET']) +@api_blueprint.route("//issue/claims/preview", methods=["GET"]) @check_permission([late_issue_management_action]) def claim_notification_preview(item_pid): """Get the preview of a claim issue notification content.""" record = Item.get_record_by_pid(item_pid) if not record: - abort(404, 'Item not found') + abort(404, "Item not found") if not record.is_issue: - abort(400, 'Item isn\'t an issue') + abort(400, "Item isn't an issue") try: issue_data = record.dumps(dumper=ClaimIssueNotificationDumper()) @@ -596,29 +575,29 @@ def claim_notification_preview(item_pid): # update the claims issue counter :: # As this is preview for next claim, we need to add 1 to the returned # claim counter - issue_data['claim_counter'] += 1 - language = issue_data.get('vendor', {}).get('language') + issue_data["claim_counter"] += 1 + language = issue_data.get("vendor", {}).get("language") - response = {'recipient_suggestions': get_recipient_suggestions(record)} - template_directory = 'email/claim_issue/' + response = {"recipient_suggestions": get_recipient_suggestions(record)} + template_directory = "email/claim_issue/" try: - tmpl_file = f'{template_directory}/{language}.tpl.txt' - response['preview'] = render_template(tmpl_file, issue=issue_data) + tmpl_file = f"{template_directory}/{language}.tpl.txt" + response["preview"] = render_template(tmpl_file, issue=issue_data) except TemplateNotFound: # If the corresponding translated template isn't found, use the english # template as default template msg = f'None "claim_issue" template found for "{language}" language' current_app.logger.error(msg) - response['message'] = [{'type': 'error', 'content': msg}] - tmpl_file = f'{template_directory}/eng.tpl.txt' - response['preview'] = render_template(tmpl_file, issue=issue_data) + response["message"] = [{"type": "error", "content": msg}] + tmpl_file = f"{template_directory}/eng.tpl.txt" + response["preview"] = render_template(tmpl_file, issue=issue_data) except UndefinedError as ue: - abort(500, f'template generation failed : {str(ue)}') + abort(500, f"template generation failed : {str(ue)}") return jsonify(response) -@api_blueprint.route('//issue/claims', methods=['POST']) +@api_blueprint.route("//issue/claims", methods=["POST"]) @check_permission([late_issue_management_action]) def claim_issue(item_pid): """API to claim an issue. @@ -632,12 +611,12 @@ def claim_issue(item_pid): """ item_issue = Item.get_record_by_pid(item_pid) if not item_issue: - abort(404, 'Item not found') + abort(404, "Item not found") if not item_issue.is_issue: - abort(400, 'Item isn\'t an issue') + abort(400, "Item isn't an issue") data = flask_request.get_json() - if not (recipients := data.get('recipients')): + if not (recipients := data.get("recipients")): abort(400, "Missing recipients emails.") notification = item_issue.claims(recipients) - return jsonify({'data': notification}) + return jsonify({"data": notification}) diff --git a/rero_ils/modules/items/views/filters.py b/rero_ils/modules/items/views/filters.py index f75be15ae4..d494a6dcbd 100644 --- a/rero_ils/modules/items/views/filters.py +++ b/rero_ils/modules/items/views/filters.py @@ -26,9 +26,11 @@ def issue_client_reference(issue_data): :returns: the string representing the client reference. :rtype: str """ - if holding_data := issue_data.get('holdings'): - parts = list(filter(None, [ - holding_data.get('client_id'), - holding_data.get('order_reference') - ])) - return f'({"/".join(parts)})' if parts else '' + if holding_data := issue_data.get("holdings"): + parts = list( + filter( + None, + [holding_data.get("client_id"), holding_data.get("order_reference")], + ) + ) + return f'({"/".join(parts)})' if parts else "" diff --git a/rero_ils/modules/items/views/rest.py b/rero_ils/modules/items/views/rest.py index a0f84fbffd..63044b3006 100644 --- a/rero_ils/modules/items/views/rest.py +++ b/rero_ils/modules/items/views/rest.py @@ -38,17 +38,15 @@ def __init__(self, **kwargs): """Init.""" super().__init__( method_serializers={ - 'GET': { - 'text/csv': csv_item_search, + "GET": { + "text/csv": csv_item_search, } }, serializers_query_aliases={ - 'csv': 'text/csv', + "csv": "text/csv", }, - default_method_media_type={ - 'GET': 'text/csv' - }, - default_media_type='text/csv', + default_method_media_type={"GET": "text/csv"}, + default_media_type="text/csv", **kwargs ) self.search_factory = partial(items_search_factory, self) @@ -58,7 +56,4 @@ def get(self, **kwargs): search_obj = ItemsSearch() search, qs_kwargs = self.search_factory(search_obj) - return self.make_response( - pid_fetcher=None, - search_result=search.scan() - ) + return self.make_response(pid_fetcher=None, search_result=search.scan()) diff --git a/rero_ils/modules/jsonresolver.py b/rero_ils/modules/jsonresolver.py index 949687393f..d860acb1cb 100644 --- a/rero_ils/modules/jsonresolver.py +++ b/rero_ils/modules/jsonresolver.py @@ -31,19 +31,18 @@ def resolve_json_refs(pid_type, pid, raise_on_error=True): try: persistent_id = PersistentIdentifier.get(pid_type, pid) except Exception: - current_app.logger.error(f'Unable to resolve {pid_type} pid: {pid}') + current_app.logger.error(f"Unable to resolve {pid_type} pid: {pid}") else: if persistent_id.status == PIDStatus.REGISTERED: - return dict( - pid=persistent_id.pid_value, - type=pid_type - ) - base_item_route = current_app.config.get( - 'RECORDS_REST_ENDPOINTS' - ).get(pid_type, {}).get('item_route', '/???') - item_route_parts = ['api'] + base_item_route.split('/')[1:-1] + [pid] - item_route = '/'.join(item_route_parts) - msg = f' Resolve {pid_type}: {item_route} {persistent_id}' + return dict(pid=persistent_id.pid_value, type=pid_type) + base_item_route = ( + current_app.config.get("RECORDS_REST_ENDPOINTS") + .get(pid_type, {}) + .get("item_route", "/???") + ) + item_route_parts = ["api"] + base_item_route.split("/")[1:-1] + [pid] + item_route = "/".join(item_route_parts) + msg = f" Resolve {pid_type}: {item_route} {persistent_id}" current_app.logger.error(msg) if raise_on_error: - raise Exception(f'Unable to resolve {pid_type} pid: {pid}') + raise Exception(f"Unable to resolve {pid_type} pid: {pid}") diff --git a/rero_ils/modules/libraries/api.py b/rero_ils/modules/libraries/api.py index 4c3f894490..bc696deb3f 100644 --- a/rero_ils/modules/libraries/api.py +++ b/rero_ils/modules/libraries/api.py @@ -34,8 +34,12 @@ from rero_ils.modules.providers import Provider from rero_ils.modules.stats_cfg.api import StatsConfigurationSearch from rero_ils.modules.users.models import UserRole -from rero_ils.modules.utils import date_string_to_utc, \ - extracted_data_from_ref, sorted_pids, strtotime +from rero_ils.modules.utils import ( + date_string_to_utc, + extracted_data_from_ref, + sorted_pids, + strtotime, +) from .exceptions import LibraryNeverOpen from .extensions import LibraryCalendarChangesExtension @@ -43,9 +47,7 @@ # provider LibraryProvider = type( - 'LibraryProvider', - (Provider,), - dict(identifier=LibraryIdentifier, pid_type='lib') + "LibraryProvider", (Provider,), dict(identifier=LibraryIdentifier, pid_type="lib") ) # minter library_id_minter = partial(id_minter, provider=LibraryProvider) @@ -56,12 +58,12 @@ class LibrariesSearch(IlsRecordsSearch): """Libraries search.""" - class Meta(): + class Meta: """Meta class.""" - index = 'libraries' + index = "libraries" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -73,7 +75,7 @@ def by_organisation_pid(self, organisation_pid): :returns: An ElasticSearch query to get hits related the entity. :rtype: `elasticsearch_dsl.Search` """ - return self.filter('term', organisation__pid=organisation_pid) + return self.filter("term", organisation__pid=organisation_pid) class Library(IlsRecord): @@ -83,14 +85,10 @@ class Library(IlsRecord): fetcher = library_id_fetcher provider = LibraryProvider model_cls = LibraryMetadata - pids_exist_check = { - 'required': { - 'org': 'organisation' - } - } + pids_exist_check = {"required": {"org": "organisation"}} _extensions = [ - LibraryCalendarChangesExtension(['opening_hours', 'exception_dates']) + LibraryCalendarChangesExtension(["opening_hours", "exception_dates"]) ] def extended_validation(self, **kwargs): @@ -98,19 +96,23 @@ def extended_validation(self, **kwargs): :return: reason for validation failure, otherwise True """ - for exception_date in self.get('exception_dates', []): - if exception_date['is_open'] and not exception_date.get('times'): - return _('Opening times must be specified for an open ' - 'exception date.') + for exception_date in self.get("exception_dates", []): + if exception_date["is_open"] and not exception_date.get("times"): + return _( + "Opening times must be specified for an open " "exception date." + ) return True @property def online_location(self): """Get the online location.""" - result = LocationsSearch()\ - .filter('term', is_online=True)\ - .filter('term', library__pid=self.pid)\ - .source(['pid']).scan() + result = ( + LocationsSearch() + .filter("term", is_online=True) + .filter("term", library__pid=self.pid) + .source(["pid"]) + .scan() + ) try: return next(result).pid except StopIteration: @@ -118,7 +120,7 @@ def online_location(self): def get_organisation(self): """Get Organisation.""" - return extracted_data_from_ref(self['organisation'], data='record') + return extracted_data_from_ref(self["organisation"], data="record") def get_address(self, address_type): """Get informations about an address type. @@ -127,10 +129,13 @@ def get_address(self, address_type): :return a dict with all necessary address data. """ if address_type == LibraryAddressType.MAIN_ADDRESS: - return self.get('address') + return self.get("address") else: - return self.get('acquisition_settings', {}) \ - .get(f'{address_type}_informations', {}).get('address') + return ( + self.get("acquisition_settings", {}) + .get(f"{address_type}_informations", {}) + .get("address") + ) def get_email(self, notification_type): """Get the email corresponding to the given notification type. @@ -141,24 +146,28 @@ def get_email(self, notification_type): """ # notification_settings is not a required field. if notification_type: - for setting in self.get('notification_settings', []): - if setting['type'] == notification_type: - return setting['email'] + for setting in self.get("notification_settings", []): + if setting["type"] == notification_type: + return setting["email"] def _pickup_location_query(self): """Search the location index for pickup locations.""" - return LocationsSearch() \ - .filter('term', library__pid=self.pid) \ - .filter('term', is_pickup=True) \ - .source(['pid']) \ + return ( + LocationsSearch() + .filter("term", library__pid=self.pid) + .filter("term", is_pickup=True) + .source(["pid"]) .scan() + ) def location_pids(self): """Return a generator of ES Hits of all pids of library locations.""" - return LocationsSearch() \ - .filter('term', library__pid=self.pid) \ - .source(['pid']) \ + return ( + LocationsSearch() + .filter("term", library__pid=self.pid) + .source(["pid"]) .scan() + ) def get_pickup_locations_pids(self): """Returns libraries all pickup locations pids.""" @@ -183,30 +192,29 @@ def _is_betweentimes(self, time_to_test, times): """Test if time is between times.""" times_open = False for time_given in times: - start_time = strtotime(time_given['start_time']) - end_time = strtotime(time_given['end_time']) + start_time = strtotime(time_given["start_time"]) + end_time = strtotime(time_given["end_time"]) - if time_to_test.hour == time_to_test.minute == \ - time_to_test.second == 0: + if time_to_test.hour == time_to_test.minute == time_to_test.second == 0: # case when library is open or close few hours per day times_open = times_open or end_time > start_time else: - times_open = times_open or ((time_to_test >= start_time) and - (time_to_test <= end_time)) + times_open = times_open or ( + (time_to_test >= start_time) and (time_to_test <= end_time) + ) return times_open def _has_is_open(self): """Test if library has opening days in the future.""" - if opening_hours := self.get('opening_hours'): + if opening_hours := self.get("opening_hours"): for opening_hour in opening_hours: - if opening_hour['is_open']: + if opening_hour["is_open"]: return True current_timestamp = datetime.now(pytz.utc) for exception_date in filter( - lambda d: d['is_open'], - self.get('exception_dates', []) + lambda d: d["is_open"], self.get("exception_dates", []) ): - start_date = date_string_to_utc(exception_date['start_date']) + start_date = date_string_to_utc(exception_date["start_date"]) # avoid next_open infinite loop if an open exception date is # in the past if start_date > current_timestamp: @@ -215,14 +223,14 @@ def _has_is_open(self): def _get_exceptions_matching_date(self, date_to_check, day_only=False): """Get all exception matching a given date.""" - for exception in self.get('exception_dates', []): + for exception in self.get("exception_dates", []): # Get the start date and the gap (in days) between start date and # end date. If no end_date are supplied, the gap will be 0. - start_date = date_string_to_utc(exception['start_date']) + start_date = date_string_to_utc(exception["start_date"]) end_date = start_date day_gap = 0 - if exception.get('end_date'): - end_date = date_string_to_utc(exception.get('end_date')) + if exception.get("end_date"): + end_date = date_string_to_utc(exception.get("end_date")) day_gap = (end_date - start_date).days # If the exception is repeatable, then the start_date should be the @@ -230,13 +238,13 @@ def _get_exceptions_matching_date(self, date_to_check, day_only=False): # repeat period/interval definition. To know that, we need to know # all exception dates possible (form exception start_date to # date_to_check) and get only the last one. - if exception.get('repeat'): - period = exception['repeat']['period'].upper() + if exception.get("repeat"): + period = exception["repeat"]["period"].upper() exception_dates = rrule( freq=FREQNAMES.index(period), until=date_to_check, - interval=exception['repeat']['interval'], - dtstart=start_date + interval=exception["repeat"]["interval"], + dtstart=start_date, ) for start_date in exception_dates: pass @@ -247,8 +255,8 @@ def _get_exceptions_matching_date(self, date_to_check, day_only=False): # is includes into theses time intervals (only if `day_only` method # argument is set) if start_date.date() <= date_to_check.date() <= end_date.date(): - if exception.get('times') and not day_only: - times = exception.get('times') + if exception.get("times") and not day_only: + times = exception.get("times") if self._is_betweentimes(date_to_check.time(), times): yield exception else: @@ -274,14 +282,13 @@ def is_open(self, date=None, day_only=False): # If the find rule defined open time periods, check if date_to_check # is into one of these periods (depending on `day_only` method # argument). - day_name = date.strftime('%A').lower() + day_name = date.strftime("%A").lower() regular_rule = [ - rule for rule in self.get('opening_hours', []) - if rule['day'] == day_name + rule for rule in self.get("opening_hours", []) if rule["day"] == day_name ] if regular_rule: - is_open = regular_rule[0].get('is_open', False) - rule_hours = regular_rule[0].get('times', []) + is_open = regular_rule[0].get("is_open", False) + rule_hours = regular_rule[0].get("times", []) if is_open and not day_only: is_open = self._is_betweentimes(date.time(), rule_hours) @@ -296,7 +303,7 @@ def is_open(self, date=None, day_only=False): # date_to_check is into one of these periods (depending on `day_only` # method argument) for exception in self._get_exceptions_matching_date(date, day_only): - if is_open != exception['is_open']: + if is_open != exception["is_open"]: is_open = not is_open return is_open @@ -305,19 +312,18 @@ def _get_opening_hour_by_day(self, day_name): """Get the library opening hour for a specific day.""" day_name = day_name.lower() days = [ - day for day in self.get('opening_hours', []) - if day['day'] == day_name and day['is_open'] + day + for day in self.get("opening_hours", []) + if day["day"] == day_name and day["is_open"] ] - if days and days[0]['times']: - return days[0]['times'][0]['start_time'] + if days and days[0]["times"]: + return days[0]["times"][0]["start_time"] def next_open(self, date=None, previous=False, ensure=False): """Get next open day.""" date = date or datetime.now(pytz.utc) if not self._has_is_open(): - raise LibraryNeverOpen( - f'No open days found for library (pid: {self.pid})' - ) + raise LibraryNeverOpen(f"No open days found for library (pid: {self.pid})") if isinstance(date, str): date = parser.parse(date) add_day = -1 if previous else 1 @@ -326,14 +332,9 @@ def next_open(self, date=None, previous=False, ensure=False): date += timedelta(days=add_day) if not ensure: return date - opening_hour = self._get_opening_hour_by_day(date.strftime('%A')) - time = [int(part) for part in opening_hour.split(':')] - return date.replace( - hour=time[0], - minute=time[1], - second=0, - microsecond=0 - ) + opening_hour = self._get_opening_hour_by_day(date.strftime("%A")) + time = [int(part) for part in opening_hour.split(":")] + return date.replace(hour=time[0], minute=time[1], second=0, microsecond=0) def get_open_days(self, start_date=None, end_date=None): """Get all open days between date interval.""" @@ -375,22 +376,21 @@ def get_links_to_me(self, get_pids=False): :param get_pids: if True list of linked pids if False count of linked records """ - from rero_ils.modules.acquisition.acq_receipts.api import \ - AcqReceiptsSearch + from rero_ils.modules.acquisition.acq_receipts.api import AcqReceiptsSearch from rero_ils.modules.patrons.api import PatronsSearch + links = {} - stat_cfg_query = StatsConfigurationSearch()\ - .filter( - Q('term', library__pid=self.pid) | - Q('term', filter_by_libraries__pid=self.pid) - ) - location_query = LocationsSearch() \ - .filter('term', library__pid=self.pid) - patron_query = PatronsSearch() \ - .filter('term', libraries__pid=self.pid) \ - .filter('terms', roles=UserRole.PROFESSIONAL_ROLES) - receipt_query = AcqReceiptsSearch() \ - .filter('term', library__pid=self.pid) + stat_cfg_query = StatsConfigurationSearch().filter( + Q("term", library__pid=self.pid) + | Q("term", filter_by_libraries__pid=self.pid) + ) + location_query = LocationsSearch().filter("term", library__pid=self.pid) + patron_query = ( + PatronsSearch() + .filter("term", libraries__pid=self.pid) + .filter("terms", roles=UserRole.PROFESSIONAL_ROLES) + ) + receipt_query = AcqReceiptsSearch().filter("term", library__pid=self.pid) if get_pids: locations = sorted_pids(location_query) librarians = sorted_pids(patron_query) @@ -402,33 +402,33 @@ def get_links_to_me(self, get_pids=False): receipts = receipt_query.count() stats_cfg = stat_cfg_query.count() if locations: - links['locations'] = locations + links["locations"] = locations if librarians: - links['patrons'] = librarians + links["patrons"] = librarians if receipts: - links['acq_receipts'] = receipts + links["acq_receipts"] = receipts if stats_cfg: - links['stats_cfg'] = stats_cfg + links["stats_cfg"] = stats_cfg return links def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def get_timezone(self): """Get library timezone.""" # TODO: get timezone regarding Library address. # TODO: Use BABEL_DEFAULT_TIMEZONE by default - return pytz.timezone('Europe/Zurich') + return pytz.timezone("Europe/Zurich") def get_online_harvested_source_url(self, source): """Get online harvested source url.""" - for harvested_source in self.get('online_harvested_source', []): - if harvested_source.get('source') == source: - return harvested_source['url'] + for harvested_source in self.get("online_harvested_source", []): + if harvested_source.get("source") == source: + return harvested_source["url"] class LibrariesIndexer(IlsRecordsIndexer): @@ -441,4 +441,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='lib') + super().bulk_index(record_id_iterator, doc_type="lib") diff --git a/rero_ils/modules/libraries/api_views.py b/rero_ils/modules/libraries/api_views.py index a1ec421b78..689870f4e4 100644 --- a/rero_ils/modules/libraries/api_views.py +++ b/rero_ils/modules/libraries/api_views.py @@ -28,14 +28,10 @@ from rero_ils.modules.libraries.api import Library from rero_ils.modules.utils import add_years, date_string_to_utc -api_blueprint = Blueprint( - 'api_library', - __name__, - url_prefix='/library' -) +api_blueprint = Blueprint("api_library", __name__, url_prefix="/library") -@api_blueprint.route('//closed_dates', methods=['GET']) +@api_blueprint.route("//closed_dates", methods=["GET"]) @check_logged_as_librarian def list_closed_dates(library_pid): """HTTP GET request to get the closed dates for a given library pid. @@ -54,12 +50,12 @@ def list_closed_dates(library_pid): abort(404) # get start date from 'from' parameter from query string request - start_date = request.args.get('from', datetime.now() - timedelta(days=31)) + start_date = request.args.get("from", datetime.now() - timedelta(days=31)) if isinstance(start_date, str): start_date = date_string_to_utc(start_date) start_date = start_date.replace(tzinfo=library.get_timezone()) # get end date from 'until' parameter from query string request - end_date = request.args.get('until', add_years(datetime.now(), 1)) + end_date = request.args.get("until", add_years(datetime.now(), 1)) if isinstance(end_date, str): end_date = date_string_to_utc(end_date) end_date = end_date.replace(tzinfo=library.get_timezone()) @@ -70,12 +66,14 @@ def list_closed_dates(library_pid): for i in range(delta.days + 1): tmp_date = start_date + timedelta(days=i) if not library.is_open(date=tmp_date, day_only=True): - closed_date.append(tmp_date.strftime('%Y-%m-%d')) + closed_date.append(tmp_date.strftime("%Y-%m-%d")) - return jsonify({ - 'params': { - 'from': start_date.strftime('%Y-%m-%d'), - 'until': end_date.strftime('%Y-%m-%d') - }, - 'closed_dates': closed_date - }) + return jsonify( + { + "params": { + "from": start_date.strftime("%Y-%m-%d"), + "until": end_date.strftime("%Y-%m-%d"), + }, + "closed_dates": closed_date, + } + ) diff --git a/rero_ils/modules/libraries/dumpers.py b/rero_ils/modules/libraries/dumpers.py index e21808d02a..40fbdf81a4 100644 --- a/rero_ils/modules/libraries/dumpers.py +++ b/rero_ils/modules/libraries/dumpers.py @@ -33,16 +33,18 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data.update({ - 'name': record.get('name'), - 'address': record.get_address(LibraryAddressType.MAIN_ADDRESS), - 'shipping_informations': - record.get('acquisition_settings', {}) - .get('shipping_informations', {}), - 'billing_informations': - record.get('acquisition_settings', {}) - .get('billing_informations', {}) - }) + data.update( + { + "name": record.get("name"), + "address": record.get_address(LibraryAddressType.MAIN_ADDRESS), + "shipping_informations": record.get("acquisition_settings", {}).get( + "shipping_informations", {} + ), + "billing_informations": record.get("acquisition_settings", {}).get( + "billing_informations", {} + ), + } + ) data = {k: v for k, v in data.items() if v} return data @@ -56,19 +58,21 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - if 'serial_acquisition_settings' not in record: - raise MissingDataException('library.serial_acquisition_settings') - - data.update({ - 'name': record.get('name'), - 'address': record.get_address(LibraryAddressType.MAIN_ADDRESS), - 'shipping_informations': - record.get('serial_acquisition_settings', {}) - .get('shipping_informations', {}), - 'billing_informations': - record.get('serial_acquisition_settings', {}) - .get('billing_informations', {}) - }) + if "serial_acquisition_settings" not in record: + raise MissingDataException("library.serial_acquisition_settings") + + data.update( + { + "name": record.get("name"), + "address": record.get_address(LibraryAddressType.MAIN_ADDRESS), + "shipping_informations": record.get( + "serial_acquisition_settings", {} + ).get("shipping_informations", {}), + "billing_informations": record.get( + "serial_acquisition_settings", {} + ).get("billing_informations", {}), + } + ) data = {k: v for k, v in data.items() if v} return data @@ -83,10 +87,12 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. :return a dict with dumped data. """ - data.update({ - 'pid': record.pid, - 'name': record.get('name'), - 'address': record.get('address'), - 'email': record.get('email') - }) + data.update( + { + "pid": record.pid, + "name": record.get("name"), + "address": record.get("address"), + "email": record.get("email"), + } + ) return {k: v for k, v in data.items() if v} diff --git a/rero_ils/modules/libraries/extensions.py b/rero_ils/modules/libraries/extensions.py index 93f82eab92..975f410830 100644 --- a/rero_ils/modules/libraries/extensions.py +++ b/rero_ils/modules/libraries/extensions.py @@ -82,11 +82,11 @@ def _cache_current_task(record, task): :param record: the touched library record. :param task: the task related to the library. """ - content = current_cache.get('library-calendar-changes') or {} + content = current_cache.get("library-calendar-changes") or {} # If a previous task is still present into this cache entry, revoke it. # DEV NOTE : the task MUST clean (remove) this cache entry when task is # finished. if task_id := content.pop(record.pid, None): celery_app.control.revoke(task_id, terminate=True) content[record.pid] = task.id - current_cache.set('library-calendar-changes', content) + current_cache.set("library-calendar-changes", content) diff --git a/rero_ils/modules/libraries/jsonresolver.py b/rero_ils/modules/libraries/jsonresolver.py index 0c70d7ae55..08d9aaaa19 100644 --- a/rero_ils/modules/libraries/jsonresolver.py +++ b/rero_ils/modules/libraries/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/libraries/', host='bib.rero.ch') +@jsonresolver.route("/api/libraries/", host="bib.rero.ch") def library_resolver(pid): """Library resolver.""" - return resolve_json_refs('lib', pid) + return resolve_json_refs("lib", pid) diff --git a/rero_ils/modules/libraries/models.py b/rero_ils/modules/libraries/models.py index 17c9798010..628a0e1499 100644 --- a/rero_ils/modules/libraries/models.py +++ b/rero_ils/modules/libraries/models.py @@ -27,31 +27,32 @@ class LibraryIdentifier(RecordIdentifier): """Sequence generator for Library identifiers.""" - __tablename__ = 'library_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "library_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class LibraryMetadata(db.Model, RecordMetadataBase): """Library record metadata.""" - __tablename__ = 'library_metadata' + __tablename__ = "library_metadata" class LibraryAddressType: """Address type for libraries.""" - MAIN_ADDRESS = 'main' - SHIPPING_ADDRESS = 'shipping' - BILLING_ADDRESS = 'billing' + MAIN_ADDRESS = "main" + SHIPPING_ADDRESS = "shipping" + BILLING_ADDRESS = "billing" class AccountTransferOption: """Allowed account transfer option for rollover setting.""" - NO_TRANSFER = 'rollover_no_transfer' - ALLOCATED_AMOUNT = 'rollover_allocated_amount' + NO_TRANSFER = "rollover_no_transfer" + ALLOCATED_AMOUNT = "rollover_allocated_amount" diff --git a/rero_ils/modules/libraries/permissions.py b/rero_ils/modules/libraries/permissions.py index 4228b27cc8..c9a69009f2 100644 --- a/rero_ils/modules/libraries/permissions.py +++ b/rero_ils/modules/libraries/permissions.py @@ -19,17 +19,20 @@ """Permissions for libraries.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control library policy -search_action = action_factory('lib-search') -read_action = action_factory('lib-read') -create_action = action_factory('lib-create') -update_action = action_factory('lib-update') -delete_action = action_factory('lib-delete') -access_action = action_factory('lib-access') +search_action = action_factory("lib-search") +read_action = action_factory("lib-read") +create_action = action_factory("lib-create") +update_action = action_factory("lib-update") +delete_action = action_factory("lib-delete") +access_action = action_factory("lib-access") class LibraryPermissionPolicy(RecordPermissionPolicy): @@ -37,9 +40,18 @@ class LibraryPermissionPolicy(RecordPermissionPolicy): can_search = [AllowedByAction(search_action)] can_read = [AllowedByActionRestrictByOrganisation(read_action)] - can_create = [AllowedByActionRestrictByManageableLibrary( - create_action, lambda record: record.get('pid'))] - can_update = [AllowedByActionRestrictByManageableLibrary( - update_action, lambda record: record.get('pid'))] - can_delete = [AllowedByActionRestrictByManageableLibrary( - delete_action, lambda record: record.get('pid'))] + can_create = [ + AllowedByActionRestrictByManageableLibrary( + create_action, lambda record: record.get("pid") + ) + ] + can_update = [ + AllowedByActionRestrictByManageableLibrary( + update_action, lambda record: record.get("pid") + ) + ] + can_delete = [ + AllowedByActionRestrictByManageableLibrary( + delete_action, lambda record: record.get("pid") + ) + ] diff --git a/rero_ils/modules/libraries/tasks.py b/rero_ils/modules/libraries/tasks.py index 9a76a54418..17af15e428 100644 --- a/rero_ils/modules/libraries/tasks.py +++ b/rero_ils/modules/libraries/tasks.py @@ -26,7 +26,7 @@ from .exceptions import LibraryNeverOpen -@shared_task(name='library-calendar-changes-update-loans') +@shared_task(name="library-calendar-changes-update-loans") def calendar_changes_update_loans(record_data): """Task to update related loans if library calendar changes. @@ -64,12 +64,11 @@ def _at_finish(): # decorator should take the key to clean as argument. But we didn't # know the key because it's created from `record_data`. This is why # it's easier to create a small specific function. - cache_content = current_cache.get('library-calendar-changes') or {} + cache_content = current_cache.get("library-calendar-changes") or {} cache_content.pop(library.pid, {}) - current_cache.set('library-calendar-changes', cache_content) + current_cache.set("library-calendar-changes", cache_content) - from rero_ils.modules.loans.api import LoansIndexer, \ - get_on_loan_loans_for_library + from rero_ils.modules.loans.api import LoansIndexer, get_on_loan_loans_for_library from .api import Library @@ -80,11 +79,12 @@ def _at_finish(): active_loan_counter += 1 if not library.is_open(loan.end_date): with contextlib.suppress(LibraryNeverOpen): - loan['end_date'] = library \ - .next_open(loan.end_date) \ - .astimezone(library.get_timezone()) \ - .replace(hour=23, minute=59, second=0, microsecond=0)\ + loan["end_date"] = ( + library.next_open(loan.end_date) + .astimezone(library.get_timezone()) + .replace(hour=23, minute=59, second=0, microsecond=0) .isoformat() + ) changed_loan_uuids.append(loan.id) loan.update(loan, dbcommit=True, reindex=False) indexer = LoansIndexer() diff --git a/rero_ils/modules/loans/api.py b/rero_ils/modules/loans/api.py index 776bcfbe2d..5620aa1a9a 100644 --- a/rero_ils/modules/loans/api.py +++ b/rero_ils/modules/loans/api.py @@ -36,19 +36,26 @@ from invenio_jsonschemas import current_jsonschemas from werkzeug.utils import cached_property -from rero_ils.modules.api import IlsRecord, IlsRecordError, \ - IlsRecordsIndexer, IlsRecordsSearch -from rero_ils.modules.circ_policies.api import DUE_SOON_REMINDER_TYPE, \ - OVERDUE_REMINDER_TYPE, CircPolicy +from rero_ils.modules.api import ( + IlsRecord, + IlsRecordError, + IlsRecordsIndexer, + IlsRecordsSearch, +) +from rero_ils.modules.circ_policies.api import ( + DUE_SOON_REMINDER_TYPE, + OVERDUE_REMINDER_TYPE, + CircPolicy, +) from rero_ils.modules.errors import NoCirculationActionIsPermitted from rero_ils.modules.items.models import ItemStatus from rero_ils.modules.items.utils import item_pid_to_object from rero_ils.modules.libraries.api import LibrariesSearch, Library from rero_ils.modules.locations.api import Location, LocationsSearch -from rero_ils.modules.notifications.api import Notification, \ - NotificationsSearch -from rero_ils.modules.notifications.dispatcher import \ - Dispatcher as NotificationDispatcher +from rero_ils.modules.notifications.api import Notification, NotificationsSearch +from rero_ils.modules.notifications.dispatcher import ( + Dispatcher as NotificationDispatcher, +) from rero_ils.modules.notifications.models import NotificationType from rero_ils.modules.patron_transactions.api import PatronTransactionsSearch from rero_ils.modules.patron_transactions.models import PatronTransactionStatus @@ -65,9 +72,9 @@ class LoansSearch(IlsRecordsSearch): class Meta: """Meta class.""" - index = 'loans' + index = "loans" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -79,9 +86,10 @@ def unavailable_query(self): :returns: an elasticsearch query """ - states = [LoanState.PENDING] + \ - current_app.config['CIRCULATION_STATES_LOAN_ACTIVE'] - return self.filter('terms', state=states) + states = [LoanState.PENDING] + current_app.config[ + "CIRCULATION_STATES_LOAN_ACTIVE" + ] + return self.filter("terms", state=states) class Loan(IlsRecord): @@ -90,14 +98,9 @@ class Loan(IlsRecord): minter = loan_pid_minter fetcher = loan_pid_fetcher provider = CirculationLoanIdProvider - pid_field = 'pid' - _schema = 'loans/loan-ils-v0.0.1.json' - pids_exist_check = { - 'not_required': { - 'org': 'organisation', - 'item': 'item' - } - } + pid_field = "pid" + _schema = "loans/loan-ils-v0.0.1.json" + pids_exist_check = {"not_required": {"org": "organisation", "item": "item"}} DATE_FIELDS = [] DATETIME_FIELDS = [ @@ -106,13 +109,10 @@ class Loan(IlsRecord): "request_expire_date", "request_start_date", "start_date", - "transaction_date" + "transaction_date", ] # Invenio Records extensions - _extensions = [ - CheckoutLocationExtension(), - CirculationDatesExtension() - ] + _extensions = [CheckoutLocationExtension(), CirculationDatesExtension()] def __init__(self, data, model=None): """Loan init.""" @@ -122,37 +122,40 @@ def __init__(self, data, model=None): def can_extend(cls, item, **kwargs): """Loan can extend.""" from rero_ils.modules.loans.utils import extend_loan_data_is_valid - loan = kwargs.get('loan') + + loan = kwargs.get("loan") if loan is None: # try to load the loan from kwargs loan, _unused_data = item.prior_extend_loan_actions(**kwargs) if loan is None: # not relevant method :: return True return True, [] - if loan.get('state') != LoanState.ITEM_ON_LOAN: - return False, [_('The loan cannot be extended')] + if loan.get("state") != LoanState.ITEM_ON_LOAN: + return False, [_("The loan cannot be extended")] # The parameters for the renewal is calculated based on the transaction # library and not the owning library. - transaction_library_pid = Location \ - .get_record_by_pid(loan['transaction_location_pid']) \ - .get_library().get('pid') + transaction_library_pid = ( + Location.get_record_by_pid(loan["transaction_location_pid"]) + .get_library() + .get("pid") + ) - patron = Patron.get_record_by_pid(loan.get('patron_pid')) + patron = Patron.get_record_by_pid(loan.get("patron_pid")) cipo = CircPolicy.provide_circ_policy( organisation_pid=item.organisation_pid, library_pid=transaction_library_pid, patron_type_pid=patron.patron_type_pid, - item_type_pid=item.item_type_circulation_category_pid + item_type_pid=item.item_type_circulation_category_pid, ) - extension_count = loan.get('extension_count', 0) - number_renewals = cipo.get('number_renewals', 0) + extension_count = loan.get("extension_count", 0) + number_renewals = cipo.get("number_renewals", 0) loan_data_is_valid = extend_loan_data_is_valid( - end_date=loan.get('end_date'), - renewal_duration=cipo.get('renewal_duration'), - library_pid=transaction_library_pid + end_date=loan.get("end_date"), + renewal_duration=cipo.get("renewal_duration"), + library_pid=transaction_library_pid, ) if not (extension_count < number_renewals > 0 and loan_data_is_valid): - return False, [_('Circulation policies disallows the operation.')] + return False, [_("Circulation policies disallows the operation.")] if item.number_of_requests(): - return False, [_('A pending request exists on this item.')] + return False, [_("A pending request exists on this item.")] return True, [] @staticmethod @@ -161,7 +164,7 @@ def check_required_params(action, **kwargs): # TODO: do we need to check also the parameter exist and its value? required_params = action_required_params(action=action) if missing_params := set(required_params) - set(kwargs): - message = f'Parameters {missing_params} are required' + message = f"Parameters {missing_params} are required" raise MissingRequiredParameterError(description=message) def update_pickup_location(self, pickup_location_pid): @@ -173,17 +176,21 @@ def update_pickup_location(self, pickup_location_pid): :param pickup_location_pid: The new pickup_location_pid. :return: the new updated loan. """ - if self['state'] not in [ - LoanState.PENDING, LoanState.ITEM_IN_TRANSIT_FOR_PICKUP]: + if self["state"] not in [ + LoanState.PENDING, + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, + ]: raise NoCirculationActionIsPermitted( - _('No circulation action is permitted')) + _("No circulation action is permitted") + ) - self['pickup_location_pid'] = pickup_location_pid + self["pickup_location_pid"] = pickup_location_pid return self.update(self, dbcommit=True, reindex=True) @classmethod - def create(cls, data, id_=None, delete_pid=True, - dbcommit=False, reindex=False, **kwargs): + def create( + cls, data, id_=None, delete_pid=True, dbcommit=False, reindex=False, **kwargs + ): """Create the loan record. :param cls: class object @@ -194,24 +201,27 @@ def create(cls, data, id_=None, delete_pid=True, :param reindex: index the record after the creation. :returns: the created record """ - data['$schema'] = current_jsonschemas.path_to_url(cls._schema) + data["$schema"] = current_jsonschemas.path_to_url(cls._schema) # default state assignment - data.setdefault( - 'state', - current_app.config['CIRCULATION_LOAN_INITIAL_STATE'] - ) + data.setdefault("state", current_app.config["CIRCULATION_LOAN_INITIAL_STATE"]) if delete_pid and data.get(cls.pid_field): del data[cls.pid_field] cls._loan_build_org_ref(data) # set the field to_anonymize - data['to_anonymize'] = \ - cls.can_anonymize(loan_data=data) and not data.get('to_anonymize') + data["to_anonymize"] = cls.can_anonymize(loan_data=data) and not data.get( + "to_anonymize" + ) - if not data.get('state'): - data['state'] = LoanState.CREATED + if not data.get("state"): + data["state"] = LoanState.CREATED return super(Loan, cls).create( - data=data, id_=id_, delete_pid=delete_pid, dbcommit=dbcommit, - reindex=reindex, **kwargs) + data=data, + id_=id_, + delete_pid=delete_pid, + dbcommit=dbcommit, + reindex=reindex, + **kwargs, + ) def update(self, data, commit=False, dbcommit=False, reindex=False): """Update loan record. @@ -223,10 +233,9 @@ def update(self, data, commit=False, dbcommit=False, reindex=False): """ self._loan_build_org_ref(data) # set the field to_anonymize - if not self.get('to_anonymize') and Loan.can_anonymize(loan_data=data): - data['to_anonymize'] = True - super().update( - data=data, commit=commit, dbcommit=dbcommit, reindex=reindex) + if not self.get("to_anonymize") and Loan.can_anonymize(loan_data=data): + data["to_anonymize"] = True + super().update(data=data, commit=commit, dbcommit=dbcommit, reindex=reindex) return self def anonymize(self, commit=True, dbcommit=False, reindex=False): @@ -238,14 +247,14 @@ def anonymize(self, commit=True, dbcommit=False, reindex=False): :returns: the modified record """ from rero_ils.modules.loans.logs.api import LoanOperationLog - self['to_anonymize'] = True + + self["to_anonymize"] = True try: super().update(self, commit, dbcommit, reindex) # Anonymize loan operation logs LoanOperationLog.anonymize_logs(self.pid) except Exception as err: - current_app.logger.error( - f'Can not anonymize loan: {self.get("pid")} {err}') + current_app.logger.error(f'Can not anonymize loan: {self.get("pid")} {err}') return self def date_fields2datetime(self): @@ -283,17 +292,28 @@ def requested_loans_to_validate(cls, library_pid): def loans_pending(): """Get pending loans.""" - return LoansSearch()\ - .params(preserve_order=True)\ - .filter('term', state=LoanState.PENDING)\ - .filter('term', library_pid=library_pid)\ - .sort({'_created': {"order": 'asc'}})\ - .source(includes=[ - 'pid', 'transaction_date', 'item_pid', 'patron_pid', - 'document_pid', 'library_pid', 'state', '_created', - 'transaction_location_pid', 'pickup_location_pid' - ])\ + return ( + LoansSearch() + .params(preserve_order=True) + .filter("term", state=LoanState.PENDING) + .filter("term", library_pid=library_pid) + .sort({"_created": {"order": "asc"}}) + .source( + includes=[ + "pid", + "transaction_date", + "item_pid", + "patron_pid", + "document_pid", + "library_pid", + "state", + "_created", + "transaction_location_pid", + "pickup_location_pid", + ] + ) .scan() + ) def patron_by_pid(pid, known_patrons): """Get patron by pid. @@ -302,12 +322,14 @@ def patron_by_pid(pid, known_patrons): :param known_patrons: already known patrons. :return: the corresponding patron. """ - fields = ['pid', 'first_name', 'last_name', 'patron.barcode'] + fields = ["pid", "first_name", "last_name", "patron.barcode"] if pid not in known_patrons: - results = PatronsSearch()\ - .filter('term', pid=pid)\ - .source(includes=fields)\ + results = ( + PatronsSearch() + .filter("term", pid=pid) + .source(includes=fields) .execute() + ) if hit := next(iter(results or []), None): known_patrons[pid] = hit.to_dict() return known_patrons.get(pid, {}) @@ -319,12 +341,14 @@ def location_by_pid(pid, known_locations): :param known_locations: already known locations. :return: the corresponding location. """ - fields = ['pid', 'name', 'library', 'pickup_name'] + fields = ["pid", "name", "library", "pickup_name"] if pid not in known_locations: - results = LocationsSearch()\ - .filter('term', pid=pid)\ - .source(includes=fields)\ + results = ( + LocationsSearch() + .filter("term", pid=pid) + .source(includes=fields) .execute() + ) if hit := next(iter(results or []), None): data = hit.to_dict() known_locations[pid] = {k: v for k, v in data.items() if v} @@ -338,10 +362,12 @@ def library_name_by_pid(pid, known_libraries): :return: the corresponding library. """ if pid not in known_libraries: - results = LibrariesSearch()\ - .filter('term', pid=pid)\ - .source(includes='name')\ + results = ( + LibrariesSearch() + .filter("term", pid=pid) + .source(includes="name") .execute() + ) if hit := next(iter(results or []), None): known_libraries[pid] = hit.name return known_libraries.get(pid, {}) @@ -354,11 +380,14 @@ def holding_by_pid(pid, known_holdings): :return: the corresponding holdings. """ from ..holdings.api import HoldingsSearch + if pid not in known_holdings: - results = HoldingsSearch()\ - .filter('term', pid=pid)\ - .source(includes='call_number')\ + results = ( + HoldingsSearch() + .filter("term", pid=pid) + .source(includes="call_number") .execute() + ) if hit := next(iter(results or []), None): known_holdings[pid] = hit.to_dict() return known_holdings.get(pid, {}) @@ -370,16 +399,26 @@ def item_by_pid(pid, known_items): :param known_items: already known items. :return: the corresponding item. """ - fields = ['pid', 'barcode', 'call_number', - 'second_call_number', 'library', 'location', - 'temporary_item_type', 'holding', - 'enumerationAndChronology', 'temporary_location'] + fields = [ + "pid", + "barcode", + "call_number", + "second_call_number", + "library", + "location", + "temporary_item_type", + "holding", + "enumerationAndChronology", + "temporary_location", + ] if pid not in known_items: - results = ItemsSearch()\ - .filter('term', pid=pid)\ - .filter('term', status=ItemStatus.ON_SHELF)\ - .source(includes=fields)\ + results = ( + ItemsSearch() + .filter("term", pid=pid) + .filter("term", status=ItemStatus.ON_SHELF) + .source(includes=fields) .execute() + ) known_items[pid] = next(iter(results or []), None) return known_items.get(pid, {}) @@ -391,10 +430,12 @@ def item_type_by_pid(pid, known_ittys): :return: the corresponding item type """ if pid not in known_ittys: - results = ItemTypesSearch()\ - .filter('term', pid=pid)\ - .filter('term', negative_availability=False)\ + results = ( + ItemTypesSearch() + .filter("term", pid=pid) + .filter("term", negative_availability=False) .execute() + ) known_ittys[pid] = next(iter(results or []), None) return known_ittys.get(pid, {}) @@ -403,58 +444,46 @@ def item_type_by_pid(pid, known_ittys): loans = loans_pending() for loan in loans: - item_pid = loan['item_pid']['value'] + item_pid = loan["item_pid"]["value"] item = item_by_pid(item_pid, items) if item: add = True - if 'temporary_item_type' in item: - itty_pid = item['temporary_item_type']['pid'] + if "temporary_item_type" in item: + itty_pid = item["temporary_item_type"]["pid"] add = item_type_by_pid(itty_pid, item_types) is not None if add and item_pid not in item_pids: item_pids.append(item_pid) item_data = item.to_dict() loan_data = loan.to_dict() - loan_data['creation_date'] = loan_data.pop('_created') - if 'call_number' not in item_data: - holding = holding_by_pid( - item['holding']['pid'], - holdings - ) - if 'call_number' in holding: - item_data['call_number'] = holding['call_number'] - item_data['library']['name'] = library_name_by_pid( - item_data['library']['pid'], - libraries + loan_data["creation_date"] = loan_data.pop("_created") + if "call_number" not in item_data: + holding = holding_by_pid(item["holding"]["pid"], holdings) + if "call_number" in holding: + item_data["call_number"] = holding["call_number"] + item_data["library"]["name"] = library_name_by_pid( + item_data["library"]["pid"], libraries ) - item_data['location']['name'] = location_by_pid( - item_data['location']['pid'], - locations - )['name'] - if 'temporary_location' in item_data: + item_data["location"]["name"] = location_by_pid( + item_data["location"]["pid"], locations + )["name"] + if "temporary_location" in item_data: location = location_by_pid( - item_data['temporary_location']['pid'], - locations + item_data["temporary_location"]["pid"], locations ) - item_data['temporary_location']['name'] = \ - location.get('name') - patron_data = patron_by_pid(loan_data['patron_pid'], - patrons) - loan_data['patron'] = { - 'barcode': patron_data['patron']['barcode'][0], - 'name': f'{patron_data["last_name"]}, ' - f'{patron_data["first_name"]}' + item_data["temporary_location"]["name"] = location.get("name") + patron_data = patron_by_pid(loan_data["patron_pid"], patrons) + loan_data["patron"] = { + "barcode": patron_data["patron"]["barcode"][0], + "name": f'{patron_data["last_name"]}, ' + f'{patron_data["first_name"]}', } - loan_data['pickup_location'] = location_by_pid( - loan_data['pickup_location_pid'], locations) - loan_data['pickup_location']['library_name'] = \ - library_name_by_pid( - loan_data['pickup_location']['library']['pid'], - libraries - ) - metadata.append({ - 'item': item_data, - 'loan': loan_data - }) + loan_data["pickup_location"] = location_by_pid( + loan_data["pickup_location_pid"], locations + ) + loan_data["pickup_location"]["library_name"] = library_name_by_pid( + loan_data["pickup_location"]["library"]["pid"], libraries + ) + metadata.append({"item": item_data, "loan": loan_data}) return metadata @classmethod @@ -465,12 +494,14 @@ def _loan_build_org_ref(cls, data): :returns: data with organisations informations. """ from ..items.api import Item - if not data.get('organisation'): - item_pid = data.get('item_pid', {}).get('value') - data['organisation'] = {'$ref': get_ref_for_pid( - 'org', - Item.get_record_by_pid(item_pid).organisation_pid - )} + + if not data.get("organisation"): + item_pid = data.get("item_pid", {}).get("value") + data["organisation"] = { + "$ref": get_ref_for_pid( + "org", Item.get_record_by_pid(item_pid).organisation_pid + ) + } return data def is_loan_late(self): @@ -481,6 +512,7 @@ def is_loan_late(self): def is_loan_overdue(self): """Check if the loan is overdue.""" from .utils import get_circ_policy + if self.state != LoanState.ITEM_ON_LOAN: return False @@ -488,8 +520,7 @@ def is_loan_overdue(self): now = datetime.now(timezone.utc) due_date = ciso8601.parse_datetime(self.end_date) days_after = circ_policy.initial_overdue_days - return bool(days_after and - now > due_date + timedelta(days=days_after-1)) + return bool(days_after and now > due_date + timedelta(days=days_after - 1)) def is_loan_due_soon(self, tstamp=None): """Check if the loan is due soon. @@ -499,7 +530,7 @@ def is_loan_due_soon(self, tstamp=None): :returns: True if is due soon """ date = tstamp or datetime.now(timezone.utc) - if due_soon_date := self.get('due_soon_date'): + if due_soon_date := self.get("due_soon_date"): return ciso8601.parse_datetime(due_soon_date) <= date return False @@ -509,10 +540,13 @@ def has_pending_transaction(self): :return: True if some open transaction is found, False otherwise """ if pid := self.pid: - return PatronTransactionsSearch() \ - .filter('term', loan__pid=pid) \ - .filter('term', status=PatronTransactionStatus.OPEN) \ - .count() > 0 + return ( + PatronTransactionsSearch() + .filter("term", loan__pid=pid) + .filter("term", status=PatronTransactionStatus.OPEN) + .count() + > 0 + ) return False @property @@ -524,12 +558,12 @@ def request_creation_date(self): @property def pid(self): """Shortcut for pid.""" - return self.get('pid') + return self.get("pid") @property def state(self): """Shortcut for state.""" - return self.get('state') + return self.get("state") @property def rank(self): @@ -537,7 +571,7 @@ def rank(self): Used by the sorted function """ - return self.get('rank') + return self.get("rank") @property def transaction_date(self): @@ -545,7 +579,7 @@ def transaction_date(self): Used by the sorted function """ - return self.get('transaction_date') + return self.get("transaction_date") @property def end_date(self): @@ -553,7 +587,7 @@ def end_date(self): Used by the sorted function """ - return self.get('end_date') + return self.get("end_date") @property def overdue_date(self): @@ -561,31 +595,34 @@ def overdue_date(self): if self.end_date: d_after = date_string_to_utc(self.end_date) + timedelta(days=1) return datetime( - year=d_after.year, month=d_after.month, day=d_after.day, - tzinfo=timezone.utc + year=d_after.year, + month=d_after.month, + day=d_after.day, + tzinfo=timezone.utc, ) @property def item_pid(self): """Returns the item pid value.""" - return self.get('item_pid', {}).get('value', None) + return self.get("item_pid", {}).get("value", None) @property def item_pid_object(self): """Returns the loan item_pid object.""" - return self.get('item_pid', {}) + return self.get("item_pid", {}) @property def item(self): """Return the `Item` related to this loan.""" from rero_ils.modules.items.api import Item + if pid := self.item_pid: return Item.get_record_by_pid(pid) @property def patron_pid(self): """Shortcut for patron pid.""" - return self.get('patron_pid') + return self.get("patron_pid") @property def patron(self): @@ -595,13 +632,13 @@ def patron(self): @property def document_pid(self): """Shortcut for document pid.""" - return self.get('document_pid') + return self.get("document_pid") @property def is_active(self): """Shortcut to check of loan is active.""" - states = current_app.config['CIRCULATION_STATES_LOAN_ACTIVE'] - return self.get('state') in states + states = current_app.config["CIRCULATION_STATES_LOAN_ACTIVE"] + return self.get("state") in states @property def organisation_pid(self): @@ -609,8 +646,7 @@ def organisation_pid(self): if item := self.item: return item.organisation_pid raise IlsRecordError.PidDoesNotExist( - self.provider.pid_type, - 'organisation_pid:item_pid' + self.provider.pid_type, "organisation_pid:item_pid" ) @property @@ -622,7 +658,7 @@ def library_pid(self): def checkout_library_pid(self): """Get the checkout library pid.""" if checkout_location := Location.get_record_by_pid( - self.get('checkout_location_pid') + self.get("checkout_location_pid") ): return checkout_location.library_pid @@ -630,20 +666,18 @@ def checkout_library_pid(self): def checkout_date(self): """Get the checkout date for this loan.""" from .utils import get_loan_checkout_date + return get_loan_checkout_date(self.pid) @property def location_pid(self): """Get loan transaction_location PID or item owning location.""" - location_pid = self.get('transaction_location_pid') + location_pid = self.get("transaction_location_pid") if not location_pid and (item := self.item): return item.holding_location_pid elif location_pid: return location_pid - return IlsRecordError.PidDoesNotExist( - self.provider.pid_type, - 'library_pid' - ) + return IlsRecordError.PidDoesNotExist(self.provider.pid_type, "library_pid") @property def pickup_library(self): @@ -654,20 +688,21 @@ def pickup_library(self): @property def pickup_location_pid(self): """Get loan pickup_location PID.""" - return self.get('pickup_location_pid') + return self.get("pickup_location_pid") @property def transaction_location_pid(self): """Get loan transaction_location PID.""" - return self.get('transaction_location_pid') + return self.get("transaction_location_pid") @cached_property def transaction_library_pid(self): """Get loan transaction_library PID.""" - return Location \ - .get_record_by_pid(self.transaction_location_pid) \ - .get_library() \ - .get('pid') + return ( + Location.get_record_by_pid(self.transaction_location_pid) + .get_library() + .get("pid") + ) @property def get_overdue_fees(self): @@ -688,6 +723,7 @@ def get_overdue_fees(self): ] """ from .utils import get_circ_policy + fees = [] # if the loan isn't "late", no need to continue. # !!! there is a difference between "is_late" and "is_overdue" : @@ -705,7 +741,7 @@ def get_overdue_fees(self): # The circulation policy used will be related to the checkout location, # not the transaction location cipo = get_circ_policy(self, checkout_location=True) - overdue_settings = cipo.get('overdue_fees') + overdue_settings = cipo.get("overdue_fees") if overdue_settings is None: return fees @@ -716,9 +752,9 @@ def get_overdue_fees(self): # after the due date end_date = date_string_to_utc(self.end_date) + timedelta(days=1) total = 0 - max_overdue = overdue_settings.get('maximum_total_amount', math.inf) + max_overdue = overdue_settings.get("maximum_total_amount", math.inf) intervals = cipo.get_overdue_intervals() - interval_lower_bounds = [inter['from'] for inter in intervals] + interval_lower_bounds = [inter["from"] for inter in intervals] # For each overdue day, we need to find the correct fee_amount to # charge. In the bellowed loop, `day_idx' is the day number from the @@ -727,21 +763,20 @@ def get_overdue_fees(self): for day_idx, day in enumerate(loan_lib.get_open_days(end_date), 1): # replace the hour to start of the day :: an overdue start at # at the beginning of the day - day = day.replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None) + day = day.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=None) day = loan_lib.get_timezone().localize(day) # a) find the correct interval. # b) check the index found exist into intervals # c) check the upper limit of this interval is grower or equal # to day index - interval_idx = bisect_right(interval_lower_bounds, day_idx)-1 + interval_idx = bisect_right(interval_lower_bounds, day_idx) - 1 if interval_idx == -1: continue - if day_idx > intervals[interval_idx]['to']: + if day_idx > intervals[interval_idx]["to"]: continue # d) add the corresponding fee_amount to the fees array. # e) if maximum_overdue is reached, exit the loop - fee_amount = intervals[interval_idx]['fee_amount'] + fee_amount = intervals[interval_idx]["fee_amount"] gap = round(max_overdue - total, 2) fee_amount = min(fee_amount, gap) total = round(math.fsum([total, fee_amount]), 2) @@ -752,12 +787,15 @@ def get_overdue_fees(self): def is_notified(self, notification_type=None, counter=0): """Check if a notification already exists for a loan by type.""" - trans_date = ciso8601.parse_datetime(self.get('transaction_date')) - query_count = NotificationsSearch() \ - .filter('term', context__loan__pid=self.pid) \ - .filter('term', notification_type=notification_type) \ - .filter('range', creation_date={'gt': trans_date}) \ - .source().count() + trans_date = ciso8601.parse_datetime(self.get("transaction_date")) + query_count = ( + NotificationsSearch() + .filter("term", context__loan__pid=self.pid) + .filter("term", notification_type=notification_type) + .filter("range", creation_date={"gt": trans_date}) + .source() + .count() + ) return query_count > counter def get_notification_candidates(self, trigger): @@ -773,11 +811,12 @@ def get_notification_candidates(self, trigger): the loan object, and the related notification type. """ from rero_ils.modules.items.api import Item + candidates = [] item = self.item # Get the list of requests pids for the related item and exclude myself # from the result list. - requests = item.get_requests(output='pids') + requests = item.get_requests(output="pids") requests = [loan_pid for loan_pid in requests if loan_pid != self.pid] has_request = len(requests) > 0 @@ -786,10 +825,12 @@ def get_notification_candidates(self, trigger): # an AVAILABILITY and AT_DESK notifications. AVAILABILITY is sent to # the patron, AT_DESK is sent to transaction library. if self.state == LoanState.ITEM_AT_DESK: - candidates.extend(( - (self, NotificationType.AT_DESK), - (self, NotificationType.AVAILABILITY) - )) + candidates.extend( + ( + (self, NotificationType.AT_DESK), + (self, NotificationType.AVAILABILITY), + ) + ) # REQUEST & RECALL NOTIFICATION # When a request is created on an item, the system create a 'pending' @@ -812,8 +853,11 @@ def get_notification_candidates(self, trigger): # RECALL NOTIFICATION AT CHECKOUT # When an item is checkout and this item has pending request for # another patron, a RECALL notification could be created. - if trigger == LoanAction.CHECKOUT and has_request \ - and not self.is_notified(NotificationType.RECALL): + if ( + trigger == LoanAction.CHECKOUT + and has_request + and not self.is_notified(NotificationType.RECALL) + ): candidates.append((self, NotificationType.RECALL)) # TRANSIT @@ -821,8 +865,7 @@ def get_notification_candidates(self, trigger): # related request, we could create a TRANSIT_NOTICE notification to # notify the transaction library to return the item to the owning # library. - if self.state == LoanState.ITEM_IN_TRANSIT_TO_HOUSE \ - and not has_request: + if self.state == LoanState.ITEM_IN_TRANSIT_TO_HOUSE and not has_request: candidates.append((self, NotificationType.TRANSIT_NOTICE)) # BOOKING @@ -833,7 +876,7 @@ def get_notification_candidates(self, trigger): candidates.append((self, NotificationType.BOOKING)) # AUTO_RENEWAL - if trigger == 'extend' and self.get('auto_extend'): + if trigger == "extend" and self.get("auto_extend"): candidates.append((self, NotificationType.AUTO_EXTEND)) return candidates @@ -848,6 +891,7 @@ def create_notification(self, trigger=None, _type=None, counter=0): :return: the list of created notifications """ from .utils import get_circ_policy + types = [(self, t) for t in [_type] if t] notifications = [] for loan, n_type in types or self.get_notification_candidates(trigger): @@ -859,11 +903,9 @@ def create_notification(self, trigger=None, _type=None, counter=0): dispatch = n_type in NotificationType.INTERNAL_NOTIFICATIONS record = { - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'notification_type': n_type, - 'context': { - 'loan': {'$ref': get_ref_for_pid('loans', loan.pid)} - } + "creation_date": datetime.now(timezone.utc).isoformat(), + "notification_type": n_type, + "context": {"loan": {"$ref": get_ref_for_pid("loans", loan.pid)}}, } # overdue + due_soon if n_type in NotificationType.REMINDERS_NOTIFICATIONS: @@ -881,14 +923,15 @@ def create_notification(self, trigger=None, _type=None, counter=0): reminder_type = OVERDUE_REMINDER_TYPE # Reminder does not exists on the circulation policy. if cipo.get_reminder(reminder_type, counter): - record['context']['reminder_counter'] = counter + record["context"]["reminder_counter"] = counter else: create = False # create the notification and enqueue it. if create: if notification := self._create_notification_resource( - record, dispatch=dispatch): + record, dispatch=dispatch + ): notifications.append(notification) return notifications @@ -900,11 +943,10 @@ def _create_notification_resource(cls, record, dispatch=False): :param dispatch: if True send the notification to the dispatcher. :return: the created `Notification` resource. """ - notification = Notification.create( - data=record, dbcommit=True, reindex=True) + notification = Notification.create(data=record, dbcommit=True, reindex=True) if dispatch and notification: NotificationDispatcher.dispatch_notifications( - notification_pids=[notification.get('pid')] + notification_pids=[notification.get("pid")] ) return notification @@ -927,21 +969,28 @@ def get_anonymized_candidates(cls): three_month_ago = datetime.now() - relativedelta(months=3) six_month_ago = datetime.now() - relativedelta(months=6) - patron_query = PatronsSearch().filter('bool', must_not=[ - Q('exists', field='keep_history'), - Q('term', keep_history=True) - ]) - anonym_patron_pids = [h.pid for h in patron_query.source('pid').scan()] - - query = LoansSearch() \ - .filter('terms', state=LoanState.CONCLUDED) \ - .filter('term', to_anonymize=False) \ - .filter('bool', should=[ - Q('range', transaction_date={'lt': six_month_ago}), - (Q('terms', patron_pid=anonym_patron_pids) & - Q('range', transaction_date={'lt': three_month_ago})) - ]) \ + patron_query = PatronsSearch().filter( + "bool", + must_not=[Q("exists", field="keep_history"), Q("term", keep_history=True)], + ) + anonym_patron_pids = [h.pid for h in patron_query.source("pid").scan()] + + query = ( + LoansSearch() + .filter("terms", state=LoanState.CONCLUDED) + .filter("term", to_anonymize=False) + .filter( + "bool", + should=[ + Q("range", transaction_date={"lt": six_month_ago}), + ( + Q("terms", patron_pid=anonym_patron_pids) + & Q("range", transaction_date={"lt": three_month_ago}) + ), + ], + ) .source(False) + ) for hit in list(query.scan()): yield Loan.get_record(hit.meta.id) @@ -950,15 +999,17 @@ def is_concluded(self): :return: True is the loan is concluded, False otherwise """ - return self.get('state') in LoanState.CONCLUDED and \ - not self.has_pending_transaction() + return ( + self.get("state") in LoanState.CONCLUDED + and not self.has_pending_transaction() + ) def age(self): """Return the age of a loan in days. :return: the number of days since last transaction date. """ - if value := self.get('transaction_date'): + if value := self.get("transaction_date"): trans_date = ciso8601.parse_datetime(value) loan_age = datetime.utcnow() - trans_date.replace(tzinfo=None) return loan_age.days @@ -994,8 +1045,7 @@ def can_anonymize(cls, loan_data=None, patron=None): # Limit could be configured by 'RERO_ILS_ANONYMISATION_TIME_LIMIT' # key into `config.py`. max_limit = current_app.config.get( - 'RERO_ILS_ANONYMISATION_MAX_TIME_LIMIT', - math.inf + "RERO_ILS_ANONYMISATION_MAX_TIME_LIMIT", math.inf ) loan_age = loan.age() if loan_age > max_limit: @@ -1005,22 +1055,23 @@ def can_anonymize(cls, loan_data=None, patron=None): # Circulation management and/or library manager needs to keep loan # information for a delay (in days) after the concluded date anyway. min_limit = current_app.config.get( - 'RERO_ILS_ANONYMISATION_MIN_TIME_LIMIT', - -math.inf + "RERO_ILS_ANONYMISATION_MIN_TIME_LIMIT", -math.inf ) if loan_age < (min_limit + 1): return False # CHECK #5 : Check about patron preferences # Patron could specify if it wants to keep transaction history or not - patron_pid = loan_data.get('patron_pid') + patron_pid = loan_data.get("patron_pid") patron = patron or Patron.get_record_by_pid(patron_pid) keep_history = True if patron: - keep_history = patron.user.user_profile.get('keep_history', True) + keep_history = patron.user.user_profile.get("keep_history", True) else: - msg = f'Can not anonymize loan: {loan_data.get("pid")} ' \ - f'no patron: {loan_data.get("patron_pid")}' + msg = ( + f'Can not anonymize loan: {loan_data.get("pid")} ' + f'no patron: {loan_data.get("patron_pid")}' + ) current_app.logger.warning(msg) return not keep_history @@ -1032,19 +1083,19 @@ def action_required_params(action=None): :return: the list of required parameters than the `Loan` must define to validate the action. """ - shared_params = ['transaction_location_pid', 'transaction_user_pid'] + shared_params = ["transaction_location_pid", "transaction_user_pid"] params = { - 'cancel_loan': ['pid'], - 'validate_request': ['pid'], - LoanAction.REQUEST: ['item_pid', 'pickup_location_pid', 'patron_pid'], - LoanAction.CHECKIN: ['pid'], - LoanAction.EXTEND: ['item_pid'], + "cancel_loan": ["pid"], + "validate_request": ["pid"], + LoanAction.REQUEST: ["item_pid", "pickup_location_pid", "patron_pid"], + LoanAction.CHECKIN: ["pid"], + LoanAction.EXTEND: ["item_pid"], LoanAction.CHECKOUT: [ - 'item_pid', - 'patron_pid', - 'transaction_location_pid', - 'transaction_user_pid', - ] + "item_pid", + "patron_pid", + "transaction_location_pid", + "transaction_user_pid", + ], } return params.get(action, []) + shared_params @@ -1060,10 +1111,9 @@ def get_request_by_item_pid_by_patron_pid(item_pid, patron_pid): LoanState.PENDING, LoanState.ITEM_AT_DESK, LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_IN_TRANSIT_TO_HOUSE + LoanState.ITEM_IN_TRANSIT_TO_HOUSE, ] - return get_loans_by_item_pid_by_patron_pid( - item_pid, patron_pid, filter_states) + return get_loans_by_item_pid_by_patron_pid(item_pid, patron_pid, filter_states) def get_any_loans_by_item_pid_by_patron_pid(item_pid, patron_pid): @@ -1077,14 +1127,12 @@ def get_any_loans_by_item_pid_by_patron_pid(item_pid, patron_pid): LoanState.PENDING, LoanState.ITEM_AT_DESK, LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_ON_LOAN + LoanState.ITEM_ON_LOAN, ] - return get_loans_by_item_pid_by_patron_pid( - item_pid, patron_pid, filter_states) + return get_loans_by_item_pid_by_patron_pid(item_pid, patron_pid, filter_states) -def get_loans_by_item_pid_by_patron_pid(item_pid, patron_pid, - filter_states=None): +def get_loans_by_item_pid_by_patron_pid(item_pid, patron_pid, filter_states=None): """Get loans for item, patron according to the given filter_states. :param item_pid: The item pid. @@ -1098,7 +1146,7 @@ def get_loans_by_item_pid_by_patron_pid(item_pid, patron_pid, filter_states=filter_states, ) search_result = search.execute() - return search_result.hits.hits[0]['_source'] if search_result.hits else {} + return search_result.hits.hits[0]["_source"] if search_result.hits else {} def get_loans_stats_by_patron_pid(patron_pid): @@ -1107,19 +1155,17 @@ def get_loans_stats_by_patron_pid(patron_pid): :param patron_pid: The patron pid :return: a dict with loans state as key, number of loans as value """ - agg = A('terms', field='state') + agg = A("terms", field="state") search = search_by_patron_item_or_document(patron_pid=patron_pid) - search.aggs.bucket('state', agg) + search.aggs.bucket("state", agg) search = search[:0] results = search.execute() return { - result.key: result.doc_count - for result in results.aggregations.state.buckets + result.key: result.doc_count for result in results.aggregations.state.buckets } -def get_loans_by_patron_pid(patron_pid, filter_states=None, - to_anonymize=False): +def get_loans_by_patron_pid(patron_pid, filter_states=None, to_anonymize=False): """Search all loans for patron to the given filter_states. :param to_anonymize: filter by field to_anonymize. @@ -1127,29 +1173,33 @@ def get_loans_by_patron_pid(patron_pid, filter_states=None, :param filter_states: loan states to use as a filter. :return: loans for given patron. """ - search = search_by_patron_item_or_document( - patron_pid=patron_pid, - filter_states=filter_states) \ - .params(preserve_order=True) \ - .sort({'_created': {'order': 'asc'}}) \ + search = ( + search_by_patron_item_or_document( + patron_pid=patron_pid, filter_states=filter_states + ) + .params(preserve_order=True) + .sort({"_created": {"order": "asc"}}) .source(False) - search = search.filter('term', to_anonymize=to_anonymize) + ) + search = search.filter("term", to_anonymize=to_anonymize) for loan in search.scan(): yield Loan.get_record(loan.meta.id) def get_last_transaction_loc_for_item(item_pid): """Return last transaction location for an item.""" - results = current_circulation.loan_search_cls() \ - .filter('term', item_pid=item_pid) \ - .params(preserve_order=True) \ - .exclude('terms', state=[ - LoanState.PENDING, LoanState.CREATED]) \ - .sort({'_created': {'order': 'desc'}}) \ - .source(False).scan() + results = ( + current_circulation.loan_search_cls() + .filter("term", item_pid=item_pid) + .params(preserve_order=True) + .exclude("terms", state=[LoanState.PENDING, LoanState.CREATED]) + .sort({"_created": {"order": "desc"}}) + .source(False) + .scan() + ) try: loan_uuid = next(results).meta.id - return Loan.get_record(loan_uuid).get('transaction_location_pid') + return Loan.get_record(loan_uuid).get("transaction_location_pid") except StopIteration: return None @@ -1162,17 +1212,15 @@ def get_loans_count_by_library_for_patron_pid(patron_pid, filter_states=None): :return: a dict with library_pid as key, number of loans as value """ filter_states = filter_states or [] # prevent mutable argument warning - agg = A('terms', field='library_pid') + agg = A("terms", field="library_pid") search = search_by_patron_item_or_document( - patron_pid=patron_pid, - filter_states=filter_states + patron_pid=patron_pid, filter_states=filter_states ) - search.aggs.bucket('library', agg) + search.aggs.bucket("library", agg) search = search[:0] results = search.execute() return { - result.key: result.doc_count - for result in results.aggregations.library.buckets + result.key: result.doc_count for result in results.aggregations.library.buckets } @@ -1182,10 +1230,12 @@ def get_on_loan_loans_for_library(library_pid): :param library_pid: the library pid. :returns a generator of `Loan` record. """ - query = current_circulation.loan_search_cls() \ - .filter('term', library_pid=library_pid) \ - .filter('term', state=LoanState.ITEM_ON_LOAN) \ + query = ( + current_circulation.loan_search_cls() + .filter("term", library_pid=library_pid) + .filter("term", state=LoanState.ITEM_ON_LOAN) .source(False) + ) for id_ in [hit.meta.id for hit in query.scan()]: yield Loan.get_record(id_) @@ -1196,13 +1246,16 @@ def get_due_soon_loans(tstamp=None): :param tstamp: a limit timestamp. Default is `datetime.now()`. """ end_date = tstamp or datetime.now(timezone.utc) - end_date = end_date.strftime('%Y-%m-%dT%H:%M:%S.000Z') - query = current_circulation.loan_search_cls() \ - .filter('term', state=LoanState.ITEM_ON_LOAN) \ - .filter('range', due_soon_date={'lte': end_date}) \ - .params(preserve_order=True) \ - .sort({'_created': {'order': 'asc'}}) \ - .source(False).scan() + end_date = end_date.strftime("%Y-%m-%dT%H:%M:%S.000Z") + query = ( + current_circulation.loan_search_cls() + .filter("term", state=LoanState.ITEM_ON_LOAN) + .filter("range", due_soon_date={"lte": end_date}) + .params(preserve_order=True) + .sort({"_created": {"order": "asc"}}) + .source(False) + .scan() + ) for hit in query: yield Loan.get_record(hit.meta.id) @@ -1213,11 +1266,14 @@ def get_expired_request(tstamp=None): :param tstamp: a limit timestamp. Default is `datetime.now()`. """ end_date = tstamp or datetime.now(timezone.utc) - end_date = end_date.strftime('%Y-%m-%dT%H:%M:%S.000Z') - query = current_circulation.loan_search_cls() \ - .filter('term', state=LoanState.ITEM_AT_DESK) \ - .filter('range', request_expire_date={'lte': end_date}) \ - .source(False).scan() + end_date = end_date.strftime("%Y-%m-%dT%H:%M:%S.000Z") + query = ( + current_circulation.loan_search_cls() + .filter("term", state=LoanState.ITEM_AT_DESK) + .filter("range", request_expire_date={"lte": end_date}) + .source(False) + .scan() + ) for hit in query: yield Loan.get_record(hit.meta.id) @@ -1230,17 +1286,21 @@ def get_overdue_loan_pids(patron_pid=None, tstamp=None): Default to `datetime.now()`. :return: a list of loan pids """ - until_date = tstamp or datetime.now(timezone.utc) - until_date = until_date.strftime('%Y-%m-%dT%H:%M:%S.000Z') - query = current_circulation.loan_search_cls() \ - .filter('term', state=LoanState.ITEM_ON_LOAN) \ - .filter('range', end_date={'lte': until_date}) + end_date = tstamp or datetime.now(timezone.utc) + end_date = end_date.strftime("%Y-%m-%dT%H:%M:%S.000Z") + query = ( + current_circulation.loan_search_cls() + .filter("term", state=LoanState.ITEM_ON_LOAN) + .filter("range", end_date={"lte": end_date}) + ) if patron_pid: - query = query.filter('term', patron_pid=patron_pid) - results = query\ - .params(preserve_order=True) \ - .sort({'_created': {'order': 'asc'}}) \ - .source(['pid']).scan() + query = query.filter("term", patron_pid=patron_pid) + results = ( + query.params(preserve_order=True) + .sort({"_created": {"order": "asc"}}) + .source(["pid"]) + .scan() + ) # We will return all pids here to prevent folowing error during long # operations: # elasticsearch.helpers.errors.ScanError: @@ -1266,14 +1326,16 @@ def get_non_anonymized_loans(patron=None, org_pid=None): :param org_pid: optional parameter to filter by organisation. :return: loans. """ - search = current_circulation.loan_search_cls() \ - .filter('term', to_anonymize=False) \ - .filter('terms', state=[LoanState.CANCELLED, LoanState.ITEM_RETURNED])\ + search = ( + current_circulation.loan_search_cls() + .filter("term", to_anonymize=False) + .filter("terms", state=[LoanState.CANCELLED, LoanState.ITEM_RETURNED]) .source(False) + ) if patron: - search = search.filter('term', patron_pid=patron.pid) + search = search.filter("term", patron_pid=patron.pid) if org_pid: - search = search.filter('term', organisation__pid=org_pid) + search = search.filter("term", organisation__pid=org_pid) for record in search.scan(): yield Loan.get_record(record.meta.id) @@ -1305,4 +1367,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='loanid') + super().bulk_index(record_id_iterator, doc_type="loanid") diff --git a/rero_ils/modules/loans/api_views.py b/rero_ils/modules/loans/api_views.py index b210a51306..0d0632671b 100644 --- a/rero_ils/modules/loans/api_views.py +++ b/rero_ils/modules/loans/api_views.py @@ -23,19 +23,17 @@ from rero_ils.modules.decorators import check_logged_as_librarian from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemCirculationAction -from rero_ils.modules.items.views.api_views import \ - check_logged_user_authentication, jsonify_error +from rero_ils.modules.items.views.api_views import ( + check_logged_user_authentication, + jsonify_error, +) from rero_ils.modules.loans.api import Loan from rero_ils.modules.loans.utils import get_circ_policy, sum_for_fees -api_blueprint = Blueprint( - 'api_loan', - __name__, - url_prefix='/loan' -) +api_blueprint = Blueprint("api_loan", __name__, url_prefix="/loan") -@api_blueprint.route('//circulation_policy', methods=['GET']) +@api_blueprint.route("//circulation_policy", methods=["GET"]) @check_logged_as_librarian def dump_loan_current_circulation_policy(loan_pid): """Search and dump the current circulation policy related to a loan.""" @@ -44,44 +42,32 @@ def dump_loan_current_circulation_policy(loan_pid): # changed (Patron.patron_type, Item.item_type, ...) loan = Loan.get_record_by_pid(loan_pid) if not loan: - abort(404, 'Loan not found') + abort(404, "Loan not found") return jsonify(get_circ_policy(loan)) -@api_blueprint.route('//overdue/preview', methods=['GET']) +@api_blueprint.route("//overdue/preview", methods=["GET"]) @login_required def preview_loan_overdue(loan_pid): """HTTP GET request for overdue preview about a loan.""" loan = Loan.get_record_by_pid(loan_pid) if not loan: - abort(404, 'Loan not found') + abort(404, "Loan not found") fees = loan.get_overdue_fees fees = [(fee[0], fee[1].isoformat()) for fee in fees] # format date - return jsonify({ - 'total': sum_for_fees(fees), - 'steps': fees - }) + return jsonify({"total": sum_for_fees(fees), "steps": fees}) -@api_blueprint.route('//can_extend', methods=['GET']) +@api_blueprint.route("//can_extend", methods=["GET"]) @check_logged_user_authentication @jsonify_error def can_extend(loan_pid): """Loan can extend.""" loan = Loan.get_record_by_pid(loan_pid) - item_pid = loan.get('item_pid', {}).get('value') - can_extend = { - 'can': False, - 'reasons': [] - } + item_pid = loan.get("item_pid", {}).get("value") + can_extend = {"can": False, "reasons": []} if item_pid: item = Item.get_record_by_pid(item_pid) - can, reasons = item.can( - ItemCirculationAction.EXTEND, - loan=loan - ) - can_extend = { - 'can': can, - 'reasons': reasons - } + can, reasons = item.can(ItemCirculationAction.EXTEND, loan=loan) + can_extend = {"can": can, "reasons": reasons} return jsonify(can_extend) diff --git a/rero_ils/modules/loans/cli.py b/rero_ils/modules/loans/cli.py index e050154430..f03924c23e 100644 --- a/rero_ils/modules/loans/cli.py +++ b/rero_ils/modules/loans/cli.py @@ -44,8 +44,13 @@ from ..patron_types.api import PatronType from ..patrons.api import Patron, PatronsSearch from ..users.models import UserRole -from ..utils import JsonWriter, extracted_data_from_ref, get_ref_for_pid, \ - get_schema_for_resource, read_json_record +from ..utils import ( + JsonWriter, + extracted_data_from_ref, + get_ref_for_pid, + get_schema_for_resource, + read_json_record, +) def check_missing_fields(transaction, transaction_type): @@ -54,17 +59,38 @@ def check_missing_fields(transaction, transaction_type): transaction: the json transaction record. transaction_type: type of transaction. """ - if transaction_type == 'checkout': - fields = ['item_pid', 'patron_pid', 'end_date', 'transaction_date', - 'transaction_location_pid', 'transaction_user_pid', - 'organisation', 'start_date'] - elif transaction_type == 'request': - fields = ['item_pid', 'patron_pid', 'organisation', 'transaction_date', - 'pickup_location_pid', 'transaction_location_pid', - 'transaction_user_pid', 'request_expire_date'] - elif transaction_type == 'fine': - fields = ['note', 'type', 'patron', 'status', 'organisation', - 'total_amount', 'creation_date'] + if transaction_type == "checkout": + fields = [ + "item_pid", + "patron_pid", + "end_date", + "transaction_date", + "transaction_location_pid", + "transaction_user_pid", + "organisation", + "start_date", + ] + elif transaction_type == "request": + fields = [ + "item_pid", + "patron_pid", + "organisation", + "transaction_date", + "pickup_location_pid", + "transaction_location_pid", + "transaction_user_pid", + "request_expire_date", + ] + elif transaction_type == "fine": + fields = [ + "note", + "type", + "patron", + "status", + "organisation", + "total_amount", + "creation_date", + ] return [field for field in fields if field not in transaction] @@ -76,29 +102,30 @@ def build_loan_record(transaction, transaction_type, item): transaction_type: type of transaction. item: the item record. """ - if transaction_type == 'checkout': - transaction.pop('item_pid', None) - transaction.pop('organisation', None) - elif transaction_type == 'request': - transaction['state'] = 'PENDING' - transaction['trigger'] = 'request' - transaction['item_pid'] = {'value': transaction.get('item_pid'), - 'type': 'item'} - transaction['document_pid'] = item.document_pid - transaction['to_anonymize'] = False - - -@click.command('load_virtua_transactions') -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.option('-e', '--save_errors', 'save_errors', type=click.File('w')) -@click.option('-t', '--transaction_type', 'transaction_type', is_flag=False, - default='checkout') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.argument('infile', type=click.File('r')) + if transaction_type == "checkout": + transaction.pop("item_pid", None) + transaction.pop("organisation", None) + elif transaction_type == "request": + transaction["state"] = "PENDING" + transaction["trigger"] = "request" + transaction["item_pid"] = {"value": transaction.get("item_pid"), "type": "item"} + transaction["document_pid"] = item.document_pid + transaction["to_anonymize"] = False + + +@click.command("load_virtua_transactions") +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.option("-e", "--save_errors", "save_errors", type=click.File("w")) +@click.option( + "-t", "--transaction_type", "transaction_type", is_flag=False, default="checkout" +) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.argument("infile", type=click.File("r")) @with_appcontext def load_virtua_transactions( - infile, lazy, save_errors, transaction_type, verbose, debug): + infile, lazy, save_errors, transaction_type, verbose, debug +): """Load Virtua circulation transactions. infile: Json Virtua transactions file. @@ -110,123 +137,103 @@ def load_virtua_transactions( # deals with loading data from other ILS into REROILS. if save_errors: name, ext = os.path.splitext(infile.name) - err_file_name = f'{name}_errors{ext}' + err_file_name = f"{name}_errors{ext}" error_file = JsonWriter(err_file_name) if lazy: records = read_json_record(infile) else: file_data = json.load(infile) - click.secho( - f'Loading Virtua transactions of type {transaction_type}', - fg='green' - ) + click.secho(f"Loading Virtua transactions of type {transaction_type}", fg="green") for counter, transaction in enumerate(file_data, 1): - missing_fields = check_missing_fields(transaction, transaction_type) - if missing_fields: + if missing_fields := check_missing_fields(transaction, transaction_type): click.secho( - f'\ntransaction # {counter} missing fields: {missing_fields}', - fg='red') + f"\ntransaction # {counter} missing fields: {missing_fields}", fg="red" + ) if save_errors: error_file.write(transaction) continue - if transaction_type == 'fine': - patron_pid = extracted_data_from_ref(transaction.get('patron')) + if transaction_type == "fine": + patron_pid = extracted_data_from_ref(transaction.get("patron")) patron = Patron.get_record_by_pid(patron_pid) if not patron: - click.secho( - f'\ntransaction # {counter} patron not in db', - fg='red' - ) + click.secho(f"\ntransaction # {counter} patron not in db", fg="red") if save_errors: error_file.write(transaction) continue try: - PatronTransaction.create( - transaction, dbcommit=True, reindex=True) - click.secho( - f'\ntransaction # {counter} created', - fg='green' - ) + PatronTransaction.create(transaction, dbcommit=True, reindex=True) + click.secho(f"\ntransaction # {counter} created", fg="green") except Exception as error: - click.secho( - f'transaction# {counter} failed creation {error}', - fg='red' - ) + click.secho(f"transaction# {counter} failed creation {error}", fg="red") if save_errors: error_file.write(transaction) - elif transaction_type in ['checkout', 'request']: - item = Item.get_record_by_pid(transaction.get('item_pid')) - patron = Patron.get_record_by_pid(transaction.get('patron_pid')) + elif transaction_type in ["checkout", "request"]: + item = Item.get_record_by_pid(transaction.get("item_pid")) + patron = Patron.get_record_by_pid(transaction.get("patron_pid")) if not (item and patron): - click.secho( - f'\ntransaction# {counter} item/patron not in db', - fg='red' - ) + click.secho(f"\ntransaction# {counter} item/patron not in db", fg="red") if save_errors: error_file.write(transaction) continue else: build_loan_record(transaction, transaction_type, item) try: - if transaction_type == 'request': + if transaction_type == "request": Loan.create(transaction, dbcommit=True, reindex=True) - elif transaction_type == 'checkout': + elif transaction_type == "checkout": item.checkout(**transaction) - click.secho( - f'\ntransaction # {counter} created', - fg='green' - ) + click.secho(f"\ntransaction # {counter} created", fg="green") except Exception as error: - click.secho( - f'transaction# {counter} failed creation {error}', - fg='red' - ) + click.secho(f"transaction# {counter} failed creation {error}", fg="red") if save_errors: error_file.write(transaction) -@click.command('create_loans') -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.argument('infile', type=click.File('r')) +@click.command("create_loans") +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.argument("infile", type=click.File("r")) @with_appcontext def create_loans(infile, verbose, debug): """Create circulation transactions. infile: Json transactions file """ - click.secho('Create circulation transactions:', fg='green') + click.secho("Create circulation transactions:", fg="green") data = json.load(infile) errors_count = {} to_block = [] for patron_data in data: - barcode = patron_data.get('barcode') + barcode = patron_data.get("barcode") if barcode is None: - click.secho('Patron barcode is missing!', fg='red') + click.secho("Patron barcode is missing!", fg="red") else: - click.echo(f'Patron: {barcode}') - loans = patron_data.get('loans', {}) - requests = patron_data.get('requests', {}) - if patron_data.get('blocked', False): + click.echo(f"Patron: {barcode}") + loans = patron_data.get("loans", {}) + requests = patron_data.get("requests", {}) + if patron_data.get("blocked", False): to_block.append(patron_data) - patron_type_pid = Patron\ - .get_patron_by_barcode(barcode=barcode)\ - .patron_type_pid + patron_type_pid = Patron.get_patron_by_barcode( + barcode=barcode + ).patron_type_pid loanable_items = get_loanable_items(patron_type_pid) if verbose: - loanable_items_count = len( - list(get_loanable_items(patron_type_pid)) - ) - msg = f'\t{patron_data} loanable_items: {loanable_items_count}' + loanable_items_count = len(list(get_loanable_items(patron_type_pid))) + msg = f"\t{patron_data} loanable_items: {loanable_items_count}" click.echo(msg) - loan_types = ['active', 'overdue_active', 'overdue_paid', - 'extended', 'requested_by_others'] + loan_types = [ + "active", + "overdue_active", + "overdue_paid", + "extended", + "requested_by_others", + ] for loan_type in loan_types: for _ in range(loans.get(loan_type, 0)): item_barcode, error = create_loan( @@ -234,52 +241,44 @@ def create_loans(infile, verbose, debug): transaction_type=loan_type, loanable_items=loanable_items, verbose=verbose, - debug=debug + debug=debug, ) if error: errors_count.setdefault(loan_type, 0) errors_count[loan_type] += 1 - click.echo( - f'\titem {item_barcode}: {loan_type}') - for request_type in ['requests', 'rank_1', 'rank_2']: + click.echo(f"\titem {item_barcode}: {loan_type}") + for request_type in ["requests", "rank_1", "rank_2"]: for _ in range(requests.get(request_type, 0)): item_barcode, error = create_loan( barcode=barcode, transaction_type=request_type, loanable_items=loanable_items, verbose=verbose, - debug=debug + debug=debug, ) if error: errors_count.setdefault(request_type, 0) errors_count[request_type] += 1 - click.echo( - f'\titem {item_barcode}: {request_type}') + click.echo(f"\titem {item_barcode}: {request_type}") # create due soon notifications, overdue notifications are auto created. # block given patron for patron_data in to_block: - barcode = patron_data.get('barcode') + barcode = patron_data.get("barcode") patron = Patron.get_patron_by_barcode(barcode=barcode) - patron['patron']['blocked'] = True - patron['patron']['blocked_note'] = patron_data.get('blocked', "") - patron.update( - patron, - dbcommit=True, - reindex=True - ) + patron["patron"]["blocked"] = True + patron["patron"]["blocked_note"] = patron_data.get("blocked", "") + patron.update(patron, dbcommit=True, reindex=True) for transaction_type, count in errors_count.items(): - click.secho(f'Errors {transaction_type}: {count}', fg='red') + click.secho(f"Errors {transaction_type}: {count}", fg="red") result = create_notifications( - types=[NotificationType.DUE_SOON, NotificationType.OVERDUE], - verbose=verbose + types=[NotificationType.DUE_SOON, NotificationType.OVERDUE], verbose=verbose ) for notification, count in result.items(): - click.secho(f'Notification {notification}: {count}', fg='green') + click.secho(f"Notification {notification}: {count}", fg="green") -def create_loan(barcode, transaction_type, loanable_items, verbose=False, - debug=False): +def create_loan(barcode, transaction_type, loanable_items, verbose=False, debug=False): """Create loans transactions. :param barcode: patron barcode @@ -295,87 +294,60 @@ def create_loan(barcode, transaction_type, loanable_items, verbose=False, item = next(loanable_items) patron = Patron.get_patron_by_barcode(barcode=barcode) transaction_date = datetime.now(timezone.utc).isoformat() - user_pid, user_location = \ - get_random_librarian_and_transaction_location(patron) + user_pid, user_location = get_random_librarian_and_transaction_location(patron) item.checkout( patron_pid=patron.pid, transaction_user_pid=user_pid, transaction_location_pid=user_location, transaction_date=transaction_date, - document_pid=extracted_data_from_ref(item.get('document')), + document_pid=extracted_data_from_ref(item.get("document")), item_pid=item.pid, ) loan = get_loan_for_item(item_pid_to_object(item.pid)) - loan_pid = loan.get('pid') + loan_pid = loan.get("pid") loan = Loan.get_record_by_pid(loan_pid) - if transaction_type == 'overdue_active': + if transaction_type == "overdue_active": end_date = datetime.now(timezone.utc) - timedelta(days=2) - loan['end_date'] = end_date.isoformat() - loan.update( - data=loan, - dbcommit=True, - reindex=True - ) - notifications = loan.create_notification( - _type=NotificationType.DUE_SOON) - notification_pids.extend(notif['pid'] for notif in notifications) + loan["end_date"] = end_date.isoformat() + loan.update(data=loan, dbcommit=True, reindex=True) + notifications = loan.create_notification(_type=NotificationType.DUE_SOON) + notification_pids.extend(notif["pid"] for notif in notifications) end_date = datetime.now(timezone.utc) - timedelta(days=70) - loan['end_date'] = end_date.isoformat() - loan.update( - data=loan, - dbcommit=True, - reindex=True - ) - notifications = loan.create_notification( - _type=NotificationType.OVERDUE) - notification_pids.extend(notif['pid'] for notif in notifications) - elif transaction_type == 'overdue_paid': + loan["end_date"] = end_date.isoformat() + loan.update(data=loan, dbcommit=True, reindex=True) + notifications = loan.create_notification(_type=NotificationType.OVERDUE) + notification_pids.extend(notif["pid"] for notif in notifications) + elif transaction_type == "overdue_paid": end_date = datetime.now(timezone.utc) - timedelta(days=2) - loan['end_date'] = end_date.isoformat() - loan.update( - data=loan, - dbcommit=True, - reindex=True - ) - notifications = loan.create_notification( - _type=NotificationType.DUE_SOON) + loan["end_date"] = end_date.isoformat() + loan.update(data=loan, dbcommit=True, reindex=True) + notifications = loan.create_notification(_type=NotificationType.DUE_SOON) for notif in notifications: - notification_pids.append(notif['pid']) + notification_pids.append(notif["pid"]) end_date = datetime.now(timezone.utc) - timedelta(days=70) - loan['end_date'] = end_date.isoformat() - loan.update( - data=loan, - dbcommit=True, - reindex=True - ) - notifications = loan.create_notification( - _type=NotificationType.OVERDUE) + loan["end_date"] = end_date.isoformat() + loan.update(data=loan, dbcommit=True, reindex=True) + notifications = loan.create_notification(_type=NotificationType.OVERDUE) for notif in notifications: - notification_pids.append(notif['pid']) + notification_pids.append(notif["pid"]) patron_transaction = next(notif.patron_transactions) user = get_random_librarian(patron).replace_refs() payment = create_payment_record( patron_transaction=patron_transaction, user_pid=user_pid, - user_library=random.choice(user['libraries'])['pid'] + user_library=random.choice(user["libraries"])["pid"], ) PatronTransactionEvent.create( - data=payment, - dbcommit=True, - reindex=True, - update_parent=True + data=payment, dbcommit=True, reindex=True, update_parent=True ) - elif transaction_type == 'extended': + elif transaction_type == "extended": end_date = datetime.now(timezone.utc) - timedelta(days=1) - loan['end_date'] = end_date.isoformat() - loan.update( - data=loan, - dbcommit=True, - reindex=True + loan["end_date"] = end_date.isoformat() + loan.update(data=loan, dbcommit=True, reindex=True) + user_pid, user_location = get_random_librarian_and_transaction_location( + patron ) - user_pid, user_location = \ - get_random_librarian_and_transaction_location(patron) item.extend_loan( pid=loan_pid, patron_pid=patron.pid, @@ -384,90 +356,88 @@ def create_loan(barcode, transaction_type, loanable_items, verbose=False, transaction_date=( datetime.now(timezone.utc) - timedelta(days=1) ).isoformat(), - document_pid=extracted_data_from_ref(item.get('document')), + document_pid=extracted_data_from_ref(item.get("document")), item_pid=item.pid, ) - elif transaction_type == 'requested_by_others': + elif transaction_type == "requested_by_others": requested_patron = get_random_patron(barcode) - user_pid, user_location = \ - get_random_librarian_and_transaction_location(patron) + user_pid, user_location = get_random_librarian_and_transaction_location( + patron + ) circ_policy = CircPolicy.provide_circ_policy( organisation_pid=item.organisation_pid, library_pid=item.library_pid, patron_type_pid=requested_patron.patron_type_pid, - item_type_pid=item.item_type_circulation_category_pid + item_type_pid=item.item_type_circulation_category_pid, ) - if circ_policy.get('allow_requests'): + if circ_policy.get("allow_requests"): item.request( patron_pid=requested_patron.pid, transaction_location_pid=user_location, transaction_user_pid=user_pid, transaction_date=transaction_date, pickup_location_pid=get_random_pickup_location( - requested_patron.pid, item), - document_pid=extracted_data_from_ref(item.get('document')), + requested_patron.pid, item + ), + document_pid=extracted_data_from_ref(item.get("document")), ) - notifications = loan.create_notification( - _type=NotificationType.RECALL) - notification_pids.extend( - notif['pid'] for notif in notifications) + notifications = loan.create_notification(_type=NotificationType.RECALL) + notification_pids.extend(notif["pid"] for notif in notifications) Dispatcher.dispatch_notifications(notification_pids, verbose=verbose) - return item['barcode'], False + return item["barcode"], False except Exception as err: if verbose: - click.secho(f'\tException loan {transaction_type}:{err}', fg='red') + click.secho(f"\tException loan {transaction_type}:{err}", fg="red") if debug: traceback.print_exc() - return item['barcode'], True + return item["barcode"], True -def create_request(barcode, transaction_type, loanable_items, verbose=False, - debug=False): +def create_request( + barcode, transaction_type, loanable_items, verbose=False, debug=False +): """Create request transactions.""" try: item = next(loanable_items) rank_1_patron = get_random_patron(barcode) patron = Patron.get_patron_by_barcode(barcode=barcode) - if transaction_type == 'rank_2': - transaction_date = \ - (datetime.now(timezone.utc) - timedelta(2)).isoformat() - user_pid, user_location = \ - get_random_librarian_and_transaction_location(patron) + if transaction_type == "rank_2": + transaction_date = (datetime.now(timezone.utc) - timedelta(2)).isoformat() + user_pid, user_location = get_random_librarian_and_transaction_location( + patron + ) circ_policy = CircPolicy.provide_circ_policy( item.organisation_pid, item.holding_library_pid, rank_1_patron.patron_type_pid, - item.holding_circulation_category_pid + item.holding_circulation_category_pid, ) - if circ_policy.get('allow_requests'): + if circ_policy.get("allow_requests"): item.request( patron_pid=rank_1_patron.pid, transaction_location_pid=user_location, transaction_user_pid=user_pid, transaction_date=transaction_date, pickup_location_pid=get_random_pickup_location( - rank_1_patron.pid, item), - document_pid=extracted_data_from_ref(item.get('document')), + rank_1_patron.pid, item + ), + document_pid=extracted_data_from_ref(item.get("document")), ) transaction_date = datetime.now(timezone.utc).isoformat() - user_pid, user_location = \ - get_random_librarian_and_transaction_location(patron) + user_pid, user_location = get_random_librarian_and_transaction_location(patron) item.request( patron_pid=patron.pid, transaction_location_pid=user_location, transaction_user_pid=user_pid, transaction_date=transaction_date, pickup_location_pid=get_random_pickup_location(patron.pid, item), - document_pid=extracted_data_from_ref(item.get('document')), + document_pid=extracted_data_from_ref(item.get("document")), ) - return item['barcode'] + return item["barcode"] except Exception as err: if verbose: - click.secho( - f'\tException request {transaction_type}: {err}', - fg='red' - ) + click.secho(f"\tException request {transaction_type}: {err}", fg="red") if debug: traceback.print_exc() return None @@ -476,70 +446,75 @@ def create_request(barcode, transaction_type, loanable_items, verbose=False, def get_loanable_items(patron_type_pid): """Get the list of loanable items.""" patron_type = PatronType.get_record_by_pid(patron_type_pid) - org_pid = extracted_data_from_ref(patron_type.get('organisation')) - loanable_items = ItemsSearch()\ - .filter('term', organisation__pid=org_pid)\ - .filter('term', status=ItemStatus.ON_SHELF).source(['pid']).scan() + org_pid = extracted_data_from_ref(patron_type.get("organisation")) + loanable_items = ( + ItemsSearch() + .filter("term", organisation__pid=org_pid) + .filter("term", status=ItemStatus.ON_SHELF) + .source(["pid"]) + .scan() + ) for loanable_item in loanable_items: if item := Item.get_record_by_pid(loanable_item.pid): circ_policy = CircPolicy.provide_circ_policy( item.organisation_pid, item.holding_library_pid, patron_type_pid, - item.holding_circulation_category_pid + item.holding_circulation_category_pid, ) - if (circ_policy.allow_checkout and - circ_policy.get('allow_requests') and - circ_policy.get('number_renewals', 0) > 0): - if not item.number_of_requests(): - # exclude the first 16 items of the 3rd organisation - barcode = item.get('barcode') - if not ( - barcode.startswith('fictive') and - int(barcode.split('fictive')[1]) < 17 - ): - yield item + if ( + circ_policy.allow_checkout + and circ_policy.get("allow_requests") + and circ_policy.get("number_renewals", 0) > 0 + ) and not item.number_of_requests(): + barcode = item.get("barcode") + if not ( + barcode.startswith("fictive") + and int(barcode.split("fictive")[1]) < 17 + ): + yield item def get_random_pickup_location(patron_pid, item): """Find a qualified pickup location.""" - pickup_locations_pids = list(Location.get_pickup_location_pids( - patron_pid=patron_pid, - item_pid=item.pid - )) + pickup_locations_pids = list( + Location.get_pickup_location_pids(patron_pid=patron_pid, item_pid=item.pid) + ) return random.choice(pickup_locations_pids) def get_random_patron(exclude_this_barcode): """Find a qualified patron other than exclude_this_barcode.""" - ptrn_to_exclude = Patron.get_patron_by_barcode( - barcode=exclude_this_barcode) - ptty_pid = extracted_data_from_ref( - ptrn_to_exclude.get('patron').get('type') - ) + ptrn_to_exclude = Patron.get_patron_by_barcode(barcode=exclude_this_barcode) + ptty_pid = extracted_data_from_ref(ptrn_to_exclude.get("patron").get("type")) org_pid = extracted_data_from_ref( - PatronType.get_record_by_pid(ptty_pid).get('organisation') + PatronType.get_record_by_pid(ptty_pid).get("organisation") + ) + patrons = ( + PatronsSearch() + .filter("term", roles=UserRole.PATRON) + .filter("term", organisation__pid=org_pid) + .source(["patron"]) + .scan() ) - patrons = PatronsSearch()\ - .filter('term', roles=UserRole.PATRON)\ - .filter('term', organisation__pid=org_pid)\ - .source(['patron']).scan() for patron in patrons: if exclude_this_barcode not in patron.patron.barcode: - return Patron.get_patron_by_barcode( - barcode=patron.patron.barcode[0]) + return Patron.get_patron_by_barcode(barcode=patron.patron.barcode[0]) def get_random_librarian(patron): """Find a qualified staff user.""" - ptty_pid = extracted_data_from_ref(patron.get('patron').get('type')) + ptty_pid = extracted_data_from_ref(patron.get("patron").get("type")) org_pid = extracted_data_from_ref( - PatronType.get_record_by_pid(ptty_pid).get('organisation') + PatronType.get_record_by_pid(ptty_pid).get("organisation") + ) + patrons = ( + PatronsSearch() + .filter("terms", roles=UserRole.PROFESSIONAL_ROLES) + .filter("term", organisation__pid=org_pid) + .source(["pid"]) + .scan() ) - patrons = PatronsSearch()\ - .filter('terms', roles=UserRole.PROFESSIONAL_ROLES)\ - .filter('term', organisation__pid=org_pid)\ - .source(['pid']).scan() for patron in patrons: return Patron.get_record_by_pid(patron.pid) return None @@ -548,38 +523,27 @@ def get_random_librarian(patron): def get_random_librarian_and_transaction_location(patron): """Find a qualified user data.""" user = get_random_librarian(patron).replace_refs() - library = Library.get_record_by_pid( - random.choice(user['libraries'])['pid']) + library = Library.get_record_by_pid(random.choice(user["libraries"])["pid"]) return user.pid, library.get_pickup_location_pid() def create_payment_record(patron_transaction, user_pid, user_library): """Create payment record from patron_transaction.""" - data = { - '$schema': get_schema_for_resource('ptre') - } + data = {"$schema": get_schema_for_resource("ptre")} for record in [ { - 'resource': 'parent', - 'doc_type': 'patron_transactions', - 'pid': patron_transaction.pid - }, - { - 'resource': 'operator', - 'doc_type': 'patrons', - 'pid': user_pid - }, - { - 'resource': 'library', - 'doc_type': 'libraries', - 'pid': user_library + "resource": "parent", + "doc_type": "patron_transactions", + "pid": patron_transaction.pid, }, + {"resource": "operator", "doc_type": "patrons", "pid": user_pid}, + {"resource": "library", "doc_type": "libraries", "pid": user_library}, ]: - data[record['resource']] = { - '$ref': get_ref_for_pid(record['doc_type'], record['pid']) + data[record["resource"]] = { + "$ref": get_ref_for_pid(record["doc_type"], record["pid"]) } - data['type'] = 'payment' - data['subtype'] = 'cash' - data['amount'] = patron_transaction.get('total_amount') - data['creation_date'] = datetime.now(timezone.utc).isoformat() + data["type"] = "payment" + data["subtype"] = "cash" + data["amount"] = patron_transaction.get("total_amount") + data["creation_date"] = datetime.now(timezone.utc).isoformat() return data diff --git a/rero_ils/modules/loans/dumpers.py b/rero_ils/modules/loans/dumpers.py index 1c83a4abf3..01c5e83884 100644 --- a/rero_ils/modules/loans/dumpers.py +++ b/rero_ils/modules/loans/dumpers.py @@ -36,24 +36,26 @@ def dump(self, record, data): """Dump an loan for circulation.""" data = deepcopy(dict(record)) # used only for pending - data['creation_date'] = record.created + data["creation_date"] = record.created - ptrn_query = PatronsSearch()\ - .source(['patron', 'first_name', 'last_name'])\ - .filter('term', pid=record['patron_pid']) + ptrn_query = ( + PatronsSearch() + .source(["patron", "first_name", "last_name"]) + .filter("term", pid=record["patron_pid"]) + ) if ptrn_data := next(ptrn_query.scan(), None): - data['patron'] = {} - data['patron']['barcode'] = ptrn_data.patron.barcode.pop() - data['patron']['name'] = ', '.join(( - ptrn_data.last_name, ptrn_data.first_name)) + data["patron"] = {} + data["patron"]["barcode"] = ptrn_data.patron.barcode.pop() + data["patron"]["name"] = ", ".join( + (ptrn_data.last_name, ptrn_data.first_name) + ) - if record.get('pickup_location_pid'): - location = Location.get_record_by_pid( - record.get('pickup_location_pid')) - data['pickup_location'] = { - 'name': location.get('name'), - 'library_name': location.get_library().get('name'), - 'pickup_name': location.pickup_name + if record.get("pickup_location_pid"): + location = Location.get_record_by_pid(record.get("pickup_location_pid")) + data["pickup_location"] = { + "name": location.get("name"), + "library_name": location.get_library().get("name"), + "pickup_name": location.pickup_name, } # Always add item destination readable information if item state is @@ -61,20 +63,14 @@ def dump(self, record, data): item = record.item if item.status == ItemStatus.IN_TRANSIT: destination_loc_pid = item.location_pid - if record.get('state') == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: - destination_loc_pid = record.get('pickup_location_pid') + if record.get("state") == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP: + destination_loc_pid = record.get("pickup_location_pid") # can be already computed - if library_name := data.get( - 'pickup_location', {}).get('library_name'): - data['item_destination'] = { - 'library_name': library_name - } + if library_name := data.get("pickup_location", {}).get("library_name"): + data["item_destination"] = {"library_name": library_name} # do nothing is already done - if not data.get('item_destination'): - destination_loc = Location.get_record_by_pid( - destination_loc_pid) + if not data.get("item_destination"): + destination_loc = Location.get_record_by_pid(destination_loc_pid) destination_lib = destination_loc.get_library() - data['item_destination'] = { - 'library_name': destination_lib.get('name') - } + data["item_destination"] = {"library_name": destination_lib.get("name")} return data diff --git a/rero_ils/modules/loans/extensions.py b/rero_ils/modules/loans/extensions.py index b4ac5f2eff..8ddfef590f 100644 --- a/rero_ils/modules/loans/extensions.py +++ b/rero_ils/modules/loans/extensions.py @@ -45,9 +45,9 @@ def _add_checkout_location(record): :param record: the record metadata. """ - transaction_pid = record.get('transaction_location_pid') - if record.get('trigger') == 'checkout' and transaction_pid: - record['checkout_location_pid'] = transaction_pid + transaction_pid = record.get("transaction_location_pid") + if record.get("trigger") == "checkout" and transaction_pid: + record["checkout_location_pid"] = transaction_pid def pre_commit(self, record): """Called before a record is committed.""" @@ -72,36 +72,41 @@ def _add_request_expiration_date(record): :param record: the record metadata. """ from .utils import get_circ_policy - if record.state == LoanState.ITEM_AT_DESK and \ - 'request_expire_date' not in record: + + if ( + record.state == LoanState.ITEM_AT_DESK + and "request_expire_date" not in record + ): cipo = get_circ_policy(record) - duration = cipo.get('pickup_hold_duration') + duration = cipo.get("pickup_hold_duration") library = record.pickup_library - if cipo.get('allow_requests') and duration and library: + if cipo.get("allow_requests") and duration and library: # the expiration date should be calculated using the pickup # library calendar trans_date = ciso8601.parse_datetime(record.transaction_date) try: # Ask `duration-1` to get eve to safely use `next_open` expire_date = trans_date + timedelta(days=duration - 1) - expire_date = library \ - .next_open(expire_date) \ - .astimezone(library.get_timezone()) \ + expire_date = ( + library.next_open(expire_date) + .astimezone(library.get_timezone()) .replace(hour=23, minute=59, second=0, microsecond=0) + ) except LibraryNeverOpen: # 10 days by default ... it's better than place a random # date value default_duration = current_app.config.get( - 'RERO_ILS_DEFAULT_PICKUP_HOLD_DURATION', 10) + "RERO_ILS_DEFAULT_PICKUP_HOLD_DURATION", 10 + ) expire_date = trans_date + timedelta(days=default_duration) - expire_date = expire_date \ - .astimezone(library.get_timezone()) \ - .replace(hour=23, minute=59, second=0, microsecond=0) + expire_date = expire_date.astimezone( + library.get_timezone() + ).replace(hour=23, minute=59, second=0, microsecond=0) - record['request_expire_date'] = expire_date.isoformat() - record['request_start_date'] = datetime \ - .now(library.get_timezone()) \ - .isoformat() + record["request_expire_date"] = expire_date.isoformat() + record["request_start_date"] = datetime.now( + library.get_timezone() + ).isoformat() @staticmethod def _add_due_soon_date(record): @@ -110,16 +115,17 @@ def _add_due_soon_date(record): :param record: the record metadata. """ from .utils import get_circ_policy + if record.state == LoanState.ITEM_ON_LOAN and record.end_date: # find the correct policy based on the checkout location. circ_policy = get_circ_policy(record, checkout_location=True) due_date = ciso8601.parse_datetime(record.end_date).replace( - tzinfo=timezone.utc) + tzinfo=timezone.utc + ) if days_before := circ_policy.due_soon_interval_days: due_soon = due_date - timedelta(days=days_before) - due_soon = due_soon.replace( - hour=0, minute=0, second=0, microsecond=0) - record['due_soon_date'] = due_soon.isoformat() + due_soon = due_soon.replace(hour=0, minute=0, second=0, microsecond=0) + record["due_soon_date"] = due_soon.isoformat() @staticmethod def _add_last_end_date(record): @@ -127,8 +133,8 @@ def _add_last_end_date(record): :param record: the record metadata. """ - if record.state == LoanState.ITEM_ON_LOAN and record.get('end_date'): - record['last_end_date'] = record['end_date'] + if record.state == LoanState.ITEM_ON_LOAN and record.get("end_date"): + record["last_end_date"] = record["end_date"] def pre_commit(self, record): """Called before a record is committed.""" diff --git a/rero_ils/modules/loans/jsonresolver.py b/rero_ils/modules/loans/jsonresolver.py index b2a6753a50..0fc1cc53ac 100644 --- a/rero_ils/modules/loans/jsonresolver.py +++ b/rero_ils/modules/loans/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/loans/', host='bib.rero.ch') +@jsonresolver.route("/api/loans/", host="bib.rero.ch") def loan_resolver(pid): """Loan resolver.""" - return resolve_json_refs('loanid', pid) + return resolve_json_refs("loanid", pid) diff --git a/rero_ils/modules/loans/listener.py b/rero_ils/modules/loans/listener.py index cd1bad9562..5728d8f510 100644 --- a/rero_ils/modules/loans/listener.py +++ b/rero_ils/modules/loans/listener.py @@ -22,14 +22,22 @@ from rero_ils.modules.items.api import Item from rero_ils.modules.loans.logs.api import LoanOperationLog -from rero_ils.modules.patron_transactions.utils import \ - create_patron_transaction_from_overdue_loan +from rero_ils.modules.patron_transactions.utils import ( + create_patron_transaction_from_overdue_loan, +) from .models import LoanAction -def enrich_loan_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_loan_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -37,30 +45,29 @@ def enrich_loan_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] != current_circulation.loan_search_cls.Meta.index: + if index.split("-")[0] != current_circulation.loan_search_cls.Meta.index: return # Update the patron type related to this loan only for "alive" loan to # try preserving performance during circulation process. if not record.is_concluded(): if patron_type_pid := record.patron.patron_type_pid: - json['patron_type_pid'] = patron_type_pid + json["patron_type_pid"] = patron_type_pid if record.transaction_location_pid: - json['transaction_library_pid'] = record.transaction_library_pid + json["transaction_library_pid"] = record.transaction_library_pid if record.pickup_location_pid: - json['pickup_library_pid'] = record.pickup_library.pid + json["pickup_library_pid"] = record.pickup_library.pid - item_pid = record.get('item_pid', {}).get('value') + item_pid = record.get("item_pid", {}).get("value") if item := Item.get_record_by_pid(item_pid): - json['library_pid'] = item.holding_library_pid - json['location_pid'] = item.holding_location_pid + json["library_pid"] = item.holding_library_pid + json["location_pid"] = item.holding_location_pid else: msg = f'No item found: {item_pid} for loan: {record.get("pid")}' current_app.logger.warning(msg) -def listener_loan_state_changed( - _, initial_loan, loan, trigger, **transition_kwargs): +def listener_loan_state_changed(_, initial_loan, loan, trigger, **transition_kwargs): """Listener when a loan state changed. :param initial_loan: The initial loan. @@ -77,5 +84,5 @@ def listener_loan_state_changed( # Create fees for check-in/extend operations # This is the `create_patron_transaction_from_overdue_loan' that # determine if the loan is overdue and if some fee must be created. - if trigger in [LoanAction.CHECKIN, 'extend']: + if trigger in [LoanAction.CHECKIN, "extend"]: create_patron_transaction_from_overdue_loan(initial_loan) diff --git a/rero_ils/modules/loans/logs/api.py b/rero_ils/modules/loans/logs/api.py index ce61730cf0..0c1573b638 100644 --- a/rero_ils/modules/loans/logs/api.py +++ b/rero_ils/modules/loans/logs/api.py @@ -18,8 +18,7 @@ """Loans logs API.""" -from rero_ils.modules.operation_logs.api import OperationLog, \ - OperationLogsSearch +from rero_ils.modules.operation_logs.api import OperationLog, OperationLogsSearch from rero_ils.modules.operation_logs.logs.api import SpecificOperationLog from ...items.api import Item @@ -35,10 +34,11 @@ def get_logs_by_trigger(self, triggers, date_range=None): :param triggers: list[str] - loan triggers value to filter :return: an elasticsearch dsl search query """ - query = self.filter('term', record__type='loan')\ - .filter('terms', loan__trigger=triggers) + query = self.filter("term", record__type="loan").filter( + "terms", loan__trigger=triggers + ) if date_range: - query = query.filter('range', date=date_range) + query = query.filter("range", date=date_range) return query @@ -46,7 +46,7 @@ class LoanOperationLog(OperationLog, SpecificOperationLog): """Operation log for loans.""" @classmethod - def create(cls, data, id_=None, index_refresh='false', **kwargs): + def create(cls, data, id_=None, index_refresh="false", **kwargs): """Create a new record instance and store it in elasticsearch. :param loan_data: Dict with the loan metadata. @@ -60,57 +60,49 @@ def create(cls, data, id_=None, index_refresh='false', **kwargs): :returns: A new :class:`Record` instance. """ log = { - 'record': { - 'value': data.get('pid'), - 'type': 'loan' - }, - 'operation': 'create', - 'date': data['transaction_date'], - 'loan': { - 'pid': data['pid'], - 'trigger': data['trigger'], - 'override_flag': False, - 'auto_extend': data.get('auto_extend', False), - 'transaction_channel': 'system' if not data.get( - 'selfcheck_terminal_id') else 'sip2', - 'transaction_location': { - 'pid': data['transaction_location_pid'], - 'name': cls._get_location_name( - data['transaction_location_pid']) + "record": {"value": data.get("pid"), "type": "loan"}, + "operation": "create", + "date": data["transaction_date"], + "loan": { + "pid": data["pid"], + "trigger": data["trigger"], + "override_flag": False, + "auto_extend": data.get("auto_extend", False), + "transaction_channel": ( + "sip2" if data.get("selfcheck_terminal_id") else "system" + ), + "transaction_location": { + "pid": data["transaction_location_pid"], + "name": cls._get_location_name(data["transaction_location_pid"]), }, - 'pickup_location': { - 'pid': data['pickup_location_pid'], - 'name': cls._get_location_name(data['pickup_location_pid']) + "pickup_location": { + "pid": data["pickup_location_pid"], + "name": cls._get_location_name(data["pickup_location_pid"]), }, - 'patron': cls._get_patron_data( - Patron.get_record_by_pid(data['patron_pid'])), - 'item': cls._get_item_data( - Item.get_record_by_pid(data['item_pid']['value'])) - } + "patron": cls._get_patron_data( + Patron.get_record_by_pid(data["patron_pid"]) + ), + "item": cls._get_item_data( + Item.get_record_by_pid(data["item_pid"]["value"]) + ), + }, } if current_librarian: - log['user'] = { - 'type': 'ptrn', - 'value': current_librarian.pid - } - log['user_name'] = current_librarian.formatted_name - log['organisation'] = { - 'value': current_librarian.organisation_pid, - 'type': 'org' - } - log['library'] = { - 'value': current_librarian.library_pid, - 'type': 'lib' + log["user"] = {"type": "ptrn", "value": current_librarian.pid} + log["user_name"] = current_librarian.formatted_name + log["organisation"] = { + "value": current_librarian.organisation_pid, + "type": "org", } + log["library"] = {"value": current_librarian.library_pid, "type": "lib"} else: - log['user_name'] = 'system' + log["user_name"] = "system" # Store transaction user name if not done by SIP2 - if log['loan']['transaction_channel'] != 'sip2': - transaction_user = Patron.get_record_by_pid( - data['transaction_user_pid']) - log['loan']['transaction_user'] = { - 'pid': data['transaction_user_pid'], - 'name': transaction_user.formatted_name + if log["loan"]["transaction_channel"] != "sip2": + transaction_user = Patron.get_record_by_pid(data["transaction_user_pid"]) + log["loan"]["transaction_user"] = { + "pid": data["transaction_user_pid"], + "name": transaction_user.formatted_name, } return super().create(log, index_refresh=index_refresh) @@ -122,6 +114,6 @@ def anonymize_logs(cls, loan_pid): """ for log in OperationLogsSearch().get_logs_by_record_pid(loan_pid): record = log.to_dict() - record['loan']['patron']['name'] = 'anonymized' - record['loan']['patron']['pid'] = 'anonymized' - cls.update(log.meta.id, log['date'], record) + record["loan"]["patron"]["name"] = "anonymized" + record["loan"]["patron"]["pid"] = "anonymized" + cls.update(log.meta.id, log["date"], record) diff --git a/rero_ils/modules/loans/models.py b/rero_ils/modules/loans/models.py index 16e960698a..160098ed6c 100644 --- a/rero_ils/modules/loans/models.py +++ b/rero_ils/modules/loans/models.py @@ -22,14 +22,14 @@ class LoanState: """Class to handle different loan states.""" - CREATED = 'CREATED' - PENDING = 'PENDING' - ITEM_IN_TRANSIT_FOR_PICKUP = 'ITEM_IN_TRANSIT_FOR_PICKUP' - ITEM_IN_TRANSIT_TO_HOUSE = 'ITEM_IN_TRANSIT_TO_HOUSE' - ITEM_AT_DESK = 'ITEM_AT_DESK' - ITEM_ON_LOAN = 'ITEM_ON_LOAN' - ITEM_RETURNED = 'ITEM_RETURNED' - CANCELLED = 'CANCELLED' + CREATED = "CREATED" + PENDING = "PENDING" + ITEM_IN_TRANSIT_FOR_PICKUP = "ITEM_IN_TRANSIT_FOR_PICKUP" + ITEM_IN_TRANSIT_TO_HOUSE = "ITEM_IN_TRANSIT_TO_HOUSE" + ITEM_AT_DESK = "ITEM_AT_DESK" + ITEM_ON_LOAN = "ITEM_ON_LOAN" + ITEM_RETURNED = "ITEM_RETURNED" + CANCELLED = "CANCELLED" CONCLUDED = [CANCELLED, ITEM_RETURNED] ITEM_IN_TRANSIT = [ITEM_IN_TRANSIT_TO_HOUSE, ITEM_IN_TRANSIT_FOR_PICKUP] @@ -39,13 +39,13 @@ class LoanState: class LoanAction: """Class holding all available circulation loan actions.""" - REQUEST = 'request' - CHECKOUT = 'checkout' - CHECKIN = 'checkin' - VALIDATE = 'validate' - RECEIVE = 'receive' - RETURN_MISSING = 'return_missing' - EXTEND = 'extend_loan' - CANCEL = 'cancel' - NO = 'no' - UPDATE = 'update' + REQUEST = "request" + CHECKOUT = "checkout" + CHECKIN = "checkin" + VALIDATE = "validate" + RECEIVE = "receive" + RETURN_MISSING = "return_missing" + EXTEND = "extend_loan" + CANCEL = "cancel" + NO = "no" + UPDATE = "update" diff --git a/rero_ils/modules/loans/permissions.py b/rero_ils/modules/loans/permissions.py index 00726c96d8..eb5f92641a 100644 --- a/rero_ils/modules/loans/permissions.py +++ b/rero_ils/modules/loans/permissions.py @@ -21,13 +21,15 @@ from invenio_records_permissions.generators import Generator from rero_ils.modules.loans.api import Loan -from rero_ils.modules.permissions import \ - AllowedByActionRestrictByOwnerOrOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByActionRestrictByOwnerOrOrganisation, + RecordPermissionPolicy, +) # Actions to control Loan policy -search_action = action_factory('loan-search') -read_action = action_factory('loan-read') -access_action = action_factory('loan-access') +search_action = action_factory("loan-search") +read_action = action_factory("loan-read") +access_action = action_factory("loan-access") class DisallowedIfAnonymized(Generator): @@ -40,7 +42,7 @@ def excludes(self, record=None, *args, **kwargs): :param kwargs: extra arguments. :returns: a list of needs to disabled access. """ - return [any_user] if record and record.get('to_anonymize') else [] + return [any_user] if record and record.get("to_anonymize") else [] class LoanPermissionPolicy(RecordPermissionPolicy): @@ -48,14 +50,12 @@ class LoanPermissionPolicy(RecordPermissionPolicy): can_search = [ AllowedByActionRestrictByOwnerOrOrganisation( - search_action, - record_mapper=lambda r: Loan(r) + search_action, record_mapper=lambda r: Loan(r) ) ] can_read = [ DisallowedIfAnonymized(), AllowedByActionRestrictByOwnerOrOrganisation( - read_action, - record_mapper=lambda r: Loan(r) - ) + read_action, record_mapper=lambda r: Loan(r) + ), ] diff --git a/rero_ils/modules/loans/query.py b/rero_ils/modules/loans/query.py index 79c44c73bf..844f492084 100644 --- a/rero_ils/modules/loans/query.py +++ b/rero_ils/modules/loans/query.py @@ -30,17 +30,19 @@ def misc_status_filter(): :return: Function allowing to filter the ElasticSearch query. """ + def inner(values): queries = [] # OVERDUE # Filter query to return only loans with an `end_date` lower than the # current timestamp. - if 'overdue' in values: - queries.append(Q('range', end_date={'lt': 'now/d'})) + if "overdue" in values: + queries.append(Q("range", end_date={"lt": "now/d"})) # EXPIRED_REQUEST # Filter query to return only loans with a `request_expire_date` # lower than the current timestamp - if 'expired_request' in values: - queries.append(Q('range', request_expire_date={'lt': 'now/d'})) - return Q('bool', must=queries) + if "expired_request" in values: + queries.append(Q("range", request_expire_date={"lt": "now/d"})) + return Q("bool", must=queries) + return inner diff --git a/rero_ils/modules/loans/serializers/__init__.py b/rero_ils/modules/loans/serializers/__init__.py index 0e5ae6c3e5..09c5622d66 100644 --- a/rero_ils/modules/loans/serializers/__init__.py +++ b/rero_ils/modules/loans/serializers/__init__.py @@ -17,43 +17,40 @@ # along with this program. If not, see . """RERO-ILS Loan resource serializers.""" -from rero_ils.modules.serializers import RecordSchemaJSONV1, \ - search_responsify, search_responsify_file +from rero_ils.modules.serializers import ( + RecordSchemaJSONV1, + search_responsify, + search_responsify_file, +) from .csv import LoanStreamedCSVSerializer from .json import LoanJSONSerializer -__all__ = [ - 'json_loan_search', - 'csv_stream_search' -] +__all__ = ["json_loan_search", "csv_stream_search"] _json = LoanJSONSerializer(RecordSchemaJSONV1) _streamed_csv = LoanStreamedCSVSerializer( csv_included_fields=[ - 'pid', - 'document_title', - 'item_barcode', - 'item_call_numbers', - 'patron_name', - 'patron_barcode', - 'patron_email', - 'patron_type', - 'owning_library', - 'transaction_library', - 'pickup_library', - 'state', - 'checkout_date', - 'end_date', - 'request_expire_date', + "pid", + "document_title", + "item_barcode", + "item_call_numbers", + "patron_name", + "patron_barcode", + "patron_email", + "patron_type", + "owning_library", + "transaction_library", + "pickup_library", + "state", + "checkout_date", + "end_date", + "request_expire_date", ] ) -json_loan_search = search_responsify(_json, 'application/rero+json') +json_loan_search = search_responsify(_json, "application/rero+json") csv_stream_search = search_responsify_file( - _streamed_csv, - 'text/csv', - file_extension='csv', - file_prefix='export-loans' + _streamed_csv, "text/csv", file_extension="csv", file_prefix="export-loans" ) diff --git a/rero_ils/modules/loans/serializers/csv.py b/rero_ils/modules/loans/serializers/csv.py index 7730a6d94f..037e862cd0 100644 --- a/rero_ils/modules/loans/serializers/csv.py +++ b/rero_ils/modules/loans/serializers/csv.py @@ -30,68 +30,69 @@ from rero_ils.modules.libraries.api import LibrariesSearch from rero_ils.modules.patron_types.api import PatronTypesSearch from rero_ils.modules.patrons.api import Patron -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - StreamSerializerMixin +from rero_ils.modules.serializers import ( + CachedDataSerializerMixin, + StreamSerializerMixin, +) from ..utils import get_loan_checkout_date -class LoanStreamedCSVSerializer(CSVSerializer, StreamSerializerMixin, - CachedDataSerializerMixin): +class LoanStreamedCSVSerializer( + CSVSerializer, StreamSerializerMixin, CachedDataSerializerMixin +): """Streamed CSV serializer for `loan` resource.""" - def transform_search_hit(self, hit_pid, hit, links_factory=None, - **kwargs): + def transform_search_hit(self, hit_pid, hit, links_factory=None, **kwargs): """Transform search result hit into a desired representation. :param hit_pid: Pid of the resource. :param hit: Record metadata retrieved via search. :param links_factory: Factory function for record links. """ - date_format = '%Y-%m-%d' + date_format = "%Y-%m-%d" # Transform ISO dates to human-readable strings - for field in ['end_date', 'request_expire_date']: + for field in ["end_date", "request_expire_date"]: if field in hit: - hit[field] = ciso8601.parse_datetime(hit[field])\ - .strftime(date_format) + hit[field] = ciso8601.parse_datetime(hit[field]).strftime(date_format) if checkout_date := get_loan_checkout_date(hit_pid): - hit['checkout_date'] = checkout_date.strftime(date_format) + hit["checkout_date"] = checkout_date.strftime(date_format) # Convert PID references to human-readable name lib_loader_ref = LibrariesSearch() pid_reference_fields = [ - (lib_loader_ref, 'owning_library', 'library_pid'), - (lib_loader_ref, 'pickup_library', 'pickup_library_pid'), - (lib_loader_ref, 'transaction_library', 'transaction_library_pid'), - (PatronTypesSearch(), 'patron_type', 'patron_type_pid') + (lib_loader_ref, "owning_library", "library_pid"), + (lib_loader_ref, "pickup_library", "pickup_library_pid"), + (lib_loader_ref, "transaction_library", "transaction_library_pid"), + (PatronTypesSearch(), "patron_type", "patron_type_pid"), ] for loader, field, pid_reference in pid_reference_fields: if pid := hit.pop(pid_reference, None): - hit[field] = self.get_resource(loader, pid).get('name') + hit[field] = self.get_resource(loader, pid).get("name") # document information dumping - if doc := self.get_resource(DocumentsSearch(), hit['document_pid']): - hit['document_title'] = \ - TitleExtension.format_text(doc.get('title')) + if doc := self.get_resource(DocumentsSearch(), hit["document_pid"]): + hit["document_title"] = TitleExtension.format_text(doc.get("title")) # Item information dumping - if item := self.get_resource(ItemsSearch(), hit['item_pid']['value']): - hit['item_call_numbers'] = '|'.join(filter(None, [ - item.get('call_number'), item.get('second_call_number') - ])) - hit['item_barcode'] = item.get('barcode') + if item := self.get_resource(ItemsSearch(), hit["item_pid"]["value"]): + hit["item_call_numbers"] = "|".join( + filter(None, [item.get("call_number"), item.get("second_call_number")]) + ) + hit["item_barcode"] = item.get("barcode") # Patron information's dumping - if patron := self.get_resource(Patron, hit['patron_pid']): - hit['patron_name'] = patron.formatted_name - hit['patron_email'] = patron.user.email - hit['patron_barcode'] = '|'.join( - patron.get('patron', {}).get('barcode', []) + if patron := self.get_resource(Patron, hit["patron_pid"]): + hit["patron_name"] = patron.formatted_name + hit["patron_email"] = patron.user.email + hit["patron_barcode"] = "|".join( + patron.get("patron", {}).get("barcode", []) ) return hit - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -118,16 +119,15 @@ def generate_csv(): # from this chunks item_pids, document_pids = [], [] for record in records: - document_pids.append(record['document_pid']) - item_pids.append(record['item_pid']['value']) + document_pids.append(record["document_pid"]) + item_pids.append(record["item_pid"]["value"]) self.load_resources(DocumentsSearch(), document_pids) self.load_resources(ItemsSearch(), item_pids) for record in records: - row_data = self.process_dict(self.transform_search_hit( - record.pid, - record.to_dict() - )) + row_data = self.process_dict( + self.transform_search_hit(record.pid, record.to_dict()) + ) writer.writerow(row_data) yield line.read() diff --git a/rero_ils/modules/loans/serializers/json.py b/rero_ils/modules/loans/serializers/json.py index c166535d55..1eb702fb3a 100644 --- a/rero_ils/modules/loans/serializers/json.py +++ b/rero_ils/modules/loans/serializers/json.py @@ -26,8 +26,7 @@ from rero_ils.modules.patron_types.api import PatronTypesSearch from rero_ils.modules.patrons.api import Patron from rero_ils.modules.patrons.dumpers import PatronPropertiesDumper -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer +from rero_ils.modules.serializers import CachedDataSerializerMixin, JSONSerializer from ..api import Loan @@ -41,70 +40,69 @@ def _postprocess_search_hit(self, hit): def _post_process_search_request_hit(metadata, item): """Adds some information about a request.""" loc = self.get_resource( - LocationsSearch(), metadata.get('pickup_location_pid')) - metadata.update({ - 'pickup_name': loc.get('pickup_name', loc.get('name')), - 'rank': 0 - }) - if metadata['state'] not in LoanState.ITEM_AT_DESK: - patron_pid = metadata.get('patron', {}).get('pid') + LocationsSearch(), metadata.get("pickup_location_pid") + ) + metadata.update( + {"pickup_name": loc.get("pickup_name", loc.get("name")), "rank": 0} + ) + if metadata["state"] not in LoanState.ITEM_AT_DESK: + patron_pid = metadata.get("patron", {}).get("pid") patron = self.get_resource(Patron, patron_pid) - metadata['rank'] = item.patron_request_rank(patron) + metadata["rank"] = item.patron_request_rank(patron) def _post_process_search_concluded_hit(metadata, loan): """Adds some information about a concluded loan.""" - ploc_pid = loan.get('pickup_location_pid') + ploc_pid = loan.get("pickup_location_pid") ploc = self.get_resource(Location, ploc_pid) or {} plib = self.get_resource(Library, ploc.library_pid) or {} - metadata['pickup_library_name'] = plib.get('name') - tloc_pid = loan.get('transaction_location_pid') + metadata["pickup_library_name"] = plib.get("name") + tloc_pid = loan.get("transaction_location_pid") tloc = self.get_resource(Location, tloc_pid) or {} tlib = self.get_resource(Library, tloc.library_pid) or {} - metadata['transaction_library_name'] = tlib.get('name') + metadata["transaction_library_name"] = tlib.get("name") - metadata = hit.get('metadata', {}) + metadata = hit.get("metadata", {}) # UPDATE LIBRARY INFORMATION # create a new `library` dictionary entry containing library name and # library pid. Remove the unnecessary `library_pid` entry. - if library_pid := metadata.pop('library_pid', None): + if library_pid := metadata.pop("library_pid", None): library = self.get_resource(Library, library_pid) - metadata['library'] = { - 'pid': library_pid, - 'name': library['name'] - } + metadata["library"] = {"pid": library_pid, "name": library["name"]} # DUMP DOCUMENT INFORMATION # Replace the `document_pid` reference by the known ElasticSearch # data related to this document. - if document_pid := metadata.pop('document_pid', None): + if document_pid := metadata.pop("document_pid", None): document = self.get_resource(DocumentsSearch(), document_pid) - metadata['document'] = document + metadata["document"] = document # DUMP PATRON INFORMATION # Replace the `patron_pid` reference by some known ElasticSearch # data related to this patron - if patron_pid := metadata.pop('patron_pid', None): + if patron_pid := metadata.pop("patron_pid", None): patron = self.get_resource(Patron, patron_pid) - metadata['patron'] = patron.dumps( - dumper=PatronPropertiesDumper(['formatted_name'])) + metadata["patron"] = patron.dumps( + dumper=PatronPropertiesDumper(["formatted_name"]) + ) # DUMP ITEM INFORMATION # Replace the `item_pid` reference by some specific item metadata. # Complete these item metadata with some useful information depending # on the current loan state. - if item_pid := metadata.pop('item_pid', {}).get('value'): + if item_pid := metadata.pop("item_pid", {}).get("value"): item = Item.get_record_by_pid(item_pid) - loan = Loan.get_record_by_pid(metadata.get('pid')) + loan = Loan.get_record_by_pid(metadata.get("pid")) if item: - metadata['item'] = item.dumps(dumper=ItemCirculationDumper()) + metadata["item"] = item.dumps(dumper=ItemCirculationDumper()) # Add some specific information depending on loan state - loan_state = metadata.get('state') + loan_state = metadata.get("state") if loan_state == LoanState.ITEM_ON_LOAN: - metadata['overdue'] = loan.is_loan_overdue() - metadata['is_late'] = loan.is_loan_late() + metadata["overdue"] = loan.is_loan_overdue() + metadata["is_late"] = loan.is_loan_late() elif loan_state in LoanState.REQUEST_STATES: _post_process_search_request_hit(metadata, item) elif loan_state in LoanState.CONCLUDED + [ - LoanState.ITEM_IN_TRANSIT_TO_HOUSE]: + LoanState.ITEM_IN_TRANSIT_TO_HOUSE + ]: _post_process_search_concluded_hit(metadata, loan) def _postprocess_search_aggregations(self, aggregations): @@ -115,39 +113,38 @@ def _postprocess_search_aggregations(self, aggregations): """ def _get_buckets(parent, bucket_name: str) -> list: - return parent.get(bucket_name, {}).get('buckets', []) + return parent.get(bucket_name, {}).get("buckets", []) def _enrich_buckets(buckets, search_class, attribute): - JSONSerializer.enrich_bucket_with_data( - buckets, search_class, attribute) + JSONSerializer.enrich_bucket_with_data(buckets, search_class, attribute) - aggr_ptty = _get_buckets(aggregations, 'patron_type') - _enrich_buckets(aggr_ptty, PatronTypesSearch, 'name') + aggr_ptty = _get_buckets(aggregations, "patron_type") + _enrich_buckets(aggr_ptty, PatronTypesSearch, "name") # Add a `name` for all entries of the all library/location structure. # Owning library & location - aggr_lib = _get_buckets(aggregations, 'owner_library') - _enrich_buckets(aggr_lib, LibrariesSearch, 'name') + aggr_lib = _get_buckets(aggregations, "owner_library") + _enrich_buckets(aggr_lib, LibrariesSearch, "name") for lib_term in aggr_lib: - aggr_loc = _get_buckets(lib_term, 'owner_location') - _enrich_buckets(aggr_loc, LocationsSearch, 'name') + aggr_loc = _get_buckets(lib_term, "owner_location") + _enrich_buckets(aggr_loc, LocationsSearch, "name") # Transaction library & location - aggr_lib = _get_buckets(aggregations, 'transaction_library') - _enrich_buckets(aggr_lib, LibrariesSearch, 'name') + aggr_lib = _get_buckets(aggregations, "transaction_library") + _enrich_buckets(aggr_lib, LibrariesSearch, "name") for lib_term in aggr_lib: - aggr_loc = _get_buckets(lib_term, 'transaction_location') - _enrich_buckets(aggr_loc, LocationsSearch, 'name') + aggr_loc = _get_buckets(lib_term, "transaction_location") + _enrich_buckets(aggr_loc, LocationsSearch, "name") # Pickup library & location - aggr_lib = _get_buckets(aggregations, 'pickup_library') - _enrich_buckets(aggr_lib, LibrariesSearch, 'name') + aggr_lib = _get_buckets(aggregations, "pickup_library") + _enrich_buckets(aggr_lib, LibrariesSearch, "name") for lib_term in aggr_lib: - aggr_loc = _get_buckets(lib_term, 'pickup_location') - _enrich_buckets(aggr_loc, LocationsSearch, 'name') + aggr_loc = _get_buckets(lib_term, "pickup_location") + _enrich_buckets(aggr_loc, LocationsSearch, "name") # Add configuration for `end_date` and `request_expire_date` # ES `date_histogram` facet need some configuration settings to # display the widget. - for aggr_name in ['end_date', 'request_expire_date']: + for aggr_name in ["end_date", "request_expire_date"]: aggr = aggregations.get(aggr_name, {}) JSONSerializer.add_date_range_configuration(aggr) @@ -155,9 +152,9 @@ def _enrich_buckets(buckets, search_class, attribute): # The `misc_status` aggregation buckets are based on ES filters # queries. We need to rebuild this aggregation to display each # filter query hit as a 'classic' term facet hit. - if misc_aggr := aggregations.get('misc_status', {}).get('buckets'): - aggregations['misc_status']['buckets'] = [ - {'key': term, 'doc_count': hit['doc_count']} + if misc_aggr := aggregations.get("misc_status", {}).get("buckets"): + aggregations["misc_status"]["buckets"] = [ + {"key": term, "doc_count": hit["doc_count"]} for term, hit in misc_aggr.items() - if hit.get('doc_count') + if hit.get("doc_count") ] diff --git a/rero_ils/modules/loans/tasks.py b/rero_ils/modules/loans/tasks.py index be15754a4a..63a2328f26 100644 --- a/rero_ils/modules/loans/tasks.py +++ b/rero_ils/modules/loans/tasks.py @@ -48,7 +48,7 @@ def loan_anonymizer(dbcommit=True, reindex=True): loan.anonymize(dbcommit=dbcommit, reindex=reindex) counter += 1 - set_timestamp('anonymize-loans', count=counter) + set_timestamp("anonymize-loans", count=counter) return counter @@ -68,18 +68,18 @@ def automatic_renewal(tstamp=None): until_date = tstamp + timedelta(days=1) for loan in get_overdue_loans(tstamp=until_date): policy = get_circ_policy(loan) - if policy.get('automatic_renewal'): + if policy.get("automatic_renewal"): if item := Item.get_record_by_pid(loan.item_pid): if item.can( action=ItemCirculationAction.EXTEND, patron_pid=loan.patron_pid, - loan=loan + loan=loan, )[0]: item.extend_loan( pid=loan.pid, transaction_location_pid=loan.location_pid, transaction_user_pid=loan.patron_pid, - auto_extend=True + auto_extend=True, ) extended_loans_count += 1 else: @@ -104,12 +104,15 @@ def cancel_expired_request_task(tstamp=None): _, actions = item.cancel_item_request( loan.pid, transaction_location_pid=loan.location_pid, - transaction_user_pid=loan.patron_pid + transaction_user_pid=loan.patron_pid, ) - if actions.get('cancel', {}).get('pid') == loan.pid: + if actions.get("cancel", {}).get("pid") == loan.pid: total_cancelled_loans += 1 - set_timestamp('cancel-expired-request-task', total=total_loans_counter, - cancelled=total_cancelled_loans) + set_timestamp( + "cancel-expired-request-task", + total=total_loans_counter, + cancelled=total_cancelled_loans, + ) return total_loans_counter, total_cancelled_loans @@ -119,18 +122,19 @@ def delete_loans_created(verbose=False, hours=1, dbcommit=True, delindex=True): now = datetime.now(timezone.utc) if hours >= 0: now -= timedelta(hours=hours) - count = LoansSearch().filter('term', state='CREATED').count() - query = LoansSearch() \ - .filter('term', state='CREATED').filter('range', _created={'lt': now}) + count = LoansSearch().filter("term", state="CREATED").count() + query = ( + LoansSearch() + .filter("term", state="CREATED") + .filter("range", _created={"lt": now}) + ) if verbose: - click.echo( - f'TOTAL: {count} DELETE: {query.count()} HOURS: {-query.count()}' - ) + click.echo(f"TOTAL: {count} DELETE: {query.count()} HOURS: {-query.count()}") idx = 0 - for idx, hit in enumerate(query.source('pid').scan(), 1): + for idx, hit in enumerate(query.source("pid").scan(), 1): loan = Loan.get_record_by_pid(hit.pid) - state = loan.get('state') + state = loan.get("state") if verbose: - click.echo(f'{idx:<10} {loan.pid:<10} {state} DELETE') + click.echo(f"{idx:<10} {loan.pid:<10} {state} DELETE") loan.delete(dbcommit=dbcommit, delindex=delindex) return idx diff --git a/rero_ils/modules/loans/utils.py b/rero_ils/modules/loans/utils.py index 985ddecf08..37fcea9728 100644 --- a/rero_ils/modules/loans/utils.py +++ b/rero_ils/modules/loans/utils.py @@ -45,17 +45,16 @@ def get_circ_policy(loan, checkout_location=False): :return the circulation policy related to the loan """ item = Item.get_record_by_pid(loan.item_pid) - library_pid = loan.checkout_library_pid if checkout_location else \ - loan.library_pid + library_pid = loan.checkout_library_pid if checkout_location else loan.library_pid - patron = Patron.get_record_by_pid(loan.get('patron_pid')) + patron = Patron.get_record_by_pid(loan.get("patron_pid")) patron_type_pid = patron.patron_type_pid return CircPolicy.provide_circ_policy( loan.organisation_pid, library_pid, patron_type_pid, - item.temporary_item_type_pid or item.holding_circulation_category_pid + item.temporary_item_type_pid or item.holding_circulation_category_pid, ) @@ -81,9 +80,11 @@ def get_default_loan_duration(loan, initial_loan): # method. This was not the place for this ; this function should # only return the loan duration. policy = get_circ_policy(loan) - due_date_eve = now_in_library_timezone \ - + timedelta(days=policy.get('checkout_duration', 0)) \ + due_date_eve = ( + now_in_library_timezone + + timedelta(days=policy.get("checkout_duration", 0)) - timedelta(days=1) + ) try: end_date = library.next_open(date=due_date_eve) except LibraryNeverOpen: @@ -91,13 +92,9 @@ def get_default_loan_duration(loan, initial_loan): end_date = due_date_eve + timedelta(days=1) # all libraries are closed at 23h59 # the next_open returns UTC. - end_date_in_library_timezone = end_date.astimezone( - library.get_timezone()).replace( - hour=23, - minute=59, - second=0, - microsecond=0 - ) + end_date_in_library_timezone = end_date.astimezone(library.get_timezone()).replace( + hour=23, minute=59, second=0, microsecond=0 + ) return end_date_in_library_timezone - now_in_library_timezone @@ -106,19 +103,19 @@ def get_extension_params(loan=None, initial_loan=None, parameter_name=None): # find the correct policy based on the checkout location for the extend # action. policy = get_circ_policy(loan, checkout_location=True) - end_date = ciso8601.parse_datetime(str(loan.get('end_date'))) + end_date = ciso8601.parse_datetime(str(loan.get("end_date"))) params = { - 'max_count': policy.get('number_renewals'), - 'duration_default': policy.get('renewal_duration') + "max_count": policy.get("number_renewals"), + "duration_default": policy.get("renewal_duration"), } # Get settings/records used to compute the duration: # * 'CIRCULATION_POLICIES' from app configuration. # * library (to check opening hours) - config_settings = current_app.config['CIRCULATION_POLICIES']['extension'] + config_settings = current_app.config["CIRCULATION_POLICIES"]["extension"] library = Library.get_record_by_pid(loan.library_pid) - if config_settings['from_end_date']: + if config_settings["from_end_date"]: trans_date_tz = end_date else: now_in_utc = datetime.now(timezone.utc) @@ -136,19 +133,21 @@ def get_extension_params(loan=None, initial_loan=None, parameter_name=None): # This check is now done previously by `CircPolicies.allow_checkout` # method. This was not the place for this ; this function should # only return the loan duration. - due_date_eve = trans_date_tz \ - + timedelta(days=policy.get('renewal_duration')) \ + due_date_eve = ( + trans_date_tz + + timedelta(days=policy.get("renewal_duration")) - timedelta(days=1) + ) next_open_date = library.next_open(date=due_date_eve) if next_open_date.date() < end_date.date(): - params['max_count'] = 0 + params["max_count"] = 0 # all libraries are closed at 23h59 --> the `next_open` returns UTC. - end_date_in_library_timezone = next_open_date\ - .astimezone(library.get_timezone())\ - .replace(hour=23, minute=59, second=0, microsecond=0) - params['duration_default'] = end_date_in_library_timezone - trans_date_tz + end_date_in_library_timezone = next_open_date.astimezone( + library.get_timezone() + ).replace(hour=23, minute=59, second=0, microsecond=0) + params["duration_default"] = end_date_in_library_timezone - trans_date_tz return params.get(parameter_name) @@ -166,14 +165,14 @@ def extend_loan_data_is_valid(end_date, renewal_duration, library_pid): # if library has no open dates, use the default renewal duration except LibraryNeverOpen: first_open_date = datetime.now(timezone.utc) - + timedelta(days=renewal_duration) - - timedelta(days=1) + +timedelta(days=renewal_duration) + -timedelta(days=1) return first_open_date.date() > end_date.date() def validate_loan_duration(loan): """Validate the loan duration.""" - return loan['end_date'] > loan['start_date'] + return loan["end_date"] > loan["start_date"] def is_item_available_for_checkout(item_pid): @@ -194,43 +193,42 @@ def can_be_requested(loan): # requestable" and not "Is the item is really requestable". if not loan.item_pid: - raise Exception('Transaction on document is not implemented.') + raise Exception("Transaction on document is not implemented.") # 1) Check patron is not blocked patron = Patron.get_record_by_pid(loan.patron_pid) - if patron.patron.get('blocked', False): + if patron.patron.get("blocked", False): return False # 2) Check if owning location allows request location = Item.get_record_by_pid(loan.item_pid).get_circulation_location() - if not location or not location.get('allow_request'): + if not location or not location.get("allow_request"): return False # 3) Check if there is already a loan for same patron+item if get_any_loans_by_item_pid_by_patron_pid( - loan.get('item_pid', {}).get('value'), - loan.get('patron_pid') + loan.get("item_pid", {}).get("value"), loan.get("patron_pid") ): return False # 4) Check if circulation_policy allows request policy = get_circ_policy(loan) - return bool(policy.get('allow_requests')) + return bool(policy.get("allow_requests")) def loan_build_item_ref(loan_pid, loan): """Build $ref for the Item attached to the Loan.""" - return get_ref_for_pid('items', loan.item_pid) + return get_ref_for_pid("items", loan.item_pid) def loan_build_patron_ref(loan_pid, loan): """Build $ref for the Patron attached to the Loan.""" - return get_ref_for_pid('patrons', loan.patron_pid) + return get_ref_for_pid("patrons", loan.patron_pid) def loan_build_document_ref(loan_pid, loan): """Build $ref for the Document attached to the Loan.""" - return get_ref_for_pid('documents', loan.document_pid) + return get_ref_for_pid("documents", loan.document_pid) def validate_item_pickup_transaction_locations(loan, destination, **kwargs): @@ -241,22 +239,22 @@ def validate_item_pickup_transaction_locations(loan, destination, **kwargs): :param kwargs : all others named arguments :return: validation of the loan to next transition, True or False """ - pickup_library_pid = kwargs.get('pickup_library_pid', None) - transaction_library_pid = kwargs.get('transaction_library_pid', None) + pickup_library_pid = kwargs.get("pickup_library_pid", None) + transaction_library_pid = kwargs.get("transaction_library_pid", None) # validation is made at the library level if not pickup_library_pid: - pickup_location_pid = loan['pickup_location_pid'] - pickup_library_pid = Location.get_record_by_pid( - pickup_location_pid).library_pid + pickup_location_pid = loan["pickup_location_pid"] + pickup_library_pid = Location.get_record_by_pid(pickup_location_pid).library_pid if not transaction_library_pid: - transaction_location_pid = loan['transaction_location_pid'] + transaction_location_pid = loan["transaction_location_pid"] transaction_library_pid = Location.get_record_by_pid( - transaction_location_pid).library_pid + transaction_location_pid + ).library_pid - if destination == 'ITEM_AT_DESK': + if destination == "ITEM_AT_DESK": return pickup_library_pid == transaction_library_pid - elif destination == 'ITEM_IN_TRANSIT_FOR_PICKUP': + elif destination == "ITEM_IN_TRANSIT_FOR_PICKUP": return pickup_library_pid != transaction_library_pid @@ -268,10 +266,7 @@ def sum_for_fees(fee_steps): :return the sum of fee steps rounded with a precision of 2 digits after decimal """ - if fee_steps: - return round(math.fsum([fee[0] for fee in fee_steps]), 2) - else: - return 0 + return round(math.fsum([fee[0] for fee in fee_steps]), 2) if fee_steps else 0 def get_loan_checkout_date(loan_pid): @@ -281,9 +276,12 @@ def get_loan_checkout_date(loan_pid): :return the checkout date (if exists) as ``datetime.data``. """ from rero_ils.modules.operation_logs.api import OperationLogsSearch - query = OperationLogsSearch() \ - .filter('term', loan__pid=loan_pid) \ - .filter('term', loan__trigger=LoanAction.CHECKOUT) \ - .source('date') + + query = ( + OperationLogsSearch() + .filter("term", loan__pid=loan_pid) + .filter("term", loan__trigger=LoanAction.CHECKOUT) + .source("date") + ) if hit := next(query.scan(), None): return ciso8601.parse_datetime(hit.date) diff --git a/rero_ils/modules/local_fields/api.py b/rero_ils/modules/local_fields/api.py index d403078270..8218a72a92 100644 --- a/rero_ils/modules/local_fields/api.py +++ b/rero_ils/modules/local_fields/api.py @@ -23,20 +23,20 @@ from elasticsearch_dsl import Q from flask_babel import gettext as _ -from .models import LocalFieldIdentifier, LocalFieldMetadata +from ...modules.utils import extracted_data_from_ref from ..api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch from ..documents.api import Document from ..fetchers import id_fetcher from ..items.api import Item from ..minters import id_minter from ..providers import Provider -from ...modules.utils import extracted_data_from_ref +from .models import LocalFieldIdentifier, LocalFieldMetadata # provider LocalFieldProvider = type( - 'LocalFieldProvider', + "LocalFieldProvider", (Provider,), - dict(identifier=LocalFieldIdentifier, pid_type='lofi') + dict(identifier=LocalFieldIdentifier, pid_type="lofi"), ) # minter local_field_id_minter = partial(id_minter, provider=LocalFieldProvider) @@ -50,9 +50,9 @@ class LocalFieldsSearch(IlsRecordsSearch): class Meta: """Search only on local_field index.""" - index = 'local_fields' + index = "local_fields" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -64,10 +64,10 @@ def get_local_fields(self, parent_type, parent_pid, organisation_pid=None): :param organisation_pid: organisation pid filter value. :return: a list of ElasticSearch hit. """ - filters = Q('term', parent__type=parent_type) - filters &= Q('term', parent__pid=parent_pid) + filters = Q("term", parent__type=parent_type) + filters &= Q("term", parent__pid=parent_pid) if organisation_pid: - filters &= Q('term', organisation__pid=organisation_pid) + filters &= Q("term", organisation__pid=organisation_pid) return self.filter(filters) @@ -82,19 +82,20 @@ class LocalField(IlsRecord): def extended_validation(self, **kwargs): """Extended validation.""" # parent reference must exists - parent = extracted_data_from_ref(self.get('parent'), data='record') + parent = extracted_data_from_ref(self.get("parent"), data="record") if not parent: return _("Parent record doesn't exists.") # check if a local_fields resource exists for this document query = LocalFieldsSearch().get_local_fields( - parent.provider.pid_type, parent.pid, - extracted_data_from_ref(self.get('organisation')) + parent.provider.pid_type, + parent.pid, + extracted_data_from_ref(self.get("organisation")), ) - if query.exclude('term', pid=self['pid']).count(): - return _('Local fields already exist for this resource.') + if query.exclude("term", pid=self["pid"]).count(): + return _("Local fields already exist for this resource.") # check if all fields are empty. - if len(self.get('fields', {}).keys()) == 0: - return _('Missing fields.') + if len(self.get("fields", {}).keys()) == 0: + return _("Missing fields.") return True @staticmethod @@ -106,9 +107,11 @@ def get_local_fields_by_id(parent_type, parent_pid, organisation_pid=None): :param organisation_pid: organisation pid filter value. :returns: a generator of `LocalField` records. """ - search = LocalFieldsSearch()\ - .get_local_fields(parent_type, parent_pid, organisation_pid)\ + search = ( + LocalFieldsSearch() + .get_local_fields(parent_type, parent_pid, organisation_pid) .source(False) + ) for hit in search.scan(): yield LocalField.get_record(hit.meta.id) @@ -121,9 +124,7 @@ def get_local_fields(parent, organisation_pid=None): :returns: a generator of `LocalField` records. """ return LocalField.get_local_fields_by_id( - parent.provider.pid_type, - parent.pid, - organisation_pid + parent.provider.pid_type, parent.pid, organisation_pid ) @@ -138,7 +139,7 @@ def _reindex_parent_resource(record): :param record: the `LocalField` instance. """ - resource = extracted_data_from_ref(record['parent']['$ref'], 'record') + resource = extracted_data_from_ref(record["parent"]["$ref"], "record") if isinstance(resource, (Document, Item)): resource.reindex() @@ -165,4 +166,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='lofi') + super().bulk_index(record_id_iterator, doc_type="lofi") diff --git a/rero_ils/modules/local_fields/dumpers.py b/rero_ils/modules/local_fields/dumpers.py index a718d64d83..c52aeccd98 100644 --- a/rero_ils/modules/local_fields/dumpers.py +++ b/rero_ils/modules/local_fields/dumpers.py @@ -31,7 +31,7 @@ def dump(self, record, data): :param data: The initial dump data passed in by ``record.dumps()``. """ output = { - 'organisation_pid': record.organisation_pid, - 'fields': record.get('fields', {}) + "organisation_pid": record.organisation_pid, + "fields": record.get("fields", {}), } return data | output diff --git a/rero_ils/modules/local_fields/extensions.py b/rero_ils/modules/local_fields/extensions.py index 120e0bc4df..1dffb1e7b3 100644 --- a/rero_ils/modules/local_fields/extensions.py +++ b/rero_ils/modules/local_fields/extensions.py @@ -35,5 +35,6 @@ def pre_delete(self, record, force=False): :param force: is the suppression must be forced. """ from .api import LocalField + for local_field in LocalField.get_local_fields(record): local_field.delete(force=force, delindex=True) diff --git a/rero_ils/modules/local_fields/jsonresolver.py b/rero_ils/modules/local_fields/jsonresolver.py index 0827e6c5e5..4b2b4cd81b 100644 --- a/rero_ils/modules/local_fields/jsonresolver.py +++ b/rero_ils/modules/local_fields/jsonresolver.py @@ -22,13 +22,13 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/local_fields/', host='bib.rero.ch') +@jsonresolver.route("/api/local_fields/", host="bib.rero.ch") def local_field_resolver(pid): """Resolver for local_field record.""" - persistent_id = PersistentIdentifier.get('lofi', pid) + persistent_id = PersistentIdentifier.get("lofi", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) current_app.logger.error( - f'Local fields resolver error: /api/local_fields/{pid} {persistent_id}' + f"Local fields resolver error: /api/local_fields/{pid} {persistent_id}" ) - raise Exception('unable to resolve') + raise Exception("unable to resolve") diff --git a/rero_ils/modules/local_fields/models.py b/rero_ils/modules/local_fields/models.py index b3b3f07f5d..b47882d60a 100644 --- a/rero_ils/modules/local_fields/models.py +++ b/rero_ils/modules/local_fields/models.py @@ -27,16 +27,17 @@ class LocalFieldIdentifier(RecordIdentifier): """Sequence generator for LocalField identifiers.""" - __tablename__ = 'local_field_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "local_field_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class LocalFieldMetadata(db.Model, RecordMetadataBase): """Local field record metadata.""" - __tablename__ = 'local_field_metadata' + __tablename__ = "local_field_metadata" diff --git a/rero_ils/modules/local_fields/permissions.py b/rero_ils/modules/local_fields/permissions.py index 4bf6b934dc..ef369b1ec7 100644 --- a/rero_ils/modules/local_fields/permissions.py +++ b/rero_ils/modules/local_fields/permissions.py @@ -18,16 +18,19 @@ """Permissions of Local field.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control "local field" policies for CRUD operations -search_action = action_factory('lofi-search') -read_action = action_factory('lofi-read') -create_action = action_factory('lofi-create') -update_action = action_factory('lofi-update') -delete_action = action_factory('lofi-delete') -access_action = action_factory('lofi-access') +search_action = action_factory("lofi-search") +read_action = action_factory("lofi-read") +create_action = action_factory("lofi-create") +update_action = action_factory("lofi-update") +delete_action = action_factory("lofi-delete") +access_action = action_factory("lofi-access") class LocalFieldPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/locations/api.py b/rero_ils/modules/locations/api.py index 9c6b48764f..b819665d26 100644 --- a/rero_ils/modules/locations/api.py +++ b/rero_ils/modules/locations/api.py @@ -35,9 +35,7 @@ # provider LocationProvider = type( - 'LocationProvider', - (Provider,), - dict(identifier=LocationIdentifier, pid_type='loc') + "LocationProvider", (Provider,), dict(identifier=LocationIdentifier, pid_type="loc") ) # minter location_id_minter = partial(id_minter, provider=LocationProvider) @@ -51,22 +49,26 @@ class LocationsSearch(IlsRecordsSearch): class Meta: """Search only on locations index.""" - index = 'locations' + index = "locations" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None - def location_pids(self, library_pid, source='pid'): + def location_pids(self, library_pid, source="pid"): """Locations pid for given library. :param library_pid: string - the library to filter with :return: list of pid locations :rtype: list """ - return [location.pid for location in self.filter( - 'term', library__pid=library_pid).source(source).scan()] + return [ + location.pid + for location in self.filter("term", library__pid=library_pid) + .source(source) + .scan() + ] def by_organisation_pid(self, organisation_pid): """Build a search to get hits related to an organisation pid. @@ -75,7 +77,7 @@ def by_organisation_pid(self, organisation_pid): :returns: An ElasticSearch query to get hits related the entity. :rtype: `elasticsearch_dsl.Search` """ - return self.filter('term', organisation__pid=organisation_pid) + return self.filter("term", organisation__pid=organisation_pid) class Location(IlsRecord): @@ -86,15 +88,9 @@ class Location(IlsRecord): provider = LocationProvider model_cls = LocationMetadata enable_jsonref = False - pids_exist_check = { - 'required': { - 'lib': 'library' - } - } + pids_exist_check = {"required": {"lib": "library"}} - _extensions = [ - IsPickupToExtension() - ] + _extensions = [IsPickupToExtension()] def extended_validation(self, **kwargs): """Validate record against schema. @@ -104,41 +100,45 @@ def extended_validation(self, **kwargs): is present and not empty if location is pickup """ online_location_pid = self.get_library().online_location - if self.get('is_online') and online_location_pid and \ - self.pid != online_location_pid: - return _('Another online location exists in this library') - if self.get('is_pickup', False) and \ - not self.get('pickup_name', '').strip(): - return _('Pickup location name field is required.') + if ( + self.get("is_online") + and online_location_pid + and self.pid != online_location_pid + ): + return _("Another online location exists in this library") + if self.get("is_pickup", False) and not self.get("pickup_name", "").strip(): + return _("Pickup location name field is required.") return True @classmethod - def get_pickup_location_pids(cls, patron_pid=None, item_pid=None, - is_ill_pickup=False): + def get_pickup_location_pids( + cls, patron_pid=None, item_pid=None, is_ill_pickup=False + ): """Return pickup locations.""" from rero_ils.modules.items.api import Item from rero_ils.modules.patrons.api import Patron + search = LocationsSearch() if item_pid: loc = Item.get_record_by_pid(item_pid).get_location() if loc.restrict_pickup_to: - search = search.filter('terms', pid=loc.restrict_pickup_to) + search = search.filter("terms", pid=loc.restrict_pickup_to) - field = 'is_ill_pickup' if is_ill_pickup else 'is_pickup' - search = search.filter('term', **{field: True}) + field = "is_ill_pickup" if is_ill_pickup else "is_pickup" + search = search.filter("term", **{field: True}) if patron_pid: org_pid = Patron.get_record_by_pid(patron_pid).organisation_pid - search = search.filter('term', organisation__pid=org_pid) + search = search.filter("term", organisation__pid=org_pid) - locations = search.source(['pid']).scan() + locations = search.source(["pid"]).scan() for location in locations: yield location.pid def get_library(self): """Get library.""" - return extracted_data_from_ref(self.get('library'), data='record') + return extracted_data_from_ref(self.get("library"), data="record") def get_links_to_me(self, get_pids=False): """Record links. @@ -149,21 +149,31 @@ def get_links_to_me(self, get_pids=False): from ..holdings.api import HoldingsSearch from ..items.api import ItemsSearch from ..loans.api import LoansSearch - item_query = ItemsSearch() \ - .filter('bool', should=[ - Q('term', location__pid=self.pid), - Q('term', temporary_location__pid=self.pid) - ]) + + item_query = ItemsSearch().filter( + "bool", + should=[ + Q("term", location__pid=self.pid), + Q("term", temporary_location__pid=self.pid), + ], + ) exclude_states = [ - LoanState.CANCELLED, LoanState.ITEM_RETURNED, LoanState.CREATED] - loan_query = LoansSearch() \ - .filter('bool', should=[ - Q('term', pickup_location_pid=self.pid), - Q('term', transaction_location_pid=self.pid) - ]) \ - .exclude('terms', state=exclude_states) - holdings_query = HoldingsSearch() \ - .filter('term', location__pid=self.pid) + LoanState.CANCELLED, + LoanState.ITEM_RETURNED, + LoanState.CREATED, + ] + loan_query = ( + LoansSearch() + .filter( + "bool", + should=[ + Q("term", pickup_location_pid=self.pid), + Q("term", transaction_location_pid=self.pid), + ], + ) + .exclude("terms", state=exclude_states) + ) + holdings_query = HoldingsSearch().filter("term", location__pid=self.pid) links = {} if get_pids: items = sorted_pids(item_query) @@ -173,11 +183,7 @@ def get_links_to_me(self, get_pids=False): items = item_query.count() loans = loan_query.count() holdings = holdings_query.count() - links = { - 'items': items, - 'loans': loans, - 'holdings': holdings - } + links = {"items": items, "loans": loans, "holdings": holdings} return {k: v for k, v in links.items() if v} def resolve(self): @@ -191,18 +197,18 @@ def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete @property def library_pid(self): """Get library pid for location.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) @property def library(self): """Get library record related to this location.""" - return extracted_data_from_ref(self.get('library'), data='record') + return extracted_data_from_ref(self.get("library"), data="record") @property def organisation_pid(self): @@ -217,14 +223,17 @@ def restrict_pickup_to(self): """Get restriction pickup location pid of location.""" return [ extracted_data_from_ref(restrict_pickup_to) - for restrict_pickup_to in self.get('restrict_pickup_to', []) + for restrict_pickup_to in self.get("restrict_pickup_to", []) ] @property def pickup_name(self): """Get pickup name for location.""" - return self['pickup_name'] if 'pickup_name' in self \ + return ( + self["pickup_name"] + if "pickup_name" in self else f"{self.library['code']}: {self['name']}" + ) @classmethod def can_request(cls, record, **kwargs): @@ -235,12 +244,12 @@ def can_request(cls, record, **kwargs): :return a tuple with True|False and reasons to disallow if False. """ if record: - location_method = 'get_location' - if hasattr(record, 'get_circulation_location'): - location_method = 'get_circulation_location' + location_method = "get_location" + if hasattr(record, "get_circulation_location"): + location_method = "get_circulation_location" location = getattr(record, location_method)() - if not location.get('allow_request', False): - return False, [_('Record location disallows request.')] + if not location.get("allow_request", False): + return False, [_("Record location disallows request.")] return True, [] def transaction_location_validator(self, location_pid): @@ -265,4 +274,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='loc') + super().bulk_index(record_id_iterator, doc_type="loc") diff --git a/rero_ils/modules/locations/extensions.py b/rero_ils/modules/locations/extensions.py index 3524bea051..dea905ff39 100644 --- a/rero_ils/modules/locations/extensions.py +++ b/rero_ils/modules/locations/extensions.py @@ -32,8 +32,8 @@ def pre_commit(self, record): """ # Remove the possible `pickup_name` if the location isn't (yet) # defined as a pickup location. - if not record.get('is_pickup', False): - record.pop('pickup_name', None) + if not record.get("is_pickup", False): + record.pop("pickup_name", None) def post_commit(self, record): """Called after a record is committed. diff --git a/rero_ils/modules/locations/indexer.py b/rero_ils/modules/locations/indexer.py index adc1775985..563788658d 100644 --- a/rero_ils/modules/locations/indexer.py +++ b/rero_ils/modules/locations/indexer.py @@ -32,20 +32,12 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data['organisation'] = { - 'pid': record.organisation_pid, - 'type': 'org' - } + data["organisation"] = {"pid": record.organisation_pid, "type": "org"} return data -location_replace_refs_dumper = MultiDumper(dumpers=[ - Dumper(), - ReplaceRefsDumper() -]) +location_replace_refs_dumper = MultiDumper(dumpers=[Dumper(), ReplaceRefsDumper()]) -location_indexer_dumper = MultiDumper(dumpers=[ - Dumper(), - ReplaceRefsDumper(), - LocationIndexerDumper() -]) +location_indexer_dumper = MultiDumper( + dumpers=[Dumper(), ReplaceRefsDumper(), LocationIndexerDumper()] +) diff --git a/rero_ils/modules/locations/jsonresolver.py b/rero_ils/modules/locations/jsonresolver.py index bd9ca4757a..4f48da1428 100644 --- a/rero_ils/modules/locations/jsonresolver.py +++ b/rero_ils/modules/locations/jsonresolver.py @@ -24,7 +24,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/locations/', host='bib.rero.ch') +@jsonresolver.route("/api/locations/", host="bib.rero.ch") def location_resolver(pid): """Location resolver.""" - return resolve_json_refs('loc', pid) + return resolve_json_refs("loc", pid) diff --git a/rero_ils/modules/locations/models.py b/rero_ils/modules/locations/models.py index b8e215d845..23698fcf20 100644 --- a/rero_ils/modules/locations/models.py +++ b/rero_ils/modules/locations/models.py @@ -28,16 +28,17 @@ class LocationIdentifier(RecordIdentifier): """Sequence generator for Location identifiers.""" - __tablename__ = 'location_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "location_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class LocationMetadata(db.Model, RecordMetadataBase): """Location record metadata.""" - __tablename__ = 'location_metadata' + __tablename__ = "location_metadata" diff --git a/rero_ils/modules/locations/permissions.py b/rero_ils/modules/locations/permissions.py index b693acd760..7aaedadeee 100644 --- a/rero_ils/modules/locations/permissions.py +++ b/rero_ils/modules/locations/permissions.py @@ -19,17 +19,20 @@ """Permissions for locations.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control location policy -search_action = action_factory('loc-search') -read_action = action_factory('loc-read') -create_action = action_factory('loc-create') -update_action = action_factory('loc-update') -delete_action = action_factory('loc-delete') -access_action = action_factory('loc-access') +search_action = action_factory("loc-search") +read_action = action_factory("loc-read") +create_action = action_factory("loc-create") +update_action = action_factory("loc-update") +delete_action = action_factory("loc-delete") +access_action = action_factory("loc-access") class LocationPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/locations/serializers/__init__.py b/rero_ils/modules/locations/serializers/__init__.py index a83abb9d0a..e83bde87d9 100644 --- a/rero_ils/modules/locations/serializers/__init__.py +++ b/rero_ils/modules/locations/serializers/__init__.py @@ -23,10 +23,10 @@ from .json import LocationJSONSerializer __all__ = [ - 'json_loc_search', + "json_loc_search", ] """JSON serializer.""" _json = LocationJSONSerializer(RecordSchemaJSONV1) -json_loc_search = search_responsify(_json, 'application/rero+json') +json_loc_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/locations/serializers/json.py b/rero_ils/modules/locations/serializers/json.py index 5bc5695875..7426048682 100644 --- a/rero_ils/modules/locations/serializers/json.py +++ b/rero_ils/modules/locations/serializers/json.py @@ -19,8 +19,7 @@ """Location serialization.""" from rero_ils.modules.libraries.api import LibrariesSearch -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer +from rero_ils.modules.serializers import CachedDataSerializerMixin, JSONSerializer class LocationJSONSerializer(JSONSerializer, CachedDataSerializerMixin): @@ -28,11 +27,11 @@ class LocationJSONSerializer(JSONSerializer, CachedDataSerializerMixin): def _postprocess_search_hit(self, hit): """Post-process each hit of a search result.""" - metadata = hit.get('metadata', {}) + metadata = hit.get("metadata", {}) # Add label for some $ref fields. - pid = metadata.get('library', {}).get('pid') + pid = metadata.get("library", {}).get("pid") if pid and (resource := self.get_resource(LibrariesSearch(), pid)): - metadata['library']['code'] = resource.get('code') - metadata['library']['name'] = resource.get('name') + metadata["library"]["code"] = resource.get("code") + metadata["library"]["name"] = resource.get("name") super()._postprocess_search_hit(hit) diff --git a/rero_ils/modules/locations/tasks.py b/rero_ils/modules/locations/tasks.py index b8ca8f958c..85cedf5686 100644 --- a/rero_ils/modules/locations/tasks.py +++ b/rero_ils/modules/locations/tasks.py @@ -34,24 +34,26 @@ def remove_location_from_restriction(restricted_location): # If the location is defined as a pickup location, no need to remove it # from restriction; just stop the process. - if restricted_location.get('is_pickup', False): + if restricted_location.get("is_pickup", False): return # Search for locations that uses the restricted location into # `restrict_pickup_to` field. For each of these locations, remove the # restricted location from this field and reindex the record. - restricted_pid = restricted_location['pid'] - query = LocationsSearch() \ - .filter('term', restrict_pickup_to__pid=restricted_pid) \ + restricted_pid = restricted_location["pid"] + query = ( + LocationsSearch() + .filter("term", restrict_pickup_to__pid=restricted_pid) .source(False) + ) for hit in query.scan(): location = Location.get_record(hit.meta.id) restricted_location = [ location_ref - for location_ref in location['restrict_pickup_to'] + for location_ref in location["restrict_pickup_to"] if extracted_data_from_ref(location_ref) != restricted_pid ] - del location['restrict_pickup_to'] + del location["restrict_pickup_to"] if restricted_location: - location['restrict_pickup_to'] = restricted_location + location["restrict_pickup_to"] = restricted_location location.update(location, dbcommit=True, reindex=True) diff --git a/rero_ils/modules/message.py b/rero_ils/modules/message.py index 72684b21ae..1f3fe88e30 100644 --- a/rero_ils/modules/message.py +++ b/rero_ils/modules/message.py @@ -21,10 +21,10 @@ from markupsafe import Markup -class Message(): +class Message: """Message for the user.""" - prefix = 'message_' + prefix = "message_" @classmethod def set(cls, key, type, value): @@ -35,8 +35,8 @@ def set(cls, key, type, value): :param value: the value of message. :return: True if the insertion went well. """ - data = {'type': type or 'primary', 'message': Markup(value)} - return current_cache.set(f'{cls.prefix}{key}', data) + data = {"type": type or "primary", "message": Markup(value)} + return current_cache.set(f"{cls.prefix}{key}", data) @classmethod def get(cls, key): @@ -45,7 +45,7 @@ def get(cls, key): :param key: the cache key. :return: empty or the json. """ - return current_cache.get(f'{cls.prefix}{key}') + return current_cache.get(f"{cls.prefix}{key}") @classmethod def delete(cls, key): @@ -54,4 +54,4 @@ def delete(cls, key): :param key: the cache key. :return: True if the removal went well. """ - return current_cache.delete(f'{cls.prefix}{key}') + return current_cache.delete(f"{cls.prefix}{key}") diff --git a/rero_ils/modules/minters.py b/rero_ils/modules/minters.py index 3a96cb427a..3b99589e24 100644 --- a/rero_ils/modules/minters.py +++ b/rero_ils/modules/minters.py @@ -20,12 +20,10 @@ from __future__ import absolute_import, print_function, unicode_literals -def id_minter(record_uuid, data, provider, pid_key='pid', object_type='rec'): +def id_minter(record_uuid, data, provider, pid_key="pid", object_type="rec"): """RERO ILS minter.""" provider = provider.create( - object_type=object_type, - object_uuid=record_uuid, - pid_value=data.get(pid_key) + object_type=object_type, object_uuid=record_uuid, pid_value=data.get(pid_key) ) persistent_identifier = provider.pid diff --git a/rero_ils/modules/monitoring/api.py b/rero_ils/modules/monitoring/api.py index f8ebbc7cb6..08124ab822 100644 --- a/rero_ils/modules/monitoring/api.py +++ b/rero_ils/modules/monitoring/api.py @@ -26,7 +26,7 @@ from invenio_search import RecordsSearch from sqlalchemy import text -db_connection_counts_query = """ +DB_CONNECTION_COUNTS_QUERY = """ select max_conn, used, res_for_super, max_conn-used-res_for_super res_for_normal @@ -48,7 +48,7 @@ """ -db_connections_query = """ +DB_CONNECTIONS_QUERY = """ SELECT pid, application_name, client_addr, client_port, backend_start, xact_start, query_start, wait_event, state, left(query, 64) @@ -68,7 +68,7 @@ class Monitoring(object): times of selected functions. """ - has_no_db = ['oplg', 'ent'] + has_no_db = ["oplg", "ent"] def __init__(self, time_delta=1): """Constructor. @@ -88,16 +88,16 @@ def __str__(self): 4. elasticsearch index 5. elasticsearch count """ - result = '' + result = "" msg_head = f'DB - ES{"type":>8}{"count":>11}{"index":>27}{"count":>11}' msg_head += f'\n{"":-^64s}\n' for doc_type, info in sorted(self.info().items()): - db_es = info.get('db-es', '') - count_db = info.get('db', '') - msg = f'{db_es:>7} {doc_type:>6} {count_db:>10}' - if index := info.get('index', ''): + db_es = info.get("db-es", "") + count_db = info.get("db", "") + msg = f"{db_es:>7} {doc_type:>6} {count_db:>10}" + if index := info.get("index", ""): msg += f' {index:>25} {info.get("es", ""):>10}' - result += msg + '\n' + result += msg + "\n" return msg_head + result @classmethod @@ -110,9 +110,8 @@ def get_db_count(cls, doc_type, with_deleted=False, date=None): :param with_deleted: count also deleted items. :return: item count. """ - if not current_app.config.get( - 'RECORDS_REST_ENDPOINTS').get(doc_type): - return f'No >>{doc_type}<< in DB' + if not current_app.config.get("RECORDS_REST_ENDPOINTS").get(doc_type): + return f"No >>{doc_type}<< in DB" query = PersistentIdentifier.query.filter_by(pid_type=doc_type) if not with_deleted: query = query.filter_by(status=PIDStatus.REGISTERED) @@ -132,15 +131,14 @@ def get_es_count(cls, index, date=None): try: query = RecordsSearch(index=index).query() if date: - query = query.filter('range', _created={'lte': date}) + query = query.filter("range", _created={"lte": date}) result = query.count() except NotFoundError: - result = f'No >>{index}<< in ES' + result = f"No >>{index}<< in ES" return result @classmethod - def get_all_pids(cls, doc_type, with_deleted=False, limit=100000, - date=None): + def get_all_pids(cls, doc_type, with_deleted=False, limit=100000, date=None): """Get all doc_type pids. Return a generator iterator. :param with_deleted: get also deleted pids. @@ -156,7 +154,7 @@ def get_all_pids(cls, doc_type, with_deleted=False, limit=100000, if limit: count = query.count() # slower, less memory - query = query.order_by(text('pid_value')).limit(limit) + query = query.order_by(text("pid_value")).limit(limit) offset = 0 while offset < count: for identifier in query.offset(offset): @@ -169,27 +167,24 @@ def get_all_pids(cls, doc_type, with_deleted=False, limit=100000, def get_es_db_missing_pids(self, doc_type, with_deleted=False): """Get ES and DB counts.""" - endpoint = current_app.config.get( - 'RECORDS_REST_ENDPOINTS' - ).get(doc_type, {}) - index = endpoint.get('search_index') + endpoint = current_app.config.get("RECORDS_REST_ENDPOINTS").get(doc_type, {}) + index = endpoint.get("search_index") pids_es_double = [] pids_es = [] pids_db = [] if index and doc_type not in self.has_no_db: date = datetime.utcnow() - timedelta(minutes=self.time_delta) pids_es = {} - es_query = RecordsSearch(index=index) \ - .filter('range', _created={'lte': date}) - for hit in es_query.source('pid').scan(): + es_query = RecordsSearch(index=index).filter( + "range", _created={"lte": date} + ) + for hit in es_query.source("pid").scan(): if pids_es.get(hit.pid): pids_es_double.append(hit.pid) pids_es[hit.pid] = 1 pids_db = [] for pid in self.get_all_pids( - doc_type, - with_deleted=with_deleted, - date=date + doc_type, with_deleted=with_deleted, date=date ): if pids_es.get(pid): pids_es.pop(pid) @@ -208,37 +203,38 @@ def info(self, with_deleted=False, difference_db_es=False): """ info = {} for doc_type, endpoint in current_app.config.get( - 'RECORDS_REST_ENDPOINTS' + "RECORDS_REST_ENDPOINTS" ).items(): info[doc_type] = {} date = datetime.utcnow() - timedelta(minutes=self.time_delta) if doc_type not in self.has_no_db: count_db = self.get_db_count( - doc_type, with_deleted=with_deleted, date=date) + doc_type, with_deleted=with_deleted, date=date + ) count_db = count_db if isinstance(count_db, int) else 0 - info[doc_type]['db'] = count_db - if index := endpoint.get('search_index', ''): + info[doc_type]["db"] = count_db + if index := endpoint.get("search_index", ""): count_es = self.get_es_count(index, date=date) count_es = count_es if isinstance(count_es, int) else 0 db_es = count_db - count_es - info[doc_type]['index'] = index - info[doc_type]['es'] = count_es + info[doc_type]["index"] = index + info[doc_type]["es"] = count_es if doc_type not in self.has_no_db: - info[doc_type]['db-es'] = db_es + info[doc_type]["db-es"] = db_es if db_es == 0 and difference_db_es: - missing_in_db, missing_in_es, pids_es_double, index = \ + missing_in_db, missing_in_es, pids_es_double, index = ( self.get_es_db_missing_pids( - doc_type=doc_type, - with_deleted=with_deleted + doc_type=doc_type, with_deleted=with_deleted ) + ) if index: if missing_in_db: - info[doc_type]['db-'] = list(missing_in_db) + info[doc_type]["db-"] = list(missing_in_db) if missing_in_es: - info[doc_type]['es-'] = list(missing_in_es) + info[doc_type]["es-"] = list(missing_in_es) else: - info[doc_type]['db'] = 0 - info[doc_type]['db-es'] = 0 + info[doc_type]["db"] = 0 + info[doc_type]["db-es"] = 0 return info def check(self, with_deleted=False, difference_db_es=False): @@ -250,19 +246,18 @@ def check(self, with_deleted=False, difference_db_es=False): """ checks = {} for info, data in self.info( - with_deleted=with_deleted, - difference_db_es=difference_db_es + with_deleted=with_deleted, difference_db_es=difference_db_es ).items(): - db_es = data.get('db-es', '') - if db_es and db_es not in [0, '']: + db_es = data.get("db-es", "") + if db_es and db_es not in [0, ""]: checks.setdefault(info, {}) - checks[info]['db_es'] = db_es - if data.get('db-'): + checks[info]["db_es"] = db_es + if data.get("db-"): checks.setdefault(info, {}) - checks[info]['db-'] = len(data.get('db-')) - if data.get('es-'): + checks[info]["db-"] = len(data.get("db-")) + if data.get("es-"): checks.setdefault(info, {}) - checks[info]['es-'] = len(data.get('es-')) + checks[info]["es-"] = len(data.get("es-")) return checks def missing(self, doc_type, with_deleted=False): @@ -274,19 +269,17 @@ def missing(self, doc_type, with_deleted=False): :param doc_type: doc type to get missing pids. :return: dictionary with all missing pids. """ - missing_in_db, missing_in_es, pids_es_double, index =\ - self.get_es_db_missing_pids( - doc_type=doc_type, - with_deleted=with_deleted - ) + missing_in_db, missing_in_es, pids_es_double, index = ( + self.get_es_db_missing_pids(doc_type=doc_type, with_deleted=with_deleted) + ) if index: return { - 'DB': list(missing_in_db), - 'ES': list(missing_in_es), - 'ES duplicate': pids_es_double + "DB": list(missing_in_db), + "ES": list(missing_in_es), + "ES duplicate": pids_es_double, } else: - return {'ERROR': f'Document type not found: {doc_type}'} + return {"ERROR": f"Document type not found: {doc_type}"} def print_missing(self, doc_type): """Print missing pids for the given document type. @@ -294,22 +287,20 @@ def print_missing(self, doc_type): :param doc_type: doc type to print. """ missing = self.missing(doc_type=doc_type) - if 'ERROR' in missing: - click.secho(f'Error: {missing["ERROR"]}', fg='yellow') + if "ERROR" in missing: + click.secho(f'Error: {missing["ERROR"]}', fg="yellow") else: - if 'ES duplicate' in missing and missing["ES duplicate"]: + if "ES duplicate" in missing and missing["ES duplicate"]: click.secho( - f'ES duplicate {doc_type}:' + f"ES duplicate {doc_type}:" f' {", ".join(missing["ES duplicate"])}', - fg='red' + fg="red", ) - if 'ES' in missing and missing["ES"]: + if "ES" in missing and missing["ES"]: click.secho( - f'ES missing {doc_type}: {", ".join(missing["ES"])}', - fg='red' + f'ES missing {doc_type}: {", ".join(missing["ES"])}', fg="red" ) - if 'DB' in missing and missing["DB"]: + if "DB" in missing and missing["DB"]: click.secho( - f'DB missing {doc_type}: {", ".join(missing["DB"])}', - fg='red' + f'DB missing {doc_type}: {", ".join(missing["DB"])}', fg="red" ) diff --git a/rero_ils/modules/monitoring/cli.py b/rero_ils/modules/monitoring/cli.py index 76dcd379c1..4110c02932 100644 --- a/rero_ils/modules/monitoring/cli.py +++ b/rero_ils/modules/monitoring/cli.py @@ -25,7 +25,7 @@ from invenio_search import current_search_client from redis import Redis -from .api import Monitoring, db_connection_counts_query, db_connections_query +from .api import DB_CONNECTION_COUNTS_QUERY, DB_CONNECTIONS_QUERY, Monitoring @click.group() @@ -33,11 +33,22 @@ def monitoring(): """Monitoring commands.""" -@monitoring.command('es_db_counts') -@click.option('-m', '--missing', 'missing', is_flag=True, default=False, - help='Display missing pids.') -@click.option('-d', '--delay', 'delay', default=1, - help='Get ES and DB counts from delay min minutes in the past.') +@monitoring.command("es_db_counts") +@click.option( + "-m", + "--missing", + "missing", + is_flag=True, + default=False, + help="Display missing pids.", +) +@click.option( + "-d", + "--delay", + "delay", + default=1, + help="Get ES and DB counts from delay min minutes in the past.", +) @with_appcontext def es_db_counts_cli(missing, delay): """Print ES and DB counts. @@ -57,13 +68,13 @@ def es_db_counts_cli(missing, delay): click.echo(msg_head) info = mon.info(with_deleted=False, difference_db_es=False) for doc_type in sorted(info): - db_es = info[doc_type].get('db-es', '') + db_es = info[doc_type].get("db-es", "") msg = f'{db_es:>7}{doc_type:>8}{info[doc_type].get("db", ""):>11}' - index = info[doc_type].get('index', '') + index = info[doc_type].get("index", "") if index: msg += f'{index:>27}{info[doc_type].get("es", ""):>11}' - if db_es not in [0, '']: - click.secho(msg, fg='red') + if db_es not in [0, ""]: + click.secho(msg, fg="red") else: click.echo(msg) if missing and index: @@ -72,10 +83,15 @@ def es_db_counts_cli(missing, delay): mon.print_missing(missing_doc_type) -@monitoring.command('es_db_missing') -@click.argument('doc_type') -@click.option('-d', '--delay', 'delay', default=1, - help='Get ES and DB counts from delay minutes in the past.') +@monitoring.command("es_db_missing") +@click.argument("doc_type") +@click.option( + "-d", + "--delay", + "delay", + default=1, + help="Get ES and DB counts from delay minutes in the past.", +) @with_appcontext def es_db_missing_cli(doc_type, delay): """Print missing pids informations.""" @@ -83,74 +99,85 @@ def es_db_missing_cli(doc_type, delay): mon.print_missing(doc_type) -@monitoring.command('time_stamps') +@monitoring.command("time_stamps") @with_appcontext def time_stamps_cli(): """Print time_stamps information.""" - if cache := current_cache.get('timestamps'): + if cache := current_cache.get("timestamps"): for key, value in cache.items(): - time = value.pop('time') - args = [f'{k}={v}' for k, v in value.items()] + time = value.pop("time") + args = [f"{k}={v}" for k, v in value.items()] click.echo(f'{time}: {key} {" | ".join(args)}') -@monitoring.command('es') +@monitoring.command("es") @with_appcontext def es(): """Displays Elasticsearch cluster info.""" for key, value in current_search_client.cluster.health().items(): - click.echo(f'{key:<33}: {value}') + click.echo(f"{key:<33}: {value}") -@monitoring.command('es_indices') +@monitoring.command("es_indices") @with_appcontext def es_indices(): """Displays Elasticsearch indices info.""" - click.echo(current_search_client.cat.indices(s='index')) + click.echo(current_search_client.cat.indices(s="index")) @monitoring.command() @with_appcontext def redis(): """Displays redis info.""" - url = current_app.config.get('ACCOUNTS_SESSION_REDIS_URL', - 'redis://localhost:6379') + url = current_app.config.get("ACCOUNTS_SESSION_REDIS_URL", "redis://localhost:6379") redis = Redis.from_url(url) for key, value in redis.info().items(): - click.echo(f'{key:<33}: {value}') + click.echo(f"{key:<33}: {value}") -@monitoring.command('db_connection_counts') +@monitoring.command("db_connection_counts") @with_appcontext def db_connection_counts(): """Display DB connection counts.""" try: max_conn, used, res_for_super, free = db.session.execute( - db_connection_counts_query).first() + DB_CONNECTION_COUNTS_QUERY + ).first() except Exception as error: - click.secho(f'ERROR: {error}', fg='red') - return click.secho(f'max: {max_conn}, used: {used}, ' - f'res_super: {res_for_super}, free: {free}') + click.secho(f"ERROR: {error}", fg="red") + return click.secho( + f"max: {max_conn}, used: {used}, " f"res_super: {res_for_super}, free: {free}" + ) -@monitoring.command('db_connections') +@monitoring.command("db_connections") @with_appcontext def db_connections(): """Display DB connections.""" try: - results = db.session.execute(db_connections_query).fetchall() + results = db.session.execute(DB_CONNECTIONS_QUERY).fetchall() except Exception as error: - click.secho(f'ERROR: {error}', fg='red') - for pid, application_name, client_addr, client_port, backend_start, \ - xact_start, query_start, wait_event, state, left in results: + click.secho(f"ERROR: {error}", fg="red") + for ( + pid, + application_name, + client_addr, + client_port, + backend_start, + xact_start, + query_start, + wait_event, + state, + left, + ) in results: click.secho( - f'application_name: {application_name}\n' - f'client_addr: {client_addr}\n' - f'client_port: {client_port}\n' - f'backend_start: {backend_start}\n' - f'xact_start: {xact_start}\n' - f'query_start: {query_start}\n' - f'wait_event: {wait_event}\n' - f'state: {state}\n' - f'left: {left}\n' + f"application_name: {application_name}\n" + f"client_addr: {client_addr}\n" + f"client_port: {client_port}\n" + f"backend_start: {backend_start}\n" + f"xact_start: {xact_start}\n" + f"query_start: {query_start}\n" + f"wait_event: {wait_event}\n" + f"state: {state}\n" + f"left: {left}\n" ) diff --git a/rero_ils/modules/monitoring/views.py b/rero_ils/modules/monitoring/views.py index c5b1f9d75e..7102dde23a 100644 --- a/rero_ils/modules/monitoring/views.py +++ b/rero_ils/modules/monitoring/views.py @@ -27,29 +27,27 @@ from invenio_search import current_search_client from redis import Redis -from .api import Monitoring, db_connection_counts_query, db_connections_query from ...permissions import monitoring_permission +from .api import DB_CONNECTION_COUNTS_QUERY, DB_CONNECTIONS_QUERY, Monitoring -api_blueprint = Blueprint( - 'api_monitoring', - __name__, - url_prefix='/monitoring' -) +api_blueprint = Blueprint("api_monitoring", __name__, url_prefix="/monitoring") def check_authentication(func): """Decorator to check authentication for items HTTP API.""" + @wraps(func) def decorated_view(*args, **kwargs): if not current_user.is_authenticated: - return jsonify({'status': 'error: Unauthorized'}), 401 + return jsonify({"status": "error: Unauthorized"}), 401 if not monitoring_permission.require().can(): - return jsonify({'status': 'error: Forbidden'}), 403 + return jsonify({"status": "error: Forbidden"}), 403 return func(*args, **kwargs) + return decorated_view -@api_blueprint.route('/db_connection_counts') +@api_blueprint.route("/db_connection_counts") @check_authentication def db_connection_counts(): """Display DB connection counts. @@ -58,18 +56,23 @@ def db_connection_counts(): """ try: max_conn, used, res_for_super, free = db.session.execute( - db_connection_counts_query).first() + DB_CONNECTION_COUNTS_QUERY + ).first() except Exception as error: - return jsonify({'ERROR': error}) - return jsonify({'data': { - 'max': max_conn, - 'used': used, - 'res_super': res_for_super, - 'free': free - }}) + return jsonify({"ERROR": error}) + return jsonify( + { + "data": { + "max": max_conn, + "used": used, + "res_super": res_for_super, + "free": free, + } + } + ) -@api_blueprint.route('/db_connections') +@api_blueprint.route("/db_connections") @check_authentication def db_connections(): """Display DB connections. @@ -77,27 +80,27 @@ def db_connections(): :return: jsonified connections for db """ try: - results = db.session.execute(db_connections_query).fetchall() + results = db.session.execute(DB_CONNECTIONS_QUERY).fetchall() except Exception as error: - return jsonify({'ERROR': error}) - data = {} - for pid, application_name, client_addr, client_port, backend_start, \ - xact_start, query_start, wait_event, state, left in results: - data[pid] = { - 'application_name': application_name, - 'client_addr': client_addr, - 'client_port': client_port, - 'backend_start': backend_start, - 'xact_start': xact_start, - 'query_start': query_start, - 'wait_event': wait_event, - 'state': state, - 'left': left + return jsonify({"ERROR": error}) + data = { + pid: { + "application_name": application_name, + "client_addr": client_addr, + "client_port": client_port, + "backend_start": backend_start, + "xact_start": xact_start, + "query_start": query_start, + "wait_event": wait_event, + "state": state, + "left": left, } - return jsonify({'data': data}) + for pid, application_name, client_addr, client_port, backend_start, xact_start, query_start, wait_event, state, left in results + } + return jsonify({"data": data}) -@api_blueprint.route('/es_db_counts') +@api_blueprint.route("/es_db_counts") def es_db_counts(): """Display count for elasticsearch and documents. @@ -108,17 +111,16 @@ def es_db_counts(): - difference between the count in elasticsearch and database :return: jsonified count for elasticsearch and documents """ - difference_db_es = request.args.get('diff', False) - with_deleted = request.args.get('deleted', False) - time_delta = request.args.get('delay', 1) + difference_db_es = request.args.get("diff", False) + with_deleted = request.args.get("deleted", False) + time_delta = request.args.get("delay", 1) mon = Monitoring(time_delta=time_delta) - return jsonify({'data': mon.info( - with_deleted=with_deleted, - difference_db_es=difference_db_es - )}) + return jsonify( + {"data": mon.info(with_deleted=with_deleted, difference_db_es=difference_db_es)} + ) -@api_blueprint.route('/check_es_db_counts') +@api_blueprint.route("/check_es_db_counts") def check_es_db_counts(): """Displays health status for elasticsearch and database counts. @@ -127,72 +129,67 @@ def check_es_db_counts(): links will be provided with more detailed information. :return: jsonified health status for elasticsearch and database counts """ - result = {'data': {'status': 'green'}} - difference_db_es = request.args.get('diff', False) - with_deleted = request.args.get('deleted', False) - time_delta = request.args.get('delay', 1) + result = {"data": {"status": "green"}} + difference_db_es = request.args.get("diff", False) + with_deleted = request.args.get("deleted", False) + time_delta = request.args.get("delay", 1) mon = Monitoring(time_delta=time_delta) - checks = mon.check( - with_deleted=with_deleted, - difference_db_es=difference_db_es - ) + checks = mon.check(with_deleted=with_deleted, difference_db_es=difference_db_es) if checks: - result = {'data': {'status': 'red'}} + result = {"data": {"status": "red"}} errors = [] for doc_type, doc_type_data in checks.items(): - links = {'about': url_for( - 'api_monitoring.check_es_db_counts', _external=True)} + links = { + "about": url_for("api_monitoring.check_es_db_counts", _external=True) + } for info, count in doc_type_data.items(): - if info == 'db_es': - msg = f'There are {count} items ' \ - f'from {doc_type} missing in ES.' + if info == "db_es": + msg = f"There are {count} items " f"from {doc_type} missing in ES." links[doc_type] = url_for( - 'api_monitoring.missing_pids', - doc_type=doc_type, - _external=True + "api_monitoring.missing_pids", doc_type=doc_type, _external=True + ) + errors.append( + { + "id": "DB_ES_COUNTER_MISMATCH", + "links": links, + "code": "DB_ES_COUNTER_MISMATCH", + "title": "DB items counts don't match ES items count.", + "details": msg, + } ) - errors.append({ - 'id': 'DB_ES_COUNTER_MISMATCH', - 'links': links, - 'code': 'DB_ES_COUNTER_MISMATCH', - 'title': "DB items counts don't match ES items count.", - 'details': msg - }) - elif info == 'db-': - msg = f'There are {count} items ' \ - f'from {doc_type} missing in DB.' + elif info == "db-": + msg = f"There are {count} items " f"from {doc_type} missing in DB." links[doc_type] = url_for( - 'api_monitoring.missing_pids', - doc_type=doc_type, - _external=True + "api_monitoring.missing_pids", doc_type=doc_type, _external=True ) - errors.append({ - 'id': 'DB_ES_UNEQUAL', - 'links': links, - 'code': 'DB_ES_UNEQUAL', - 'title': "DB items unequal ES items.", - 'details': msg - }) - elif info == 'es-': - msg = f'There are {count} items ' \ - f'from {doc_type} missing in ES.' + errors.append( + { + "id": "DB_ES_UNEQUAL", + "links": links, + "code": "DB_ES_UNEQUAL", + "title": "DB items unequal ES items.", + "details": msg, + } + ) + elif info == "es-": + msg = f"There are {count} items " f"from {doc_type} missing in ES." links[doc_type] = url_for( - 'api_monitoring.missing_pids', - doc_type=doc_type, - _external=True + "api_monitoring.missing_pids", doc_type=doc_type, _external=True + ) + errors.append( + { + "id": "DB_ES_UNEQUAL", + "links": links, + "code": "DB_ES_UNEQUAL", + "title": "DB items unequal ES items.", + "details": msg, + } ) - errors.append({ - 'id': 'DB_ES_UNEQUAL', - 'links': links, - 'code': 'DB_ES_UNEQUAL', - 'title': "DB items unequal ES items.", - 'details': msg - }) - result['errors'] = errors + result["errors"] = errors return jsonify(result) -@api_blueprint.route('/missing_pids/') +@api_blueprint.route("/missing_pids/") @check_authentication def missing_pids(doc_type): """Displays details of counts for document type. @@ -208,58 +205,54 @@ def missing_pids(doc_type): :return: jsonified details of counts for document type """ try: - api_url = url_for( - f'invenio_records_rest.{doc_type}_list', - _external=True - ) + api_url = url_for(f"invenio_records_rest.{doc_type}_list", _external=True) except Exception: api_url = None - time_delta = request.args.get('delay', 1) + time_delta = request.args.get("delay", 1) mon = Monitoring(time_delta=time_delta) res = mon.missing(doc_type) - if res.get('ERROR'): + if res.get("ERROR"): return { - 'error': { - 'id': 'DOCUMENT_TYPE_NOT_FOUND', - 'code': 'DOCUMENT_TYPE_NOT_FOUND', - 'title': "Document type not found.", - 'details': res.get('ERROR') + "error": { + "id": "DOCUMENT_TYPE_NOT_FOUND", + "code": "DOCUMENT_TYPE_NOT_FOUND", + "title": "Document type not found.", + "details": res.get("ERROR"), } } - data = {'DB': [], 'ES': [], 'ES duplicate': []} - for pid in res.get('DB'): + data = {"DB": [], "ES": [], "ES duplicate": []} + for pid in res.get("DB"): if api_url: - data['DB'].append(f'{api_url}?q=pid:"{pid}"') + data["DB"].append(f'{api_url}?q=pid:"{pid}"') else: - data['DB'].append(pid) - for pid in res.get('ES'): + data["DB"].append(pid) + for pid in res.get("ES"): if api_url: - data['ES'].append(f'{api_url}{pid}') + data["ES"].append(f"{api_url}{pid}") else: - data['ES'].append(pid) - for pid in res.get('ES duplicate'): + data["ES"].append(pid) + for pid in res.get("ES duplicate"): if api_url: - data['ES duplicate'].append(f'{api_url}?q=pid:"{pid}"') + data["ES duplicate"].append(f'{api_url}?q=pid:"{pid}"') else: - data['ES duplicate'].append(pid) - return jsonify({'data': data}) + data["ES duplicate"].append(pid) + return jsonify({"data": data}) -@api_blueprint.route('/redis') +@api_blueprint.route("/redis") @check_authentication def redis(): """Displays redis info. :return: jsonified redis info. """ - url = current_app.config.get('ACCOUNTS_SESSION_REDIS_URL', - 'redis://localhost:6379') + url = current_app.config.get("ACCOUNTS_SESSION_REDIS_URL", "redis://localhost:6379") redis = Redis.from_url(url) info = redis.info() - return jsonify({'data': info}) + return jsonify({"data": info}) -@api_blueprint.route('/es') +@api_blueprint.route("/es") @check_authentication def elastic_search(): """Displays Elasticsearch cluster info. @@ -267,23 +260,22 @@ def elastic_search(): :return: jsonified Elasticsearch cluster info. """ info = current_search_client.cluster.health() - return jsonify({'data': info}) + return jsonify({"data": info}) -@api_blueprint.route('/es_indices') +@api_blueprint.route("/es_indices") @check_authentication def elastic_search_indices(): """Displays Elasticsearch indices info. :return: jsonified Elasticsearch indices info. """ - info = current_search_client.cat.indices( - bytes='b', format='json', s='index') - info = {data['index']: data for data in info} - return jsonify({'data': info}) + info = current_search_client.cat.indices(bytes="b", format="json", s="index") + info = {data["index"]: data for data in info} + return jsonify({"data": info}) -@api_blueprint.route('/timestamps') +@api_blueprint.route("/timestamps") @check_authentication def timestamps(): """Get time stamps from current cache. @@ -293,18 +285,16 @@ def timestamps(): :return: jsonified timestamps. """ data = {} - if time_stamps := current_cache.get('timestamps'): + if time_stamps := current_cache.get("timestamps"): for name, values in time_stamps.items(): # make the name safe for JSON export - name = name.replace('-', '_') + name = name.replace("-", "_") data[name] = {} for key, value in values.items(): - if key == 'time': - data[name]['utctime'] = value.strftime( - "%Y-%m-%d %H:%M:%S" - ) - data[name]['unixtime'] = time.mktime(value.timetuple()) + if key == "time": + data[name]["utctime"] = value.strftime("%Y-%m-%d %H:%M:%S") + data[name]["unixtime"] = time.mktime(value.timetuple()) else: data[name][key] = value - return jsonify({'data': data}) + return jsonify({"data": data}) diff --git a/rero_ils/modules/normalizer_stop_words.py b/rero_ils/modules/normalizer_stop_words.py index 26089ff0df..f1fa4ace63 100644 --- a/rero_ils/modules/normalizer_stop_words.py +++ b/rero_ils/modules/normalizer_stop_words.py @@ -20,7 +20,7 @@ import re -class NormalizerStopWords(): +class NormalizerStopWords: """Normalizer Stop words.""" stop_words_punctuation = [] @@ -34,20 +34,19 @@ def __init__(self, app=None): def init_app(self, app): """Flask application initialization.""" - if app.config.get('RERO_ILS_STOP_WORDS_ACTIVATE', False): + if app.config.get("RERO_ILS_STOP_WORDS_ACTIVATE", False): self.init_config(app) - app.extensions['reroils-normalizer-stop-words'] = self + app.extensions["reroils-normalizer-stop-words"] = self def init_config(self, app): """Initialize configuration.""" - punc = app.config.get('RERO_ILS_STOP_WORDS_PUNCTUATION', []) - self.stop_words_punctuation = '|'.join(punc) - stop_words = app.config.get('RERO_ILS_STOP_WORDS', {}) + punc = app.config.get("RERO_ILS_STOP_WORDS_PUNCTUATION", []) + self.stop_words_punctuation = "|".join(punc) + stop_words = app.config.get("RERO_ILS_STOP_WORDS", {}) if stop_words: # Generating a regex per language for lang, words in stop_words.items(): - self.stop_words_regex[lang] = \ - r'\b(' + r'|'.join(words) + r')\b\s*' + self.stop_words_regex[lang] = r"\b(" + r"|".join(words) + r")\b\s*" def normalize(self, text, language=None): """Normalize. @@ -57,14 +56,12 @@ def normalize(self, text, language=None): :returns: Normalized text """ word_regex = self.stop_words_regex.get( - language, - self.stop_words_regex.get('default') + language, self.stop_words_regex.get("default") ) if word_regex: - compiled = re.compile(fr'{word_regex}', re.IGNORECASE) - text = compiled.sub('', text) + compiled = re.compile(rf"{word_regex}", re.IGNORECASE) + text = compiled.sub("", text) if self.stop_words_punctuation: - compiled = re.compile( - fr'{self.stop_words_punctuation}', re.IGNORECASE) - text = compiled.sub('', text) - return re.sub(r'\s+', ' ', text).strip() + compiled = re.compile(rf"{self.stop_words_punctuation}", re.IGNORECASE) + text = compiled.sub("", text) + return re.sub(r"\s+", " ", text).strip() diff --git a/rero_ils/modules/notifications/api.py b/rero_ils/modules/notifications/api.py index 9657de4c47..4aad27ee4d 100644 --- a/rero_ils/modules/notifications/api.py +++ b/rero_ils/modules/notifications/api.py @@ -26,24 +26,26 @@ from flask import current_app -from .extensions import NotificationSubclassExtension -from .logs.api import NotificationOperationLog -from .models import NotificationIdentifier, NotificationMetadata, \ - NotificationStatus, NotificationType from ..api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch from ..fetchers import id_fetcher from ..minters import id_minter -from ..patron_transactions.api import PatronTransaction, \ - PatronTransactionsSearch -from ..patron_transactions.utils import \ - create_patron_transaction_from_notification +from ..patron_transactions.api import PatronTransaction, PatronTransactionsSearch +from ..patron_transactions.utils import create_patron_transaction_from_notification from ..providers import Provider +from .extensions import NotificationSubclassExtension +from .logs.api import NotificationOperationLog +from .models import ( + NotificationIdentifier, + NotificationMetadata, + NotificationStatus, + NotificationType, +) # notification provider NotificationProvider = type( - 'NotificationProvider', + "NotificationProvider", (Provider,), - dict(identifier=NotificationIdentifier, pid_type='notif') + dict(identifier=NotificationIdentifier, pid_type="notif"), ) # notification minter @@ -58,9 +60,9 @@ class NotificationsSearch(IlsRecordsSearch): class Meta: """Search only on Notifications index.""" - index = 'notifications' + index = "notifications" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -71,9 +73,9 @@ def _get_claims_query(self, item_pid): :param item_pid: the item pid related to the claim notification. :returns: a ElasticSearch query object. """ - return self \ - .filter('term', context__item__pid=item_pid) \ - .filter('term', notification_type=NotificationType.CLAIM_ISSUE) + return self.filter("term", context__item__pid=item_pid).filter( + "term", notification_type=NotificationType.CLAIM_ISSUE + ) def get_claims(self, item_pid): """Get the claims notifications about an issue item. @@ -123,27 +125,28 @@ class Notification(IlsRecord, ABC): provider = NotificationProvider model_cls = NotificationMetadata - _extensions = [ - NotificationSubclassExtension() - ] + _extensions = [NotificationSubclassExtension()] # INVENIO API METHODS ===================================================== # Override some invenio ``RecordBase`` method @classmethod - def create(cls, data, id_=None, delete_pid=False, dbcommit=False, - reindex=False, **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=False, reindex=False, **kwargs + ): """Create notification record.""" # Check if the notification_type is disabled by app configuration - if data.get('notification_type') in current_app.config.get( - 'RERO_ILS_DISABLED_NOTIFICATION_TYPE', []): + if data.get("notification_type") in current_app.config.get( + "RERO_ILS_DISABLED_NOTIFICATION_TYPE", [] + ): return - data.setdefault('status', NotificationStatus.CREATED) - record = super().create(data, id_, delete_pid, dbcommit, reindex, - **kwargs) + data.setdefault("status", NotificationStatus.CREATED) + record = super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) create_patron_transaction_from_notification( - notification=record, dbcommit=dbcommit, reindex=reindex, - delete_pid=delete_pid + notification=record, + dbcommit=dbcommit, + reindex=reindex, + delete_pid=delete_pid, ) NotificationOperationLog.create(record) return record @@ -245,19 +248,22 @@ def get_notification_context(cls, notifications=None): @property def type(self): """Shortcut for notification type.""" - return self.get('notification_type') + return self.get("notification_type") @property def status(self): """Shortcut for notification status.""" - return self.get('status') + return self.get("status") @property def patron_transactions(self): """Returns patron transactions attached of a notification.""" - results = PatronTransactionsSearch()\ - .filter('term', notification__pid=self.pid)\ - .source(False).scan() + results = ( + PatronTransactionsSearch() + .filter("term", notification__pid=self.pid) + .source(False) + .scan() + ) for result in results: yield PatronTransaction.get_record(result.meta.id) @@ -270,12 +276,10 @@ def update_effective_recipients(self, recipients): """ recipients = recipients or [] for type_, address in recipients: - self.setdefault('effective_recipients', []).append({ - 'type': type_, - 'address': address - }) - return self.update( - data=self.dumps(), commit=True, dbcommit=True, reindex=True) + self.setdefault("effective_recipients", []).append( + {"type": type_, "address": address} + ) + return self.update(data=self.dumps(), commit=True, dbcommit=True, reindex=True) def update_process_date(self, sent=False, status=NotificationStatus.DONE): """Update the notification to set process date. @@ -284,11 +288,10 @@ def update_process_date(self, sent=False, status=NotificationStatus.DONE): :param status: the new notification status. :return the updated notification. """ - self['process_date'] = datetime.now(timezone.utc).isoformat() - self['notification_sent'] = sent - self['status'] = status - return self.update( - data=self.dumps(), commit=True, dbcommit=True, reindex=True) + self["process_date"] = datetime.now(timezone.utc).isoformat() + self["notification_sent"] = sent + self["status"] = status + return self.update(data=self.dumps(), commit=True, dbcommit=True, reindex=True) class NotificationsIndexer(IlsRecordsIndexer): @@ -301,4 +304,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='notif') + super().bulk_index(record_id_iterator, doc_type="notif") diff --git a/rero_ils/modules/notifications/cli.py b/rero_ils/modules/notifications/cli.py index 8ea467fb24..b09b66baa0 100644 --- a/rero_ils/modules/notifications/cli.py +++ b/rero_ils/modules/notifications/cli.py @@ -31,13 +31,24 @@ def notifications(): """Notification management commands.""" -@notifications.command('process') -@click.option('-t', '--type', 'notification_type', help="Notification Type.", - multiple=True, default=NotificationType.ALL_NOTIFICATIONS) -@click.option('-k', '--enqueue', 'enqueue', is_flag=True, default=False, - help="Enqueue record creation.") -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False, - help='verbose') +@notifications.command("process") +@click.option( + "-t", + "--type", + "notification_type", + help="Notification Type.", + multiple=True, + default=NotificationType.ALL_NOTIFICATIONS, +) +@click.option( + "-k", + "--enqueue", + "enqueue", + is_flag=True, + default=False, + help="Enqueue record creation.", +) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False, help="verbose") @with_appcontext def process(notification_type, enqueue, verbose): """Process notifications.""" @@ -45,17 +56,17 @@ def process(notification_type, enqueue, verbose): enqueue_results = {} for n_type in notification_type: if n_type not in NotificationType.ALL_NOTIFICATIONS: - click.secho( - f'Notification type does not exist: {n_type}', fg='red') + click.secho(f"Notification type does not exist: {n_type}", fg="red") break - click.secho( - f'Process notification: {n_type}', fg='green') + click.secho(f"Process notification: {n_type}", fg="green") if enqueue: enqueue_results[n_type] = process_notifications.delay( - notification_type=n_type, verbose=verbose) + notification_type=n_type, verbose=verbose + ) else: results[n_type] = process_notifications( - notification_type=n_type, verbose=verbose) + notification_type=n_type, verbose=verbose + ) if verbose: if enqueue_results: @@ -63,5 +74,5 @@ def process(notification_type, enqueue, verbose): results[key] = value.get() for key, value in results.items(): - result_values = ' '.join([f'{k}={v}' for k, v in value.items()]) - click.secho(f'Notification {key:12}: {result_values}') + result_values = " ".join([f"{k}={v}" for k, v in value.items()]) + click.secho(f"Notification {key:12}: {result_values}") diff --git a/rero_ils/modules/notifications/dispatcher.py b/rero_ils/modules/notifications/dispatcher.py index f1cb4f725c..de1de9302f 100644 --- a/rero_ils/modules/notifications/dispatcher.py +++ b/rero_ils/modules/notifications/dispatcher.py @@ -32,8 +32,9 @@ class Dispatcher: """Dispatcher notifications class.""" @classmethod - def dispatch_notifications(cls, notification_pids=None, resend=False, - verbose=False): + def dispatch_notifications( + cls, notification_pids=None, resend=False, verbose=False + ): """Dispatch the notification. :param notification_pids: Notification pids to send. @@ -41,16 +42,18 @@ def dispatch_notifications(cls, notification_pids=None, resend=False, :param verbose: Verbose output. :returns: dictionary with processed and send count """ + def get_dispatcher_function(channel): """Find the dispatcher function to use by communication channel.""" try: communication_switcher = current_app.config.get( - 'RERO_ILS_COMMUNICATION_DISPATCHER_FUNCTIONS', []) + "RERO_ILS_COMMUNICATION_DISPATCHER_FUNCTIONS", [] + ) return communication_switcher[channel] except KeyError: current_app.logger.warning( - f'The communication channel: {channel}' - ' is not yet implemented') + f"The communication channel: {channel}" " is not yet implemented" + ) return Dispatcher.not_yet_implemented sent = not_sent = errors = 0 @@ -65,14 +68,16 @@ def get_dispatcher_function(channel): # notification to the aggregation dict for notification in notifications: try: - cls._process_notification( - notification, resend, aggregated) + cls._process_notification(notification, resend, aggregated) except Exception as error: errors += 1 current_app.logger.error( - f'Notification has not be sent (pid: {notification.pid},' + f"Notification has not be sent (pid: {notification.pid}," f' type: {notification["notification_type"]}): ' - f'{error}', exc_info=True, stack_info=True) + f"{error}", + exc_info=True, + stack_info=True, + ) # SEND AGGREGATED NOTIFICATIONS # The aggregation key we build ensure than aggregated notifications @@ -85,12 +90,12 @@ def get_dispatcher_function(channel): dispatcher_function = get_dispatcher_function(comm_channel) counter = len(aggr_notifications) if verbose: - msg = f'Dispatch notifications: {notification.type} ' - if hasattr(notification, 'library'): - msg += f'library: {notification.library.pid} ' - if hasattr(notification, 'patron'): - msg += f'patron: {notification.patron.pid} ' - msg += f'documents: {counter}' + msg = f"Dispatch notifications: {notification.type} " + if hasattr(notification, "library"): + msg += f"library: {notification.library.pid} " + if hasattr(notification, "patron"): + msg += f"patron: {notification.patron.pid} " + msg += f"documents: {counter}" current_app.logger.info(msg) result, recipients = dispatcher_function(aggr_notifications) for notification in aggr_notifications: @@ -102,10 +107,10 @@ def get_dispatcher_function(channel): else: not_sent += counter return { - 'processed': len(notifications), - 'sent': sent, - 'not_sent': not_sent, - 'errors': errors + "processed": len(notifications), + "sent": sent, + "not_sent": not_sent, + "errors": errors, } @classmethod @@ -117,13 +122,10 @@ def _process_notification(cls, notification, resend, aggregated): if already send. :param aggregated: ``dict`` to store notification results. """ - # 1. Check if notification has already been processed and if we - # need to resend it. If not, skip this notification and continue - process_date = notification.get('process_date') - if process_date: + if process_date := notification.get("process_date"): current_app.logger.warning( - f'Notification: {notification.pid} already processed ' - f'on: {process_date}' + f"Notification: {notification.pid} already processed " + f"on: {process_date}" ) if not resend: return @@ -133,9 +135,9 @@ def _process_notification(cls, notification, resend, aggregated): # notification 'status' and stop the notification processing. can_cancel, reason = notification.can_be_cancelled() if can_cancel: - msg = f'Notification #{notification.pid} cancelled: {reason}' + msg = f"Notification #{notification.pid} cancelled: {reason}" current_app.logger.info(msg) - notification.update_process_date(sent=False, status='cancelled') + notification.update_process_date(sent=False, status="cancelled") return # 3. Aggregate notifications @@ -144,8 +146,7 @@ def _process_notification(cls, notification, resend, aggregated): aggregated[aggr_key].append(notification) @staticmethod - def _create_email(recipients, reply_to, ctx_data, template, - cc=None, bcc=None): + def _create_email(recipients, reply_to, ctx_data, template, cc=None, bcc=None): """Create email message from template. :param recipients: Main recipient emails list @@ -158,18 +159,17 @@ def _create_email(recipients, reply_to, ctx_data, template, """ msg = TemplatedMessage( template_body=template, - sender=current_app.config.get('DEFAULT_SENDER_EMAIL', - 'noreply@rero.ch'), - reply_to=','.join(reply_to), # the client is unable to manage list + sender=current_app.config.get("DEFAULT_SENDER_EMAIL", "noreply@rero.ch"), + reply_to=",".join(reply_to), # the client is unable to manage list recipients=recipients, cc=cc, bcc=bcc, - ctx=ctx_data + ctx=ctx_data, ) # subject is the first line, body is the rest - text = msg.body.split('\n') + text = msg.body.split("\n") msg.subject = text[0] - msg.body = '\n'.join(text[1:]) + msg.body = "\n".join(text[1:]) return msg @staticmethod @@ -207,7 +207,7 @@ def send_mail_for_printing(notifications=None): library = notification.library if notification.type in [ NotificationType.BOOKING, - NotificationType.TRANSIT_NOTICE + NotificationType.TRANSIT_NOTICE, ]: library = notification.transaction_library elif notification.type == NotificationType.AVAILABILITY: @@ -218,19 +218,19 @@ def send_mail_for_printing(notifications=None): # on the location. If the location email isn't defined, then use the # library email by default. if notification.type == NotificationType.REQUEST: - recipient = notification.location.get( - 'notification_email', recipient) + recipient = notification.location.get("notification_email", recipient) error_reasons = [] - reply_to = notification.library.get('email') + reply_to = notification.library.get("email") if not recipient: - error_reasons.append('Missing notification email') + error_reasons.append("Missing notification email") if not reply_to: - error_reasons.append('Missing notification reply_to email') + error_reasons.append("Missing notification reply_to email") if error_reasons: current_app.logger.warning( - f'Notification#{notification.pid} for printing is lost :: ' - f'({")(".join(error_reasons)})') + f"Notification#{notification.pid} for printing is lost :: " + f'({")(".join(error_reasons)})' + ) return False, None # 2. Build the context to render the template @@ -241,14 +241,14 @@ def send_mail_for_printing(notifications=None): # the patron asked to receive them by email (cipo reminders # notifications with a communication channel to 'mail' value). # Ensure than the ``include_patron_address`` are set to True. - context['include_patron_address'] = True + context["include_patron_address"] = True # 3. Send the message msg = Dispatcher._create_email( recipients=[recipient], reply_to=[reply_to], ctx_data=context, - template=notification.get_template_path() + template=notification.get_template_path(), ) task_send_email.apply_async((msg.__dict__,)) return True, [(RecipientType.TO, recipient)] @@ -275,13 +275,14 @@ def send_notification_by_email(notifications): error_reasons = [] if not recipients: - error_reasons.append('Missing notification recipients') + error_reasons.append("Missing notification recipients") if not reply_to: - error_reasons.append('Missing reply_to email') + error_reasons.append("Missing reply_to email") if error_reasons: current_app.logger.warning( - f'Notification#{notification.pid} is lost :: ' - f'({")(".join(error_reasons)})') + f"Notification#{notification.pid} is lost :: " + f'({")(".join(error_reasons)})' + ) return False, None # build the context for this notification set @@ -294,8 +295,8 @@ def send_notification_by_email(notifications): bcc=bcc, reply_to=reply_to, ctx_data=context, - template=notification.get_template_path() + template=notification.get_template_path(), ) - delay = context.get('delay', 0) + delay = context.get("delay", 0) task_send_email.apply_async((msg.__dict__,), countdown=delay) return True, [(RecipientType.TO, addr) for addr in recipients] diff --git a/rero_ils/modules/notifications/extensions.py b/rero_ils/modules/notifications/extensions.py index 03adb46690..fb03ea1515 100644 --- a/rero_ils/modules/notifications/extensions.py +++ b/rero_ils/modules/notifications/extensions.py @@ -37,14 +37,14 @@ def _get_circulation_subclass(record): from .api import Notification from .subclasses.acq_order import AcquisitionOrderNotification from .subclasses.at_desk import AtDeskCirculationNotification - from .subclasses.availability import \ - AvailabilityCirculationNotification + from .subclasses.availability import AvailabilityCirculationNotification from .subclasses.booking import BookingCirculationNotification from .subclasses.claim_issue import ClaimSerialIssueNotification from .subclasses.recall import RecallCirculationNotification from .subclasses.reminder import ReminderCirculationNotification from .subclasses.request import RequestCirculationNotification from .subclasses.transit import TransitCirculationNotification + mapping = { NotificationType.AVAILABILITY: AvailabilityCirculationNotification, NotificationType.AT_DESK: AtDeskCirculationNotification, diff --git a/rero_ils/modules/notifications/jsonresolver.py b/rero_ils/modules/notifications/jsonresolver.py index 494b419142..ab40158eab 100644 --- a/rero_ils/modules/notifications/jsonresolver.py +++ b/rero_ils/modules/notifications/jsonresolver.py @@ -22,7 +22,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/notifications/', host='bib.rero.ch') +@jsonresolver.route("/api/notifications/", host="bib.rero.ch") def notification_resolver(pid): """Resolver for notifications record.""" - return resolve_json_refs('notif', pid) + return resolve_json_refs("notif", pid) diff --git a/rero_ils/modules/notifications/listener.py b/rero_ils/modules/notifications/listener.py index dd02d8c893..b6dfaaa6a0 100644 --- a/rero_ils/modules/notifications/listener.py +++ b/rero_ils/modules/notifications/listener.py @@ -20,8 +20,15 @@ from .api import Notification, NotificationsSearch -def enrich_notification_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_notification_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -29,10 +36,7 @@ def enrich_notification_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == NotificationsSearch.Meta.index: + if index.split("-")[0] == NotificationsSearch.Meta.index: if not isinstance(record, Notification): - record = Notification.get_record_by_pid(record.get('pid')) - json['organisation'] = { - 'pid': record.organisation_pid, - 'type': 'org' - } + record = Notification.get_record_by_pid(record.get("pid")) + json["organisation"] = {"pid": record.organisation_pid, "type": "org"} diff --git a/rero_ils/modules/notifications/logs/api.py b/rero_ils/modules/notifications/logs/api.py index 2978b921d3..c6736d06d2 100644 --- a/rero_ils/modules/notifications/logs/api.py +++ b/rero_ils/modules/notifications/logs/api.py @@ -28,7 +28,7 @@ class NotificationOperationLog(OperationLog, SpecificOperationLog): """Operation log for notification.""" @classmethod - def create(cls, data, id_=None, index_refresh='false', **kwargs): + def create(cls, data, id_=None, index_refresh="false", **kwargs): """Create a new record instance and store it in elasticsearch. :param data: Dict with the notification metadata. @@ -41,45 +41,41 @@ def create(cls, data, id_=None, index_refresh='false', **kwargs): Valid choices: true, false, wait_for :returns: A new :class:`Record` instance. """ - if not (loan := getattr(data, 'loan', None)): + if not (loan := getattr(data, "loan", None)): return # If i have no recipients, assign a default value # because the "recipients" json schema required at least one item. if not (recipients := data.get_recipients(RecipientType.TO)): - recipients = ['no-recipient-email'] + recipients = ["no-recipient-email"] log = { - 'record': { - 'value': data.get('pid'), - 'type': 'notif' - }, - 'operation': 'create', - 'date': data['creation_date'], - 'loan': { - 'pid': loan.pid, - 'trigger': loan['trigger'], - 'override_flag': False, - 'transaction_channel': 'system', - 'transaction_location': { - 'pid': loan.transaction_location_pid, - 'name': cls._get_location_name( - loan.transaction_location_pid) + "record": {"value": data.get("pid"), "type": "notif"}, + "operation": "create", + "date": data["creation_date"], + "loan": { + "pid": loan.pid, + "trigger": loan["trigger"], + "override_flag": False, + "transaction_channel": "system", + "transaction_location": { + "pid": loan.transaction_location_pid, + "name": cls._get_location_name(loan.transaction_location_pid), }, - 'pickup_location': { - 'pid': loan.pickup_location_pid, - 'name': cls._get_location_name(loan.pickup_location_pid) + "pickup_location": { + "pid": loan.pickup_location_pid, + "name": cls._get_location_name(loan.pickup_location_pid), }, - 'patron': cls._get_patron_data(loan.patron), - 'item': cls._get_item_data(loan.item) + "patron": cls._get_patron_data(loan.patron), + "item": cls._get_item_data(loan.item), + }, + "user_name": "system", + "notification": { + "pid": data.pid, + "type": data["notification_type"], + "date": data["creation_date"], + "sender_library_pid": data.library_pid, + "recipients": recipients, }, - 'user_name': 'system', - 'notification': { - 'pid': data.pid, - 'type': data['notification_type'], - 'date': data['creation_date'], - 'sender_library_pid': data.library_pid, - 'recipients': recipients - } } return super().create(log, index_refresh=index_refresh) diff --git a/rero_ils/modules/notifications/models.py b/rero_ils/modules/notifications/models.py index 835efac725..35dd4cba7d 100644 --- a/rero_ils/modules/notifications/models.py +++ b/rero_ils/modules/notifications/models.py @@ -27,19 +27,20 @@ class NotificationIdentifier(RecordIdentifier): """Sequence generator for Notifications identifiers.""" - __tablename__ = 'notification_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "notification_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class NotificationMetadata(db.Model, RecordMetadataBase): """Notification record metadata.""" - __tablename__ = 'notifications_metadata' + __tablename__ = "notifications_metadata" class NotificationType: @@ -62,17 +63,17 @@ class NotificationType: system. """ - ACQUISITION_ORDER = 'acquisition_order' - AT_DESK = 'at_desk' - AUTO_EXTEND = 'auto_extend' - AVAILABILITY = 'availability' - BOOKING = 'booking' - CLAIM_ISSUE = 'claim_issue' - DUE_SOON = 'due_soon' - OVERDUE = 'overdue' - RECALL = 'recall' - REQUEST = 'request' - TRANSIT_NOTICE = 'transit_notice' + ACQUISITION_ORDER = "acquisition_order" + AT_DESK = "at_desk" + AUTO_EXTEND = "auto_extend" + AVAILABILITY = "availability" + BOOKING = "booking" + CLAIM_ISSUE = "claim_issue" + DUE_SOON = "due_soon" + OVERDUE = "overdue" + RECALL = "recall" + REQUEST = "request" + TRANSIT_NOTICE = "transit_notice" # All notification types ALL_NOTIFICATIONS = [ @@ -83,20 +84,12 @@ class NotificationType: RECALL, TRANSIT_NOTICE, REQUEST, - BOOKING + BOOKING, ] # Notification related to cipo reminders. - REMINDERS_NOTIFICATIONS = [ - DUE_SOON, - OVERDUE - ] + REMINDERS_NOTIFICATIONS = [DUE_SOON, OVERDUE] # Notification to send to a library (not to a patron) - INTERNAL_NOTIFICATIONS = [ - AT_DESK, - BOOKING, - REQUEST, - TRANSIT_NOTICE - ] + INTERNAL_NOTIFICATIONS = [AT_DESK, BOOKING, REQUEST, TRANSIT_NOTICE] # Notification related to circulation modules CIRCULATION_NOTIFICATIONS = [ @@ -108,30 +101,30 @@ class NotificationType: RECALL, TRANSIT_NOTICE, REQUEST, - BOOKING + BOOKING, ] class NotificationStatus: """Notification status.""" - DONE = 'done' - CREATED = 'created' - CANCELLED = 'cancelled' + DONE = "done" + CREATED = "created" + CANCELLED = "cancelled" class NotificationChannel: """Notification channels.""" - MAIL = 'mail' - EMAIL = 'email' - PATRON_SETTING = 'patron_setting' + MAIL = "mail" + EMAIL = "email" + PATRON_SETTING = "patron_setting" class RecipientType: """Notification recipient type.""" - TO = 'to' - CC = 'cc' - BCC = 'bcc' - REPLY_TO = 'reply_to' + TO = "to" + CC = "cc" + BCC = "bcc" + REPLY_TO = "reply_to" diff --git a/rero_ils/modules/notifications/permissions.py b/rero_ils/modules/notifications/permissions.py index 5f80b38378..87cc1243c9 100644 --- a/rero_ils/modules/notifications/permissions.py +++ b/rero_ils/modules/notifications/permissions.py @@ -19,17 +19,20 @@ """Permissions for notifications.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, \ - AllowedByActionRestrictByOwnerOrOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + AllowedByActionRestrictByOwnerOrOrganisation, + RecordPermissionPolicy, +) # Actions to control Items policies for CRUD operations -search_action = action_factory('notif-search') -read_action = action_factory('notif-read') -create_action = action_factory('notif-create') -update_action = action_factory('notif-update') -delete_action = action_factory('notif-delete') -access_action = action_factory('notif-access') +search_action = action_factory("notif-search") +read_action = action_factory("notif-read") +create_action = action_factory("notif-create") +update_action = action_factory("notif-update") +delete_action = action_factory("notif-delete") +access_action = action_factory("notif-access") class NotificationPermissionPolicy(RecordPermissionPolicy): @@ -43,15 +46,21 @@ class NotificationPermissionPolicy(RecordPermissionPolicy): can_search = [AllowedByAction(search_action)] can_read = [AllowedByActionRestrictByOwnerOrOrganisation(read_action)] - can_create = [AllowedByActionRestrictByManageableLibrary( - create_action, - callback=lambda rec: getattr(rec, 'library_pid', 'unavailable_data') - )] - can_update = [AllowedByActionRestrictByManageableLibrary( - update_action, - callback=lambda rec: getattr(rec, 'library_pid', 'unavailable_data') - )] - can_delete = [AllowedByActionRestrictByManageableLibrary( - delete_action, - callback=lambda rec: getattr(rec, 'library_pid', 'unavailable_data') - )] + can_create = [ + AllowedByActionRestrictByManageableLibrary( + create_action, + callback=lambda rec: getattr(rec, "library_pid", "unavailable_data"), + ) + ] + can_update = [ + AllowedByActionRestrictByManageableLibrary( + update_action, + callback=lambda rec: getattr(rec, "library_pid", "unavailable_data"), + ) + ] + can_delete = [ + AllowedByActionRestrictByManageableLibrary( + delete_action, + callback=lambda rec: getattr(rec, "library_pid", "unavailable_data"), + ) + ] diff --git a/rero_ils/modules/notifications/subclasses/acq_order.py b/rero_ils/modules/notifications/subclasses/acq_order.py index c23213c46c..326e40dc9f 100644 --- a/rero_ils/modules/notifications/subclasses/acq_order.py +++ b/rero_ils/modules/notifications/subclasses/acq_order.py @@ -23,12 +23,14 @@ from flask import current_app from werkzeug.utils import cached_property -from rero_ils.modules.acquisition.acq_orders.dumpers import \ - AcqOrderNotificationDumper +from rero_ils.modules.acquisition.acq_orders.dumpers import AcqOrderNotificationDumper from rero_ils.modules.libraries.api import Library from rero_ils.modules.notifications.api import Notification -from rero_ils.modules.notifications.models import NotificationChannel, \ - NotificationType, RecipientType +from rero_ils.modules.notifications.models import ( + NotificationChannel, + NotificationType, + RecipientType, +) from rero_ils.modules.utils import extracted_data_from_ref @@ -53,18 +55,22 @@ def extended_validation(self, **kwargs): if self.type != NotificationType.ACQUISITION_ORDER: return f"'{self.type} isn't an AcquisitionNotification" if not self.acq_order_pid: - return '`order` field must be specified into `context` for ' \ - 'AcquisitionNotification' + return ( + "`order` field must be specified into `context` for " + "AcquisitionNotification" + ) # validate that at least one email of type `to` exist and one email of # type `reply_to` is given in the ist of emails. recipient_types = { - recipient.get('type') - for recipient in self.get('context', {}).get('recipients', []) + recipient.get("type") + for recipient in self.get("context", {}).get("recipients", []) } - if RecipientType.TO not in recipient_types \ - or RecipientType.REPLY_TO not in recipient_types: - return 'Recipient type `to` and `reply_to` are required' + if ( + RecipientType.TO not in recipient_types + or RecipientType.REPLY_TO not in recipient_types + ): + return "Recipient type `to` and `reply_to` are required" return True # PARENT ABSTRACT IMPLEMENTATION METHODS ================================== @@ -98,9 +104,7 @@ def can_be_cancelled(self): can be cancelled; the reason why the notification can be cancelled (only present if tuple first value is True). """ - if not self.order: - return True, "Order doesn't exists anymore" - return False, None + return (False, None) if self.order else (True, "Order doesn't exists anymore") def get_communication_channel(self): """Get the communication channel to use for this notification.""" @@ -113,16 +117,15 @@ def get_language_to_use(self): # By default, the language to use to build the notification is defined # in the vendor setting. Override this method if needed in the future. return self.order.vendor.get( - 'communication_language', - current_app.config.get('RERO_ILS_APP_DEFAULT_LANGUAGE', 'eng') + "communication_language", + current_app.config.get("RERO_ILS_APP_DEFAULT_LANGUAGE", "eng"), ) def get_template_path(self): """Get the template to use to render the notification.""" # By default, the template path to use reflects the notification type. # Override this method if necessary - return \ - f'rero_ils/vendor_order_mail/{self.get_language_to_use()}.tpl.txt' + return f"rero_ils/vendor_order_mail/{self.get_language_to_use()}.tpl.txt" def get_recipients(self, address_type): """Get the notification recipients email address. @@ -135,9 +138,9 @@ def get_recipients(self, address_type): :returns: email addresses list where send the notification to. """ return [ - recipient.get('address') - for recipient in self.get('context', {}).get('recipients', []) - if recipient.get('type') == address_type + recipient.get("address") + for recipient in self.get("context", {}).get("recipients", []) + if recipient.get("type") == address_type ] @classmethod @@ -155,7 +158,7 @@ def get_notification_context(cls, notifications=None): notification = notifications[0] order = notification.order - return {'order': order.dumps(dumper=AcqOrderNotificationDumper())} + return {"order": order.dumps(dumper=AcqOrderNotificationDumper())} # GETTER & SETTER METHODS ================================================= # Shortcuts to easy access notification attributes. @@ -163,12 +166,12 @@ def get_notification_context(cls, notifications=None): @property def acq_order_pid(self): """Shortcut for acq order pid of the notification.""" - return extracted_data_from_ref(self['context']['order']) + return extracted_data_from_ref(self["context"]["order"]) @cached_property def order(self): """Shortcut for acquisition order related to the notification.""" - return extracted_data_from_ref(self['context']['order'], data='record') + return extracted_data_from_ref(self["context"]["order"], data="record") @property def library_pid(self): diff --git a/rero_ils/modules/notifications/subclasses/at_desk.py b/rero_ils/modules/notifications/subclasses/at_desk.py index 5b222ec3bb..83dda7b5e9 100644 --- a/rero_ils/modules/notifications/subclasses/at_desk.py +++ b/rero_ils/modules/notifications/subclasses/at_desk.py @@ -65,19 +65,19 @@ def can_be_cancelled(self): request_loan = self.request_loan msg = None if not request_loan: - msg = 'No previous request found, none AT_DESK should be sent.' + msg = "No previous request found, none AT_DESK should be sent." # we need to use `!=` comparator because strings was built differently # The `!=` operator compares the value or equality of two objects, # `is not` operator checks whether two variables point to the same # object in memory : `id(str_a) is not `id(str_b)`. - elif request_loan.get('state') != LoanState.ITEM_AT_DESK: - msg = "The first found request isn\'t AT_DESK" + elif request_loan.get("state") != LoanState.ITEM_AT_DESK: + msg = "The first found request isn't AT_DESK" # we don't find any reasons to cancel this notification return msg is not None, msg def get_template_path(self): """Get the template to use to render the notification.""" - return f'email/at_desk/{self.get_language_to_use()}.txt' + return f"email/at_desk/{self.get_language_to_use()}.txt" def get_recipients_to(self): """Get notification recipient email addresses.""" @@ -90,51 +90,50 @@ def get_notification_context(cls, notifications=None): """Get the context to render the notification template.""" # Use a delay to be sure the notification is sent AFTER the loan has # been indexed (avoid problem due to server load). - context = { - 'delay': 30 - } + context = {"delay": 30} notifications = notifications or [] if not notifications: return context - context['loans'] = [] + context["loans"] = [] item_dumper = ItemNotificationDumper() patron_dumper = PatronNotificationDumper() for notification in notifications: loan = notification.loan creation_date = format_date_filter( - notification.get('creation_date'), date_format='medium', - locale=language_iso639_2to1(notification.get_language_to_use()) + notification.get("creation_date"), + date_format="medium", + locale=language_iso639_2to1(notification.get_language_to_use()), ) request_expire_date = format_date_filter( - loan.get('request_expire_date'), date_format='medium', - locale=language_iso639_2to1(notification.get_language_to_use()) + loan.get("request_expire_date"), + date_format="medium", + locale=language_iso639_2to1(notification.get_language_to_use()), ) # merge doc and item metadata preserving document key item_data = notification.item.dumps(dumper=item_dumper) - doc_data = notification.document.dumps( - dumper=document_title_dumper) + doc_data = notification.document.dumps(dumper=document_title_dumper) doc_data = {**item_data, **doc_data} # pickup location name --> !! pickup is on notif.request_loan, not # on notif.loan request_loan = notification.request_loan pickup_location = Location.get_record_by_pid( - request_loan.get('pickup_location_pid')) - if not pickup_location: - pickup_location = Location.get_record_by_pid( - request_loan.get('transaction_location_pid')) + request_loan.get("pickup_location_pid") + ) or Location.get_record_by_pid( + request_loan.get("transaction_location_pid") + ) # request_patron - request_patron = Patron.get_record_by_pid( - request_loan.get('patron_pid')) + request_patron = Patron.get_record_by_pid(request_loan.get("patron_pid")) loan_context = { - 'creation_date': creation_date, - 'document': doc_data, - 'pickup_name': pickup_location.get( - 'pickup_name', pickup_location.get('name')), - 'request_expire_date': request_expire_date, - 'patron': request_patron.dumps(dumper=patron_dumper) + "creation_date": creation_date, + "document": doc_data, + "pickup_name": pickup_location.get( + "pickup_name", pickup_location.get("name") + ), + "request_expire_date": request_expire_date, + "patron": request_patron.dumps(dumper=patron_dumper), } - context['loans'].append(loan_context) + context["loans"].append(loan_context) return context diff --git a/rero_ils/modules/notifications/subclasses/availability.py b/rero_ils/modules/notifications/subclasses/availability.py index 73c386c58f..43ba304f69 100644 --- a/rero_ils/modules/notifications/subclasses/availability.py +++ b/rero_ils/modules/notifications/subclasses/availability.py @@ -24,12 +24,11 @@ from rero_ils.modules.documents.dumpers import document_title_dumper from rero_ils.modules.items.dumpers import ItemNotificationDumper -from rero_ils.modules.libraries.dumpers import \ - LibraryCirculationNotificationDumper +from rero_ils.modules.libraries.dumpers import LibraryCirculationNotificationDumper from rero_ils.modules.patrons.dumpers import PatronNotificationDumper -from .circulation import CirculationNotification from ..models import NotificationChannel, NotificationType +from .circulation import CirculationNotification class AvailabilityCirculationNotification(CirculationNotification): @@ -54,8 +53,7 @@ def can_be_cancelled(self): # Check loan notification candidate (by unpacking tuple's notification # candidate) candidates_types = [ - n[1] for n in - self.loan.get_notification_candidates(trigger=None) + n[1] for n in self.loan.get_notification_candidates(trigger=None) ] if self.type not in candidates_types: msg = "Notification type isn't into notification candidate" @@ -80,28 +78,28 @@ def get_notification_context(cls, notifications=None): patron = notifications[0].patron library = notifications[0].pickup_library - include_address = notifications[0].get_communication_channel() == \ - NotificationChannel.MAIL + include_address = ( + notifications[0].get_communication_channel() == NotificationChannel.MAIL + ) # Dump basic informations context |= { - 'include_patron_address': include_address, - 'patron': patron.dumps(dumper=PatronNotificationDumper()), - 'library': library.dumps( - dumper=LibraryCirculationNotificationDumper()), - 'loans': [], - 'delay': 0 + "include_patron_address": include_address, + "patron": patron.dumps(dumper=PatronNotificationDumper()), + "library": library.dumps(dumper=LibraryCirculationNotificationDumper()), + "loans": [], + "delay": 0, } # Availability notification could be sent with a delay. We need to find # this delay into the library notifications settings and convert it # from minutes to seconds. - for setting in library.get('notification_settings', []): - if setting['type'] == NotificationType.AVAILABILITY: - context['delay'] = setting.get('delay', 0)*60 + for setting in library.get("notification_settings", []): + if setting["type"] == NotificationType.AVAILABILITY: + context["delay"] = setting.get("delay", 0) * 60 # Add metadata for any ``notification.loan`` of the notifications list item_dumper = ItemNotificationDumper() for notification in notifications: loc = lib = None - keep_until = notification.loan.get('request_expire_date') + keep_until = notification.loan.get("request_expire_date") if keep_until: keep_until = ciso8601.parse_datetime(keep_until) @@ -113,13 +111,14 @@ def get_notification_context(cls, notifications=None): lib = notification.transaction_library # merge doc and item metadata preserving document key item_data = notification.item.dumps(dumper=item_dumper) - doc_data = notification.document.dumps( - dumper=document_title_dumper) + doc_data = notification.document.dumps(dumper=document_title_dumper) doc_data = {**item_data, **doc_data} if loc and lib: - context['loans'].append({ - 'document': doc_data, - 'pickup_name': loc.get('pickup_name', lib.get('name')), - 'pickup_until': keep_until - }) + context["loans"].append( + { + "document": doc_data, + "pickup_name": loc.get("pickup_name", lib.get("name")), + "pickup_until": keep_until, + } + ) return context diff --git a/rero_ils/modules/notifications/subclasses/booking.py b/rero_ils/modules/notifications/subclasses/booking.py index 38006b44b7..84ee4498c3 100644 --- a/rero_ils/modules/notifications/subclasses/booking.py +++ b/rero_ils/modules/notifications/subclasses/booking.py @@ -31,8 +31,8 @@ from rero_ils.modules.patrons.dumpers import PatronNotificationDumper from rero_ils.utils import language_iso639_2to1 -from .circulation import CirculationNotification from ..models import NotificationChannel +from .circulation import CirculationNotification class BookingCirculationNotification(CirculationNotification): @@ -67,7 +67,7 @@ def get_communication_channel(self): def get_language_to_use(self): """Get the language to use when dispatching the notification.""" lib = self.pickup_library or self.transaction_library - return lib.get('communication_language') + return lib.get("communication_language") def get_recipients_to(self): """Get notification email addresses for 'TO' recipient type.""" @@ -78,7 +78,7 @@ def get_recipients_to(self): @classmethod def get_notification_context(cls, notifications=None): """Get the context to render the notification template.""" - context = {'loans': []} + context = {"loans": []} notifications = notifications or [] item_dumper = ItemNotificationDumper() @@ -86,33 +86,34 @@ def get_notification_context(cls, notifications=None): for notification in notifications: loan = notification.loan creation_date = format_date_filter( - notification.get('creation_date'), date_format='medium', - locale=language_iso639_2to1(notification.get_language_to_use()) + notification.get("creation_date"), + date_format="medium", + locale=language_iso639_2to1(notification.get_language_to_use()), ) # merge doc and item metadata preserving document key item_data = notification.item.dumps(dumper=item_dumper) - doc_data = notification.document.dumps( - dumper=document_title_dumper) + doc_data = notification.document.dumps(dumper=document_title_dumper) doc_data = {**item_data, **doc_data} # pickup location name --> !! pickup is on notif.request_loan, not # on notif.loan request_loan = notification.request_loan pickup_location = Location.get_record_by_pid( - request_loan.get('pickup_location_pid')) or \ - Location.get_record_by_pid( - request_loan.get('transaction_location_pid')) + request_loan.get("pickup_location_pid") + ) or Location.get_record_by_pid( + request_loan.get("transaction_location_pid") + ) # request_patron - request_patron = Patron.get_record_by_pid( - request_loan.get('patron_pid')) + request_patron = Patron.get_record_by_pid(request_loan.get("patron_pid")) loan_context = { - 'creation_date': creation_date, - 'in_transit': loan.state in LoanState.ITEM_IN_TRANSIT, - 'document': doc_data, - 'pickup_name': pickup_location.get( - 'pickup_name', pickup_location.get('name')), - 'patron': request_patron.dumps(dumper=patron_dumper) + "creation_date": creation_date, + "in_transit": loan.state in LoanState.ITEM_IN_TRANSIT, + "document": doc_data, + "pickup_name": pickup_location.get( + "pickup_name", pickup_location.get("name") + ), + "patron": request_patron.dumps(dumper=patron_dumper), } - context['loans'].append(loan_context) + context["loans"].append(loan_context) return context diff --git a/rero_ils/modules/notifications/subclasses/circulation.py b/rero_ils/modules/notifications/subclasses/circulation.py index ee1ef49144..d5aa3caffb 100644 --- a/rero_ils/modules/notifications/subclasses/circulation.py +++ b/rero_ils/modules/notifications/subclasses/circulation.py @@ -49,8 +49,10 @@ def extended_validation(self, **kwargs): if self.type not in NotificationType.CIRCULATION_NOTIFICATIONS: return f"'{self.type} isn't a CirculationNotification" if not self.loan_pid: - return '`loan` field must be specified into `context` for ' \ - 'CirculationNotification' + return ( + "`loan` field must be specified into `context` for " + "CirculationNotification" + ) return True # PARENT ABSTRACT IMPLEMENTATION METHODS ================================== @@ -73,7 +75,7 @@ def aggregation_key(self): self.get_template_path(), self.get_communication_channel(), self.library_pid, - self.patron_pid + self.patron_pid, ] return hashlib.md5(str(parts).encode()).hexdigest() @@ -89,49 +91,49 @@ def can_be_cancelled(self): can be cancelled; the reason why the notification can be cancelled (only present if tuple first value is True). """ - if not self.item: - return True, "Item doesn't exists anymore" - return False, None + return (False, None) if self.item else (True, "Item doesn't exists anymore") def get_communication_channel(self): """Get the communication channel to use for this notification.""" # By default the circulation notification should be send depending of # the patron setting. Override this method if necessary - return self.patron.get('patron', {}).get('communication_channel') + return self.patron.get("patron", {}).get("communication_channel") def get_language_to_use(self): """Get the language to use for dispatching the notification.""" # By default, the language to use to build the notification is defined # in the patron setting. Override this method if the patron isn't the # recipient of this notification. - return self.patron.get('patron', {}).get('communication_language') + return self.patron.get("patron", {}).get("communication_language") def get_template_path(self): """Get the template to use to render the notification.""" # By default, the template path to use reflects the notification type. # Override this method if necessary - return f'email/{self.type}/{self.get_language_to_use()}.txt' + return f"email/{self.type}/{self.get_language_to_use()}.txt" def get_recipients(self, address_type): """Get the notification recipient email addresses.""" mapping = { RecipientType.TO: self.get_recipients_to, - RecipientType.REPLY_TO: self.get_recipients_reply_to + RecipientType.REPLY_TO: self.get_recipients_reply_to, } return mapping[address_type]() if address_type in mapping else [] def get_recipients_reply_to(self): """Get the notification email address for 'REPLY_TO' recipient type.""" - return [self.library.get('email')] + return [self.library.get("email")] def get_recipients_to(self): """Get the notification email address for 'TO' recipient type.""" addresses = [] - if self.get_communication_channel() == NotificationChannel.EMAIL \ - and self.patron: + if ( + self.get_communication_channel() == NotificationChannel.EMAIL + and self.patron + ): addresses = [ self.patron.user.email, - self.patron['patron'].get('additional_communication_email') + self.patron["patron"].get("additional_communication_email"), ] addresses = [address for address in addresses if address] return addresses @@ -141,7 +143,7 @@ def get_recipients_to(self): @property def loan_pid(self): """Shortcut for loan pid of the notification.""" - return extracted_data_from_ref(self['context']['loan']) + return extracted_data_from_ref(self["context"]["loan"]) @cached_property def loan(self): @@ -151,7 +153,7 @@ def loan(self): @property def item_pid(self): """Shortcut for item pid of the notification.""" - return self.loan.get('item_pid', {}).get('value') + return self.loan.get("item_pid", {}).get("value") @cached_property def item(self): @@ -181,7 +183,7 @@ def library(self): @property def patron_pid(self): """Shortcut for patron pid of the notification.""" - return self.loan.get('patron_pid') + return self.loan.get("patron_pid") @cached_property def patron(self): @@ -191,7 +193,7 @@ def patron(self): @property def transaction_user_pid(self): """Shortcut for transaction user pid of the notification.""" - return self.loan.get('transaction_user_pid') + return self.loan.get("transaction_user_pid") @cached_property def transaction_user(self): @@ -211,7 +213,7 @@ def transaction_library(self): @property def transaction_location_pid(self): """Shortcut for transaction location pid of the notification.""" - return self.loan.get('transaction_location_pid') + return self.loan.get("transaction_location_pid") @cached_property def transaction_location(self): @@ -221,7 +223,7 @@ def transaction_location(self): @property def pickup_location_pid(self): """Shortcut for pickup location pid of the notification.""" - return self.loan.get('pickup_location_pid') + return self.loan.get("pickup_location_pid") @cached_property def pickup_location(self): @@ -236,7 +238,7 @@ def pickup_library(self): @property def document_pid(self): """Shortcut for document pid of the notification.""" - return self.loan.get('document_pid') + return self.loan.get("document_pid") @cached_property def document(self): @@ -246,7 +248,8 @@ def document(self): @cached_property def request_loan(self): """Get the request loan related to this notification.""" - return self.item.get_first_loan_by_state(LoanState.ITEM_AT_DESK) \ - or self.item.get_first_loan_by_state( - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP) \ + return ( + self.item.get_first_loan_by_state(LoanState.ITEM_AT_DESK) + or self.item.get_first_loan_by_state(LoanState.ITEM_IN_TRANSIT_FOR_PICKUP) or self.item.get_first_loan_by_state(LoanState.PENDING) + ) diff --git a/rero_ils/modules/notifications/subclasses/claim_issue.py b/rero_ils/modules/notifications/subclasses/claim_issue.py index 8a04657503..9cee108262 100644 --- a/rero_ils/modules/notifications/subclasses/claim_issue.py +++ b/rero_ils/modules/notifications/subclasses/claim_issue.py @@ -26,8 +26,11 @@ from rero_ils.modules.items.dumpers import ClaimIssueNotificationDumper from rero_ils.modules.notifications.api import Notification -from rero_ils.modules.notifications.models import NotificationChannel, \ - NotificationType, RecipientType +from rero_ils.modules.notifications.models import ( + NotificationChannel, + NotificationType, + RecipientType, +) from rero_ils.modules.utils import extracted_data_from_ref @@ -55,20 +58,24 @@ def extended_validation(self, **kwargs): if self.type != NotificationType.CLAIM_ISSUE: return f"'{self.type} isn't an ClaimSerialIssueNotification" if not self.item: - return '`item` field must be specified into `context` for ' \ - 'ClaimSerialIssueNotification' + return ( + "`item` field must be specified into `context` for " + "ClaimSerialIssueNotification" + ) if not self.item.is_issue: - return '`item` field must reference an serial issue item.' + return "`item` field must reference an serial issue item." # validate that at least one email of type `to` exist and one email of # type `reply_to` is given in the ist of emails. recipient_types = { - recipient.get('type') - for recipient in self.get('context', {}).get('recipients', []) + recipient.get("type") + for recipient in self.get("context", {}).get("recipients", []) } - if RecipientType.TO not in recipient_types \ - or RecipientType.REPLY_TO not in recipient_types: - return 'Recipient type `to` and `reply_to` are required' + if ( + RecipientType.TO not in recipient_types + or RecipientType.REPLY_TO not in recipient_types + ): + return "Recipient type `to` and `reply_to` are required" return True # PARENT ABSTRACT IMPLEMENTATION METHODS ================================== @@ -98,9 +105,7 @@ def can_be_cancelled(self): notification can be cancelled; the reason why the notification can be cancelled (only present if tuple first value is True). """ - if not self.item: - return True, "Item doesn't exists anymore" - return False, None + return (False, None) if self.item else (True, "Item doesn't exists anymore") def get_communication_channel(self): """Get the communication channel to use for this notification.""" @@ -112,13 +117,13 @@ def get_language_to_use(self): """Get the language to use for dispatching the notification.""" # By default, the language to use to build the notification is defined # in the vendor setting. Override this method if needed in the future. - return self.vendor.get('communication_language') + return self.vendor.get("communication_language") def get_template_path(self): """Get the template to use to render the notification.""" # By default, the template path to use reflects the notification type. # Override this method if necessary - return f'email/{self.type}/{self.get_language_to_use()}.tpl.txt' + return f"email/{self.type}/{self.get_language_to_use()}.tpl.txt" def get_recipients(self, address_type): """Get the notification recipients email address. @@ -132,9 +137,9 @@ def get_recipients(self, address_type): :rtype: list<{type: str, address: str}> """ return [ - recipient.get('address') - for recipient in self.get('context', {}).get('recipients', []) - if recipient.get('type') == address_type + recipient.get("address") + for recipient in self.get("context", {}).get("recipients", []) + if recipient.get("type") == address_type ] @classmethod @@ -153,19 +158,19 @@ def get_notification_context(cls, notifications=None): notification = notifications[0] item = notification.item - return {'issue': item.dumps(dumper=ClaimIssueNotificationDumper())} + return {"issue": item.dumps(dumper=ClaimIssueNotificationDumper())} # GETTER & SETTER METHODS ================================================= # Shortcuts to easy access notification attributes. @property def item_pid(self): """Shortcut for item pid related to the notification.""" - return extracted_data_from_ref(self['context']['item']) + return extracted_data_from_ref(self["context"]["item"]) @property def item(self): """Shortcut for item related to the notification.""" - return extracted_data_from_ref(self['context']['item'], data='record') + return extracted_data_from_ref(self["context"]["item"], data="record") @cached_property def vendor(self): diff --git a/rero_ils/modules/notifications/subclasses/internal.py b/rero_ils/modules/notifications/subclasses/internal.py index 8a0958c0fb..3ee0e6f565 100644 --- a/rero_ils/modules/notifications/subclasses/internal.py +++ b/rero_ils/modules/notifications/subclasses/internal.py @@ -23,8 +23,8 @@ import hashlib from abc import ABC -from .circulation import CirculationNotification from ..models import NotificationChannel +from .circulation import CirculationNotification class InternalCirculationNotification(CirculationNotification, ABC): @@ -57,8 +57,7 @@ def can_be_cancelled(self): # Check loan notification candidate (by unpacking tuple's notification # candidate) candidates_types = [ - n[1] for n in - self.loan.get_notification_candidates(trigger=None) + n[1] for n in self.loan.get_notification_candidates(trigger=None) ] if self.type not in candidates_types: msg = "Notification type isn't into notification candidate" @@ -84,7 +83,7 @@ def get_communication_channel(self): def get_language_to_use(self): """Get the language to use when dispatching the notification.""" - return self.library.get('communication_language') + return self.library.get("communication_language") def get_recipients_to(self): """Get notification recipient email addresses.""" diff --git a/rero_ils/modules/notifications/subclasses/recall.py b/rero_ils/modules/notifications/subclasses/recall.py index 101b185033..e7189ffca6 100644 --- a/rero_ils/modules/notifications/subclasses/recall.py +++ b/rero_ils/modules/notifications/subclasses/recall.py @@ -24,12 +24,11 @@ from rero_ils.modules.documents.dumpers import document_title_dumper from rero_ils.modules.items.models import ItemStatus -from rero_ils.modules.libraries.dumpers import \ - LibraryCirculationNotificationDumper +from rero_ils.modules.libraries.dumpers import LibraryCirculationNotificationDumper from rero_ils.modules.patrons.dumpers import PatronNotificationDumper -from .circulation import CirculationNotification from ..models import NotificationChannel +from .circulation import CirculationNotification class RecallCirculationNotification(CirculationNotification): @@ -74,25 +73,28 @@ def get_notification_context(cls, notifications=None): patron = notifications[0].patron library = notifications[0].library - include_address = notifications[0].get_communication_channel == \ - NotificationChannel.MAIL + include_address = ( + notifications[0].get_communication_channel == NotificationChannel.MAIL + ) # Dump basic informations context |= { - 'include_patron_address': include_address, - 'patron': patron.dumps(dumper=PatronNotificationDumper()), - 'library': library.dumps( - dumper=LibraryCirculationNotificationDumper()), - 'loans': [] + "include_patron_address": include_address, + "patron": patron.dumps(dumper=PatronNotificationDumper()), + "library": library.dumps(dumper=LibraryCirculationNotificationDumper()), + "loans": [], } # Add metadata for any ``notification.loan`` of the notifications list for notification in notifications: - end_date = notification.loan.get('end_date') + end_date = notification.loan.get("end_date") if end_date: end_date = ciso8601.parse_datetime(end_date) end_date = end_date.strftime("%d.%m.%Y") - context['loans'].append({ - 'document': notification.document.dumps( - dumper=document_title_dumper), - 'end_date': end_date - }) + context["loans"].append( + { + "document": notification.document.dumps( + dumper=document_title_dumper + ), + "end_date": end_date, + } + ) return context diff --git a/rero_ils/modules/notifications/subclasses/reminder.py b/rero_ils/modules/notifications/subclasses/reminder.py index a19eb22fde..f4f12436c2 100644 --- a/rero_ils/modules/notifications/subclasses/reminder.py +++ b/rero_ils/modules/notifications/subclasses/reminder.py @@ -23,19 +23,20 @@ import ciso8601 from num2words import num2words -from rero_ils.modules.circ_policies.api import DUE_SOON_REMINDER_TYPE, \ - OVERDUE_REMINDER_TYPE +from rero_ils.modules.circ_policies.api import ( + DUE_SOON_REMINDER_TYPE, + OVERDUE_REMINDER_TYPE, +) from rero_ils.modules.documents.dumpers import document_title_dumper -from rero_ils.modules.libraries.dumpers import \ - LibraryCirculationNotificationDumper +from rero_ils.modules.libraries.dumpers import LibraryCirculationNotificationDumper from rero_ils.modules.loans.utils import get_circ_policy from rero_ils.modules.patrons.dumpers import PatronNotificationDumper from rero_ils.utils import language_iso639_2to1 -from .circulation import CirculationNotification +from ...items.dumpers import ItemNotificationDumper from ..api import NotificationsSearch from ..models import NotificationChannel, NotificationType -from ...items.dumpers import ItemNotificationDumper +from .circulation import CirculationNotification class ReminderCirculationNotification(CirculationNotification): @@ -60,14 +61,14 @@ def _cipo_reminder(self): :return the cipo reminder related to this notifications. """ - if not hasattr(self, '_reminder'): + if not hasattr(self, "_reminder"): cipo = get_circ_policy(self.loan) reminder_type = DUE_SOON_REMINDER_TYPE if self.type != NotificationType.DUE_SOON: reminder_type = OVERDUE_REMINDER_TYPE self._reminder = cipo.get_reminder( reminder_type=reminder_type, - idx=self.get('context', {}).get('reminder_counter', 0) + idx=self.get("context", {}).get("reminder_counter", 0), ) return self._reminder @@ -89,10 +90,10 @@ def can_be_cancelled(self): return can, reason # Check if cipo reminder exists if not self._cipo_reminder: - return True, 'No corresponding CIPO reminder found' + return True, "No corresponding CIPO reminder found" # Check if a similar notification exists and has already been sent. if self._exists_similar_notification(): - return True, 'Similar notification already proceed' + return True, "Similar notification already proceed" # we don't find any reasons to cancel this notification return False, None @@ -106,16 +107,18 @@ def _exists_similar_notification(self): :return True if a similar notification is found, False otherwise. """ - reminder_counter = self.get('context', {}).get('reminder_counter', 0) - trans_date = self.loan.get('transaction_date') + reminder_counter = self.get("context", {}).get("reminder_counter", 0) + trans_date = self.loan.get("transaction_date") trans_date = ciso8601.parse_datetime(trans_date) - query = NotificationsSearch() \ - .filter('term', context__loan__pid=self.loan_pid) \ - .filter('term', notification_type=self.type) \ - .filter('range', creation_date={'gt': trans_date}) \ - .filter('term', context__reminder_counter=reminder_counter) + query = ( + NotificationsSearch() + .filter("term", context__loan__pid=self.loan_pid) + .filter("term", notification_type=self.type) + .filter("range", creation_date={"gt": trans_date}) + .filter("term", context__reminder_counter=reminder_counter) + ) if self.pid: - query = query.exclude('term', pid=self.pid) + query = query.exclude("term", pid=self.pid) return query.source().count() > 0 def get_communication_channel(self): @@ -123,7 +126,7 @@ def get_communication_channel(self): # For REMINDERS notification, the communication channel to use is # define into the corresponding circulation policy if self._cipo_reminder: - channel = self._cipo_reminder.get('communication_channel') + channel = self._cipo_reminder.get("communication_channel") # If CIPO communication channel is patron setting, the parent # class will return the correct value if channel != NotificationChannel.PATRON_SETTING: @@ -133,8 +136,8 @@ def get_communication_channel(self): def get_template_path(self): """Get the template to use to render the notification.""" if self._cipo_reminder: - tmpl = self._cipo_reminder.get('template') - return f'{tmpl}/{self.get_language_to_use()}.txt' + tmpl = self._cipo_reminder.get("template") + return f"{tmpl}/{self.get_language_to_use()}.txt" return super().get_template_path() @classmethod @@ -151,36 +154,36 @@ def get_notification_context(cls, notifications=None): patron = notifications[0].patron library = notifications[0].library - include_address = notifications[0].get_communication_channel == \ - NotificationChannel.MAIL + include_address = ( + notifications[0].get_communication_channel == NotificationChannel.MAIL + ) # Dump basic informations context |= { - 'include_patron_address': include_address, - 'patron': patron.dumps(dumper=PatronNotificationDumper()), - 'library': library.dumps( - dumper=LibraryCirculationNotificationDumper()), - 'loans': [] + "include_patron_address": include_address, + "patron": patron.dumps(dumper=PatronNotificationDumper()), + "library": library.dumps(dumper=LibraryCirculationNotificationDumper()), + "loans": [], } # Add metadata for any ``notification.loan`` of the notifications list item_dumper = ItemNotificationDumper() language = language_iso639_2to1(notifications[0].get_language_to_use()) for notification in notifications: - end_date = notification.loan.get('end_date') - counter = notification.get('context', {})\ - .get('reminder_counter', 0) + end_date = notification.loan.get("end_date") + counter = notification.get("context", {}).get("reminder_counter", 0) counter += 1 - literal_counter = num2words(counter, to='ordinal', lang=language) + literal_counter = num2words(counter, to="ordinal", lang=language) if end_date: end_date = ciso8601.parse_datetime(end_date) end_date = end_date.strftime("%d.%m.%Y") # merge doc and item metadata preserving document key item_data = notification.item.dumps(dumper=item_dumper) - doc_data = notification.document.dumps( - dumper=document_title_dumper) + doc_data = notification.document.dumps(dumper=document_title_dumper) doc_data = {**item_data, **doc_data} - context['loans'].append({ - 'document': doc_data, - 'end_date': end_date, - 'reminder_counter': literal_counter - }) + context["loans"].append( + { + "document": doc_data, + "end_date": end_date, + "reminder_counter": literal_counter, + } + ) return context diff --git a/rero_ils/modules/notifications/subclasses/request.py b/rero_ils/modules/notifications/subclasses/request.py index 7fc8153a5b..da7b3c6c39 100644 --- a/rero_ils/modules/notifications/subclasses/request.py +++ b/rero_ils/modules/notifications/subclasses/request.py @@ -47,14 +47,14 @@ def get_recipients_to(self): # Request notification will be sent to the item location if a location # ``notification_email`` attribute is defined, otherwise to the library # address. - if loc_email := self.location.get('notification_email'): + if loc_email := self.location.get("notification_email"): return [loc_email] return super().get_recipients_to() @classmethod def get_notification_context(cls, notifications=None): """Get the context to render the notification template.""" - context = {'loans': []} + context = {"loans": []} notifications = notifications or [] item_dumper = ItemNotificationDumper() @@ -62,26 +62,28 @@ def get_notification_context(cls, notifications=None): for notification in notifications: loan = notification.loan creation_date = format_date_filter( - notification.get('creation_date'), date_format='medium', - locale=language_iso639_2to1(notification.get_language_to_use()) + notification.get("creation_date"), + date_format="medium", + locale=language_iso639_2to1(notification.get_language_to_use()), ) # merge doc and item metadata preserving document key item_data = notification.item.dumps(dumper=item_dumper) - doc_data = notification.document.dumps( - dumper=document_title_dumper) + doc_data = notification.document.dumps(dumper=document_title_dumper) doc_data = {**item_data, **doc_data} # pickup location name - pickup_location = notification.pickup_location or \ - notification.transaction_location + pickup_location = ( + notification.pickup_location or notification.transaction_location + ) loan_context = { - 'creation_date': creation_date, - 'in_transit': loan.state in LoanState.ITEM_IN_TRANSIT, - 'document': doc_data, - 'pickup_name': pickup_location.get( - 'pickup_name', pickup_location.get('name')), - 'patron': notification.patron.dumps(dumper=patron_dumper) + "creation_date": creation_date, + "in_transit": loan.state in LoanState.ITEM_IN_TRANSIT, + "document": doc_data, + "pickup_name": pickup_location.get( + "pickup_name", pickup_location.get("name") + ), + "patron": notification.patron.dumps(dumper=patron_dumper), } - context['loans'].append(loan_context) + context["loans"].append(loan_context) return context diff --git a/rero_ils/modules/notifications/subclasses/transit.py b/rero_ils/modules/notifications/subclasses/transit.py index ef5415b4a1..777e985114 100644 --- a/rero_ils/modules/notifications/subclasses/transit.py +++ b/rero_ils/modules/notifications/subclasses/transit.py @@ -23,8 +23,7 @@ from rero_ils.filter import format_date_filter from rero_ils.modules.documents.dumpers import document_title_dumper from rero_ils.modules.items.dumpers import ItemNotificationDumper -from rero_ils.modules.libraries.dumpers import \ - LibraryCirculationNotificationDumper +from rero_ils.modules.libraries.dumpers import LibraryCirculationNotificationDumper from rero_ils.utils import language_iso639_2to1 from .internal import InternalCirculationNotification @@ -45,7 +44,7 @@ class TransitCirculationNotification(InternalCirculationNotification): def get_template_path(self): """Get the template to use to render the notification.""" - return f'email/transit_notice/{self.get_language_to_use()}.txt' + return f"email/transit_notice/{self.get_language_to_use()}.txt" def get_recipients_to(self): """Get notification recipient email addresses.""" @@ -56,7 +55,7 @@ def get_recipients_to(self): @classmethod def get_notification_context(cls, notifications=None): """Get the context to render the notification template.""" - context = {'loans': []} + context = {"loans": []} notifications = notifications or [] item_dumper = ItemNotificationDumper() @@ -64,19 +63,19 @@ def get_notification_context(cls, notifications=None): for notification in notifications: trans_lib = notification.transaction_library creation_date = format_date_filter( - notification.get('creation_date'), date_format='medium', - locale=language_iso639_2to1(notification.get_language_to_use()) + notification.get("creation_date"), + date_format="medium", + locale=language_iso639_2to1(notification.get_language_to_use()), ) # merge doc and item metadata preserving document key item_data = notification.item.dumps(dumper=item_dumper) - doc_data = notification.document.dumps( - dumper=document_title_dumper) + doc_data = notification.document.dumps(dumper=document_title_dumper) doc_data = {**item_data, **doc_data} loan_context = { - 'creation_date': creation_date, - 'document': doc_data, - 'transaction_library': trans_lib.dumps(dumper=lib_dumper) + "creation_date": creation_date, + "document": doc_data, + "transaction_library": trans_lib.dumps(dumper=lib_dumper), } - context['loans'].append(loan_context) + context["loans"].append(loan_context) return context diff --git a/rero_ils/modules/notifications/tasks.py b/rero_ils/modules/notifications/tasks.py index 5064a2bf18..4a23cd500f 100644 --- a/rero_ils/modules/notifications/tasks.py +++ b/rero_ils/modules/notifications/tasks.py @@ -43,10 +43,9 @@ def process_notifications(notification_type, verbose=True): """ notification_pids = get_notifications(notification_type=notification_type) result = Dispatcher.dispatch_notifications( - notification_pids=notification_pids, - verbose=verbose + notification_pids=notification_pids, verbose=verbose ) - set_timestamp(f'notification-dispatch-{notification_type}', **result) + set_timestamp(f"notification-dispatch-{notification_type}", **result) return result @@ -60,6 +59,7 @@ def create_notifications(types=None, tstamp=None, verbose=True): :param verbose: is the task should be verbose. """ from ..loans.utils import get_circ_policy + types = types or [] tstamp = tstamp or datetime.now(timezone.utc) logger = current_app.logger @@ -73,13 +73,14 @@ def create_notifications(types=None, tstamp=None, verbose=True): try: logger.debug(f"* Loan#{loan.pid} is considered as 'due_soon'") notifications = loan.create_notification( - _type=NotificationType.DUE_SOON) - notification_counter[NotificationType.DUE_SOON] += len( - notifications) + _type=NotificationType.DUE_SOON + ) + notification_counter[NotificationType.DUE_SOON] += len(notifications) except Exception as error: logger.error( - f'Unable to create DUE_SOON notification :: {error}', - exc_info=True, stack_info=True + f"Unable to create DUE_SOON notification :: {error}", + exc_info=True, + stack_info=True, ) process_notifications(NotificationType.DUE_SOON) # OVERDUE NOTIFICATIONS @@ -92,15 +93,13 @@ def create_notifications(types=None, tstamp=None, verbose=True): # to should be sent from the due_date and the current used date. loan_library = Library.get_record_by_pid(loan.library_pid) open_days = loan_library.count_open( - start_date=loan.overdue_date, - end_date=tstamp + start_date=loan.overdue_date, end_date=tstamp ) circ_policy = get_circ_policy(loan) - logger.debug(f' - this loan use the cipo#{circ_policy.pid}') - logger.debug(f' - open days from loans due_date :: {open_days}') + logger.debug(f" - this loan use the cipo#{circ_policy.pid}") + logger.debug(f" - open days from loans due_date :: {open_days}") reminders = circ_policy.get_reminders( - reminder_type=OVERDUE_REMINDER_TYPE, - limit=open_days + reminder_type=OVERDUE_REMINDER_TYPE, limit=open_days ) # For each reminder, try to create it. # the `create_notification` method will check if the notification @@ -109,22 +108,25 @@ def create_notifications(types=None, tstamp=None, verbose=True): for idx, _ in enumerate(reminders): try: if notifications := loan.create_notification( - _type=NotificationType.OVERDUE, - counter=idx + _type=NotificationType.OVERDUE, counter=idx ): - msg = f' --> Overdue notification#{idx+1} created' + msg = f" --> Overdue notification#{idx+1} created" logger.debug(msg) notification_counter[NotificationType.OVERDUE] += len( - notifications) + notifications + ) else: - msg = f' --> Overdue notification#{idx+1} skipped ' \ - ':: already sent' + msg = ( + f" --> Overdue notification#{idx+1} skipped " + ":: already sent" + ) logger.debug(msg) except Exception as error: logger.error( - f'Unable to create OVERDUE notification :: {error}', - exc_info=True, stack_info=True + f"Unable to create OVERDUE notification :: {error}", + exc_info=True, + stack_info=True, ) process_notifications(NotificationType.OVERDUE) notification_sum = sum(notification_counter.values()) @@ -133,8 +135,8 @@ def create_notifications(types=None, tstamp=None, verbose=True): if verbose: logger = current_app.logger logger.info("NOTIFICATIONS CREATION TASK") - logger.info(f' * total of {notification_sum} notification(s) created') + logger.info(f" * total of {notification_sum} notification(s) created") for notif_type, cpt in counters.items(): - logger.info(f' +--> {cpt} `{notif_type}` notification(s) created') + logger.info(f" +--> {cpt} `{notif_type}` notification(s) created") return counters diff --git a/rero_ils/modules/notifications/utils.py b/rero_ils/modules/notifications/utils.py index ea59455442..452c51edcb 100644 --- a/rero_ils/modules/notifications/utils.py +++ b/rero_ils/modules/notifications/utils.py @@ -20,8 +20,11 @@ import ciso8601 from elasticsearch_dsl import Q -from rero_ils.modules.circ_policies.api import DUE_SOON_REMINDER_TYPE, \ - OVERDUE_REMINDER_TYPE, CircPolicy +from rero_ils.modules.circ_policies.api import ( + DUE_SOON_REMINDER_TYPE, + OVERDUE_REMINDER_TYPE, + CircPolicy, +) from rero_ils.modules.locations.api import Location from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.models import NotificationType @@ -34,12 +37,16 @@ def get_notification(loan, notification_type): :param notification_type: the type of notification sent. """ from .api import Notification - results = NotificationsSearch()\ - .filter('term', context__loan__pid=loan.pid)\ - .filter('term', notification_type=notification_type) \ - .params(preserve_order=True) \ - .sort({'creation_date': {"order": "desc"}}) \ - .source().scan() + + results = ( + NotificationsSearch() + .filter("term", context__loan__pid=loan.pid) + .filter("term", notification_type=notification_type) + .params(preserve_order=True) + .sort({"creation_date": {"order": "desc"}}) + .source() + .scan() + ) try: pid = next(results).pid return Notification.get_record_by_pid(pid) @@ -55,41 +62,45 @@ def get_notifications(notification_type, processed=False, not_sent=False): :param not_sent: filter on not yet send notifications. :return a notification pid generator. """ - query = NotificationsSearch()\ - .filter('term', notification_type=notification_type) \ - .source('pid') + query = ( + NotificationsSearch() + .filter("term", notification_type=notification_type) + .source("pid") + ) if not not_sent: query = query.filter( - 'bool', must_not=[ - Q('exists', field='notification_sent'), - Q('term', notification_sent=False) - ] + "bool", + must_not=[ + Q("exists", field="notification_sent"), + Q("term", notification_sent=False), + ], ) if processed: - query = query.filter('exists', field='process_date') + query = query.filter("exists", field="process_date") else: - query = query.filter( - 'bool', must_not=[Q('exists', field='process_date')]) + query = query.filter("bool", must_not=[Q("exists", field="process_date")]) for hit in query.scan(): yield hit.pid -def number_of_notifications_sent(loan, - notification_type=NotificationType.OVERDUE): +def number_of_notifications_sent(loan, notification_type=NotificationType.OVERDUE): """Get the number of notifications sent for the given loan. :param loan: the parent loan. :param notification_type: the type of notification to find. :return notification counter. """ - trans_date = ciso8601.parse_datetime(loan.get('transaction_date')) - return NotificationsSearch()\ - .filter('term', context__loan__pid=loan.pid)\ - .filter('term', notification_type=notification_type) \ - .filter('term', notification_sent=True) \ - .filter('range', creation_date={'gt': trans_date}) \ - .source().count() + trans_date = ciso8601.parse_datetime(loan.get("transaction_date")) + return ( + NotificationsSearch() + .filter("term", context__loan__pid=loan.pid) + .filter("term", notification_type=notification_type) + .filter("term", notification_sent=True) + .filter("range", creation_date={"gt": trans_date}) + .source() + .count() + ) def calculate_notification_amount(notification): @@ -104,10 +115,10 @@ def calculate_notification_amount(notification): # Find the reminder type to use based on the notification that we would # sent. If no reminder type is found, then no amount could be calculated # and we can't return '0' - notif_type = notification.get('notification_type') + notif_type = notification.get("notification_type") reminder_type_mapping = { NotificationType.DUE_SOON: DUE_SOON_REMINDER_TYPE, - NotificationType.OVERDUE: OVERDUE_REMINDER_TYPE + NotificationType.OVERDUE: OVERDUE_REMINDER_TYPE, } reminder_type = reminder_type_mapping.get(notif_type) if not notif_type or not reminder_type: @@ -121,13 +132,13 @@ def calculate_notification_amount(notification): location.organisation_pid, location.library_pid, notification.patron.patron_type_pid, - notification.item.holding_circulation_category_pid + notification.item.holding_circulation_category_pid, ) # now we get the circulation policy, search the correct reminder depending # of the reminder_counter from the notification context. reminder = cipo.get_reminder( reminder_type=reminder_type, - idx=notification.get('context', {}).get('reminder_counter', 0) + idx=notification.get("context", {}).get("reminder_counter", 0), ) - return reminder.get('fee_amount', 0) if reminder else 0 + return reminder.get("fee_amount", 0) if reminder else 0 diff --git a/rero_ils/modules/notifications/views.py b/rero_ils/modules/notifications/views.py index 143de53f3f..bc28083774 100644 --- a/rero_ils/modules/notifications/views.py +++ b/rero_ils/modules/notifications/views.py @@ -27,14 +27,14 @@ from rero_ils.modules.decorators import check_logged_as_librarian blueprint = Blueprint( - 'notifications', + "notifications", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) -@blueprint.route('/notifications/templates/list', methods=['GET']) +@blueprint.route("/notifications/templates/list", methods=["GET"]) @check_logged_as_librarian def list_available_template(): """List all templates to build a notification content.""" @@ -43,9 +43,10 @@ def list_available_template(): template_directories = set() for glob_pattern in current_app.config.get( - 'RERO_ILS_NOTIFICATIONS_ALLOWED_TEMPLATE_FILES'): + "RERO_ILS_NOTIFICATIONS_ALLOWED_TEMPLATE_FILES" + ): for path in pathlib.Path(template_path).rglob(glob_pattern): parent_path = str(path.parent.absolute()) - parent_path = parent_path.replace(template_path, '').lstrip('/') + parent_path = parent_path.replace(template_path, "").lstrip("/") template_directories.add(parent_path) - return jsonify({'templates': list(template_directories)}) + return jsonify({"templates": list(template_directories)}) diff --git a/rero_ils/modules/operation_logs/api.py b/rero_ils/modules/operation_logs/api.py index 1bb2955dd5..14ab06e519 100644 --- a/rero_ils/modules/operation_logs/api.py +++ b/rero_ils/modules/operation_logs/api.py @@ -25,9 +25,9 @@ from invenio_records.api import RecordBase from invenio_search import RecordsSearch, current_search_client -from .extensions import DatesExtension, IDExtension, ResolveRefsExtension from ..api import IlsRecordsSearch from ..fetchers import FetchedPID +from .extensions import DatesExtension, IDExtension, ResolveRefsExtension class OperationLogsSearch(IlsRecordsSearch): @@ -36,9 +36,9 @@ class OperationLogsSearch(IlsRecordsSearch): class Meta: """Search only on Notifications index.""" - index = 'operation_logs' + index = "operation_logs" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -50,7 +50,7 @@ def get_logs_by_notification_pid(self, notif_pid): :returns a generator of ElasticSearch hit. :rtype generator. """ - query = self.filter('term', notification__pid=notif_pid) + query = self.filter("term", notification__pid=notif_pid) for hit in query.scan(): yield hit.to_dict() @@ -61,12 +61,10 @@ def get_logs_by_record_pid(self, pid): :returns: List of logs. """ return list( - self.filter( - 'bool', must={ - 'exists': { - 'field': 'loan' - } - }).filter('term', record__value=pid).scan()) + self.filter("bool", must={"exists": {"field": "loan"}}) + .filter("term", record__value=pid) + .scan() + ) def operation_log_id_fetcher(record_uuid, data): @@ -76,24 +74,20 @@ def operation_log_id_fetcher(record_uuid, data): :param data: The record metadata. :return: A :data:`rero_ils.modules.fetchers.FetchedPID` instance. """ - return FetchedPID(provider=None, pid_type='oplg', pid_value=record_uuid) + return FetchedPID(provider=None, pid_type="oplg", pid_value=record_uuid) class OperationLog(RecordBase): """OperationLog class.""" - index_name = 'operation_logs' + index_name = "operation_logs" - _schema = 'operation_logs/operation_log-v0.0.1.json' + _schema = "operation_logs/operation_log-v0.0.1.json" - _extensions = [ - ResolveRefsExtension(), - DatesExtension(), - IDExtension() - ] + _extensions = [ResolveRefsExtension(), DatesExtension(), IDExtension()] @classmethod - def create(cls, data, id_=None, index_refresh='false', **kwargs): + def create(cls, data, id_=None, index_refresh="false", **kwargs): """Create a new record instance and store it in elasticsearch. :param data: Dict with the record metadata. @@ -107,7 +101,7 @@ def create(cls, data, id_=None, index_refresh='false', **kwargs): :returns: A new :class:`Record` instance. """ if id_: - data['pid'] = id_ + data["pid"] = id_ record = cls(data, model=None, **kwargs) @@ -115,25 +109,22 @@ def create(cls, data, id_=None, index_refresh='false', **kwargs): for e in cls._extensions: e.pre_create(record) - if current_app.config.get('RERO_ILS_ENABLE_OPERATION_LOG_VALIDATION'): + if current_app.config.get("RERO_ILS_ENABLE_OPERATION_LOG_VALIDATION"): # Validate also encodes the data # For backward compatibility we pop them here. - format_checker = kwargs.pop('format_checker', None) - validator = kwargs.pop('validator', None) - if '$schema' not in record: - record['$schema'] = current_jsonschemas.path_to_url( - cls._schema) + format_checker = kwargs.pop("format_checker", None) + validator = kwargs.pop("validator", None) + if "$schema" not in record: + record["$schema"] = current_jsonschemas.path_to_url(cls._schema) record._validate( - format_checker=format_checker, - validator=validator, - use_model=False + format_checker=format_checker, validator=validator, use_model=False ) current_search_client.index( index=cls.get_index(record), body=record.dumps(), - id=record['pid'], - refresh=index_refresh + id=record["pid"], + refresh=index_refresh, ) # Run post create extensions @@ -150,8 +141,8 @@ def get_index(cls, data): :param data: Dict with the record metadata. :returns: str, the corresponding index name. """ - suffix = '-'.join(data.get('date', '').split('-')[0:1]) - return f'{cls.index_name}-{suffix}' + suffix = "-".join(data.get("date", "").split("-")[:1]) + return f"{cls.index_name}-{suffix}" @classmethod def bulk_index(cls, data): @@ -163,22 +154,22 @@ def bulk_index(cls, data): for d in data: d = OperationLog(d) oplg = d.dumps() - if oplg.get('record', {}).get('pid'): - oplg['record']['value'] = oplg['record'].pop('pid', None) + if oplg.get("record", {}).get("pid"): + oplg["record"]["value"] = oplg["record"].pop("pid", None) # Run pre create extensions for e in cls._extensions: e.pre_create(oplg) action = { - '_op_type': 'index', - '_index': cls.get_index(oplg), - '_source': oplg, - '_id': oplg['pid'] + "_op_type": "index", + "_index": cls.get_index(oplg), + "_source": oplg, + "_id": oplg["pid"], } actions.append(action) n_succeed, errors = bulk(current_search_client, actions) if n_succeed != len(data): - raise Exception(f'Elasticsearch Indexing Errors: {errors}') + raise Exception(f"Elasticsearch Indexing Errors: {errors}") @classmethod def get_record(cls, _id): @@ -191,21 +182,24 @@ def get_record(cls, _id): # here the elasticsearch get API cannot be used with an index alias return cls( next( - RecordsSearch(index=cls.index_name).filter( - 'term', _id=_id).scan()).to_dict()) + RecordsSearch(index=cls.index_name).filter("term", _id=_id).scan() + ).to_dict() + ) @classmethod def get_indices(cls): """Get all index names present in the elasticsearch server.""" - return set([ - v['index'] for v in current_search_client.cat.indices( - index=f'{cls.index_name}*', format='json') - ]) + return { + v["index"] + for v in current_search_client.cat.indices( + index=f"{cls.index_name}*", format="json" + ) + } @classmethod def delete_indices(cls): """Remove all index names present in the elasticsearch server.""" - current_search_client.indices.delete(f'{cls.index_name}*') + current_search_client.indices.delete(f"{cls.index_name}*") return True @classmethod @@ -216,7 +210,7 @@ def update(cls, _id, date, data): :param str date: Log date, useful for getting the right index. :param dict data: New record data. """ - index = cls.get_index({'date': date}) + index = cls.get_index({"date": date}) document = Document.get(_id, index=index, using=current_search_client) @@ -230,20 +224,20 @@ def update(cls, _id, date, data): refresh=True, ) - if result != 'updated': - raise Exception('Operation log cannot be updated.') + if result != "updated": + raise Exception("Operation log cannot be updated.") @property def id(self): """Get model identifier.""" - return self.get('pid') + return self.get("pid") @classmethod def count(cls, with_deleted=False): """Get record count.""" count = 0 try: - count = OperationLogsSearch().filter('match_all').count() + count = OperationLogsSearch().filter("match_all").count() except NotFoundError: - current_app.logger.warning('Operation logs index not found.') + current_app.logger.warning("Operation logs index not found.") return count diff --git a/rero_ils/modules/operation_logs/cli.py b/rero_ils/modules/operation_logs/cli.py index 315a624b45..7644f8e5ad 100644 --- a/rero_ils/modules/operation_logs/cli.py +++ b/rero_ils/modules/operation_logs/cli.py @@ -36,10 +36,10 @@ def abort_if_false(ctx, param, value): ctx.abort() -@click.command('create_operation_logs') -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.option('-s', '--batch-size', 'size', type=int, default=10000) -@click.argument('infile', type=click.File('r')) +@click.command("create_operation_logs") +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.option("-s", "--batch-size", "size", type=int, default=10000) +@click.argument("infile", type=click.File("r")) @with_appcontext def create_operation_logs(infile, lazy, size): """Load operation log records in reroils. @@ -47,13 +47,8 @@ def create_operation_logs(infile, lazy, size): :param infile: Json operation log file. :param lazy: lazy reads file """ - click.secho('Load operation log records:', fg='green') - if lazy: - # try to lazy read json file (slower, better memory management) - data = read_json_record(infile) - else: - # load everything in memory (faster, bad memory management) - data = json.load(infile) + click.secho("Load operation log records:", fg="green") + data = read_json_record(infile) if lazy else json.load(infile) index_count = 0 with click.progressbar(data) as bar: records = [] @@ -67,22 +62,22 @@ def create_operation_logs(infile, lazy, size): if records: OperationLog.bulk_index(records) index_count += len(records) - click.echo(f'created {index_count} operation logs.') + click.echo(f"created {index_count} operation logs.") -@click.command('dump_operation_logs') -@click.argument('outfile_name') -@click.option('-y', '--year', 'year', type=int) +@click.command("dump_operation_logs") +@click.argument("outfile_name") +@click.option("-y", "--year", "year", type=int) @with_appcontext def dump_operation_logs(outfile_name, year): """Dumps operation log records in a given file. :param outfile: JSON operation log output file. """ - click.secho('Dumps operation log records:', fg='green') + click.secho("Dumps operation log records:", fg="green") index_name = OperationLog.index_name if year is not None: - index_name = f'{index_name}-{year}' + index_name = f"{index_name}-{year}" search = RecordsSearch(index=index_name) index_count = 0 @@ -91,15 +86,19 @@ def dump_operation_logs(outfile_name, year): for oplg in bar: outfile.write(str(oplg.to_dict())) index_count += 1 - click.echo(f'created {index_count} operation logs.') + click.echo(f"created {index_count} operation logs.") -@click.command('destroy_operation_logs') -@click.option('--yes-i-know', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Do you really want to remove all the operation logs?') +@click.command("destroy_operation_logs") +@click.option( + "--yes-i-know", + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt="Do you really want to remove all the operation logs?", +) @with_appcontext def destroy_operation_logs(): """Removes all the operation logs data.""" OperationLog.delete_indices() - click.secho('All operations logs have been removed', fg='green') + click.secho("All operations logs have been removed", fg="green") diff --git a/rero_ils/modules/operation_logs/es_templates/__init__.py b/rero_ils/modules/operation_logs/es_templates/__init__.py index aeb3311569..6d7a3f2bef 100644 --- a/rero_ils/modules/operation_logs/es_templates/__init__.py +++ b/rero_ils/modules/operation_logs/es_templates/__init__.py @@ -20,6 +20,4 @@ def list_es_templates(): """Elasticsearch templates path.""" - return [ - 'rero_ils.modules.operation_logs.es_templates' - ] + return ["rero_ils.modules.operation_logs.es_templates"] diff --git a/rero_ils/modules/operation_logs/extensions.py b/rero_ils/modules/operation_logs/extensions.py index 8b8be4cded..6e5148359a 100644 --- a/rero_ils/modules/operation_logs/extensions.py +++ b/rero_ils/modules/operation_logs/extensions.py @@ -27,8 +27,8 @@ from rero_ils.modules.patrons.api import current_librarian -from .models import OperationLogOperation from ..utils import extracted_data_from_ref +from .models import OperationLogOperation class OperationLogFactory: @@ -53,59 +53,46 @@ def _build_operation_log(self, record, operation): :return a dict representing the operation log to register. """ oplg = { - 'date': datetime.now(timezone.utc).isoformat(), - 'record': { - 'value': record.get('pid'), - 'type': record.provider.pid_type - }, - 'operation': operation, - 'user_name': 'system' # default value, could be override + "date": datetime.now(timezone.utc).isoformat(), + "record": {"value": record.get("pid"), "type": record.provider.pid_type}, + "operation": operation, + "user_name": "system", # default value, could be override } - if ( - hasattr(record, 'organisation_pid') - and (org_pid := record.organisation_pid) - ): - oplg['record']['organisation_pid'] = org_pid - if hasattr(record, 'library_pid') and (org_pid := record.library_pid): - oplg['record']['library_pid'] = org_pid + if hasattr(record, "organisation_pid") and (org_pid := record.organisation_pid): + oplg["record"]["organisation_pid"] = org_pid + if hasattr(record, "library_pid") and (org_pid := record.library_pid): + oplg["record"]["library_pid"] = org_pid if current_librarian: oplg |= { - 'user_name': current_librarian.formatted_name, - 'user': { - 'type': 'ptrn', - 'value': current_librarian.pid - }, - 'organisation': { - 'type': 'org', - 'value': current_librarian.organisation_pid - }, - 'library': { - 'type': 'lib', - 'value': current_librarian.library_pid + "user_name": current_librarian.formatted_name, + "user": {"type": "ptrn", "value": current_librarian.pid}, + "organisation": { + "type": "org", + "value": current_librarian.organisation_pid, }, + "library": {"type": "lib", "value": current_librarian.library_pid}, } - if (lib_pid := flask_request.args.get('current_library')) \ - and lib_pid in current_librarian.manageable_library_pids: + if ( + lib_pid := flask_request.args.get("current_library") + ) and lib_pid in current_librarian.manageable_library_pids: oplg |= { - 'organisation': { - 'type': 'org', - 'value': current_librarian.organisation_pid + "organisation": { + "type": "org", + "value": current_librarian.organisation_pid, }, - 'library': { - 'type': 'lib', - 'value': lib_pid - } + "library": {"type": "lib", "value": lib_pid}, } # Allow additional informations for the operation log. # Subclasses can override the ``additional_informations()`` method # to add some data into the operation log dict - oplg |= (self.get_additional_informations(record) or {}) + oplg |= self.get_additional_informations(record) or {} return oplg def create_operation_log(self, record, operation, **kwargs): """Build and register an operation log.""" from .api import OperationLog + data = self._build_operation_log(record, operation) OperationLog.create(data) @@ -114,26 +101,22 @@ class OperationLogObserverExtension(RecordExtension, OperationLogFactory): """Observe a resource and build operation log when it changes.""" post_create = partialmethod( - OperationLogFactory.create_operation_log, - operation=OperationLogOperation.CREATE + OperationLogFactory.create_operation_log, operation=OperationLogOperation.CREATE ) """Called after a record is created.""" pre_commit = partialmethod( - OperationLogFactory.create_operation_log, - operation=OperationLogOperation.UPDATE + OperationLogFactory.create_operation_log, operation=OperationLogOperation.UPDATE ) """Called before a record is committed.""" post_delete = partialmethod( - OperationLogFactory.create_operation_log, - operation=OperationLogOperation.DELETE + OperationLogFactory.create_operation_log, operation=OperationLogOperation.DELETE ) """Called after a record is deleted.""" -class UntrackedFieldsOperationLogObserverExtension\ - (OperationLogObserverExtension): +class UntrackedFieldsOperationLogObserverExtension(OperationLogObserverExtension): """Extension to skip Operation log if only some field changed. If you need to observe a resource but skip changes on some resource @@ -163,12 +146,12 @@ def __init__(self, fields=None): def pre_commit(self, record): """Called before a record is committed.""" original_record = record.__class__.get_record_by_pid(record.pid) - diff = DeepDiff( - original_record, record, + if diff := DeepDiff( + original_record, + record, verbose_level=2, - exclude_paths=self.exclude_path - ) - if diff: + exclude_paths=self.exclude_path, + ): super().pre_commit(record) @@ -176,14 +159,14 @@ class ResolveRefsExtension(RecordExtension): """Replace all $ref values by a dict of pid, type.""" mod_type = { - 'documents': 'doc', - 'items': 'item', - 'holdings': 'hold', - 'loans': 'loan', - 'ill_requests': 'illr', - 'patrons': 'ptrn', - 'organisations': 'org', - 'libraries': 'lib' + "documents": "doc", + "items": "item", + "holdings": "hold", + "loans": "loan", + "ill_requests": "illr", + "patrons": "ptrn", + "organisations": "org", + "libraries": "lib", } def pre_dump(self, record, dumper=None): @@ -203,14 +186,11 @@ def _resolve_refs(self, record): """ for k, v in record.items(): if isinstance(v, dict): - if v.get('$ref'): - _type = self.mod_type.get( - extracted_data_from_ref(v, data='resource')) - if _type: - resolved = dict( - pid=extracted_data_from_ref(v), - type=_type - ) + if v.get("$ref"): + if _type := self.mod_type.get( + extracted_data_from_ref(v, data="resource") + ): + resolved = dict(pid=extracted_data_from_ref(v), type=_type) record[k] = resolved else: self._resolve_refs(v) @@ -224,8 +204,8 @@ def pre_create(self, record): :param record: the record metadata. """ - if not record.get('pid'): - record['pid'] = str(uuid.uuid1()) + if not record.get("pid"): + record["pid"] = str(uuid.uuid1()) class DatesExtension(RecordExtension): @@ -237,6 +217,6 @@ def pre_create(self, record): :param record: the record metadata. """ iso_now = pytz.utc.localize(datetime.utcnow()).isoformat() - for date_field in ['_created', '_updated']: + for date_field in ["_created", "_updated"]: if not record.get(date_field): record[date_field] = iso_now diff --git a/rero_ils/modules/operation_logs/logs/api.py b/rero_ils/modules/operation_logs/logs/api.py index d147b37c56..9381aa2db7 100644 --- a/rero_ils/modules/operation_logs/logs/api.py +++ b/rero_ils/modules/operation_logs/logs/api.py @@ -24,7 +24,7 @@ from rero_ils.modules.utils import extracted_data_from_ref -class SpecificOperationLog(): +class SpecificOperationLog: """Specific Operation log.""" @classmethod @@ -36,24 +36,26 @@ def _get_patron_data(cls, patron): """ patron_type = None - if patron.get('patron'): + if patron.get("patron"): patron_type = extracted_data_from_ref( - patron['patron']['type'], data='record') + patron["patron"]["type"], data="record" + ) hashed_pid = hashlib.md5(patron.pid.encode()).hexdigest() data = { - 'name': patron.formatted_name, - 'type': patron_type['name'] if patron_type else None, - 'age': patron.age, - 'postal_code': patron.user.user_profile.get( - 'postal_code', 'no_information'), - 'gender': patron.user.user_profile.get('gender', 'no_information'), - 'pid': patron.pid, - 'hashed_pid': hashed_pid + "name": patron.formatted_name, + "type": patron_type["name"] if patron_type else None, + "age": patron.age, + "postal_code": patron.user.user_profile.get( + "postal_code", "no_information" + ), + "gender": patron.user.user_profile.get("gender", "no_information"), + "pid": patron.pid, + "hashed_pid": hashed_pid, } - if patron.get('local_codes'): - data['local_codes'] = patron['local_codes'] + if patron.get("local_codes"): + data["local_codes"] = patron["local_codes"] return data @@ -65,19 +67,20 @@ def _get_item_data(cls, item): :returns: Item formatted data """ data = { - 'pid': item.pid, - 'library_pid': item.library_pid, - 'category': item['type'], - 'document': cls._get_document_data( - extracted_data_from_ref(item['document'], data='record')), - 'holding': cls._get_holding_data( - extracted_data_from_ref(item['holding'], data='record')) + "pid": item.pid, + "library_pid": item.library_pid, + "category": item["type"], + "document": cls._get_document_data( + extracted_data_from_ref(item["document"], data="record") + ), + "holding": cls._get_holding_data( + extracted_data_from_ref(item["holding"], data="record") + ), } - if item.get('call_number'): - data['call_number'] = item.get('call_number') - if item.get('enumerationAndChronology'): - data['enumerationAndChronology'] =\ - item.get('enumerationAndChronology') + if item.get("call_number"): + data["call_number"] = item.get("call_number") + if item.get("enumerationAndChronology"): + data["enumerationAndChronology"] = item.get("enumerationAndChronology") return data @classmethod @@ -89,13 +92,13 @@ def _get_document_data(cls, document): """ document = document.dumps() return { - 'pid': document['pid'], - 'title': next(filter(lambda x: x.get('type') == 'bf:Title', - document.get('title')) - ).get('_text'), - 'type': - document['type'][0].get('subtype', - document['type'][0]['main_type']) + "pid": document["pid"], + "title": next( + filter(lambda x: x.get("type") == "bf:Title", document.get("title")) + ).get("_text"), + "type": document["type"][0].get( + "subtype", document["type"][0]["main_type"] + ), } @classmethod @@ -106,8 +109,8 @@ def _get_holding_data(cls, holding): :returns: Holding formatted data """ return { - 'pid': holding.pid, - 'location_name': cls._get_location_name(holding.location_pid) + "pid": holding.pid, + "location_name": cls._get_location_name(holding.location_pid), } @classmethod @@ -118,4 +121,4 @@ def _get_location_name(cls, location_pid): :returns: Location name """ location = Location.get_record_by_pid(location_pid) - return location['name'] + return location["name"] diff --git a/rero_ils/modules/operation_logs/models.py b/rero_ils/modules/operation_logs/models.py index e06c8d4a85..d04ccc16cd 100644 --- a/rero_ils/modules/operation_logs/models.py +++ b/rero_ils/modules/operation_logs/models.py @@ -22,6 +22,6 @@ class OperationLogOperation: """Allowed operation for operation logs.""" - CREATE = 'create' - UPDATE = 'update' - DELETE = 'delete' + CREATE = "create" + UPDATE = "update" + DELETE = "delete" diff --git a/rero_ils/modules/operation_logs/permissions.py b/rero_ils/modules/operation_logs/permissions.py index 2ba4966ec4..8b5b03f082 100644 --- a/rero_ils/modules/operation_logs/permissions.py +++ b/rero_ils/modules/operation_logs/permissions.py @@ -18,13 +18,12 @@ """Permissions of Operation log.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import AllowedByAction, RecordPermissionPolicy # Actions to control operation logs policies for CRUD operations -search_action = action_factory('oplg-search') -read_action = action_factory('oplg-read') -access_action = action_factory('oplg-access') +search_action = action_factory("oplg-search") +read_action = action_factory("oplg-read") +access_action = action_factory("oplg-access") class OperationLogPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/operation_logs/serializers.py b/rero_ils/modules/operation_logs/serializers.py index fdfeb49707..16a02a203c 100644 --- a/rero_ils/modules/operation_logs/serializers.py +++ b/rero_ils/modules/operation_logs/serializers.py @@ -18,8 +18,12 @@ """Operation Logs serialization.""" from rero_ils.modules.libraries.api import LibrariesSearch from rero_ils.modules.locations.api import LocationsSearch -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer, RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + CachedDataSerializerMixin, + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) class OperationLogsJSONSerializer(JSONSerializer, CachedDataSerializerMixin): @@ -28,15 +32,16 @@ class OperationLogsJSONSerializer(JSONSerializer, CachedDataSerializerMixin): def _postprocess_search_hit(self, hit: dict) -> None: """Post-process each hit of a search result.""" # add library name if the library entry exists. - if library := hit.get('metadata', {}).get('library'): - library['name'] = self.get_resource( - LibrariesSearch(), library.get('value'))['name'] - if 'loan' in (metadata := hit.get('metadata', {})): + if library := hit.get("metadata", {}).get("library"): + library["name"] = self.get_resource( + LibrariesSearch(), library.get("value") + )["name"] + if "loan" in (metadata := hit.get("metadata", {})): # enrich `transaction_location` and `pickup_location` fields with # related library information - trans_loc_field = metadata['loan'].get('transaction_location', {}) + trans_loc_field = metadata["loan"].get("transaction_location", {}) self._enrich_with_library_info(trans_loc_field) - pickup_loc_field = metadata['loan'].get('pickup_location', {}) + pickup_loc_field = metadata["loan"].get("pickup_location", {}) self._enrich_with_library_info(pickup_loc_field) super()._postprocess_search_hit(hit) @@ -46,14 +51,11 @@ def _enrich_with_library_info(self, field): :param field: the dictionary field to enrich. This dictionary should contain the location pid. """ - if location := self.get_resource(LocationsSearch(), field.get('pid')): - lib_pid = location.get('library', {}).get('pid') + if location := self.get_resource(LocationsSearch(), field.get("pid")): + lib_pid = location.get("library", {}).get("pid") if library := self.get_resource(LibrariesSearch(), lib_pid): - field['library'] = { - 'pid': library['pid'], - 'name': library['name'] - } + field["library"] = {"pid": library["pid"], "name": library["name"]} _json = OperationLogsJSONSerializer(RecordSchemaJSONV1) -json_oplogs_search = search_responsify(_json, 'application/rero+json') +json_oplogs_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/organisations/api.py b/rero_ils/modules/organisations/api.py index 7e5c4a537a..1a2db73fbe 100644 --- a/rero_ils/modules/organisations/api.py +++ b/rero_ils/modules/organisations/api.py @@ -35,9 +35,9 @@ # provider OrganisationProvider = type( - 'OrganisationProvider', + "OrganisationProvider", (Provider,), - dict(identifier=OrganisationIdentifier, pid_type='org') + dict(identifier=OrganisationIdentifier, pid_type="org"), ) # minter organisation_id_minter = partial(id_minter, provider=OrganisationProvider) @@ -51,22 +51,21 @@ class OrganisationsSearch(IlsRecordsSearch): class Meta: """Meta class.""" - index = 'organisations' + index = "organisations" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None def get_record_by_viewcode(self, viewcode, fields=None): """Search by viewcode.""" - query = self.filter('term', code=viewcode).extra(size=1) + query = self.filter("term", code=viewcode).extra(size=1) if fields: query = query.source(includes=fields) response = query.execute() if response.hits.total.value != 1: - raise NotFoundError( - f'Organisation viewcode {viewcode}: Result not found.') + raise NotFoundError(f"Organisation viewcode {viewcode}: Result not found.") return response.hits.hits[0]._source @@ -81,27 +80,24 @@ class Organisation(IlsRecord): @classmethod def get_all(cls): """Get all organisations.""" - return sorted([ - Organisation.get_record(_id) - for _id in Organisation.get_all_ids() - ], key=lambda org: org.get('name')) + return sorted( + [Organisation.get_record(_id) for _id in Organisation.get_all_ids()], + key=lambda org: org.get("name"), + ) @classmethod def all_code(cls): """Get all code.""" - return [org.get('code') for org in cls.get_all()] + return [org.get("code") for org in cls.get_all()] @classmethod def get_record_by_viewcode(cls, viewcode): """Get record by view code.""" - result = OrganisationsSearch()\ - .filter('term', code=viewcode)\ - .execute() - if result['hits']['total']['value'] != 1: - raise Exception( - 'Organisation (get_record_by_viewcode): Result not found.') + result = OrganisationsSearch().filter("term", code=viewcode).execute() + if result["hits"]["total"]["value"] != 1: + raise Exception("Organisation (get_record_by_viewcode): Result not found.") - return result['hits']['hits'][0]['_source'] + return result["hits"]["hits"][0]["_source"] @classmethod def get_record_by_online_harvested_source(cls, source): @@ -110,8 +106,9 @@ def get_record_by_online_harvested_source(cls, source): :param source: the record source :return: Organisation record or None. """ - results = OrganisationsSearch().filter( - 'term', online_harvested_source=source).scan() + results = ( + OrganisationsSearch().filter("term", online_harvested_source=source).scan() + ) try: return Organisation.get_record_by_pid(next(results).pid) except StopIteration: @@ -124,9 +121,13 @@ def organisation_pid(self): def online_circulation_category(self): """Get the default circulation category for online resources.""" - results = ItemTypesSearch().filter( - 'term', organisation__pid=self.pid).filter( - 'term', type='online').source(['pid']).scan() + results = ( + ItemTypesSearch() + .filter("term", organisation__pid=self.pid) + .filter("term", type="online") + .source(["pid"]) + .scan() + ) try: return next(results).pid except StopIteration: @@ -142,9 +143,12 @@ def get_online_locations(self): def get_libraries_pids(self): """Get all libraries pids related to the organisation.""" - query = LibrariesSearch().source(['pid'])\ - .filter('term', organisation__pid=self.pid)\ - .source('pid') + query = ( + LibrariesSearch() + .source(["pid"]) + .filter("term", organisation__pid=self.pid) + .source("pid") + ) return [hit.pid for hit in query.scan()] def get_libraries(self): @@ -154,9 +158,12 @@ def get_libraries(self): def get_vendor_pids(self): """Get all vendor pids related to the organisation.""" - query = VendorsSearch().source(['pid'])\ - .filter('term', organisation__pid=self.pid)\ - .source('pid') + query = ( + VendorsSearch() + .source(["pid"]) + .filter("term", organisation__pid=self.pid) + .source("pid") + ) return [hit.pid for hit in query.scan()] def get_vendors(self): @@ -170,12 +177,10 @@ def get_links_to_me(self, get_pids=False): :param get_pids: if True list of linked pids if False count of linked records """ - from rero_ils.modules.acquisition.acq_receipts.api import \ - AcqReceiptsSearch - library_query = LibrariesSearch()\ - .filter('term', organisation__pid=self.pid) - receipt_query = AcqReceiptsSearch() \ - .filter('term', organisation__pid=self.pid) + from rero_ils.modules.acquisition.acq_receipts.api import AcqReceiptsSearch + + library_query = LibrariesSearch().filter("term", organisation__pid=self.pid) + receipt_query = AcqReceiptsSearch().filter("term", organisation__pid=self.pid) links = {} if get_pids: libraries = sorted_pids(library_query) @@ -184,21 +189,21 @@ def get_links_to_me(self, get_pids=False): libraries = library_query.count() receipts = receipt_query.count() if libraries: - links['libraries'] = libraries + links["libraries"] = libraries if receipts: - links['acq_receipts'] = receipts + links["acq_receipts"] = receipts return links def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def is_test_organisation(self): """Check if this is a test organisation.""" - return self.get('code') == 'cypress' + return self.get("code") == "cypress" class OrganisationsIndexer(IlsRecordsIndexer): @@ -211,4 +216,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='org') + super().bulk_index(record_id_iterator, doc_type="org") diff --git a/rero_ils/modules/organisations/dumpers.py b/rero_ils/modules/organisations/dumpers.py index 9ad3949876..4b6cbc261c 100644 --- a/rero_ils/modules/organisations/dumpers.py +++ b/rero_ils/modules/organisations/dumpers.py @@ -30,11 +30,13 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data.update({ - 'pid': record.pid, - 'name': record.get('name'), - 'code': record.get('code'), - 'currency': record.get('default_currency'), - 'budget': {'pid': record.get('current_budget_pid')} - }) + data.update( + { + "pid": record.pid, + "name": record.get("name"), + "code": record.get("code"), + "currency": record.get("default_currency"), + "budget": {"pid": record.get("current_budget_pid")}, + } + ) return {k: v for k, v in data.items() if v} diff --git a/rero_ils/modules/organisations/jsonresolver.py b/rero_ils/modules/organisations/jsonresolver.py index cedd589c9b..d83f595488 100644 --- a/rero_ils/modules/organisations/jsonresolver.py +++ b/rero_ils/modules/organisations/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/organisations/', host='bib.rero.ch') +@jsonresolver.route("/api/organisations/", host="bib.rero.ch") def organisation_resolver(pid): """Organisation resolver.""" - return resolve_json_refs('org', pid) + return resolve_json_refs("org", pid) diff --git a/rero_ils/modules/organisations/models.py b/rero_ils/modules/organisations/models.py index f43c3c04f6..b8eac4a2d7 100644 --- a/rero_ils/modules/organisations/models.py +++ b/rero_ils/modules/organisations/models.py @@ -27,11 +27,11 @@ class OrganisationIdentifier(RecordIdentifier): """Sequence generator for Organisation identifiers.""" - __tablename__ = 'organisation_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "organisation_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), + db.BigInteger().with_variant(db.Integer, "sqlite"), primary_key=True, autoincrement=True, ) @@ -40,4 +40,4 @@ class OrganisationIdentifier(RecordIdentifier): class OrganisationMetadata(db.Model, RecordMetadataBase): """Organisation record metadata.""" - __tablename__ = 'organisation_metadata' + __tablename__ = "organisation_metadata" diff --git a/rero_ils/modules/organisations/permissions.py b/rero_ils/modules/organisations/permissions.py index 9ac05b7d3a..20347ccb66 100644 --- a/rero_ils/modules/organisations/permissions.py +++ b/rero_ils/modules/organisations/permissions.py @@ -19,16 +19,19 @@ """Permissions for organisations.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control Organisation policies -search_action = action_factory('org-search') -read_action = action_factory('org-read') -create_action = action_factory('org-create') -update_action = action_factory('org-update') -delete_action = action_factory('org-delete') -access_action = action_factory('org-access') +search_action = action_factory("org-search") +read_action = action_factory("org-read") +create_action = action_factory("org-create") +update_action = action_factory("org-update") +delete_action = action_factory("org-delete") +access_action = action_factory("org-access") class OrganisationPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/patron_transaction_events/api.py b/rero_ils/modules/patron_transaction_events/api.py index c256e5b13f..069a6fad7c 100644 --- a/rero_ils/modules/patron_transaction_events/api.py +++ b/rero_ils/modules/patron_transaction_events/api.py @@ -29,24 +29,25 @@ from rero_ils.modules.providers import Provider from rero_ils.modules.utils import extracted_data_from_ref -from .models import PatronTransactionEventIdentifier, \ - PatronTransactionEventMetadata, PatronTransactionEventType +from .models import ( + PatronTransactionEventIdentifier, + PatronTransactionEventMetadata, + PatronTransactionEventType, +) # provider PatronTransactionEventProvider = type( - 'PatronTransactionEventProvider', + "PatronTransactionEventProvider", (Provider,), - dict(identifier=PatronTransactionEventIdentifier, pid_type='ptre') + dict(identifier=PatronTransactionEventIdentifier, pid_type="ptre"), ) # minter patron_transaction_event_id_minter = partial( - id_minter, - provider=PatronTransactionEventProvider + id_minter, provider=PatronTransactionEventProvider ) # fetcher patron_transaction_event_id_fetcher = partial( - id_fetcher, - provider=PatronTransactionEventProvider + id_fetcher, provider=PatronTransactionEventProvider ) @@ -56,9 +57,9 @@ class PatronTransactionEventsSearch(IlsRecordsSearch): class Meta: """Search only on patron_transaction_event index.""" - index = 'patron_transaction_events' + index = "patron_transaction_events" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -72,27 +73,27 @@ class PatronTransactionEvent(IlsRecord): provider = PatronTransactionEventProvider model_cls = PatronTransactionEventMetadata pids_exist_check = { - 'required': { - 'pttr': 'parent' - }, - 'not_required': { - 'lib': 'library', - 'ptrn': 'operator' - } + "required": {"pttr": "parent"}, + "not_required": {"lib": "library", "ptrn": "operator"}, } - _extensions = [ - DecimalAmountExtension('amount') - ] + _extensions = [DecimalAmountExtension("amount")] @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=False, reindex=False, update_parent=True, **kwargs): + def create( + cls, + data, + id_=None, + delete_pid=False, + dbcommit=False, + reindex=False, + update_parent=True, + **kwargs, + ): """Create patron transaction event record.""" - if 'creation_date' not in data: - data['creation_date'] = datetime.now(timezone.utc).isoformat() - record = super().create( - data, id_, delete_pid, dbcommit, reindex, **kwargs) + if "creation_date" not in data: + data["creation_date"] = datetime.now(timezone.utc).isoformat() + record = super().create(data, id_, delete_pid, dbcommit, reindex, **kwargs) if update_parent and record: record.update_parent_patron_transaction() return record @@ -103,10 +104,7 @@ def create(cls, data, id_=None, delete_pid=False, def update(self, data, commit=True, dbcommit=True, reindex=True): """Update data for record.""" return super().update( - data=data, - commit=commit, - dbcommit=dbcommit, - reindex=reindex + data=data, commit=commit, dbcommit=dbcommit, reindex=reindex ) def update_parent_patron_transaction(self): @@ -121,20 +119,19 @@ def update_parent_patron_transaction(self): # digits, we can multiply amounts by 100, cast result as integer, # do operation with these values, and (at the end) divide the result # by 100. - if not self.amount or \ - self.event_type == PatronTransactionEventType.DISPUTE: + if not self.amount or self.event_type == PatronTransactionEventType.DISPUTE: return pttr = self.patron_transaction - total_amount = int(pttr.get('total_amount') * 100) + total_amount = int(pttr.get("total_amount") * 100) amount = int(self.amount * 100) if self.event_type == PatronTransactionEventType.FEE: total_amount += amount else: total_amount -= amount - pttr['total_amount'] = total_amount / 100 + pttr["total_amount"] = total_amount / 100 if total_amount == 0: - pttr['status'] = PatronTransactionStatus.CLOSED + pttr["status"] = PatronTransactionStatus.CLOSED pttr.update(pttr, dbcommit=True, reindex=True) @classmethod @@ -144,11 +141,13 @@ def get_events_by_transaction_id(cls, transaction_pid): :param transaction_pid: The transaction PID :return: Array of events selected by transaction PID """ - return PatronTransactionEventsSearch()\ - .params(preserve_order=True)\ - .filter('term', parent__pid=transaction_pid)\ - .sort({'creation_date': {'order': 'desc'}})\ + return ( + PatronTransactionEventsSearch() + .params(preserve_order=True) + .filter("term", parent__pid=transaction_pid) + .sort({"creation_date": {"order": "desc"}}) .scan() + ) @classmethod def get_initial_amount_transaction_event(cls, transaction_pid): @@ -157,33 +156,35 @@ def get_initial_amount_transaction_event(cls, transaction_pid): :param transaction_pid: The transaction PID :return: The initial amount for selected transaction """ - result = PatronTransactionEventsSearch()\ - .params(preserve_order=True)\ - .filter('term', parent__pid=transaction_pid)\ - .sort({'creation_date': {'order': 'asc'}})\ - .source(['amount'])\ + result = ( + PatronTransactionEventsSearch() + .params(preserve_order=True) + .filter("term", parent__pid=transaction_pid) + .sort({"creation_date": {"order": "asc"}}) + .source(["amount"]) .scan() + ) return next(result).amount @property def parent_pid(self): """Return the parent pid of the patron transaction event.""" - return extracted_data_from_ref(self.get('parent')) + return extracted_data_from_ref(self.get("parent")) @property def patron_transaction(self): """Return the parent patron transaction of the event.""" - return extracted_data_from_ref(self.get('parent'), data='record') + return extracted_data_from_ref(self.get("parent"), data="record") @property def event_type(self): """Return the type of the patron transaction event.""" - return self.get('type') + return self.get("type") @property def amount(self): """Return the amount of the patron transaction event.""" - return self.get('amount') + return self.get("amount") @property def patron_pid(self): @@ -208,4 +209,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='ptre') + super().bulk_index(record_id_iterator, doc_type="ptre") diff --git a/rero_ils/modules/patron_transaction_events/jsonresolver.py b/rero_ils/modules/patron_transaction_events/jsonresolver.py index 6c5b94f856..b71f30f799 100644 --- a/rero_ils/modules/patron_transaction_events/jsonresolver.py +++ b/rero_ils/modules/patron_transaction_events/jsonresolver.py @@ -23,7 +23,7 @@ from rero_ils.modules.jsonresolver import resolve_json_refs -@jsonresolver.route('/api/patron_transaction_events/', host='bib.rero.ch') +@jsonresolver.route("/api/patron_transaction_events/", host="bib.rero.ch") def patron_transaction_event_resolver(pid): """Resolver for patron_transaction_event record.""" - return resolve_json_refs('ptre', pid) + return resolve_json_refs("ptre", pid) diff --git a/rero_ils/modules/patron_transaction_events/listener.py b/rero_ils/modules/patron_transaction_events/listener.py index fcb10454ed..2f98448d49 100644 --- a/rero_ils/modules/patron_transaction_events/listener.py +++ b/rero_ils/modules/patron_transaction_events/listener.py @@ -21,9 +21,15 @@ from .api import PatronTransactionEvent, PatronTransactionEventsSearch -def enrich_patron_transaction_event_data(sender, json=None, record=None, - index=None, doc_type=None, - arguments=None, **dummy_kwargs): +def enrich_patron_transaction_event_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -31,30 +37,30 @@ def enrich_patron_transaction_event_data(sender, json=None, record=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] != PatronTransactionEventsSearch.Meta.index: + if index.split("-")[0] != PatronTransactionEventsSearch.Meta.index: return if not isinstance(record, PatronTransactionEvent): - pid = record.get('pid') + pid = record.get("pid") record = PatronTransactionEvent.get_record_by_pid(pid) parent = record.patron_transaction # Add information about the patron related to this event if patron := parent.patron: - json['patron'] = {'pid': patron.pid, 'type': 'ptrn'} - if barcode := patron.patron.get('barcode'): - json['patron']['barcode'] = barcode[0] + json["patron"] = {"pid": patron.pid, "type": "ptrn"} + if barcode := patron.patron.get("barcode"): + json["patron"]["barcode"] = barcode[0] if ptty_pid := patron.patron_type_pid: - json['patron_type'] = {'pid': ptty_pid, 'type': 'ptty'} + json["patron_type"] = {"pid": ptty_pid, "type": "ptty"} # Add information about the owning library related to the parent loan # (if exists) :: useful for faceting filter if (loan := parent.loan) and (item := loan.item): - json['owning_library'] = {'pid': item.library_pid, 'type': 'lib'} - json['owning_location'] = {'pid': item.location_pid, 'type': 'loc'} - json['item'] = {'pid': parent.item_pid, 'type': 'item'} - if barcode := item.get('barcode'): - json['item']['barcode'] = barcode + json["owning_library"] = {"pid": item.library_pid, "type": "lib"} + json["owning_location"] = {"pid": item.location_pid, "type": "loc"} + json["item"] = {"pid": parent.item_pid, "type": "item"} + if barcode := item.get("barcode"): + json["item"]["barcode"] = barcode # Add additional information - json['organisation'] = {'pid': parent.organisation_pid, 'type': 'org'} - json['category'] = parent['type'] - json['document'] = {'pid': parent.document_pid, 'type': 'doc'} + json["organisation"] = {"pid": parent.organisation_pid, "type": "org"} + json["category"] = parent["type"] + json["document"] = {"pid": parent.document_pid, "type": "doc"} diff --git a/rero_ils/modules/patron_transaction_events/models.py b/rero_ils/modules/patron_transaction_events/models.py index 50e9faef86..2e57549b98 100644 --- a/rero_ils/modules/patron_transaction_events/models.py +++ b/rero_ils/modules/patron_transaction_events/models.py @@ -28,25 +28,26 @@ class PatronTransactionEventIdentifier(RecordIdentifier): """Sequence generator for PatronTransactionEvent identifiers.""" - __tablename__ = 'patron_transaction_event_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "patron_transaction_event_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class PatronTransactionEventMetadata(db.Model, RecordMetadataBase): """PatronTransactionEvent record metadata.""" - __tablename__ = 'patron_transaction_event_metadata' + __tablename__ = "patron_transaction_event_metadata" class PatronTransactionEventType: """Type of PatronTransactionEvent.""" - FEE = 'fee' - PAYMENT = 'payment' - DISPUTE = 'dispute' - CANCEL = 'cancel' + FEE = "fee" + PAYMENT = "payment" + DISPUTE = "dispute" + CANCEL = "cancel" diff --git a/rero_ils/modules/patron_transaction_events/permissions.py b/rero_ils/modules/patron_transaction_events/permissions.py index 52beae97e2..14c43ecec0 100644 --- a/rero_ils/modules/patron_transaction_events/permissions.py +++ b/rero_ils/modules/patron_transaction_events/permissions.py @@ -19,17 +19,20 @@ """Permissions for Patron transaction event.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, \ - AllowedByActionRestrictByOwnerOrOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + AllowedByActionRestrictByOwnerOrOrganisation, + RecordPermissionPolicy, +) # Actions to control patron transaction event policies for CRUD operations -search_action = action_factory('ptre-search') -read_action = action_factory('ptre-read') -create_action = action_factory('ptre-create') -update_action = action_factory('ptre-update') -delete_action = action_factory('ptre-delete') -access_action = action_factory('ptre-access') +search_action = action_factory("ptre-search") +read_action = action_factory("ptre-read") +create_action = action_factory("ptre-create") +update_action = action_factory("ptre-update") +delete_action = action_factory("ptre-delete") +access_action = action_factory("ptre-access") class PatronTransactionEventPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/patron_transaction_events/serializers/__init__.py b/rero_ils/modules/patron_transaction_events/serializers/__init__.py index ea5f6109d6..15f4f67f0d 100644 --- a/rero_ils/modules/patron_transaction_events/serializers/__init__.py +++ b/rero_ils/modules/patron_transaction_events/serializers/__init__.py @@ -17,46 +17,43 @@ """Patron transaction event serializers.""" -from rero_ils.modules.serializers import RecordSchemaJSONV1, \ - search_responsify, search_responsify_file +from rero_ils.modules.serializers import ( + RecordSchemaJSONV1, + search_responsify, + search_responsify_file, +) from .csv import PatronTransactionEventCSVSerializer from .json import PatronTransactionEventsJSONSerializer -__all__ = [ - 'json_ptre_search', - 'csv_ptre_search' -] +__all__ = ["json_ptre_search", "csv_ptre_search"] """JSON serializer.""" _json = PatronTransactionEventsJSONSerializer(RecordSchemaJSONV1) -json_ptre_search = search_responsify(_json, 'application/rero+json') +json_ptre_search = search_responsify(_json, "application/rero+json") """CSV serializer.""" _csv = PatronTransactionEventCSVSerializer( csv_included_fields=[ - 'category', - 'type', - 'subtype', - 'transaction_date', - 'amount', - 'patron_name', - 'patron_barcode', - 'patron_email', - 'patron_type', - 'document_pid', - 'document_title', - 'item_barcode', - 'item_owning_library', - 'transaction_library', - 'operator_name' + "category", + "type", + "subtype", + "transaction_date", + "amount", + "patron_name", + "patron_barcode", + "patron_email", + "patron_type", + "document_pid", + "document_title", + "item_barcode", + "item_owning_library", + "transaction_library", + "operator_name", ] ) csv_ptre_search = search_responsify_file( - _csv, - 'text/csv', - file_extension='csv', - file_prefix='export-fees' + _csv, "text/csv", file_extension="csv", file_prefix="export-fees" ) diff --git a/rero_ils/modules/patron_transaction_events/serializers/csv.py b/rero_ils/modules/patron_transaction_events/serializers/csv.py index bd3012f3eb..284e1cf5ab 100644 --- a/rero_ils/modules/patron_transaction_events/serializers/csv.py +++ b/rero_ils/modules/patron_transaction_events/serializers/csv.py @@ -37,12 +37,12 @@ from rero_ils.modules.serializers import CachedDataSerializerMixin -class PatronTransactionEventCSVSerializer(CSVSerializer, - CachedDataSerializerMixin): +class PatronTransactionEventCSVSerializer(CSVSerializer, CachedDataSerializerMixin): """Serialize patron transaction event search for csv.""" - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -56,47 +56,45 @@ def generate_csv(): # write the CSV output in memory line = Line() - writer = csv.DictWriter( - line, quoting=csv.QUOTE_ALL, fieldnames=headers) + writer = csv.DictWriter(line, quoting=csv.QUOTE_ALL, fieldnames=headers) writer.writeheader() yield line.read() for hit in search_result: event = hit.to_dict() parent = self.get_resource( - PatronTransaction, - event.get('parent', {}).get('pid') + PatronTransaction, event.get("parent", {}).get("pid") ) - transaction_date = parse_datetime(event.get('creation_date')) + transaction_date = parse_datetime(event.get("creation_date")) # Load related resources used to fill the file. # !! 'dispute' doesn't have any amount --> can't be rounded - if amount := event.get('amount'): + if amount := event.get("amount"): amount = format_decimal(amount, locale=current_i18n.locale) csv_data = { - 'category': event.get('category'), - 'type': event.get('type'), - 'subtype': event.get('subtype'), - 'amount': amount, - 'transaction_date': transaction_date.isoformat() + "category": event.get("category"), + "type": event.get("type"), + "subtype": event.get("subtype"), + "amount": amount, + "transaction_date": transaction_date.isoformat(), } - if pid := event.get('patron', {}).get('pid'): + if pid := event.get("patron", {}).get("pid"): record = self.get_resource(Patron, pid) csv_data |= _extract_patron_data(record) - if pid := event.get('patron_type', {}).get('pid'): + if pid := event.get("patron_type", {}).get("pid"): record = self.get_resource(PatronTypesSearch(), pid) csv_data |= _extract_patron_type_data(record) - if pid := event.get('operator', {}).get('pid'): + if pid := event.get("operator", {}).get("pid"): record = self.get_resource(Patron, pid) csv_data |= _extract_operator_data(record) - if pid := event.get('library', {}).get('pid'): + if pid := event.get("library", {}).get("pid"): record = self.get_resource(Library, pid) csv_data |= _extract_transaction_library_data(record) - csv_data['transaction_date'] = transaction_date\ - .astimezone(tz=record.get_timezone())\ - .isoformat() + csv_data["transaction_date"] = transaction_date.astimezone( + tz=record.get_timezone() + ).isoformat() if pid := parent.loan_pid: loan = self.get_resource(Loan, pid) document = self.get_resource(Document, loan.document_pid) @@ -117,9 +115,11 @@ def generate_csv(): def _skip_if_no_record(func): """Decorator used to skip extract function if record doesn't exist.""" + @wraps(func) def decorated_view(record, *args, **kwargs): return func(record, *args, **kwargs) if record else {} + return decorated_view @@ -131,13 +131,10 @@ def _extract_patron_data(record): :returns a dictionary containing desired patron data. """ return { - 'patron_name': - record.formatted_name, - 'patron_barcode': - ', '.join(record.get('patron', {}).get('barcode', [])), - 'patron_email': - record.user.email or - record.get('patron', {}).get('additional_communication_email') + "patron_name": record.formatted_name, + "patron_barcode": ", ".join(record.get("patron", {}).get("barcode", [])), + "patron_email": record.user.email + or record.get("patron", {}).get("additional_communication_email"), } @@ -149,9 +146,8 @@ def _extract_document_data(record): :returns a dictionary containing desired document data. """ return { - 'document_pid': record.pid, - 'document_title': - TitleExtension.format_text(record.get('title', [])) + "document_pid": record.pid, + "document_title": TitleExtension.format_text(record.get("title", [])), } @@ -165,9 +161,9 @@ def _extract_item_data(record, collector): """ library = collector.get_resource(Library, record.library_pid) return { - 'item_pid': record.pid, - 'item_barcode': record.get('barcode'), - 'item_owning_library': library.get('name') + "item_pid": record.pid, + "item_barcode": record.get("barcode"), + "item_owning_library": library.get("name"), } @@ -178,9 +174,7 @@ def _extract_operator_data(record): :param record: the `Patron` representing the operator to analyze. :returns a dictionary containing desired operator data. """ - return { - 'operator_name': record.formatted_name - } + return {"operator_name": record.formatted_name} @_skip_if_no_record @@ -190,9 +184,7 @@ def _extract_patron_type_data(record): :param record: a dictionary with indexed ES data about the patron type. :returns a dictionary containing desired patron type data. """ - return { - 'patron_type': record.get('name') - } + return {"patron_type": record.get("name")} @_skip_if_no_record @@ -202,6 +194,4 @@ def _extract_transaction_library_data(record): :param record: the `Library` representing the transaction lib to analyze. :returns a dictionary containing desired library data. """ - return { - 'transaction_library': record.get('name') - } + return {"transaction_library": record.get("name")} diff --git a/rero_ils/modules/patron_transaction_events/serializers/json.py b/rero_ils/modules/patron_transaction_events/serializers/json.py index e41f15f158..b0d07fa21d 100644 --- a/rero_ils/modules/patron_transaction_events/serializers/json.py +++ b/rero_ils/modules/patron_transaction_events/serializers/json.py @@ -22,30 +22,33 @@ from rero_ils.modules.locations.api import LocationsSearch from rero_ils.modules.patron_types.api import PatronTypesSearch from rero_ils.modules.patrons.api import Patron -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer, RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + CachedDataSerializerMixin, + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) -class PatronTransactionEventsJSONSerializer(JSONSerializer, - CachedDataSerializerMixin): +class PatronTransactionEventsJSONSerializer(JSONSerializer, CachedDataSerializerMixin): """Serializer for RERO-ILS `PatronTransactionEvent` records as JSON.""" def _postprocess_search_hit(self, hit): """Post-process each hit of a search result.""" - metadata = hit.get('metadata', {}) + metadata = hit.get("metadata", {}) # Add label for some $ref fields. - pid = metadata.get('library', {}).get('pid') + pid = metadata.get("library", {}).get("pid") if pid and (resource := self.get_resource(LibrariesSearch(), pid)): - metadata['library']['name'] = resource.get('name') + metadata["library"]["name"] = resource.get("name") - pid = metadata.get('patron_type', {}).get('pid') + pid = metadata.get("patron_type", {}).get("pid") if pid and (resource := self.get_resource(PatronTypesSearch(), pid)): - metadata['patron_type']['name'] = resource.get('name') + metadata["patron_type"]["name"] = resource.get("name") - pid = metadata.get('operator', {}).get('pid') + pid = metadata.get("operator", {}).get("pid") if pid and (resource := self.get_resource(Patron, pid)): - metadata['operator']['name'] = resource.formatted_name + metadata["operator"]["name"] = resource.formatted_name super()._postprocess_search_hit(hit) @@ -54,29 +57,29 @@ def _postprocess_search_aggregations(self, aggregations): # enrich aggregation hit with some key aggrs = aggregations JSONSerializer.enrich_bucket_with_data( - aggrs.get('transaction_library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggrs.get("transaction_library", {}).get("buckets", []), + LibrariesSearch, + "name", ) JSONSerializer.enrich_bucket_with_data( - aggrs.get('patron_type', {}).get('buckets', []), - PatronTypesSearch, 'name' + aggrs.get("patron_type", {}).get("buckets", []), PatronTypesSearch, "name" ) JSONSerializer.enrich_bucket_with_data( - aggrs.get('owning_library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggrs.get("owning_library", {}).get("buckets", []), LibrariesSearch, "name" ) - for loc_bucket in aggrs.get('owning_library', {}).get('buckets', []): + for loc_bucket in aggrs.get("owning_library", {}).get("buckets", []): JSONSerializer.enrich_bucket_with_data( - loc_bucket.get('owning_location', {}).get('buckets', []), - LocationsSearch, 'name' + loc_bucket.get("owning_location", {}).get("buckets", []), + LocationsSearch, + "name", ) # add configuration for date-range facets - aggr = aggregations.get('transaction_date', {}) + aggr = aggregations.get("transaction_date", {}) JSONSerializer.add_date_range_configuration(aggr) super()._postprocess_search_aggregations(aggregations) _json = PatronTransactionEventsJSONSerializer(RecordSchemaJSONV1) -json_ptre_search = search_responsify(_json, 'application/rero+json') +json_ptre_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/patron_transaction_events/utils.py b/rero_ils/modules/patron_transaction_events/utils.py index 4ab3db74d5..9c2869c32e 100644 --- a/rero_ils/modules/patron_transaction_events/utils.py +++ b/rero_ils/modules/patron_transaction_events/utils.py @@ -21,8 +21,7 @@ from flask import request from invenio_records_rest.utils import make_comma_list_a_list -from rero_ils.modules.patron_transaction_events.models import \ - PatronTransactionEventType +from rero_ils.modules.patron_transaction_events.models import PatronTransactionEventType def total_facet_filter_builder(search, urlkwargs): @@ -38,11 +37,11 @@ def total_facet_filter_builder(search, urlkwargs): :param urlkwargs: possible url argument from query string (could be empty). :return the JSON filter configuration to apply on the facet. """ - facet_filter = Q('term', type=PatronTransactionEventType.PAYMENT) - searched_subtypes = make_comma_list_a_list(request.args.getlist('subtype')) + facet_filter = Q("term", type=PatronTransactionEventType.PAYMENT) + searched_subtypes = make_comma_list_a_list(request.args.getlist("subtype")) if searched_subtypes := [sub.strip() for sub in searched_subtypes]: - subtypes_query = Q('match_none') # Initial OR query condition + subtypes_query = Q("match_none") # Initial OR query condition for subtype in searched_subtypes: - subtypes_query |= Q('term', subtype=subtype) + subtypes_query |= Q("term", subtype=subtype) facet_filter &= subtypes_query return facet_filter.to_dict() diff --git a/rero_ils/modules/patron_transactions/api.py b/rero_ils/modules/patron_transactions/api.py index 7e0ef1a98f..29152429e7 100644 --- a/rero_ils/modules/patron_transactions/api.py +++ b/rero_ils/modules/patron_transactions/api.py @@ -26,8 +26,10 @@ from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.minters import id_minter from rero_ils.modules.organisations.api import Organisation -from rero_ils.modules.patron_transaction_events.api import \ - PatronTransactionEvent, PatronTransactionEventsSearch +from rero_ils.modules.patron_transaction_events.api import ( + PatronTransactionEvent, + PatronTransactionEventsSearch, +) from rero_ils.modules.providers import Provider from rero_ils.modules.utils import extracted_data_from_ref, sorted_pids @@ -36,20 +38,14 @@ # provider PatronTransactionProvider = type( - 'PatronTransactionProvider', + "PatronTransactionProvider", (Provider,), - dict(identifier=PatronTransactionIdentifier, pid_type='pttr') + dict(identifier=PatronTransactionIdentifier, pid_type="pttr"), ) # minter -patron_transaction_id_minter = partial( - id_minter, - provider=PatronTransactionProvider -) +patron_transaction_id_minter = partial(id_minter, provider=PatronTransactionProvider) # fetcher -patron_transaction_id_fetcher = partial( - id_fetcher, - provider=PatronTransactionProvider -) +patron_transaction_id_fetcher = partial(id_fetcher, provider=PatronTransactionProvider) class PatronTransactionsSearch(IlsRecordsSearch): @@ -58,9 +54,9 @@ class PatronTransactionsSearch(IlsRecordsSearch): class Meta: """Search only on patron transaction index.""" - index = 'patron_transactions' + index = "patron_transactions" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -69,22 +65,15 @@ class Meta: class PatronTransaction(IlsRecord): """Patron Transaction class.""" - _extensions = [ - PatronTransactionExtension() - ] + _extensions = [PatronTransactionExtension()] minter = patron_transaction_id_minter fetcher = patron_transaction_id_fetcher provider = PatronTransactionProvider model_cls = PatronTransactionMetadata pids_exist_check = { - 'required': { - 'ptrn': 'patron' - }, - 'not_required': { - 'org': 'organisation', - 'notif': 'notification' - } + "required": {"ptrn": "patron"}, + "not_required": {"org": "organisation", "notif": "notification"}, } def __init__(self, data, model=None, **kwargs): @@ -102,10 +91,7 @@ def __init__(self, data, model=None, **kwargs): def update(self, data, commit=True, dbcommit=True, reindex=True): """Update data for record.""" return super().update( - data=data, - commit=commit, - dbcommit=dbcommit, - reindex=reindex + data=data, commit=commit, dbcommit=dbcommit, reindex=reindex ) # GETTER & SETTER ========================================================= @@ -115,14 +101,14 @@ def update(self, data, commit=True, dbcommit=True, reindex=True): @property def loan_pid(self): """Get the `Loan` pid related to this transaction.""" - if self.get('loan'): - return extracted_data_from_ref(self['loan']) + if self.get("loan"): + return extracted_data_from_ref(self["loan"]) @cached_property def loan(self): """Get the `Loan` record related to this transaction.""" - if self.get('loan'): - return extracted_data_from_ref(self['loan'], data='record') + if self.get("loan"): + return extracted_data_from_ref(self["loan"], data="record") @property def document_pid(self): @@ -145,30 +131,29 @@ def library_pid(self): @property def patron_pid(self): """Get the `Patron` pid related to this transaction.""" - return extracted_data_from_ref(self.get('patron')) + return extracted_data_from_ref(self.get("patron")) @property def patron(self): """Get the `Patron` pid related to this transaction.""" - return extracted_data_from_ref(self.get('patron'), data='record') + return extracted_data_from_ref(self.get("patron"), data="record") @property def total_amount(self): """Shortcut to get the transaction total_amount of the transaction.""" - return self.get('total_amount') + return self.get("total_amount") @property def notification_pid(self): """Get the `Notification` pid related to this transaction.""" - if self.get('notification'): - return extracted_data_from_ref(self.get('notification')) + if self.get("notification"): + return extracted_data_from_ref(self.get("notification")) @cached_property def notification(self): """Get the `Notification` record related to this transaction.""" - if self.get('notification'): - return extracted_data_from_ref( - self.get('notification'), data='record') + if self.get("notification"): + return extracted_data_from_ref(self.get("notification"), data="record") @property def notification_transaction_library_pid(self): @@ -186,29 +171,31 @@ def notification_transaction_user_pid(self): @property def status(self): """Return the status of the patron transaction.""" - return self.get('status') + return self.get("status") @property def currency(self): """Return patron transaction currency.""" - return Organisation\ - .get_record_by_pid(self.organisation_pid)\ - .get('default_currency') + return Organisation.get_record_by_pid(self.organisation_pid).get( + "default_currency" + ) @property def events(self): """Shortcut for events of the patron transaction.""" - query = PatronTransactionEventsSearch()\ - .filter('term', parent__pid=self.pid)\ + query = ( + PatronTransactionEventsSearch() + .filter("term", parent__pid=self.pid) .source(False) + ) for hit in query.scan(): yield PatronTransactionEvent.get_record(hit.meta.id) def get_number_of_patron_transaction_events(self): """Get number of patron transaction events.""" - return PatronTransactionEventsSearch()\ - .filter('term', parent__pid=self.pid)\ - .count() + return ( + PatronTransactionEventsSearch().filter("term", parent__pid=self.pid).count() + ) def get_links_to_me(self, get_pids=False): """Get the links between this record and other records. @@ -217,18 +204,17 @@ def get_links_to_me(self, get_pids=False): count of linked records """ links = {} - query = PatronTransactionEventsSearch() \ - .filter('term', parent__pid=self.pid) + query = PatronTransactionEventsSearch().filter("term", parent__pid=self.pid) events = sorted_pids(query) if get_pids else query.count() if events: - links['events'] = events + links["events"] = events return links def reasons_not_to_delete(self): """Get reasons not to delete record.""" reasons = {} if links := self.get_links_to_me(): - reasons['links'] = links + reasons["links"] = links return reasons @@ -250,4 +236,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='pttr') + super().bulk_index(record_id_iterator, doc_type="pttr") diff --git a/rero_ils/modules/patron_transactions/extensions.py b/rero_ils/modules/patron_transactions/extensions.py index d2fd7ed8da..90ea6b46b3 100644 --- a/rero_ils/modules/patron_transactions/extensions.py +++ b/rero_ils/modules/patron_transactions/extensions.py @@ -21,8 +21,7 @@ from flask_babel import gettext as _ from invenio_records.extensions import RecordExtension -from rero_ils.modules.patron_transaction_events.api import \ - PatronTransactionEvent +from rero_ils.modules.patron_transaction_events.api import PatronTransactionEvent from rero_ils.modules.utils import get_ref_for_pid @@ -33,21 +32,17 @@ class PatronTransactionExtension(RecordExtension): def _base_data_patron_event(record, steps=None): """Create a initial data for Patron Transaction Event.""" data = { - 'creation_date': record.get('creation_date'), - 'type': 'fee', - 'subtype': 'other', - 'amount': record.get('total_amount'), - 'parent': { - '$ref': get_ref_for_pid('pttr', record.pid) - }, - 'note': _('Initial charge') + "creation_date": record.get("creation_date"), + "type": "fee", + "subtype": "other", + "amount": record.get("total_amount"), + "parent": {"$ref": get_ref_for_pid("pttr", record.pid)}, + "note": _("Initial charge"), } - if library := record.get('library'): - data['library'] = { - '$ref': library.get('$ref') - } + if library := record.get("library"): + data["library"] = {"$ref": library.get("$ref")} if steps: - data['steps'] = steps + data["steps"] = steps return data @staticmethod @@ -58,39 +53,35 @@ def _data_operator_event(data, event=None): it overrides the library of the patron transaction. """ if event: - if operator := event.get('operator'): - data['operator'] = { - '$ref': operator.get('$ref') - } - if library := event.get('library'): - data['library'] = { - '$ref': library.get('$ref') - } + if operator := event.get("operator"): + data["operator"] = {"$ref": operator.get("$ref")} + if library := event.get("library"): + data["library"] = {"$ref": library.get("$ref")} return data @staticmethod def _data_overdue(data, record): """Add overdue informations on Patron Transaction Event.""" - if record.get('type') == 'overdue': - data['subtype'] = 'overdue' - library_pid = record.loan.library_pid if record.loan_pid else \ - record.notification_transaction_library_pid + if record.get("type") == "overdue": + data["subtype"] = "overdue" + library_pid = ( + record.loan.library_pid + if record.loan_pid + else record.notification_transaction_library_pid + ) if library_pid: - data['library'] = { - '$ref': get_ref_for_pid('lib', library_pid) - } + data["library"] = {"$ref": get_ref_for_pid("lib", library_pid)} return data def pre_create(self, record): """Called before a patron transaction event record is created.""" # Extract steps and event if exists. - steps = record.pop('steps', None) - event = record.pop('event', None) + steps = record.pop("steps", None) + event = record.pop("event", None) # Update the model with the new data of the record. record.model.data = dict(record) # Creation of data for the event - data = PatronTransactionExtension._base_data_patron_event( - record, steps) + data = PatronTransactionExtension._base_data_patron_event(record, steps) data = PatronTransactionExtension._data_operator_event(data, event) data = PatronTransactionExtension._data_overdue(data, record) rec = PatronTransactionEvent.create(data, update_parent=False) diff --git a/rero_ils/modules/patron_transactions/jsonresolver.py b/rero_ils/modules/patron_transactions/jsonresolver.py index c769efeff3..9739ad1eac 100644 --- a/rero_ils/modules/patron_transactions/jsonresolver.py +++ b/rero_ils/modules/patron_transactions/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/patron_transactions/', host='bib.rero.ch') +@jsonresolver.route("/api/patron_transactions/", host="bib.rero.ch") def patron_transaction_resolver(pid): """Resolver for patron transaction record.""" - return resolve_json_refs('pttr', pid) + return resolve_json_refs("pttr", pid) diff --git a/rero_ils/modules/patron_transactions/listener.py b/rero_ils/modules/patron_transactions/listener.py index ce78699133..dc9698c4c2 100644 --- a/rero_ils/modules/patron_transactions/listener.py +++ b/rero_ils/modules/patron_transactions/listener.py @@ -21,9 +21,15 @@ from .api import PatronTransaction, PatronTransactionsSearch -def enrich_patron_transaction_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, - **dummy_kwargs): +def enrich_patron_transaction_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -31,16 +37,16 @@ def enrich_patron_transaction_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] != PatronTransactionsSearch.Meta.index: + if index.split("-")[0] != PatronTransactionsSearch.Meta.index: return if not isinstance(record, PatronTransaction): - record = PatronTransaction.get_record_by_pid(record.get('pid')) + record = PatronTransaction.get_record_by_pid(record.get("pid")) - if barcode := record.patron.patron.get('barcode'): - json['patron']['barcode'] = barcode[0] + if barcode := record.patron.patron.get("barcode"): + json["patron"]["barcode"] = barcode[0] if loan := record.loan: - json['document'] = {'pid': record.document_pid, 'type': 'doc'} - json['library'] = {'pid': record.library_pid, 'type': 'lib'} - json['item'] = {'pid': loan.item_pid, 'type': 'item'} + json["document"] = {"pid": record.document_pid, "type": "doc"} + json["library"] = {"pid": record.library_pid, "type": "lib"} + json["item"] = {"pid": loan.item_pid, "type": "item"} diff --git a/rero_ils/modules/patron_transactions/models.py b/rero_ils/modules/patron_transactions/models.py index f5634967b1..752fdb5b5d 100644 --- a/rero_ils/modules/patron_transactions/models.py +++ b/rero_ils/modules/patron_transactions/models.py @@ -27,35 +27,36 @@ class PatronTransactionIdentifier(RecordIdentifier): """Sequence generator for Patron Transaction identifiers.""" - __tablename__ = 'patron_transaction_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "patron_transaction_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class PatronTransactionMetadata(db.Model, RecordMetadataBase): """PatronTransaction record metadata.""" - __tablename__ = 'patron_transaction_metadata' + __tablename__ = "patron_transaction_metadata" class PatronTransactionStatus: """PatronTransaction status.""" - OPEN = 'open' - CLOSED = 'closed' + OPEN = "open" + CLOSED = "closed" class PatronTransactionType: """PatronTransaction type.""" - DAMAGED = 'damaged' - ILL = 'interlibrary_loan' - LOST = 'lost' - OTHER = 'other' - OVERDUE = 'overdue' - PHOTOCOPY = 'photocopy' - SUBSCRIPTION = 'subscription' + DAMAGED = "damaged" + ILL = "interlibrary_loan" + LOST = "lost" + OTHER = "other" + OVERDUE = "overdue" + PHOTOCOPY = "photocopy" + SUBSCRIPTION = "subscription" diff --git a/rero_ils/modules/patron_transactions/permissions.py b/rero_ils/modules/patron_transactions/permissions.py index 7e008fdfa5..95501a0044 100644 --- a/rero_ils/modules/patron_transactions/permissions.py +++ b/rero_ils/modules/patron_transactions/permissions.py @@ -19,17 +19,20 @@ """Permissions for Patron transaction.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, \ - AllowedByActionRestrictByOwnerOrOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + AllowedByActionRestrictByOwnerOrOrganisation, + RecordPermissionPolicy, +) # Actions to control patron transaction policies for CRUD operations -search_action = action_factory('pttr-search') -read_action = action_factory('pttr-read') -create_action = action_factory('pttr-create') -update_action = action_factory('pttr-update') -delete_action = action_factory('pttr-delete') -access_action = action_factory('pttr-access') +search_action = action_factory("pttr-search") +read_action = action_factory("pttr-read") +create_action = action_factory("pttr-create") +update_action = action_factory("pttr-update") +delete_action = action_factory("pttr-delete") +access_action = action_factory("pttr-access") class PatronTransactionPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/patron_transactions/serializers.py b/rero_ils/modules/patron_transactions/serializers.py index 40c63284bd..10a5dcc95e 100644 --- a/rero_ils/modules/patron_transactions/serializers.py +++ b/rero_ils/modules/patron_transactions/serializers.py @@ -21,12 +21,15 @@ from rero_ils.modules.documents.api import DocumentsSearch from rero_ils.modules.items.api.api import Item from rero_ils.modules.loans.api import Loan -from rero_ils.modules.serializers import CachedDataSerializerMixin, \ - JSONSerializer, RecordSchemaJSONV1, search_responsify +from rero_ils.modules.serializers import ( + CachedDataSerializerMixin, + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) -class PatronTransactionsJSONSerializer(JSONSerializer, - CachedDataSerializerMixin): +class PatronTransactionsJSONSerializer(JSONSerializer, CachedDataSerializerMixin): """Serializer for RERO-ILS `PatronTransaction` records as JSON.""" def _postprocess_search_hit(self, hit): @@ -34,21 +37,22 @@ def _postprocess_search_hit(self, hit): :param hit: the dictionary representing an ElasticSearch search hit. """ - metadata = hit.get('metadata', {}) + metadata = hit.get("metadata", {}) # Serialize document (if exists) - document_pid = metadata.get('document', {}).get('pid') - if document_pid and \ - (document := self.get_resource(DocumentsSearch(), document_pid)): - metadata['document'] = document + document_pid = metadata.get("document", {}).get("pid") + if document_pid and ( + document := self.get_resource(DocumentsSearch(), document_pid) + ): + metadata["document"] = document # Serialize loan & item - loan_pid = metadata.get('loan', {}).get('pid') + loan_pid = metadata.get("loan", {}).get("pid") if loan_pid and (loan := self.get_resource(Loan, loan_pid)): - metadata['loan'] = loan - item_pid = loan.get('item_pid', {}).get('value') + metadata["loan"] = loan + item_pid = loan.get("item_pid", {}).get("value") if item := self.get_resource(Item, item_pid): - metadata['loan']['item'] = item + metadata["loan"]["item"] = item super()._postprocess_search_hit(hit) _json = PatronTransactionsJSONSerializer(RecordSchemaJSONV1) -json_pttr_search = search_responsify(_json, 'application/rero+json') +json_pttr_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/patron_transactions/utils.py b/rero_ils/modules/patron_transactions/utils.py index 0e5297b155..d7067db81b 100644 --- a/rero_ils/modules/patron_transactions/utils.py +++ b/rero_ils/modules/patron_transactions/utils.py @@ -21,8 +21,8 @@ from flask_babel import gettext as _ -from .api import PatronTransaction, PatronTransactionsSearch from ..utils import get_ref_for_pid +from .api import PatronTransaction, PatronTransactionsSearch def _build_transaction_query(patron_pid, status=None, types=None): @@ -33,12 +33,11 @@ def _build_transaction_query(patron_pid, status=None, types=None): :param types: (optional) array of transaction types filter, :return: return prepared query. """ - query = PatronTransactionsSearch() \ - .filter('term', patron__pid=patron_pid) + query = PatronTransactionsSearch().filter("term", patron__pid=patron_pid) if status: - query = query.filter('term', status=status) + query = query.filter("term", status=status) if types: - query = query.filter('terms', type=types) + query = query.filter("terms", type=types) return query @@ -49,7 +48,7 @@ def get_transactions_pids_for_patron(patron_pid, status=None): :param status: (optional) transaction status filter, """ query = _build_transaction_query(patron_pid, status) - for result in query.source('pid').scan(): + for result in query.source("pid").scan(): yield result.pid @@ -64,7 +63,8 @@ def get_transactions_count_for_patron(patron_pid, status=None): def get_transactions_total_amount_for_patron( - patron_pid, status=None, types=None, with_subscription=True): + patron_pid, status=None, types=None, with_subscription=True +): """Get total amount transactions linked to a patron. :param patron_pid: the patron pid being searched @@ -76,9 +76,9 @@ def get_transactions_total_amount_for_patron( """ search = _build_transaction_query(patron_pid, status, types) if not with_subscription: - search = search.exclude('terms', type=['subscription']) - search.aggs.metric('pttr_total_amount', 'sum', field='total_amount') - search = search[0:0] # set the from/size to 0 ; no need es hits + search = search.exclude("terms", type=["subscription"]) + search.aggs.metric("pttr_total_amount", "sum", field="total_amount") + search = search[:0] results = search.execute() return results.aggregations.pttr_total_amount.value @@ -90,13 +90,10 @@ def get_last_transaction_by_loan_pid(loan_pid, status=None): :param status: (optional) the status of transaction. :return: return last transaction transaction matching criteria. """ - query = PatronTransactionsSearch() \ - .filter('term', loan__pid=loan_pid) + query = PatronTransactionsSearch().filter("term", loan__pid=loan_pid) if status: - query = query.filter('term', status=status) - results = query \ - .sort({'creation_date': {'order': 'desc'}}) \ - .source('pid').scan() + query = query.filter("term", status=status) + results = query.sort({"creation_date": {"order": "desc"}}).source("pid").scan() try: pid = next(results).pid return PatronTransaction.get_record_by_pid(pid) @@ -105,79 +102,66 @@ def get_last_transaction_by_loan_pid(loan_pid, status=None): def create_patron_transaction_from_overdue_loan( - loan, dbcommit=True, reindex=True, delete_pid=False): + loan, dbcommit=True, reindex=True, delete_pid=False +): """Create a patron transaction for an overdue loan.""" from ..loans.utils import sum_for_fees + fees = loan.get_overdue_fees total_amount = sum_for_fees(fees) if total_amount > 0: data = { - 'loan': { - '$ref': get_ref_for_pid('loans', loan.pid) - }, - 'patron': { - '$ref': get_ref_for_pid('ptrn', loan.patron_pid) - }, - 'organisation': { - '$ref': get_ref_for_pid('org', loan.organisation_pid) - }, - 'type': 'overdue', - 'status': 'open', - 'note': _('incremental overdue fees'), - 'total_amount': total_amount, - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'steps': [ - {'timestamp': fee[1].isoformat(), 'amount': fee[0]} - for fee in fees - ] + "loan": {"$ref": get_ref_for_pid("loans", loan.pid)}, + "patron": {"$ref": get_ref_for_pid("ptrn", loan.patron_pid)}, + "organisation": {"$ref": get_ref_for_pid("org", loan.organisation_pid)}, + "type": "overdue", + "status": "open", + "note": _("incremental overdue fees"), + "total_amount": total_amount, + "creation_date": datetime.now(timezone.utc).isoformat(), + "steps": [ + {"timestamp": fee[1].isoformat(), "amount": fee[0]} for fee in fees + ], } return PatronTransaction.create( - data, - dbcommit=dbcommit, - reindex=reindex, - delete_pid=delete_pid + data, dbcommit=dbcommit, reindex=reindex, delete_pid=delete_pid ) def create_patron_transaction_from_notification( - notification=None, dbcommit=None, reindex=None, - delete_pid=None): + notification=None, dbcommit=None, reindex=None, delete_pid=None +): """Create a patron transaction from notification.""" from ..notifications.utils import calculate_notification_amount + total_amount = calculate_notification_amount(notification) if total_amount > 0: # no need to create transaction if amount <= 0 ! data = { - 'notification': { - '$ref': get_ref_for_pid('notif', notification.pid) - }, - 'loan': { - '$ref': get_ref_for_pid('loans', notification.loan_pid) - }, - 'patron': { - '$ref': get_ref_for_pid('ptrn', notification.patron_pid) + "notification": {"$ref": get_ref_for_pid("notif", notification.pid)}, + "loan": {"$ref": get_ref_for_pid("loans", notification.loan_pid)}, + "patron": {"$ref": get_ref_for_pid("ptrn", notification.patron_pid)}, + "organisation": { + "$ref": get_ref_for_pid("org", notification.organisation_pid) }, - 'organisation': { - '$ref': get_ref_for_pid( - 'org', - notification.organisation_pid - ) - }, - 'total_amount': total_amount, - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'type': 'overdue', - 'status': 'open' + "total_amount": total_amount, + "creation_date": datetime.now(timezone.utc).isoformat(), + "type": "overdue", + "status": "open", } return PatronTransaction.create( - data, - dbcommit=dbcommit, - reindex=reindex, - delete_pid=delete_pid + data, dbcommit=dbcommit, reindex=reindex, delete_pid=delete_pid ) def create_subscription_for_patron( - patron, patron_type, start_date, end_date, dbcommit=None, reindex=None, - delete_pid=None): + patron, + patron_type, + start_date, + end_date, + dbcommit=None, + reindex=None, + delete_pid=None, +): """Create a subscription patron transaction for a patron. :param patron: the patron linked to the subscription @@ -191,26 +175,19 @@ def create_subscription_for_patron( """ record = {} if patron_type.is_subscription_required: - name = patron_type.get('name'), - start = start_date.strftime('%Y-%m-%d'), - end = end_date.strftime('%Y-%m-%d') + name = (patron_type.get("name"),) + start = (start_date.strftime("%Y-%m-%d"),) + end = end_date.strftime("%Y-%m-%d") data = { - 'patron': { - '$ref': get_ref_for_pid('ptrn', patron.pid) - }, - 'organisation': { - '$ref': get_ref_for_pid('org', patron.organisation_pid) - }, - 'total_amount': patron_type.get('subscription_amount'), - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'type': 'subscription', - 'status': 'open', - 'note': _(f"Subscription for '{name}' from {start} to {end}") + "patron": {"$ref": get_ref_for_pid("ptrn", patron.pid)}, + "organisation": {"$ref": get_ref_for_pid("org", patron.organisation_pid)}, + "total_amount": patron_type.get("subscription_amount"), + "creation_date": datetime.now(timezone.utc).isoformat(), + "type": "subscription", + "status": "open", + "note": _(f"Subscription for '{name}' from {start} to {end}"), } record = PatronTransaction.create( - data, - dbcommit=dbcommit, - reindex=reindex, - delete_pid=delete_pid + data, dbcommit=dbcommit, reindex=reindex, delete_pid=delete_pid ) return record diff --git a/rero_ils/modules/patron_types/api.py b/rero_ils/modules/patron_types/api.py index 5844dd8f38..d4bb830f2d 100644 --- a/rero_ils/modules/patron_types/api.py +++ b/rero_ils/modules/patron_types/api.py @@ -25,25 +25,23 @@ from elasticsearch_dsl import Q from flask_babel import gettext as _ -from .models import PatronTypeIdentifier, PatronTypeMetadata from ..api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch from ..circ_policies.api import CircPoliciesSearch from ..fetchers import id_fetcher -from ..loans.api import get_loans_count_by_library_for_patron_pid, \ - get_overdue_loan_pids +from ..loans.api import get_loans_count_by_library_for_patron_pid, get_overdue_loan_pids from ..loans.models import LoanState from ..minters import id_minter -from ..patron_transactions.utils import \ - get_transactions_total_amount_for_patron +from ..patron_transactions.utils import get_transactions_total_amount_for_patron from ..patrons.api import Patron, PatronsSearch from ..providers import Provider from ..utils import get_patron_from_arguments, sorted_pids +from .models import PatronTypeIdentifier, PatronTypeMetadata # provider PatronTypeProvider = type( - 'PatronTypeProvider', + "PatronTypeProvider", (Provider,), - dict(identifier=PatronTypeIdentifier, pid_type='ptty') + dict(identifier=PatronTypeIdentifier, pid_type="ptty"), ) # minter patron_type_id_minter = partial(id_minter, provider=PatronTypeProvider) @@ -57,9 +55,9 @@ class PatronTypesSearch(IlsRecordsSearch): class Meta: """Search only on patrons index.""" - index = 'patron_types' + index = "patron_types" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -71,7 +69,7 @@ def by_organisation_pid(self, organisation_pid): :returns: An ElasticSearch query to get hits related the entity. :rtype: `elasticsearch_dsl.Search` """ - return self.filter('term', organisation__pid=organisation_pid) + return self.filter("term", organisation__pid=organisation_pid) class PatronType(IlsRecord): @@ -82,8 +80,8 @@ class PatronType(IlsRecord): provider = PatronTypeProvider model_cls = PatronTypeMetadata pids_exist_check = { - 'required': { - 'org': 'organisation', + "required": { + "org": "organisation", } } @@ -94,37 +92,38 @@ def extended_validation(self, **kwargs): Ensure than library limit exceptions are coherent. """ - # validate checkout limits - checkout_limits_data = self.get('limits', {}).get('checkout_limits') - if checkout_limits_data: - global_limit = checkout_limits_data.get('global_limit') - library_limit = checkout_limits_data.get('library_limit') - if library_limit: + if checkout_limits_data := self.get("limits", {}).get("checkout_limits"): + global_limit = checkout_limits_data.get("global_limit") + if library_limit := checkout_limits_data.get("library_limit"): # Library limit cannot be higher than global limit if library_limit > global_limit: - return _('Library limit cannot be higher than global ' - 'limit.') + return _("Library limit cannot be higher than global " "limit.") # Exception limit cannot have same value than library limit # Only one exception per library exceptions_lib = [] - exceptions = checkout_limits_data.get('library_exceptions', []) + exceptions = checkout_limits_data.get("library_exceptions", []) for exception in exceptions: - if exception.get('value') == library_limit: - return _('Exception limit cannot have same value than ' - 'library limit') - ref = exception.get('library').get('$ref') + if exception.get("value") == library_limit: + return _( + "Exception limit cannot have same value than " + "library limit" + ) + ref = exception.get("library").get("$ref") if ref in exceptions_lib: - return _('Only one specific limit by library if ' - 'allowed.') + return _("Only one specific limit by library if " "allowed.") exceptions_lib.append(ref) return True @classmethod def exist_name_and_organisation_pid(cls, name, organisation_pid): """Check if the name is unique within organisation.""" - patron_type = PatronTypesSearch()\ - .filter('term', patron_type_name=name)\ - .filter('term', organisation__pid=organisation_pid).source().scan() + patron_type = ( + PatronTypesSearch() + .filter("term", patron_type_name=name) + .filter("term", organisation__pid=organisation_pid) + .source() + .scan() + ) try: return next(patron_type) except StopIteration: @@ -133,10 +132,12 @@ def exist_name_and_organisation_pid(cls, name, organisation_pid): @classmethod def get_yearly_subscription_patron_types(cls): """Get PatronType with an active yearly subscription.""" - results = PatronTypesSearch()\ - .filter('range', subscription_amount={'gt': 0})\ - .source('pid')\ + results = ( + PatronTypesSearch() + .filter("range", subscription_amount={"gt": 0}) + .source("pid") .scan() + ) for result in results: yield cls.get_record_by_pid(result.pid) @@ -158,20 +159,21 @@ def can_checkout(cls, item, **kwargs): # check overdue items limits patron_type = PatronType.get_record_by_pid(patron.patron_type_pid) if not patron_type.check_overdue_items_limit(patron): - return False, [_('Checkout denied: the maximal number of overdue ' - 'items is reached')] + return False, [ + _("Checkout denied: the maximal number of overdue " "items is reached") + ] # check checkout count limit valid, message = patron_type.check_checkout_count_limit(patron, item) if not valid: return False, [message] # check fee amount limit if not patron_type.check_fee_amount_limit(patron): - return False, [_('Checkout denied: the maximal overdue fee amount ' - 'is reached')] + return False, [ + _("Checkout denied: the maximal overdue fee amount " "is reached") + ] # check unpaid subscription if not patron_type.check_unpaid_subscription(patron): - return False, \ - [_('Checkout denied: patron has unpaid subscription')] + return False, [_("Checkout denied: patron has unpaid subscription")] return True, [] @classmethod @@ -191,15 +193,17 @@ def can_request(cls, item, **kwargs): # check overdue items limits patron_type = PatronType.get_record_by_pid(patron.patron_type_pid) if not patron_type.check_overdue_items_limit(patron): - return False, [_('Request denied: the maximal number of overdue ' - 'items is reached')] + return False, [ + _("Request denied: the maximal number of overdue " "items is reached") + ] # check fee amount limit if not patron_type.check_fee_amount_limit(patron): - return False, [_('Request denied: the maximal overdue fee amount ' - 'is reached')] + return False, [ + _("Request denied: the maximal overdue fee amount " "is reached") + ] # check unpaid subscription if not patron_type.check_unpaid_subscription(patron): - return False, [_('Request denied: patron has unpaid subscription')] + return False, [_("Request denied: patron has unpaid subscription")] return True, [] @@ -220,28 +224,34 @@ def can_extend(cls, item, **kwargs): # check overdue items limit patron_type = PatronType.get_record_by_pid(patron.patron_type_pid) if not patron_type.check_overdue_items_limit(patron): - return False, [_('Renewal denied: the maximal number of overdue ' - 'items is reached')] + return False, [ + _("Renewal denied: the maximal number of overdue " "items is reached") + ] # check fee amount limit if not patron_type.check_fee_amount_limit(patron): - return False, [_('Renewal denied: the maximal overdue fee amount ' - 'is reached')] + return False, [ + _("Renewal denied: the maximal overdue fee amount " "is reached") + ] # check unpaid subscription if not patron_type.check_unpaid_subscription(patron): - return False, [_('Renewal denied: patron has unpaid subscription')] + return False, [_("Renewal denied: patron has unpaid subscription")] return True, [] def get_linked_patron(self): """Get patron linked to this patron type.""" - results = PatronsSearch()\ - .filter('term', patron__type__pid=self.pid).source('pid').scan() + results = ( + PatronsSearch() + .filter("term", patron__type__pid=self.pid) + .source("pid") + .scan() + ) for result in results: yield Patron.get_record_by_pid(result.pid) @property def is_subscription_required(self): """Check if a subscription is required for this patron type.""" - return self.get('subscription_amount', 0) > 0 + return self.get("subscription_amount", 0) > 0 def get_links_to_me(self, get_pids=False): """Record links. @@ -249,17 +259,12 @@ def get_links_to_me(self, get_pids=False): :param get_pids: if True list of linked pids if False count of linked records """ - ptrn_query = PatronsSearch()\ - .filter('term', patron__type__pid=self.pid) - cipo_query = CircPoliciesSearch()\ - .filter( - 'nested', - path='settings', - query=Q( - 'bool', - must=[Q('match', settings__patron_type__pid=self.pid)] - ) - ) + ptrn_query = PatronsSearch().filter("term", patron__type__pid=self.pid) + cipo_query = CircPoliciesSearch().filter( + "nested", + path="settings", + query=Q("bool", must=[Q("match", settings__patron_type__pid=self.pid)]), + ) links = {} if get_pids: patrons = sorted_pids(ptrn_query) @@ -268,17 +273,16 @@ def get_links_to_me(self, get_pids=False): patrons = ptrn_query.count() circ_policies = cipo_query.count() if patrons: - links['patrons'] = patrons + links["patrons"] = patrons if circ_policies: - links['circ_policies'] = circ_policies + links["circ_policies"] = circ_policies return links def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} - links = self.get_links_to_me() - if links: - cannot_delete['links'] = links + if links := self.get_links_to_me(): + cannot_delete["links"] = links return cannot_delete # CHECK LIMITS METHODS ==================================================== @@ -289,9 +293,9 @@ def check_overdue_items_limit(self, patron): :return False if patron has more overdue items than defined limit. True in all other cases. """ - limit = self.get('limits', {}).get('overdue_items_limits', {})\ - .get('default_value') - if limit: + if limit := ( + self.get("limits", {}).get("overdue_items_limits", {}).get("default_value") + ): overdue_items = list(get_overdue_loan_pids(patron.pid)) return limit > len(overdue_items) return True @@ -308,39 +312,46 @@ def check_checkout_count_limit(self, patron, item=None): - True|False : to know if the check is success or not. - message(string) : the reason why the check fails. """ - checkout_limits = self.replace_refs().get('limits', {})\ - .get('checkout_limits', {}) - global_limit = checkout_limits.get('global_limit') + checkout_limits = ( + self.replace_refs().get("limits", {}).get("checkout_limits", {}) + ) + global_limit = checkout_limits.get("global_limit") if not global_limit: return True, None # [0] get the stats for this patron by library patron_library_stats = get_loans_count_by_library_for_patron_pid( - patron.pid, [LoanState.ITEM_ON_LOAN]) + patron.pid, [LoanState.ITEM_ON_LOAN] + ) # [1] check the general limit patron_total_count = sum(patron_library_stats.values()) or 0 if patron_total_count >= global_limit: - return False, _('Checkout denied: the maximal checkout number ' - 'is reached.') + return False, _( + "Checkout denied: the maximal checkout number " "is reached." + ) # [3] check library_limit if item is not none if item: item_lib_pid = item.library_pid - library_limit_value = checkout_limits.get('library_limit') - # try to find an exception rule for this library - for exception in checkout_limits.get('library_exceptions', []): - if exception['library']['pid'] == item_lib_pid: - library_limit_value = exception['value'] - break + library_limit_value = next( + ( + exception["value"] + for exception in checkout_limits.get("library_exceptions", []) + if exception["library"]["pid"] == item_lib_pid + ), + checkout_limits.get("library_limit"), + ) if ( library_limit_value and item_lib_pid in patron_library_stats and patron_library_stats[item_lib_pid] >= library_limit_value ): - return False, _('Checkout denied: the maximal checkout ' - 'number of items for this library is ' - 'reached.') + return False, _( + "Checkout denied: the maximal checkout " + "number of items for this library is " + "reached." + ) # [4] no problem detected, checkout is allowed return True, None @@ -353,15 +364,15 @@ def check_fee_amount_limit(self, patron): :return boolean to know if the check is success or not. """ # get fee amount limit - fee_amount_limits = self.replace_refs().get('limits', {}) \ - .get('fee_amount_limits', {}) - default_limit = fee_amount_limits.get('default_value') - if default_limit: + fee_amount_limits = ( + self.replace_refs().get("limits", {}).get("fee_amount_limits", {}) + ) + if default_limit := fee_amount_limits.get("default_value"): # get total amount for open transactions on overdue and without # subscription fee patron_total_amount = get_transactions_total_amount_for_patron( - patron.pid, status='open', types=['overdue'], - with_subscription=False) + patron.pid, status="open", types=["overdue"], with_subscription=False + ) return patron_total_amount < default_limit return True @@ -373,13 +384,14 @@ def check_unpaid_subscription(self, patron): :param patron: the patron who tried to execute a circulation operation. :return boolean to know if the check is success or not. """ - unpaid_subscription_limit = self.get('limits', {})\ - .get('unpaid_subscription', True) + unpaid_subscription_limit = self.get("limits", {}).get( + "unpaid_subscription", True + ) if not unpaid_subscription_limit: return True, None unpaid_amount = get_transactions_total_amount_for_patron( - patron.pid, status='open', types=['subscription'], - with_subscription=True) + patron.pid, status="open", types=["subscription"], with_subscription=True + ) return unpaid_amount == 0 @@ -393,4 +405,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='ptty') + super().bulk_index(record_id_iterator, doc_type="ptty") diff --git a/rero_ils/modules/patron_types/jsonresolver.py b/rero_ils/modules/patron_types/jsonresolver.py index 61d3a7665b..cb6847ab49 100644 --- a/rero_ils/modules/patron_types/jsonresolver.py +++ b/rero_ils/modules/patron_types/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/patron_types/', host='bib.rero.ch') +@jsonresolver.route("/api/patron_types/", host="bib.rero.ch") def patron_type_resolver(pid): """Patron type resolver.""" - return resolve_json_refs('ptty', pid) + return resolve_json_refs("ptty", pid) diff --git a/rero_ils/modules/patron_types/models.py b/rero_ils/modules/patron_types/models.py index 136b45076e..8b50ecd83f 100644 --- a/rero_ils/modules/patron_types/models.py +++ b/rero_ils/modules/patron_types/models.py @@ -27,11 +27,11 @@ class PatronTypeIdentifier(RecordIdentifier): """Sequence generator for PatronType identifiers.""" - __tablename__ = 'patron_type_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "patron_type_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), + db.BigInteger().with_variant(db.Integer, "sqlite"), primary_key=True, autoincrement=True, ) @@ -40,4 +40,4 @@ class PatronTypeIdentifier(RecordIdentifier): class PatronTypeMetadata(db.Model, RecordMetadataBase): """PatronType record metadata.""" - __tablename__ = 'patron_type_metadata' + __tablename__ = "patron_type_metadata" diff --git a/rero_ils/modules/patron_types/permissions.py b/rero_ils/modules/patron_types/permissions.py index f59d3b4a86..885b980e99 100644 --- a/rero_ils/modules/patron_types/permissions.py +++ b/rero_ils/modules/patron_types/permissions.py @@ -19,15 +19,18 @@ """Permissions for patron types.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) -search_action = action_factory('ptty-search') -read_action = action_factory('ptty-read') -create_action = action_factory('ptty-create') -update_action = action_factory('ptty-update') -delete_action = action_factory('ptty-delete') -access_action = action_factory('ptty-access') +search_action = action_factory("ptty-search") +read_action = action_factory("ptty-read") +create_action = action_factory("ptty-create") +update_action = action_factory("ptty-update") +delete_action = action_factory("ptty-delete") +access_action = action_factory("ptty-access") class PatronTypePermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/patron_types/views.py b/rero_ils/modules/patron_types/views.py index f2f7d3e7c0..097e71a0b2 100644 --- a/rero_ils/modules/patron_types/views.py +++ b/rero_ils/modules/patron_types/views.py @@ -26,27 +26,21 @@ from ..patrons.api import current_librarian blueprint = Blueprint( - 'patron_types', + "patron_types", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) -@blueprint.route('/patron_types/name/validate/', methods=["GET"]) +@blueprint.route("/patron_types/name/validate/", methods=["GET"]) @check_logged_as_librarian def name_validate(name): """Patron type name validation.""" - response = { - 'name': None - } + response = {"name": None} if current_librarian: - patron_type = PatronType.exist_name_and_organisation_pid( - name, - current_librarian.organisation.pid - ) - if patron_type: - response = { - 'name': patron_type.name - } + if patron_type := PatronType.exist_name_and_organisation_pid( + name, current_librarian.organisation.pid + ): + response = {"name": patron_type.name} return jsonify(response) diff --git a/rero_ils/modules/patrons/api.py b/rero_ils/modules/patrons/api.py index 3934226220..6f61fa18cb 100644 --- a/rero_ils/modules/patrons/api.py +++ b/rero_ils/modules/patrons/api.py @@ -38,38 +38,43 @@ from rero_ils.modules.minters import id_minter from rero_ils.modules.organisations.api import Organisation from rero_ils.modules.patron_transactions.api import PatronTransaction -from rero_ils.modules.patron_transactions.utils import \ - create_subscription_for_patron, get_transactions_count_for_patron, \ - get_transactions_pids_for_patron +from rero_ils.modules.patron_transactions.utils import ( + create_subscription_for_patron, + get_transactions_count_for_patron, + get_transactions_pids_for_patron, +) from rero_ils.modules.providers import Provider from rero_ils.modules.tasks import process_bulk_queue from rero_ils.modules.templates.api import TemplatesSearch from rero_ils.modules.users.api import User from rero_ils.modules.users.models import UserRole -from rero_ils.modules.utils import extracted_data_from_ref, \ - get_patron_from_arguments, get_ref_for_pid, sorted_pids +from rero_ils.modules.utils import ( + extracted_data_from_ref, + get_patron_from_arguments, + get_ref_for_pid, + sorted_pids, +) from .extensions import UserDataExtension from .models import CommunicationChannel, PatronIdentifier, PatronMetadata from .utils import get_patron_pid_by_email -_datastore = LocalProxy(lambda: current_app.extensions['security'].datastore) +_datastore = LocalProxy(lambda: current_app.extensions["security"].datastore) # current logged professional -current_librarian = LocalProxy( - lambda: Patron.get_librarian_by_user(current_user)) +current_librarian = LocalProxy(lambda: Patron.get_librarian_by_user(current_user)) # all patron role accounts related to the current user -current_patrons = LocalProxy(lambda: [ - patron - for patron in Patron.get_patrons_by_user(current_user) - if UserRole.PATRON in patron.get('roles', []) -]) +current_patrons = LocalProxy( + lambda: [ + patron + for patron in Patron.get_patrons_by_user(current_user) + if UserRole.PATRON in patron.get("roles", []) + ] +) # provider PatronProvider = type( - 'PatronProvider', - (Provider,), - dict(identifier=PatronIdentifier, pid_type='ptrn') + "PatronProvider", (Provider,), dict(identifier=PatronIdentifier, pid_type="ptrn") ) # minter patron_id_minter = partial(id_minter, provider=PatronProvider) @@ -83,9 +88,9 @@ class PatronsSearch(IlsRecordsSearch): class Meta: """Search only on patrons index.""" - index = 'patrons' + index = "patrons" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -98,11 +103,9 @@ class Patron(IlsRecord): fetcher = patron_id_fetcher provider = PatronProvider model_cls = PatronMetadata - schema = 'patrons/patron-v0.0.1.json' + schema = "patrons/patron-v0.0.1.json" - _extensions = [ - UserDataExtension() - ] + _extensions = [UserDataExtension()] def extended_validation(self, **kwargs): """Returns reasons for validation failures, otherwise True. @@ -113,12 +116,15 @@ def extended_validation(self, **kwargs): - barcode already exists in organisation """ org_pid = self.organisation_pid - if patron_barcodes := self.get('patron', {}).get('barcode', []): - results = PatronsSearch()\ - .filter('terms', patron__barcode=patron_barcodes)\ - .filter('term', organisation__pid=org_pid)\ - .exclude('term', pid=self.pid)\ - .source(['pid', 'patron.barcode']).scan() + if patron_barcodes := self.get("patron", {}).get("barcode", []): + results = ( + PatronsSearch() + .filter("terms", patron__barcode=patron_barcodes) + .filter("term", organisation__pid=org_pid) + .exclude("term", pid=self.pid) + .source(["pid", "patron.barcode"]) + .scan() + ) taken_barcodes = [] for hit in results: taken_barcodes.extend( @@ -139,9 +145,9 @@ def extended_validation(self, **kwargs): # CRUD METHODS # ========================================================================= @classmethod - def create(cls, data, id_=None, delete_pid=False, - dbcommit=False, reindex=False, - **kwargs): + def create( + cls, data, id_=None, delete_pid=False, dbcommit=False, reindex=False, **kwargs + ): """Patron record creation. :param cls - class object @@ -151,8 +157,9 @@ def create(cls, data, id_=None, delete_pid=False, :param dbcommit - commit the changes in the db after the creation :param reindex - index the record after the creation """ - record = super().create(cls._clean_data(data), - id_, delete_pid, dbcommit, reindex, **kwargs) + record = super().create( + cls._clean_data(data), id_, delete_pid, dbcommit, reindex, **kwargs + ) record._update_roles() return record @@ -183,41 +190,44 @@ def _validate(self, **kwargs): validation_message = True if self.pid_check: from ..utils import pids_exists_in_data + if self.is_patron: - validation_message = pids_exists_in_data( - info=f'{self.provider.pid_type} ({self.pid})', - data=self.get('patron'), - required={'ptty': 'type'}, - not_required={} - ) or True + validation_message = ( + pids_exists_in_data( + info=f"{self.provider.pid_type} ({self.pid})", + data=self.get("patron"), + required={"ptty": "type"}, + not_required={}, + ) + or True + ) if self.is_professional_user: - libraries = self.get('libraries') + libraries = self.get("libraries") if not libraries: - validation_message = ['Missing libraries'] + validation_message = ["Missing libraries"] for library_pid in self.library_pids: library = Library.get_record_by_pid(library_pid) if library is None: - validation_message =\ - f'Library {library_pid} doesn\'t exist.' + validation_message = f"Library {library_pid} doesn't exist." break - subscriptions = self.get('patron', {}).get('subscriptions') + subscriptions = self.get("patron", {}).get("subscriptions") if subscriptions and validation_message: for subscription in subscriptions: - subscription_validation_message = pids_exists_in_data( - info=f'{self.provider.pid_type} ({self.pid})', - data=subscription, - required={ - 'ptty': 'patron_type', - 'pttr': 'patron_transaction' - }, - not_required={} - ) or True + subscription_validation_message = ( + pids_exists_in_data( + info=f"{self.provider.pid_type} ({self.pid})", + data=subscription, + required={"ptty": "patron_type", "pttr": "patron_transaction"}, + not_required={}, + ) + or True + ) if subscription_validation_message is not True: validation_message = subscription_validation_message break self._validate_emails() if validation_message is not True: - raise ValidationError(';'.join(validation_message)) + raise ValidationError(";".join(validation_message)) return json def _validate_emails(self): @@ -226,13 +236,17 @@ def _validate_emails(self): Check if the user has at least one email if the communication channel is email. """ - patron = self.get('patron') - if patron and patron.get('communication_channel') == \ - CommunicationChannel.EMAIL \ - and self.user.email is None \ - and patron.get('additional_communication_email') is None: - raise ValidationError('At least one email should be defined ' - 'for an email communication channel.') + patron = self.get("patron") + if ( + patron + and patron.get("communication_channel") == CommunicationChannel.EMAIL + and self.user.email is None + and patron.get("additional_communication_email") is None + ): + raise ValidationError( + "At least one email should be defined " + "for an email communication channel." + ) @classmethod def _clean_data(cls, data): @@ -242,11 +256,10 @@ def _clean_data(cls, data): :return: the cleaned data. """ if data: - if barcodes := data.get('barcode'): - data['barcode'] = [b.strip() for b in barcodes] - if barcodes := data.get('patron', {}).get('barcode'): - data.setdefault('patron', {})['barcode'] = [ - b.strip() for b in barcodes] + if barcodes := data.get("barcode"): + data["barcode"] = [b.strip() for b in barcodes] + if barcodes := data.get("patron", {}).get("barcode"): + data.setdefault("patron", {})["barcode"] = [b.strip() for b in barcodes] return data @classmethod @@ -283,8 +296,7 @@ def _remove_roles(self): external_patron_roles = self.get_patrons_roles(exclude_self=True) # roles only on the current patron account filtered_roles = filter( - lambda r: r not in external_patron_roles, - self.get('roles') + lambda r: r not in external_patron_roles, self.get("roles") ) for role in [r for r in filtered_roles if r in db_roles]: self.remove_role(role) @@ -299,10 +311,10 @@ def get_patrons_roles(self, exclude_self=False): :return: the list of roles related to this patron. """ patrons = self.get_patrons_by_user(self.user) - roles = set() if exclude_self else set(self.get('roles')) + roles = set() if exclude_self else set(self.get("roles")) for patron in patrons: if patron != self: - roles.update(patron.get('roles', [])) + roles.update(patron.get("roles", [])) return list(roles) def add_role(self, role_name): @@ -336,29 +348,28 @@ def get_links_to_me(self, get_pids=False): exclude_states = [ LoanState.CANCELLED, LoanState.ITEM_RETURNED, - LoanState.CREATED + LoanState.CREATED, ] - loan_query = current_circulation.loan_search_cls()\ - .filter('term', patron_pid=self.pid)\ - .exclude('terms', state=exclude_states) - template_query = TemplatesSearch()\ - .filter('term', creator__pid=self.pid) + loan_query = ( + current_circulation.loan_search_cls() + .filter("term", patron_pid=self.pid) + .exclude("terms", state=exclude_states) + ) + template_query = TemplatesSearch().filter("term", creator__pid=self.pid) if get_pids: loans = sorted_pids(loan_query) - transactions = get_transactions_pids_for_patron( - self.pid, status='open') + transactions = get_transactions_pids_for_patron(self.pid, status="open") templates = sorted_pids(template_query) else: loans = loan_query.count() - transactions = get_transactions_count_for_patron( - self.pid, status='open') + transactions = get_transactions_count_for_patron(self.pid, status="open") templates = template_query.count() if loans: - links['loans'] = loans + links["loans"] = loans if transactions: - links['transactions'] = transactions + links["transactions"] = transactions if templates: - links['templates'] = templates + links["templates"] = templates return links def reasons_to_keep(self): @@ -372,7 +383,7 @@ def reasons_to_keep(self): and not current_librarian.has_full_permissions and self.has_full_permissions ): - others['permission denied'] = True + others["permission denied"] = True return others def reasons_not_to_delete(self): @@ -381,9 +392,9 @@ def reasons_not_to_delete(self): others = self.reasons_to_keep() links = self.get_links_to_me() if others: - cannot_delete['others'] = others + cannot_delete["others"] = others if links: - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete def get_blocked_message(self, public=False): @@ -391,35 +402,50 @@ def get_blocked_message(self, public=False): :param public: Is the message is for public interface ? """ - main = _('Your account is currently blocked.') if public \ - else _('This patron is currently blocked.') + main = ( + _("Your account is currently blocked.") + if public + else _("This patron is currently blocked.") + ) if self.is_blocked: return f'{main} {_("Reason")}: {self.patron.get("blocked_note")}' - def add_subscription(self, patron_type, start_date, end_date, - dbcommit=True, reindex=True, delete_pids=False): + def add_subscription( + self, + patron_type, + start_date, + end_date, + dbcommit=True, + reindex=True, + delete_pids=False, + ): """Add a subscription to a patron type. :param patron_type: the patron_type linked to the subscription :param start_date: As `datetime`, the subscription start date :param end_date: As `datetime`, the subscription end date (excluded) """ - transaction = create_subscription_for_patron( - self, patron_type, start_date, end_date, - dbcommit=dbcommit, reindex=reindex, delete_pid=delete_pids) - if transaction: - subscriptions = self.get('patron', {}).get('subscriptions', []) - subscriptions.append({ - 'patron_type': { - '$ref': get_ref_for_pid('ptty', patron_type.pid) - }, - 'patron_transaction': { - '$ref': get_ref_for_pid('pttr', transaction.pid) - }, - 'start_date': start_date.strftime('%Y-%m-%d'), - 'end_date': end_date.strftime('%Y-%m-%d'), - }) - self['patron']['subscriptions'] = subscriptions + if transaction := create_subscription_for_patron( + self, + patron_type, + start_date, + end_date, + dbcommit=dbcommit, + reindex=reindex, + delete_pid=delete_pids, + ): + subscriptions = self.get("patron", {}).get("subscriptions", []) + subscriptions.append( + { + "patron_type": {"$ref": get_ref_for_pid("ptty", patron_type.pid)}, + "patron_transaction": { + "$ref": get_ref_for_pid("pttr", transaction.pid) + }, + "start_date": start_date.strftime("%Y-%m-%d"), + "end_date": end_date.strftime("%Y-%m-%d"), + } + ) + self["patron"]["subscriptions"] = subscriptions self.update(self, dbcommit=dbcommit, reindex=reindex) def transaction_user_validator(self, user_pid): @@ -450,18 +476,12 @@ def get_circulation_messages(self, public=False): # if patron is blocked, no need to return any other circulation # messages ! if not public and self.is_blocked: - return [{ - 'type': 'error', - 'content': self.get_blocked_message(public) - }] + return [{"type": "error", "content": self.get_blocked_message(public)}] messages = [] # if patron expiration_date has reached - error type message if not public and self.is_expired: - messages.append({ - 'type': 'error', - 'content': _('Patron rights expired.') - }) + messages.append({"type": "error", "content": _("Patron rights expired.")}) # other messages must be only rendered for the professional interface if not public: @@ -469,25 +489,29 @@ def get_circulation_messages(self, public=False): patron_type = PatronType.get_record_by_pid(self.patron_type_pid) valid, message = patron_type.check_checkout_count_limit(self) if not valid: - messages.append({ - 'type': 'error', - 'content': message - }) + messages.append({"type": "error", "content": message}) # check fee amount limit if not patron_type.check_fee_amount_limit(self): - messages.append({ - 'type': 'error', - 'content': _( - 'Transactions denied: the maximal overdue fee amount ' - 'is reached.') - }) + messages.append( + { + "type": "error", + "content": _( + "Transactions denied: the maximal overdue fee amount " + "is reached." + ), + } + ) # check the patron type overdue limit if not patron_type.check_overdue_items_limit(self): - messages.append({ - 'type': 'error', - 'content': _('Checkout denied: the maximal number of ' - 'overdue items is reached') - }) + messages.append( + { + "type": "error", + "content": _( + "Checkout denied: the maximal number of " + "overdue items is reached" + ), + } + ) return messages def set_keep_history(self, keep_history, dbcommit=True, reindex=True): @@ -497,9 +521,9 @@ def set_keep_history(self, keep_history, dbcommit=True, reindex=True): :param dbcommit - commit the changes :param reindex - index the changes """ - user = self._get_user_by_user_id(self.get('user_id')) + user = self._get_user_by_user_id(self.get("user_id")) profile = user.user_profile - profile['keep_history'] = keep_history + profile["keep_history"] = keep_history user.user_profile = profile db.session.merge(user) if dbcommit: @@ -515,8 +539,7 @@ def set_keep_history(self, keep_history, dbcommit=True, reindex=True): def remove_user_data(cls, data): """Remove the user data.""" data = deepcopy(data) - profile_fields = \ - User.profile_fields + ['username', 'email', 'password'] + profile_fields = User.profile_fields + ["username", "email", "password"] for field in profile_fields: data.pop(field, None) return data @@ -524,12 +547,14 @@ def remove_user_data(cls, data): @classmethod def get_all_pids_for_organisation(cls, organisation_pid): """Get all patron pids for a specific organisation.""" - query = PatronsSearch() \ - .filter('term', organisation__pid=organisation_pid) \ - .source(includes='pid') \ + query = ( + PatronsSearch() + .filter("term", organisation__pid=organisation_pid) + .source(includes="pid") .scan() + ) for hit in query: - yield hit['pid'] + yield hit["pid"] @classmethod def get_librarian_by_user(cls, user): @@ -537,20 +562,20 @@ def get_librarian_by_user(cls, user): patrons = cls.get_patrons_by_user(user) librarians = list(filter(lambda p: p.is_professional_user, patrons)) if len(librarians) > 1: - raise Exception(f'more than one librarian account for {user}') - if not librarians: - return None - return librarians[0] + raise Exception(f"more than one librarian account for {user}") + return librarians[0] if librarians else None @classmethod def get_patrons_by_user(cls, user): """Get all patrons by user.""" patrons = [] - if hasattr(user, 'id'): - result = PatronsSearch() \ - .filter('term', user_id=user.id) \ - .source(includes='pid') \ + if hasattr(user, "id"): + result = ( + PatronsSearch() + .filter("term", user_id=user.id) + .source(includes="pid") .scan() + ) patrons = [cls.get_record_by_pid(hit.pid) for hit in result] return patrons @@ -568,10 +593,10 @@ def get_patron_by_barcode(cls, barcode, org_pid=None): :param org_pid: filter patron belongs to this organisation pid. :return: The patron corresponding to this barcode. """ - filters = Q('term', patron__barcode=barcode) + filters = Q("term", patron__barcode=barcode) if org_pid: - filters &= Q('term', organisation__pid=org_pid) - query = PatronsSearch().filter('bool', must=[filters]).source(['pid']) + filters &= Q("term", organisation__pid=org_pid) + query = PatronsSearch().filter("bool", must=[filters]).source(["pid"]) if hit := next(query.scan(), None): return cls.get_record_by_pid(hit.pid) @@ -596,7 +621,7 @@ def can_request(cls, item, **kwargs): if patron.is_blocked: messages.append(patron.get_blocked_message()) if patron.is_expired: - messages.append(_('Patron account expired.')) + messages.append(_("Patron account expired.")) return not messages, messages @@ -617,21 +642,25 @@ def patrons_with_obsolete_subscription_pids(cls, end_date=None): """Search about patrons with obsolete subscription.""" if end_date is None: end_date = datetime.now() - end_date = end_date.strftime('%Y-%m-%d') - results = PatronsSearch() \ - .filter('range', patron__subscriptions__end_date={'lt': end_date})\ - .source('pid') \ + end_date = end_date.strftime("%Y-%m-%d") + results = ( + PatronsSearch() + .filter("range", patron__subscriptions__end_date={"lt": end_date}) + .source("pid") .scan() + ) for result in results: yield Patron.get_record_by_pid(result.pid) @classmethod def get_patrons_without_subscription(cls, patron_type_pid): """Get patrons linked to patron_type that haven't any subscription.""" - query = PatronsSearch() \ - .filter('term', patron__type__pid=patron_type_pid) \ - .exclude('exists', field='patron.subscriptions') - for res in query.source('pid').scan(): + query = ( + PatronsSearch() + .filter("term", patron__type__pid=patron_type_pid) + .exclude("exists", field="patron.subscriptions") + ) + for res in query.source("pid").scan(): yield Patron.get_record_by_pid(res.pid) @classmethod @@ -645,33 +674,44 @@ def set_communication_channel(cls, user=None, dbcommit=True, reindex=True): def basic_query(channel): """Returns basic ES query.""" - return PatronsSearch() \ - .filter('term', user_id=user.id) \ - .filter('term', patron__communication_channel=channel) \ - .source(includes='pid') - - mail_query = basic_query(CommunicationChannel.EMAIL) \ - .filter('bool', must_not=[ - Q('exists', field='patron.additional_communication_email'), - Q('exists', field='email') - ]) - to_mail_pids = [[hit['pid'], CommunicationChannel.MAIL, hit.meta.id] - for hit in mail_query.scan()] - email_query = basic_query(CommunicationChannel.MAIL) \ - .filter('bool', should=[ - Q('exists', field='patron.additional_communication_email'), - Q('exists', field='email') - ]) - to_email_pids = [[hit['pid'], CommunicationChannel.EMAIL, hit.meta.id] - for hit in email_query.scan()] + return ( + PatronsSearch() + .filter("term", user_id=user.id) + .filter("term", patron__communication_channel=channel) + .source(includes="pid") + ) + + mail_query = basic_query(CommunicationChannel.EMAIL).filter( + "bool", + must_not=[ + Q("exists", field="patron.additional_communication_email"), + Q("exists", field="email"), + ], + ) + to_mail_pids = [ + [hit["pid"], CommunicationChannel.MAIL, hit.meta.id] + for hit in mail_query.scan() + ] + email_query = basic_query(CommunicationChannel.MAIL).filter( + "bool", + should=[ + Q("exists", field="patron.additional_communication_email"), + Q("exists", field="email"), + ], + ) + to_email_pids = [ + [hit["pid"], CommunicationChannel.EMAIL, hit.meta.id] + for hit in email_query.scan() + ] pids = to_mail_pids + to_email_pids ids = [] for pid, channel, id in pids: ids.append(id) if patron := Patron.get_record_by_pid(pid): - patron['patron']['communication_channel'] = channel - db.session.query(patron.model_cls).filter_by( - id=patron.id).update({patron.model_cls.json: patron}) + patron["patron"]["communication_channel"] = channel + db.session.query(patron.model_cls).filter_by(id=patron.id).update( + {patron.model_cls.json: patron} + ) if ids: # commit session db.session.commit() @@ -699,25 +739,25 @@ def get_current_patron(cls, record): @property def user(self): """Invenio user of a patron.""" - return self._get_user_by_user_id(self.get('user_id')) + return self._get_user_by_user_id(self.get("user_id")) @property def profile_url(self): """Get the link to the RERO_ILS patron profile URL.""" - view_code = self.organisation.get('code') - base_url = current_app.config.get('RERO_ILS_APP_URL') - return f'{base_url}/{view_code}/patrons/profile' + view_code = self.organisation.get("code") + base_url = current_app.config.get("RERO_ILS_APP_URL") + return f"{base_url}/{view_code}/patrons/profile" @property def patron(self): """Patron property shortcut.""" - return self.get('patron', {}) + return self.get("patron", {}) @property def expiration_date(self): """Shortcut to find the patron expiration date.""" - if date_string := self.patron.get('expiration_date'): - return datetime.strptime(date_string, '%Y-%m-%d') + if date_string := self.patron.get("expiration_date"): + return datetime.strptime(date_string, "%Y-%m-%d") @property def is_expired(self): @@ -730,39 +770,38 @@ def formatted_name(self): """Return the best possible human-readable patron name.""" profile = self.user.user_profile name_parts = [ - profile.get('last_name', '').strip(), - profile.get('first_name', '').strip() + profile.get("last_name", "").strip(), + profile.get("first_name", "").strip(), ] - return ', '.join(filter(None, name_parts)) + return ", ".join(filter(None, name_parts)) @property def patron_type_pid(self): """Shortcut for patron type pid.""" - if patron_type := self.get('patron', {}).get('type'): + if patron_type := self.get("patron", {}).get("type"): return extracted_data_from_ref(patron_type) @property def is_patron(self): """Shortcut to check if user has patron role.""" - return UserRole.PATRON in self.get('roles', []) + return UserRole.PATRON in self.get("roles", []) @property def is_professional_user(self): """Shortcut to check if user has librarian role.""" return any( - role in UserRole.PROFESSIONAL_ROLES - for role in self.get('roles', []) + role in UserRole.PROFESSIONAL_ROLES for role in self.get("roles", []) ) @property def has_full_permissions(self): """Shortcut to check if user has system_librarian role.""" - return UserRole.FULL_PERMISSIONS in self.get('roles', []) + return UserRole.FULL_PERMISSIONS in self.get("roles", []) @property def is_blocked(self): """Shortcut to know if user is blocked.""" - return self.patron.get('blocked', False) + return self.patron.get("blocked", False) @property def organisation_pid(self): @@ -772,6 +811,7 @@ def organisation_pid(self): return library.organisation_pid if patron_type_pid := self.patron_type_pid: from ..patron_types.api import PatronType + patron_type = PatronType.get_record_by_pid(patron_type_pid) return patron_type.organisation_pid @@ -792,13 +832,13 @@ def library_pids(self): if self.is_professional_user: return [ extracted_data_from_ref(library) - for library in self.get('libraries', []) + for library in self.get("libraries", []) ] @property def manageable_library_pids(self): """Get list of manageable library pids for this patron.""" - if UserRole.FULL_PERMISSIONS in self.get('roles', []): + if UserRole.FULL_PERMISSIONS in self.get("roles", []): return self.organisation.get_libraries_pids() return self.library_pids or [] @@ -812,16 +852,17 @@ def has_valid_subscription(self): subscription in a valid interval of time. """ from ..patron_types.api import PatronType + if self.patron_type_pid: patron_type = PatronType.get_record_by_pid(self.patron_type_pid) if patron_type.is_subscription_required: - for sub in self.get('patron', {}).get('subscriptions', []): + for sub in self.get("patron", {}).get("subscriptions", []): # not need to check if the subscription is for the # current patron.patron_type. If patron.patron_type # change while a subscription is still pending, this # subscription is still valid - start = datetime.strptime(sub['start_date'], '%Y-%m-%d') - end = datetime.strptime(sub['end_date'], '%Y-%m-%d') + start = datetime.strptime(sub["start_date"], "%Y-%m-%d") + end = datetime.strptime(sub["end_date"], "%Y-%m-%d") if start < datetime.now() < end: return True return False @@ -830,13 +871,15 @@ def has_valid_subscription(self): @property def valid_subscriptions(self): """Get valid subscriptions for a patron.""" + def is_subscription_valid(subscription): - start = datetime.strptime(subscription['start_date'], '%Y-%m-%d') - end = datetime.strptime(subscription['end_date'], '%Y-%m-%d') + start = datetime.strptime(subscription["start_date"], "%Y-%m-%d") + end = datetime.strptime(subscription["end_date"], "%Y-%m-%d") return start < datetime.now() < end + subs = filter( - is_subscription_valid, - self.get('patron', {}).get('subscriptions', [])) + is_subscription_valid, self.get("patron", {}).get("subscriptions", []) + ) return list(subs) @property @@ -846,11 +889,10 @@ def pending_subscriptions(self): # In a normal process, the maximum number of subscriptions for a patron # is two : current subscription and possibly next one. pending_subs = [] - for sub in self.get('patron', {}).get('subscriptions', []): - trans_pid = extracted_data_from_ref( - sub['patron_transaction'], data='pid') + for sub in self.get("patron", {}).get("subscriptions", []): + trans_pid = extracted_data_from_ref(sub["patron_transaction"], data="pid") transaction = PatronTransaction.get_record_by_pid(trans_pid) - if transaction.status == 'open': + if transaction.status == "open": pending_subs.append(sub) return pending_subs @@ -860,11 +902,14 @@ def age(self): :returns: Age of the patron as ``int`` """ - birth_date = self.user.user_profile['birth_date'] - birth_date = datetime.strptime(birth_date, '%Y-%m-%d') + birth_date = self.user.user_profile["birth_date"] + birth_date = datetime.strptime(birth_date, "%Y-%m-%d") today = date.today() - return today.year - birth_date.year - ( - (today.month, today.day) < (birth_date.month, birth_date.day)) + return ( + today.year + - birth_date.year + - ((today.month, today.day) < (birth_date.month, birth_date.day)) + ) class PatronsIndexer(IlsRecordsIndexer): @@ -877,4 +922,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='ptrn') + super().bulk_index(record_id_iterator, doc_type="ptrn") diff --git a/rero_ils/modules/patrons/cli.py b/rero_ils/modules/patrons/cli.py index 924d75c0d7..6c5ee92e5f 100644 --- a/rero_ils/modules/patrons/cli.py +++ b/rero_ils/modules/patrons/cli.py @@ -37,27 +37,25 @@ from rero_ils.modules.patrons.models import CommunicationChannel from rero_ils.modules.users.api import User -from .utils import create_patron_from_data from ..patrons.api import Patron, PatronProvider from ..providers import append_fixtures_new_identifiers from ..utils import JsonWriter, get_schema_for_resource, read_json_record +from .utils import create_patron_from_data -datastore = LocalProxy(lambda: current_app.extensions['security'].datastore) -records_state = LocalProxy(lambda: current_app.extensions['invenio-records']) +datastore = LocalProxy(lambda: current_app.extensions["security"].datastore) +records_state = LocalProxy(lambda: current_app.extensions["invenio-records"]) -@click.command('import_users') -@click.option('-a', '--append', 'append', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-p', '--password', 'password', default='123456') -@click.option('-l', '--lazy', 'lazy', is_flag=True, default=False) -@click.option('-o', '--dont-stop', 'dont_stop_on_error', - is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) -@click.argument('infile', type=click.File('r'), default=sys.stdin) +@click.command("import_users") +@click.option("-a", "--append", "append", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-p", "--password", "password", default="123456") +@click.option("-l", "--lazy", "lazy", is_flag=True, default=False) +@click.option("-o", "--dont-stop", "dont_stop_on_error", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) +@click.argument("infile", type=click.File("r"), default=sys.stdin) @with_appcontext -def import_users(infile, append, verbose, password, lazy, dont_stop_on_error, - debug): +def import_users(infile, append, verbose, password, lazy, dont_stop_on_error, debug): """Import users. :param verbose: this function will be verbose. @@ -66,11 +64,23 @@ def import_users(infile, append, verbose, password, lazy, dont_stop_on_error, :param dont_stop_on_error: don't stop on error :param infile: Json user file. """ - click.secho('Import users:', fg='green') + click.secho("Import users:", fg="green") profile_fields = [ - 'first_name', 'last_name', 'street', 'postal_code', 'gender', - 'city', 'birth_date', 'username', 'home_phone', 'business_phone', - 'mobile_phone', 'other_phone', 'keep_history', 'country', 'email' + "first_name", + "last_name", + "street", + "postal_code", + "gender", + "city", + "birth_date", + "username", + "home_phone", + "business_phone", + "mobile_phone", + "other_phone", + "keep_history", + "country", + "email", ] if lazy: # try to lazy read json file (slower, better memory management) @@ -85,41 +95,29 @@ def import_users(infile, append, verbose, password, lazy, dont_stop_on_error, try: # patron creation patron = None - patron_pid = patron_data.get('pid') + patron_pid = patron_data.get("pid") if patron_pid: patron = Patron.get_record_by_pid(patron_pid) if not patron: patron = create_patron_from_data( - data=patron_data, - dbcommit=True, - reindex=True + data=patron_data, dbcommit=True, reindex=True ) pids.append(patron.pid) else: # remove profile fields from patron record patron_data = User.remove_fields(patron_data) - patron.update( - data=patron_data, - dbcommit=True, - reindex=True - ) + patron.update(data=patron_data, dbcommit=True, reindex=True) if verbose: profile = patron.user.user_profile name_parts = [ - profile.get('last_name', '').strip(), - profile.get('first_name', '').strip() + profile.get("last_name", "").strip(), + profile.get("first_name", "").strip(), ] - user_name = ', '.join(filter(None, name_parts)) - click.secho( - f'{count:<8} Patron updated: {user_name}', - fg='yellow' - ) + user_name = ", ".join(filter(None, name_parts)) + click.secho(f"{count:<8} Patron updated: {user_name}", fg="yellow") except Exception as err: error_records.append(patron_data) - click.secho( - f'{count:<8} User create error: {err}', - fg='red' - ) + click.secho(f"{count:<8} User create error: {err}", fg="red") if debug: traceback.print_exc() if not dont_stop_on_error: @@ -129,56 +127,48 @@ def import_users(infile, append, verbose, password, lazy, dont_stop_on_error, db.session.rollback() if append: - click.secho(f'Append fixtures new identifiers: {len(pids)}') + click.secho(f"Append fixtures new identifiers: {len(pids)}") identifier = Patron.provider.identifier try: append_fixtures_new_identifiers( - identifier, - sorted(pids, key=lambda x: int(x)), - PatronProvider.pid_type + identifier, sorted(pids, key=lambda x: int(x)), PatronProvider.pid_type ) except Exception as err: - click.secho( - f'ERROR append fixtures new identifiers: {err}', - fg='red' - ) + click.secho(f"ERROR append fixtures new identifiers: {err}", fg="red") if error_records: name, ext = os.path.splitext(infile.name) - err_file_name = f'{name}_errors{ext}' - click.secho(f'Write error file: {err_file_name}') + err_file_name = f"{name}_errors{ext}" + click.secho(f"Write error file: {err_file_name}") error_file = JsonWriter(err_file_name) for error_record in error_records: error_file.write(error_record) -@click.command('users_validate') -@click.argument('jsonfile', type=click.File('r')) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) -@click.option('-d', '--debug', 'debug', is_flag=True, default=False) +@click.command("users_validate") +@click.argument("jsonfile", type=click.File("r")) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) +@click.option("-d", "--debug", "debug", is_flag=True, default=False) @with_appcontext def users_validate(jsonfile, verbose, debug): """Check users validation.""" - click.secho('Validate user file: ', fg='green', nl=False) - click.echo(f'{jsonfile.name}') + click.secho("Validate user file: ", fg="green", nl=False) + click.echo(f"{jsonfile.name}") - path = current_jsonschemas.url_to_path(get_schema_for_resource('ptrn')) + path = current_jsonschemas.url_to_path(get_schema_for_resource("ptrn")) ptrn_schema = current_jsonschemas.get_schema(path=path) ptrn_schema = records_state.replace_refs(ptrn_schema) # TODO: get user schema path programaticly # path = current_jsonschemas.url_to_path(get_schema_for_resource('user')) - path = 'users/user-v0.0.1.json' + path = "users/user-v0.0.1.json" user_schema = current_jsonschemas.get_schema(path=path) user_schema = records_state.replace_refs(user_schema) - merger_schema = { - "properties": { - "required": {"mergeStrategy": "append"} - } - } + merger_schema = {"properties": {"required": {"mergeStrategy": "append"}}} merger = Merger(merger_schema) schema = merger.merge(user_schema, ptrn_schema) - schema['required'] = [ - s for s in schema['required'] if s not in ['$schema', 'user_id']] + schema["required"] = [ + s for s in schema["required"] if s not in ["$schema", "user_id"] + ] datas = read_json_record(jsonfile) librarien_roles_users = {} @@ -187,37 +177,41 @@ def users_validate(jsonfile, verbose, debug): click.echo(f'\tTest record: {idx} pid: {data.get("pid")}') try: validate(data, schema) - patron = data.get('patron', {}) - if patron and patron.get('communication_channel') == \ - CommunicationChannel.EMAIL and data.get('email') is None \ - and patron.get('additional_communication_email') is None: - raise ValidationError('At least one email should be defined ' - 'for an email communication channel.') - librarian_roles = [ - Patron.ROLE_SYSTEM_LIBRARIAN, Patron.ROLE_LIBRARIAN] - roles = data.get('roles', []) + patron = data.get("patron", {}) + if ( + patron + and patron.get("communication_channel") == CommunicationChannel.EMAIL + and data.get("email") is None + and patron.get("additional_communication_email") is None + ): + raise ValidationError( + "At least one email should be defined " + "for an email communication channel." + ) + librarian_roles = [Patron.ROLE_SYSTEM_LIBRARIAN, Patron.ROLE_LIBRARIAN] + roles = data.get("roles", []) if any(role in librarian_roles for role in roles): - if not data.get('libraries'): - raise ValidationError('Missing libraries') + if not data.get("libraries"): + raise ValidationError("Missing libraries") # test multiple librarien, roles for same user - username = data.get('username') + username = data.get("username") if username in librarien_roles_users: - raise ValidationError('Multiple librarian roles') + raise ValidationError("Multiple librarian roles") else: librarien_roles_users[username] = 1 - birth_date = data.get('birth_date') - if birth_date[0] == '0': - raise ValidationError(f'Wrong birth date: {birth_date}') + birth_date = data.get("birth_date") + if birth_date[0] == "0": + raise ValidationError(f"Wrong birth date: {birth_date}") except ValidationError as err: click.secho( f'Error validate in record: {idx} pid: {data.get("pid")} ' f'username: {data.get("username")}', - fg='red' + fg="red", ) if debug: click.secho(str(err)) else: - trace_lines = traceback.format_exc(1).split('\n') + trace_lines = traceback.format_exc(1).split("\n") click.secho(trace_lines[3].strip()) diff --git a/rero_ils/modules/patrons/dumpers.py b/rero_ils/modules/patrons/dumpers.py index 8c1d2c9883..4e90c120ab 100644 --- a/rero_ils/modules/patrons/dumpers.py +++ b/rero_ils/modules/patrons/dumpers.py @@ -17,6 +17,9 @@ # along with this program. If not, see . """Patron dumpers.""" + +import contextlib + from invenio_records.dumpers import Dumper as InvenioRecordsDumper @@ -39,10 +42,8 @@ def dump(self, record, data, **kwargs): """ data = record.dumps() # use the default dumps() to get basic data. for property_name in self._properties: - try: + with contextlib.suppress(AttributeError): data[property_name] = getattr(record, property_name) - except AttributeError: - pass return {k: v for k, v in data.items() if v} @@ -57,18 +58,18 @@ def dump(self, record, data, **kwargs): :return a dict with dumped data. """ data = { - 'pid': record.pid, - 'last_name': data.get('last_name'), - 'first_name': data.get('first_name'), - 'profile_url': record.profile_url, - 'address': { - 'street': data.get('street'), - 'postal_code': data.get('postal_code'), - 'city': data.get('city'), - 'country': data.get('country') + "pid": record.pid, + "last_name": data.get("last_name"), + "first_name": data.get("first_name"), + "profile_url": record.profile_url, + "address": { + "street": data.get("street"), + "postal_code": data.get("postal_code"), + "city": data.get("city"), + "country": data.get("country"), }, - 'barcode': record.get('patron', {}).get('barcode') + "barcode": record.get("patron", {}).get("barcode"), } - data['address'] = {k: v for k, v in data['address'].items() if v} + data["address"] = {k: v for k, v in data["address"].items() if v} data = {k: v for k, v in data.items() if v} return data diff --git a/rero_ils/modules/patrons/extensions.py b/rero_ils/modules/patrons/extensions.py index 0e1f029150..09c0582a6f 100644 --- a/rero_ils/modules/patrons/extensions.py +++ b/rero_ils/modules/patrons/extensions.py @@ -33,6 +33,6 @@ def pre_dump(self, record, data, dumper=None): :param dumper: Dumper to use when dumping the record. :return the future dumped data. """ - user = User.get_record(record.get('user_id')) + user = User.get_record(record.get("user_id")) user_info = user.dumps_metadata() return data.update(user_info) diff --git a/rero_ils/modules/patrons/jsonresolver.py b/rero_ils/modules/patrons/jsonresolver.py index 01f1f2a5ec..837ad58a43 100644 --- a/rero_ils/modules/patrons/jsonresolver.py +++ b/rero_ils/modules/patrons/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/patrons/', host='bib.rero.ch') +@jsonresolver.route("/api/patrons/", host="bib.rero.ch") def patron_resolver(pid): """Patron resolver.""" - return resolve_json_refs('ptrn', pid) + return resolve_json_refs("ptrn", pid) diff --git a/rero_ils/modules/patrons/listener.py b/rero_ils/modules/patrons/listener.py index 4506334dba..8dd7c59128 100644 --- a/rero_ils/modules/patrons/listener.py +++ b/rero_ils/modules/patrons/listener.py @@ -19,13 +19,20 @@ from datetime import datetime -from .api import Patron, PatronsSearch from ..patron_types.api import PatronType from ..utils import add_years +from .api import Patron, PatronsSearch -def enrich_patron_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def enrich_patron_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs, +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -33,12 +40,12 @@ def enrich_patron_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ - if index.split('-')[0] == PatronsSearch.Meta.index: + if index.split("-")[0] == PatronsSearch.Meta.index: patron = record if not isinstance(record, Patron): - patron = Patron.get_record_by_pid(record.get('pid')) - if org_pid := patron.organisation['pid']: - json['organisation'] = {'pid': org_pid} + patron = Patron.get_record_by_pid(record.get("pid")) + if org_pid := patron.organisation["pid"]: + json["organisation"] = {"pid": org_pid} def create_subscription_patron_transaction(sender, record=None, **kwargs): @@ -74,5 +81,6 @@ def update_from_profile(sender, user, **kwargs): patron.reindex() if patron.is_patron: from ..loans.api import anonymize_loans - if not user.user_profile.get('keep_history', True): + + if not user.user_profile.get("keep_history", True): anonymize_loans(patron=patron, dbcommit=True, reindex=True) diff --git a/rero_ils/modules/patrons/loaders/__init__.py b/rero_ils/modules/patrons/loaders/__init__.py index 73918bf91a..f1112f71bc 100644 --- a/rero_ils/modules/patrons/loaders/__init__.py +++ b/rero_ils/modules/patrons/loaders/__init__.py @@ -24,6 +24,4 @@ json_v1 = marshmallow_loader(PatronMetadataSchemaV1) -__all__ = ( - 'json_v1', -) +__all__ = ("json_v1",) diff --git a/rero_ils/modules/patrons/models.py b/rero_ils/modules/patrons/models.py index 61c57930c7..50cb3581ac 100644 --- a/rero_ils/modules/patrons/models.py +++ b/rero_ils/modules/patrons/models.py @@ -27,11 +27,11 @@ class PatronIdentifier(RecordIdentifier): """Sequence generator for Patrons identifiers.""" - __tablename__ = 'patron_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "patron_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), + db.BigInteger().with_variant(db.Integer, "sqlite"), primary_key=True, autoincrement=True, ) @@ -40,11 +40,11 @@ class PatronIdentifier(RecordIdentifier): class PatronMetadata(db.Model, RecordMetadataBase): """Patron record metadata.""" - __tablename__ = 'patron_metadata' + __tablename__ = "patron_metadata" class CommunicationChannel: """Enum class to list all possible patron communication channels.""" - EMAIL = 'email' - MAIL = 'mail' + EMAIL = "email" + MAIL = "mail" diff --git a/rero_ils/modules/patrons/permissions.py b/rero_ils/modules/patrons/permissions.py index 2ce3c1cb74..960222d643 100644 --- a/rero_ils/modules/patrons/permissions.py +++ b/rero_ils/modules/patrons/permissions.py @@ -22,26 +22,30 @@ from invenio_access import action_factory, any_user from invenio_records_permissions.generators import Generator -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, \ - AllowedByActionRestrictByOwnerOrOrganisation, LibraryNeed, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + AllowedByActionRestrictByOwnerOrOrganisation, + LibraryNeed, + RecordPermissionPolicy, +) from rero_ils.modules.users.models import UserRole from .api import Patron, current_librarian from .utils import validate_role_changes # Actions to control patron permission policy -search_action = action_factory('ptrn-search') -read_action = action_factory('ptrn-read') -create_action = action_factory('ptrn-create') -update_action = action_factory('ptrn-update') -delete_action = action_factory('ptrn-delete') -access_action = action_factory('ptrn-access') +search_action = action_factory("ptrn-search") +read_action = action_factory("ptrn-read") +create_action = action_factory("ptrn-create") +update_action = action_factory("ptrn-update") +delete_action = action_factory("ptrn-delete") +access_action = action_factory("ptrn-access") class AllowedByActionRestrictStaffByManageableLibrary( - AllowedByActionRestrictByOrganisation): + AllowedByActionRestrictByOrganisation +): """Restrict action on staff users by staff users of the same library. If the updated record represents a staff `Patron` user, then only staff @@ -59,7 +63,7 @@ def needs(self, record=None, *args, **kwargs): if not isinstance(record, Patron): record = Patron(record) - record_roles = record.get('roles', []) + record_roles = record.get("roles", []) # If updated user is a staff member, only user related to the same # library (so only staff members because simple patron are not # related to any library) can perform operation on this user. @@ -85,7 +89,7 @@ def excludes(self, record=None, **kwargs): :param kwargs: extra named arguments. :returns: a list of Needs to disable access. """ - roles = set(record.get('roles', [])) + roles = set(record.get("roles", [])) if not validate_role_changes(current_user, roles, raise_exc=False): return [any_user] return [] @@ -95,19 +99,16 @@ class PatronPermissionPolicy(RecordPermissionPolicy): """Patron Permission Policy used by the CRUD operations.""" can_search = [AllowedByAction(search_action)] - can_read = [AllowedByActionRestrictByOwnerOrOrganisation( - read_action, - patron_callback=lambda record: record.pid - )] - can_create = [ - AllowedByActionRestrictStaffByManageableLibrary(create_action) - ] - can_update = [ - AllowedByActionRestrictStaffByManageableLibrary(update_action) + can_read = [ + AllowedByActionRestrictByOwnerOrOrganisation( + read_action, patron_callback=lambda record: record.pid + ) ] + can_create = [AllowedByActionRestrictStaffByManageableLibrary(create_action)] + can_update = [AllowedByActionRestrictStaffByManageableLibrary(update_action)] can_delete = [ AllowedByActionRestrictStaffByManageableLibrary(delete_action), - RestrictDeleteDependOnPatronRolesManagement() + RestrictDeleteDependOnPatronRolesManagement(), ] diff --git a/rero_ils/modules/patrons/query.py b/rero_ils/modules/patrons/query.py index efa25f2520..ab3742549e 100644 --- a/rero_ils/modules/patrons/query.py +++ b/rero_ils/modules/patrons/query.py @@ -26,7 +26,12 @@ def patron_expired(): """Create a filter for the patron account is expired.""" + def inner(values): - return Q('range', patron__expiration_date={'lte': datetime.now()}) \ - if 'true' == values[0] else Q() + return ( + Q("range", patron__expiration_date={"lte": datetime.now()}) + if "true" == values[0] + else Q() + ) + return inner diff --git a/rero_ils/modules/patrons/schemas/json.py b/rero_ils/modules/patrons/schemas/json.py index 9f5535cada..c0532b6a17 100644 --- a/rero_ils/modules/patrons/schemas/json.py +++ b/rero_ils/modules/patrons/schemas/json.py @@ -27,8 +27,11 @@ from marshmallow.validate import OneOf from rero_ils.modules.commons.models import NoteTypes -from rero_ils.modules.commons.schemas import NoteSchema, RefSchema, \ - http_applicable_method +from rero_ils.modules.commons.schemas import ( + NoteSchema, + RefSchema, + http_applicable_method, +) from rero_ils.modules.serializers.base import schema_from_context from rero_ils.modules.users.api import User from rero_ils.modules.users.models import UserRole @@ -62,9 +65,9 @@ class PatronMetadataSchemaV1(StrictKeysMixin): pid = SanitizedUnicode() schema = GenFunction( load_only=True, - attribute='$schema', - data_key='$schema', - deserialize=schema_from_template + attribute="$schema", + data_key="$schema", + deserialize=schema_from_template, ) source = SanitizedUnicode() local_codes = fields.List(SanitizedUnicode()) @@ -85,15 +88,14 @@ def remove_user_data(self, data, many, **kwargs): :return Data cleared from user profile information. """ data = data if many else [data] - profile_fields = set( - User.profile_fields + ['username', 'email', 'password']) + profile_fields = set(User.profile_fields + ["username", "email", "password"]) for record in data: for field in profile_fields: record.pop(field, None) return data if many else data[0] - @validates('roles') - @http_applicable_method('POST') + @validates("roles") + @http_applicable_method("POST") def validate_role(self, data, **kwargs): """Validate `roles` attribute through API request. @@ -107,7 +109,7 @@ def validate_role(self, data, **kwargs): validate_role_changes(current_user, set(data)) @validates_schema - @http_applicable_method('PUT') + @http_applicable_method("PUT") def validate_roles_changes(self, data, **kwargs): """Validate `roles` changes through REST API request. @@ -121,17 +123,16 @@ def validate_roles_changes(self, data, **kwargs): :raises ValidationError: if error has detected on role field """ # Load DB record - db_record = Patron.get_record_by_pid(data.get('pid')) + db_record = Patron.get_record_by_pid(data.get("pid")) if not db_record: abort(404) # Check if `roles` of the patron changed. If not, we can stop # the validation process. - original_roles = set(db_record.get('roles', [])) - data_roles = set(data.get('roles', [])) - role_changes = original_roles.symmetric_difference(data_roles) - if not role_changes: + original_roles = set(db_record.get("roles", [])) + data_roles = set(data.get("roles", [])) + if role_changes := original_roles.symmetric_difference(data_roles): + # `roles` field changes, we need to validate this change. + validate_role_changes(current_user, role_changes) + else: return - - # `roles` field changes, we need to validate this change. - validate_role_changes(current_user, role_changes) diff --git a/rero_ils/modules/patrons/serializers.py b/rero_ils/modules/patrons/serializers.py index 5f1170706e..573b2e678d 100644 --- a/rero_ils/modules/patrons/serializers.py +++ b/rero_ils/modules/patrons/serializers.py @@ -18,8 +18,11 @@ """Patrons serialization.""" from rero_ils.modules.patron_types.api import PatronTypesSearch -from rero_ils.modules.serializers import JSONSerializer, RecordSchemaJSONV1, \ - search_responsify +from rero_ils.modules.serializers import ( + JSONSerializer, + RecordSchemaJSONV1, + search_responsify, +) class PatronJSONSerializer(JSONSerializer): @@ -28,11 +31,12 @@ class PatronJSONSerializer(JSONSerializer): def _postprocess_search_aggregations(self, aggregations: dict) -> None: """Post-process aggregations from a search result.""" JSONSerializer.enrich_bucket_with_data( - aggregations.get('patron_type', {}).get('buckets', []), - PatronTypesSearch, 'name' + aggregations.get("patron_type", {}).get("buckets", []), + PatronTypesSearch, + "name", ) super()._postprocess_search_aggregations(aggregations) _json = PatronJSONSerializer(RecordSchemaJSONV1) -json_patron_search = search_responsify(_json, 'application/rero+json') +json_patron_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/patrons/tasks.py b/rero_ils/modules/patrons/tasks.py index 6d2c26f5d9..e17528ad4c 100644 --- a/rero_ils/modules/patrons/tasks.py +++ b/rero_ils/modules/patrons/tasks.py @@ -26,9 +26,9 @@ from rero_ils.modules.users.api import User -from .api import Patron from ..patron_types.api import PatronType from ..utils import add_years, set_timestamp +from .api import Patron def clean_obsolete_subscriptions(): @@ -38,30 +38,27 @@ def clean_obsolete_subscriptions(): clean the subscription array keeping only subscription with a end-time grower than now(). Update patron to commit change """ + def is_obsolete(subscription, end_date=None): """Check if a subscription is obsolete by checking end date.""" if end_date is None: end_date = datetime.now() - sub_end_date = subscription.get('end_date', '1970-01-01') - sub_end_date = datetime.strptime(sub_end_date, '%Y-%m-%d') + sub_end_date = subscription.get("end_date", "1970-01-01") + sub_end_date = datetime.strptime(sub_end_date, "%Y-%m-%d") return sub_end_date < end_date for patron in Patron.patrons_with_obsolete_subscription_pids(): - subscriptions = patron.patron.get('subscriptions', []) + subscriptions = patron.patron.get("subscriptions", []) subscriptions = [sub for sub in subscriptions if not is_obsolete(sub)] - if not subscriptions and 'subscriptions' in patron.patron: - del patron['patron']['subscriptions'] + if not subscriptions and "subscriptions" in patron.patron: + del patron["patron"]["subscriptions"] else: - patron['patron']['subscriptions'] = subscriptions + patron["patron"]["subscriptions"] = subscriptions # DEV NOTE : this update will trigger the listener # `create_subscription_patron_transaction`. This listener will # create a new subscription if needed - patron.update( - User.remove_fields(patron.dumps()), - dbcommit=True, - reindex=True - ) + patron.update(User.remove_fields(patron.dumps()), dbcommit=True, reindex=True) def check_patron_types_and_add_subscriptions(): @@ -81,8 +78,10 @@ def check_patron_types_and_add_subscriptions(): for ptty in PatronType.get_yearly_subscription_patron_types(): patron_no_subsc = Patron.get_patrons_without_subscription(ptty.pid) for patron in patron_no_subsc: - msg = f'Add a subscription for patron#{patron.pid} ... ' \ - 'it shouldn\'t happen !!' + msg = ( + f"Add a subscription for patron#{patron.pid} ... " + "it shouldn't happen !!" + ) current_app.logger.error(msg) start_date = datetime.now() end_date = add_years(start_date, 1) @@ -94,4 +93,4 @@ def task_clear_and_renew_subscriptions(): """Clean obsolete subscriptions and renew subscription if needed.""" clean_obsolete_subscriptions() check_patron_types_and_add_subscriptions() - set_timestamp('clear_and_renew_subscriptions') + set_timestamp("clear_and_renew_subscriptions") diff --git a/rero_ils/modules/patrons/utils.py b/rero_ils/modules/patrons/utils.py index ba5d3f570b..54f07261fe 100644 --- a/rero_ils/modules/patrons/utils.py +++ b/rero_ils/modules/patrons/utils.py @@ -26,6 +26,7 @@ def user_has_patron(user=current_user): """Test if user has a patron.""" from .api import Patron + patrons = Patron.get_patrons_by_user(user=user) return bool(patrons) # true if `patrons` list isn't empty; false otherwise @@ -37,7 +38,8 @@ def get_patron_pid_by_email(email): :return: the first patron pid found corresponding to this email. """ from .api import PatronsSearch - query = PatronsSearch().filter('term', email=email).source(['pid']) + + query = PatronsSearch().filter("term", email=email).source(["pid"]) if hit := next(query.scan(), None): return hit.pid @@ -55,17 +57,16 @@ def validate_role_changes(user, changes, raise_exc=True): # this user can manage reading the configuration setting. If any role # from `role_changes` are not present in manageable role, an error # should be raised. - key_config = 'RERO_ILS_PATRON_ROLES_MANAGEMENT_RESTRICTIONS' + key_config = "RERO_ILS_PATRON_ROLES_MANAGEMENT_RESTRICTIONS" config_roles = current_app.config.get(key_config, {}) manageable_roles = set() for role in user.roles: - manageable_roles = manageable_roles.union( - config_roles.get(role.name, {})) + manageable_roles = manageable_roles.union(config_roles.get(role.name, {})) # If any difference are found between both sets, disallow the operation if role_diffs := changes.difference(manageable_roles): if raise_exc: - error_roles = ', '.join(role_diffs) - raise ValidationError(f'Unable to manage role(s): {error_roles}') + error_roles = ", ".join(role_diffs) + raise ValidationError(f"Unable to manage role(s): {error_roles}") else: return False # No problems were detected @@ -79,20 +80,18 @@ def create_user_from_data(data, send_email=False): :param send_email - send the reset password email to the user :returns: The modified dict. """ - user = User.get_by_username(data.get('username')) + user = User.get_by_username(data.get("username")) if not user: user = User.create(data, send_email) user_id = user.id else: user_id = user.user.id - data['user_id'] = user_id + data["user_id"] = user_id return User.remove_fields(data) -def create_patron_from_data( - data, dbcommit=True, reindex=True, send_email=False -): +def create_patron_from_data(data, dbcommit=True, reindex=True, send_email=False): """Create a patron and a user from a data dict. :param data - dictionary representing a library user @@ -100,9 +99,8 @@ def create_patron_from_data( :returns: - A `Patron` instance """ from .api import Patron + data = create_user_from_data(data, send_email) return Patron.create( - data=data, - delete_pid=False, - dbcommit=dbcommit, - reindex=reindex) + data=data, delete_pid=False, dbcommit=dbcommit, reindex=reindex + ) diff --git a/rero_ils/modules/patrons/views.py b/rero_ils/modules/patrons/views.py index fc84bf1c3d..54967e5dad 100644 --- a/rero_ils/modules/patrons/views.py +++ b/rero_ils/modules/patrons/views.py @@ -33,20 +33,27 @@ from invenio_i18n.ext import current_i18n from invenio_oauth2server.decorators import require_api_auth -from rero_ils.modules.decorators import check_logged_as_librarian, \ - check_logged_as_patron, check_logged_user_authentication +from rero_ils.modules.decorators import ( + check_logged_as_librarian, + check_logged_as_patron, + check_logged_user_authentication, +) from rero_ils.modules.ill_requests.api import ILLRequestsSearch from rero_ils.modules.items.utils import item_pid_to_object -from rero_ils.modules.loans.api import get_loans_stats_by_patron_pid, \ - get_overdue_loans +from rero_ils.modules.loans.api import get_loans_stats_by_patron_pid, get_overdue_loans from rero_ils.modules.loans.utils import sum_for_fees from rero_ils.modules.locations.api import Location from rero_ils.modules.organisations.dumpers import OrganisationLoggedUserDumper -from rero_ils.modules.patron_transactions.utils import \ - get_transactions_total_amount_for_patron +from rero_ils.modules.patron_transactions.utils import ( + get_transactions_total_amount_for_patron, +) from rero_ils.modules.patron_types.api import PatronType, PatronTypesSearch -from rero_ils.modules.patrons.api import Patron, PatronsSearch, \ - current_librarian, current_patrons +from rero_ils.modules.patrons.api import ( + Patron, + PatronsSearch, + current_librarian, + current_patrons, +) from rero_ils.modules.patrons.permissions import get_allowed_roles_management from rero_ils.modules.patrons.utils import user_has_patron from rero_ils.modules.permissions import expose_actions_need_for_user @@ -55,46 +62,44 @@ from rero_ils.utils import remove_empties_from_dict api_blueprint = Blueprint( - 'api_patrons', + "api_patrons", __name__, - url_prefix='/patrons', - template_folder='templates', - static_folder='static', + url_prefix="/patrons", + template_folder="templates", + static_folder="static", ) -_PID_REGEX = re.compile(r'NOT\s+pid:\s*(\w+)\s*') +_PID_REGEX = re.compile(r"NOT\s+pid:\s*(\w+)\s*") _EMAIL_REGEX = re.compile(r'email:"\s*(.*?)\s*"') _USERNAME_REGEX = re.compile(r'username:"\s*(.*?)\s*"') -@api_blueprint.route('//circulation_informations', methods=['GET']) +@api_blueprint.route("//circulation_informations", methods=["GET"]) @check_logged_as_librarian def patron_circulation_informations(patron_pid): """Get the circulation statistics and info messages about a patron.""" patron = Patron.get_record_by_pid(patron_pid) if not patron: - abort(404, 'Patron not found') + abort(404, "Patron not found") preview_amount = sum( - sum_for_fees(loan.get_overdue_fees) - for loan in get_overdue_loans(patron.pid) + sum_for_fees(loan.get_overdue_fees) for loan in get_overdue_loans(patron.pid) ) - engaged_amount = get_transactions_total_amount_for_patron( - patron.pid, status='open') + engaged_amount = get_transactions_total_amount_for_patron(patron.pid, status="open") statistics = get_loans_stats_by_patron_pid(patron_pid) - statistics['ill_requests'] = ILLRequestsSearch() \ - .get_ill_requests_total_for_patron(patron_pid) - return jsonify({ - 'fees': { - 'engaged': engaged_amount, - 'preview': preview_amount - }, - 'statistics': statistics, - 'messages': patron.get_circulation_messages() - }) + statistics["ill_requests"] = ILLRequestsSearch().get_ill_requests_total_for_patron( + patron_pid + ) + return jsonify( + { + "fees": {"engaged": engaged_amount, "preview": preview_amount}, + "statistics": statistics, + "messages": patron.get_circulation_messages(), + } + ) -@api_blueprint.route('//overdues/preview', methods=['GET']) +@api_blueprint.route("//overdues/preview", methods=["GET"]) @login_required def patron_overdue_preview_api(patron_pid): """Get all overdue preview linked to a patron.""" @@ -104,150 +109,142 @@ def patron_overdue_preview_api(patron_pid): fees = [(fee[0], fee[1].isoformat()) for fee in fees] total_amount = sum_for_fees(fees) if total_amount > 0: - data.append({ - 'loan': loan.dumps(), - 'fees': {'total': total_amount, 'steps': fees} - }) + data.append( + {"loan": loan.dumps(), "fees": {"total": total_amount, "steps": fees}} + ) return jsonify(data) blueprint = Blueprint( - 'patrons', + "patrons", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) -@blueprint.route('/patrons/logged_user', methods=['GET']) +@blueprint.route("/patrons/logged_user", methods=["GET"]) def logged_user(): """Current logged user information in JSON.""" config = current_app.config data = { - 'permissions': expose_actions_need_for_user(), - 'settings': { - 'language': current_i18n.locale.language, - 'globalView': config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'), - 'baseUrl': get_base_url(), - 'agentLabelOrder': config.get('RERO_ILS_AGENTS_LABEL_ORDER', {}), - 'agentSources': config.get('RERO_ILS_AGENTS_SOURCES', []), - 'operationLogs': config.get('RERO_ILS_ENABLE_OPERATION_LOG', []), - 'documentAdvancedSearch': config.get( - 'RERO_ILS_APP_DOCUMENT_ADVANCED_SEARCH', False), - 'userProfile': { - 'readOnly': config.get( - 'RERO_PUBLIC_USERPROFILES_READONLY', False), - 'readOnlyFields': config.get( - 'RERO_PUBLIC_USERPROFILES_READONLY_FIELDS', []), - } - } + "permissions": expose_actions_need_for_user(), + "settings": { + "language": current_i18n.locale.language, + "globalView": config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"), + "baseUrl": get_base_url(), + "agentLabelOrder": config.get("RERO_ILS_AGENTS_LABEL_ORDER", {}), + "agentSources": config.get("RERO_ILS_AGENTS_SOURCES", []), + "operationLogs": config.get("RERO_ILS_ENABLE_OPERATION_LOG", []), + "documentAdvancedSearch": config.get( + "RERO_ILS_APP_DOCUMENT_ADVANCED_SEARCH", False + ), + "userProfile": { + "readOnly": config.get("RERO_PUBLIC_USERPROFILES_READONLY", False), + "readOnlyFields": config.get( + "RERO_PUBLIC_USERPROFILES_READONLY_FIELDS", [] + ), + }, + }, } if not current_user.is_authenticated: return jsonify(data) user = User.get_record(current_user.id).dumps_metadata() - user['id'] = current_user.id - data = {**data, **user, 'patrons': []} + user["id"] = current_user.id + data = {**data, **user, "patrons": []} for patron in Patron.get_patrons_by_user(current_user): - patron.pop('$schema', None) - patron.pop('user_id', None) - patron.pop('notes', None) - patron['organisation'] = patron.organisation.dumps( - dumper=OrganisationLoggedUserDumper()) - patron['libraries'] = [ - {'pid': pid} - for pid in patron.manageable_library_pids - ] - data['patrons'].append(patron) + patron.pop("$schema", None) + patron.pop("user_id", None) + patron.pop("notes", None) + patron["organisation"] = patron.organisation.dumps( + dumper=OrganisationLoggedUserDumper() + ) + patron["libraries"] = [{"pid": pid} for pid in patron.manageable_library_pids] + data["patrons"].append(patron) return jsonify(data) -@blueprint.route('//patrons/profile', methods=['GET', 'POST']) +@blueprint.route("//patrons/profile", methods=["GET", "POST"]) @check_logged_as_patron @register_menu( blueprint, - 'settings.patron_profile', - _('%(icon)s My loans', icon=''), + "settings.patron_profile", + _("%(icon)s My loans", icon=''), visible_when=user_has_patron, id="my-profile-menu", - order=-1 + order=-1, ) def profile(viewcode): """Patron Profile Page.""" - return render_template('rero_ils/patron_profile.html', viewcode=viewcode) + return render_template("rero_ils/patron_profile.html", viewcode=viewcode) -@blueprint.app_template_filter('format_currency') +@blueprint.app_template_filter("format_currency") def format_currency_filter(value, currency): """Format currency with current locale.""" if value: return format_currency(value, currency) -@api_blueprint.route('/roles_management_permissions', methods=['GET']) +@api_blueprint.route("/roles_management_permissions", methods=["GET"]) @check_logged_as_librarian def get_roles_management_permissions(): """Get the roles that current logged user could manage.""" - return jsonify({ - 'allowed_roles': get_allowed_roles_management() - }) + return jsonify({"allowed_roles": get_allowed_roles_management()}) -@blueprint.app_template_filter('get_patron_from_checkout_item_pid') +@blueprint.app_template_filter("get_patron_from_checkout_item_pid") def get_patron_from_checkout_item_pid(item_pid): """Get patron from a checked out item pid.""" from invenio_circulation.api import get_loan_for_item - patron_pid = get_loan_for_item(item_pid_to_object(item_pid))['patron_pid'] + + patron_pid = get_loan_for_item(item_pid_to_object(item_pid))["patron_pid"] return Patron.get_record_by_pid(patron_pid) -@blueprint.app_template_filter('get_checkout_loan_for_item') +@blueprint.app_template_filter("get_checkout_loan_for_item") def get_checkout_loan_for_item(item_pid): """Get patron from a checkout item pid.""" from invenio_circulation.api import get_loan_for_item + return get_loan_for_item(item_pid_to_object(item_pid)) -@blueprint.app_template_filter('get_patron_from_pid') +@blueprint.app_template_filter("get_patron_from_pid") def get_patron_from_pid(patron_pid): """Get patron from pid.""" return Patron.get_record_by_pid(patron_pid) -@blueprint.app_template_filter('get_location_name_from_pid') +@blueprint.app_template_filter("get_location_name_from_pid") def get_location_name_from_pid(location_pid): """Get location from pid.""" - return Location.get_record_by_pid(location_pid)['name'] + return Location.get_record_by_pid(location_pid)["name"] -@api_blueprint.route('//messages', methods=['GET']) +@api_blueprint.route("//messages", methods=["GET"]) @check_logged_user_authentication def get_messages(patron_pid): """Get messages for the current user.""" patron = Patron.get_record_by_pid(patron_pid) messages = patron.get_circulation_messages(True) if patron.pending_subscriptions: - messages.append({ - 'type': 'warning', - 'content': _('You have a pending subscription fee.') - }) - for note in patron.get('notes', []): - if note.get('type') == 'public_note': - messages.append({ - 'type': 'warning', - 'content': note.get('content') - }) - bootstrap_alert_mapping = { - 'error': 'danger' - } + messages.append( + {"type": "warning", "content": _("You have a pending subscription fee.")} + ) + for note in patron.get("notes", []): + if note.get("type") == "public_note": + messages.append({"type": "warning", "content": note.get("content")}) + bootstrap_alert_mapping = {"error": "danger"} for message in messages: - msg_type = message['type'] - message['type'] = bootstrap_alert_mapping.get(msg_type, msg_type) + msg_type = message["type"] + message["type"] = bootstrap_alert_mapping.get(msg_type, msg_type) return jsonify(messages) -@api_blueprint.route('/authenticate', methods=['POST']) +@api_blueprint.route("/authenticate", methods=["POST"]) @check_logged_as_librarian def patron_authenticate(): """Patron authenticate. @@ -257,58 +254,68 @@ def patron_authenticate(): :returns: The patron's information. """ json = flask_request.get_json() - if not json or 'username' not in json or 'password' not in json: + if not json or "username" not in json or "password" not in json: abort(400) - username = json['username'] - password = json['password'] + username = json["username"] + password = json["password"] # load user user = User.get_by_username_or_email(username) if not user: - abort(404, 'User not found.') + abort(404, "User not found.") # load patron organisation_pid = current_librarian.organisation_pid - result = PatronsSearch()\ - .filter('term', user_id=user.user.id)\ - .filter('term', organisation__pid=organisation_pid)\ + result = ( + PatronsSearch() + .filter("term", user_id=user.user.id) + .filter("term", organisation__pid=organisation_pid) .scan() + ) try: patron = next(result).to_dict() except StopIteration: - abort(404, 'User not found.') + abort(404, "User not found.") # Validate password if not security_utils.verify_password(password, user.user.password): - abort(401, 'Identification error.') - patron_data = patron.get('patron', {}) + abort(401, "Identification error.") + patron_data = patron.get("patron", {}) if not patron_data: - abort(404, 'User not found.') - patron_type_result = PatronTypesSearch()\ - .filter('term', pid=patron_data.get('type', {}).get('pid'))\ - .source(includes=['code'])\ + abort(404, "User not found.") + patron_type_result = ( + PatronTypesSearch() + .filter("term", pid=patron_data.get("type", {}).get("pid")) + .source(includes=["code"]) .scan() + ) try: patron_type = next(patron_type_result).to_dict() except StopIteration: abort(404) - return jsonify(remove_empties_from_dict({ - 'fullname': patron.get('first_name') + ' ' + patron.get('last_name'), - 'street': patron.get('street'), - 'postal_code': patron.get('postal_code'), - 'city': patron.get('city'), - 'phone': patron.get('home_phone'), - 'email': patron.get('email'), - 'birth_date': patron.get('birth_date'), - 'patron_type': patron_type.get('code'), - 'expiration_date': patron_data.get('expiration_date'), - 'blocked': patron_data.get('blocked', False), - 'blocked_note': patron_data.get('blocked_note'), - 'notes': list(filter( - lambda note: note.get('type') == 'staff_note', - patron.get('notes', []) - )) - })) - - -@api_blueprint.route('/info', methods=['GET']) + return jsonify( + remove_empties_from_dict( + { + "fullname": patron.get("first_name") + " " + patron.get("last_name"), + "street": patron.get("street"), + "postal_code": patron.get("postal_code"), + "city": patron.get("city"), + "phone": patron.get("home_phone"), + "email": patron.get("email"), + "birth_date": patron.get("birth_date"), + "patron_type": patron_type.get("code"), + "expiration_date": patron_data.get("expiration_date"), + "blocked": patron_data.get("blocked", False), + "blocked_note": patron_data.get("blocked_note"), + "notes": list( + filter( + lambda note: note.get("type") == "staff_note", + patron.get("notes", []), + ) + ), + } + ) + ) + + +@api_blueprint.route("/info", methods=["GET"]) @require_api_auth() def info(): """Get patron info.""" @@ -332,30 +339,31 @@ def get_institution_code(institution): :returns: Code for the institution. """ # TODO: make this non rero specific using a configuration - return institution['code'] if institution['code'] != 'nj' else 'rbnj' + return institution["code"] if institution["code"] != "nj" else "rbnj" user = User.get_record(current_user.id).dumps_metadata() # Process for all patrons patrons = copy.deepcopy(current_patrons) for patron in patrons: - patron['institution'] = patron.organisation - patron['patron']['type'] = PatronType.get_record_by_pid( - extracted_data_from_ref(patron['patron']['type']['$ref'])) + patron["institution"] = patron.organisation + patron["patron"]["type"] = PatronType.get_record_by_pid( + extracted_data_from_ref(patron["patron"]["type"]["$ref"]) + ) # Birthdate data = {} - birthdate = current_user.user_profile.get('birth_date') - if 'birthdate' in token_scopes and birthdate: - data['birthdate'] = birthdate + birthdate = current_user.user_profile.get("birth_date") + if "birthdate" in token_scopes and birthdate: + data["birthdate"] = birthdate # Full name name_parts = [ - current_user.user_profile.get('last_name', '').strip(), - current_user.user_profile.get('first_name', '').strip() + current_user.user_profile.get("last_name", "").strip(), + current_user.user_profile.get("first_name", "").strip(), ] - fullname = ', '.join(filter(None, name_parts)) - if 'fullname' in token_scopes and fullname: - data['fullname'] = fullname + fullname = ", ".join(filter(None, name_parts)) + if "fullname" in token_scopes and fullname: + data["fullname"] = fullname # No patrons found for user if not patrons: @@ -364,28 +372,26 @@ def get_institution_code(institution): # Get the main patron patron = get_main_patron(patrons) # Barcode - if patron.get('patron', {}).get('barcode'): - data['barcode'] = patron['patron']['barcode'][0] + if patron.get("patron", {}).get("barcode"): + data["barcode"] = patron["patron"]["barcode"][0] # Patron types - if 'patron_types' in token_scopes: + if "patron_types" in token_scopes: patron_types = [] for patron in patrons: info = {} - patron_type_code = patron.get( - 'patron', {}).get('type', {}).get('code') + patron_type_code = patron.get("patron", {}).get("type", {}).get("code") if patron_type_code: - info['patron_type'] = patron_type_code - if patron.get('institution'): - info['institution'] = get_institution_code( - patron['institution']) - if patron.get('patron', {}).get('expiration_date'): - info['expiration_date'] = datetime.datetime.strptime( - patron['patron']['expiration_date'], - '%Y-%m-%d').isoformat() + info["patron_type"] = patron_type_code + if patron.get("institution"): + info["institution"] = get_institution_code(patron["institution"]) + if patron.get("patron", {}).get("expiration_date"): + info["expiration_date"] = datetime.datetime.strptime( + patron["patron"]["expiration_date"], "%Y-%m-%d" + ).isoformat() if info: patron_types.append(info) if patron_types: - data['patron_types'] = patron_types + data["patron_types"] = patron_types return jsonify(data) @@ -395,20 +401,17 @@ def patron_message(): """Get patron message.""" if not current_patrons: return - data = { - 'show_info': False, - 'data': {} - } + data = {"show_info": False, "data": {}} for patron in current_patrons: - if (patron.is_blocked or patron.is_expired): - data['show_info'] = True + if patron.is_blocked or patron.is_expired: + data["show_info"] = True organisation = patron.organisation - data['data'][organisation['code']] = { - 'name': organisation['name'], - 'blocked': { - 'is_blocked': patron.is_blocked, - 'message': patron.get_blocked_message(public=True) + data["data"][organisation["code"]] = { + "name": organisation["name"], + "blocked": { + "is_blocked": patron.is_blocked, + "message": patron.get_blocked_message(public=True), }, - 'is_expired': patron.is_expired + "is_expired": patron.is_expired, } return data diff --git a/rero_ils/modules/permissions.py b/rero_ils/modules/permissions.py index 722a7c55e8..c2e189ca4e 100644 --- a/rero_ils/modules/permissions.py +++ b/rero_ils/modules/permissions.py @@ -23,12 +23,20 @@ from flask import current_app, g, jsonify from flask_principal import ActionNeed, Need -from invenio_access import ActionRoles, ActionSystemRoles, ActionUsers, \ - Permission, action_factory, any_user, current_access +from invenio_access import ( + ActionRoles, + ActionSystemRoles, + ActionUsers, + Permission, + action_factory, + any_user, + current_access, +) from invenio_accounts.models import Role from invenio_db import db -from invenio_records_permissions import \ - RecordPermissionPolicy as _RecordPermissionPolicy +from invenio_records_permissions import ( + RecordPermissionPolicy as _RecordPermissionPolicy, +) from invenio_records_permissions.generators import Disable, Generator from rero_ils.modules.patrons.api import current_librarian, current_patrons @@ -39,19 +47,19 @@ # can create a specific action in this root permission module. # Is the user can manage permissions -permission_management = ActionNeed('permission-management') +permission_management = ActionNeed("permission-management") # Is the user can access to the professional interface -access_ui_admin = action_factory('admin-ui-access') +access_ui_admin = action_factory("admin-ui-access") # Is the user can access to circulation module. This is use for granting access # to the `checkin/checkout` component -access_circulation = action_factory('access-circulation') +access_circulation = action_factory("access-circulation") # Is the user can view the debug button into the admin interface -can_use_debug_mode = action_factory('can-use-debug-mode') +can_use_debug_mode = action_factory("can-use-debug-mode") # Basics access without permission check -allow_access = type('Allow', (), {'can': lambda self: True})() -deny_access = type('Deny', (), {'can': lambda self: False})() +allow_access = type("Allow", (), {"can": lambda self: True})() +deny_access = type("Deny", (), {"can": lambda self: False})() LibraryNeed = partial(Need, "library") @@ -62,9 +70,9 @@ class PermissionContext: """List of permission context.""" - BY_ROLE = 'role' - BY_SYSTEM_ROLE = 'system_role' - BY_USER = 'user' + BY_ROLE = "role" + BY_SYSTEM_ROLE = "system_role" + BY_USER = "user" def manage_role_permissions(method, action_name, role_name): @@ -77,17 +85,16 @@ def manage_role_permissions(method, action_name, role_name): role = Role.query.filter(Role.name == role_name).first() action = current_access.actions.get(action_name) if not role: - raise NameError(f'{role_name} not found') + raise NameError(f"{role_name} not found") if not action: - raise NameError(f'{action_name} not found') + raise NameError(f"{action_name} not found") current_access.delete_action_cache(Permission._cache_key(action)) with db.session.begin_nested(): - ActionRoles\ - .query_by_action(action)\ - .filter(ActionRoles.role == role)\ - .delete(synchronize_session=False) - if method == 'allow': + ActionRoles.query_by_action(action).filter(ActionRoles.role == role).delete( + synchronize_session=False + ) + if method == "allow": db.session.add(ActionRoles.allow(action, role=role)) db.session.commit() @@ -102,72 +109,77 @@ def record_permissions(record_pid=None, route_name=None): :return: a JSON object containing permissions for the requested resource. """ try: - rec_class, record_permissions_factory = \ + rec_class, record_permissions_factory = ( get_record_class_and_permissions_from_route(route_name) + ) - permissions = { - 'list': {'can': True}, - 'create': {'can': True} - } + permissions = {"list": {"can": True}, "create": {"can": True}} # To check create and list permissions, we don't need to check if the # record_pid exists. Just call the create permission (if exists) with # `None` value as record. - for action in ['list', 'create']: + for action in ["list", "create"]: if record_permissions_factory[action]: - permissions[action]['can'] = \ - record_permissions_factory[action](record=None).can() + permissions[action]["can"] = record_permissions_factory[action]( + record=None + ).can() # If record_pid is not None, we can check about others permissions # (read, update, delete) if record_pid: - permissions.update({ - 'read': {'can': True}, - 'update': {'can': True}, - 'delete': {'can': True} - }) - - if hasattr(rec_class, 'get_record_by_pid'): + permissions.update( + { + "read": {"can": True}, + "update": {"can": True}, + "delete": {"can": True}, + } + ) + + if hasattr(rec_class, "get_record_by_pid"): # standard ILS case record = rec_class.get_record_by_pid(record_pid) else: # invenio records resources case record = rec_class.pid.resolve(record_pid) if not record: - return jsonify({'status': 'error: Record not found.'}), 404 + return jsonify({"status": "error: Record not found."}), 404 # To check if the record could be update, just call the update # permission factory to get the answer - permissions['read']['can'] = \ - record_permissions_factory['read'](record=record).can() - permissions['update']['can'] = \ - record_permissions_factory['update'](record=record).can() + permissions["read"]["can"] = record_permissions_factory["read"]( + record=record + ).can() + permissions["update"]["can"] = record_permissions_factory["update"]( + record=record + ).can() # We have two behaviors for 'can_delete'. Either the record has # linked resources and so children resources should be deleted # before ; either the `delete_permissions_factory` for this record # should be called. If this call send 'False' then the # reason_not_to_delete should be "permission denied" - if hasattr(record, 'can_delete'): + if hasattr(record, "can_delete"): # standard ILS case can_delete, reasons = record.can_delete - permissions['delete']['can'] = \ - can_delete and \ - record_permissions_factory['delete'](record=record).can() - if not permissions['delete']['can'] and not reasons: + permissions["delete"]["can"] = ( + can_delete + and record_permissions_factory["delete"](record=record).can() + ) + if not permissions["delete"]["can"] and not reasons: # in this case, it's because config delete factory return # `False`, so the reason is 'Permission denied' - reasons = {'others': {'permission': 'permission denied'}} - permissions['delete']['reasons'] = reasons + reasons = {"others": {"permission": "permission denied"}} + permissions["delete"]["reasons"] = reasons else: # invenio records resource case - permissions['delete']['can'] = \ - record_permissions_factory['delete'](record=record).can() + permissions["delete"]["can"] = record_permissions_factory["delete"]( + record=record + ).can() return jsonify(permissions) except Exception as error: # uncomment this line when you have troubles with permissions API # raise error - return jsonify({'status': 'error: Bad request'}), 400 + return jsonify({"status": "error: Bad request"}), 400 def has_superuser_access(): @@ -175,7 +187,7 @@ def has_superuser_access(): This function is used in app context and can be called in all templates. """ - if current_app.config.get('RERO_ILS_APP_DISABLE_PERMISSION_CHECKS'): + if current_app.config.get("RERO_ILS_APP_DISABLE_PERMISSION_CHECKS"): return True # TODO : create a super_user role # ... superuser_access_permission = Permission(ActionNeed('superuser')) @@ -197,13 +209,10 @@ def expose_actions_need_for_user(): """ actions = current_access.actions # filter needs for keep only relevant - config = current_app.config.get('RERO_ILS_EXPOSED_NEED_FILTER') - if regexp := config.get('regexp'): + config = current_app.config.get("RERO_ILS_EXPOSED_NEED_FILTER") + if regexp := config.get("regexp"): regexp = re.compile(regexp) - actions = { - key: need for key, need in actions.items() - if regexp.match(key) - } + actions = {key: need for key, need in actions.items() if regexp.match(key)} # check each needs regarding current logged user profile. actions = [key for key, need in actions.items() if Permission(need).can()] return actions @@ -220,36 +229,33 @@ def expose_action_needs_by_role(roles=None): def _perform_system_role(role_names): if not role_names: return - query = ActionSystemRoles.query \ - .filter(ActionSystemRoles.role_name.in_(role_names)) \ - .all() + query = ActionSystemRoles.query.filter( + ActionSystemRoles.role_name.in_(role_names) + ).all() for row in query: - matrix.setdefault(row.role_name, { - 'type': 'system_role', - 'actions': deepcopy(actions_list) - })['actions'][row.action] = not row.exclude + matrix.setdefault( + row.role_name, + {"type": "system_role", "actions": deepcopy(actions_list)}, + )["actions"][row.action] = not row.exclude def _perform_account_roles(role_names): if not role_names: return roles_query = Role.query.filter(Role.name.in_(role_names)) role_ids = [r.id for r in roles_query.all()] - query = ActionRoles.query \ - .filter(ActionRoles.role_id.in_(role_ids)) \ - .all() + query = ActionRoles.query.filter(ActionRoles.role_id.in_(role_ids)).all() for row in query: - matrix.setdefault(row.role.name, { - 'type': 'role', - 'actions': deepcopy(actions_list) - })['actions'][row.action] = not row.exclude + matrix.setdefault( + row.role.name, {"type": "role", "actions": deepcopy(actions_list)} + )["actions"][row.action] = not row.exclude actions_list = {action: None for action in current_access.actions} matrix = {} roles_types = {} for role in roles: roles_types.setdefault(role[1], []).append(role[0]) - _perform_system_role(roles_types.get('system_role')) - _perform_account_roles(roles_types.get('role')) + _perform_system_role(roles_types.get("system_role")) + _perform_account_roles(roles_types.get("role")) return matrix @@ -264,49 +270,38 @@ def expose_action_needs_by_patron(patron): # - each user role # - 'any_user' and 'authenticated_user' roles (system_role) # - special entry for specific user permissions. - base_reasons = {role: None for role in patron.get('roles', [])} - base_reasons.update({ - 'user': None, - 'any_user': None, - 'authenticated_user': None - }) + base_reasons = {role: None for role in patron.get("roles", [])} + base_reasons.update({"user": None, "any_user": None, "authenticated_user": None}) permissions_matrix = { - action: { - 'name': action, - 'can': False, - 'reasons': deepcopy(base_reasons) - } + action: {"name": action, "can": False, "reasons": deepcopy(base_reasons)} for action in current_access.actions } # Load specific ActionRoles permissions from Invenio-access and store them # into the permission_matrix. - roles_query = Role.query.filter(Role.name.in_(patron.get('roles', []))) + roles_query = Role.query.filter(Role.name.in_(patron.get("roles", []))) role_ids = [r.id for r in roles_query.all()] - query = ActionRoles.query \ - .filter(ActionRoles.role_id.in_(role_ids)) + query = ActionRoles.query.filter(ActionRoles.role_id.in_(role_ids)) for row in query.all(): with contextlib.suppress(KeyError): - permissions_matrix[row.action]['reasons'][row.role.name] = \ - not row.exclude + permissions_matrix[row.action]["reasons"][row.role.name] = not row.exclude # Load specific ActionUsers permission from Invenio-access and store them # into the permission_matrix. - query = ActionUsers.query \ - .filter(ActionUsers.user_id == patron.user.id) + query = ActionUsers.query.filter(ActionUsers.user_id == patron.user.id) for row in query.all(): - permissions_matrix[row.action]['reasons']['user'] = not row.exclude + permissions_matrix[row.action]["reasons"]["user"] = not row.exclude # Load specific ActionSystemRoles permissions form Invenio-access and store # them into the permission_matrix. - system_roles_to_check = ['any_user', 'authenticated_user'] - query = ActionSystemRoles.query\ - .filter(ActionSystemRoles.role_name.in_(system_roles_to_check)) + system_roles_to_check = ["any_user", "authenticated_user"] + query = ActionSystemRoles.query.filter( + ActionSystemRoles.role_name.in_(system_roles_to_check) + ) for row in query.all(): with contextlib.suppress(KeyError): - permissions_matrix[row.action]['reasons'][row.role_name] = \ - not row.exclude + permissions_matrix[row.action]["reasons"][row.role_name] = not row.exclude # Compute general permissions # Now we load each permission data, search into the reasons list to @@ -316,8 +311,8 @@ def expose_action_needs_by_patron(patron): # 3) Otherwise (all is null - no roles give specific access) --> global # is false (this is already the default value of 'can') for permission in permissions_matrix.values(): - values = set(permission['reasons'].values()) - permission['can'] = True in values and False not in values + values = set(permission["reasons"].values()) + permission["can"] = True in values and False not in values return [v for k, v in permissions_matrix.items()] @@ -393,7 +388,7 @@ def __init__(self, action, callback=None): to the record that we need to check. By default, the ``library_pid`` record attribute will be used. """ - self.callback = callback or (lambda r: getattr(r, 'library_pid', None)) + self.callback = callback or (lambda r: getattr(r, "library_pid", None)) super().__init__(action) def needs(self, record=None, *args, **kwargs): @@ -428,8 +423,9 @@ def __init__(self, action, patron_callback=None, record_mapper=None): ``patron_pid`` record attribute will be used. :param record_mapper: A function used to transform the record. """ - self.patron_callback = patron_callback or \ - (lambda r: getattr(r, 'patron_pid', None)) + self.patron_callback = patron_callback or ( + lambda r: getattr(r, "patron_pid", None) + ) self.record_mapper = record_mapper super().__init__(action) @@ -448,8 +444,7 @@ def needs(self, record=None, *args, **kwargs): required_need.add(OwnerNeed(self.patron_callback(record))) if current_librarian: required_need.add(OrganisationNeed(record.organisation_pid)) - if required_need and not required_need.intersection( - g.identity.provides): + if required_need and not required_need.intersection(g.identity.provides): return [] return super().needs(record, **kwargs) @@ -469,8 +464,9 @@ def __init__(self, record_cls, callback=None): otherwise True. """ self.record_cls = record_cls - self.is_rollovered = callback \ - or (lambda record: not getattr(record, 'is_active', True)) + self.is_rollovered = callback or ( + lambda record: not getattr(record, "is_active", True) + ) def excludes(self, record=None, **kwargs): """Disallow operation check. diff --git a/rero_ils/modules/providers.py b/rero_ils/modules/providers.py index 2091caab46..4b69aee74d 100644 --- a/rero_ils/modules/providers.py +++ b/rero_ils/modules/providers.py @@ -32,11 +32,11 @@ def append_fixtures_new_identifiers(identifier, pids, pid_type, limit=100000): for idx, pid in enumerate(pids, 1): db.session.add(identifier(recid=pid)) if idx % limit == 0: - click.echo(f'DB commit append: {idx}') + click.echo(f"DB commit append: {idx}") db.session.commit() db.session.commit() identifier._set_sequence(identifier.max()) - click.echo(f'DB commit append: {idx}') + click.echo(f"DB commit append: {idx}") class Provider(BaseProvider): @@ -64,17 +64,17 @@ class Provider(BaseProvider): @classmethod def create(cls, object_type=None, object_uuid=None, **kwargs): """Create a new identifier.""" - pid_value = kwargs.get('pid_value') + pid_value = kwargs.get("pid_value") if not pid_value: - kwargs['pid_value'] = str(cls.identifier.next()) + kwargs["pid_value"] = str(cls.identifier.next()) # TODO: to insert pid to the identifer table, enable if needed try: - return cls.get(kwargs['pid_value'], cls.pid_type) + return cls.get(kwargs["pid_value"], cls.pid_type) except PIDDoesNotExistError: - kwargs.setdefault('status', cls.default_status) + kwargs.setdefault("status", cls.default_status) if object_type and object_uuid: - kwargs['status'] = PIDStatus.REGISTERED + kwargs["status"] = PIDStatus.REGISTERED return super().create( object_type=object_type, object_uuid=object_uuid, **kwargs ) diff --git a/rero_ils/modules/receivers.py b/rero_ils/modules/receivers.py index 2c33b504e4..ab01d568f4 100644 --- a/rero_ils/modules/receivers.py +++ b/rero_ils/modules/receivers.py @@ -32,30 +32,30 @@ def process_boosting(index_name, config): """ config = config.copy() try: - config.remove('*') + config.remove("*") except ValueError: # nothing to replace return config # list of existing fields without the boosting factor - existing_fields = [re.sub(r'\^\d+$', '', field) for field in config] + existing_fields = [re.sub(r"\^\d+$", "", field) for field in config] doc_mappings = list(current_search.aliases[index_name].values()) assert len(doc_mappings) == 1 mapping_path = doc_mappings.pop() with open(mapping_path, "r") as body: mapping = json.load(body) fields = [] - for prop, conf in mapping['mappings']['properties'].items(): + for prop, conf in mapping["mappings"]["properties"].items(): field = prop # fields for multiple mapping configurations - if conf.get('fields'): - tmp_fields = [field, f'{field}.*'] + if conf.get("fields"): + tmp_fields = [field, f"{field}.*"] for tmp_f in tmp_fields: if tmp_f not in existing_fields: fields.append(tmp_f) continue # add .* for field with children - if conf.get('properties'): - field = f'{field}.*' + if conf.get("properties"): + field = f"{field}.*" # do nothing for existing fields if field in existing_fields: continue @@ -71,6 +71,5 @@ def set_boosting_query_fields(sender, app=None, **kwargs): """ # required to access to the flask extension with app.app_context(): - for key, value in app.config['RERO_ILS_QUERY_BOOSTING'].items(): - app.config['RERO_ILS_QUERY_BOOSTING'][key] = \ - process_boosting(key, value) + for key, value in app.config["RERO_ILS_QUERY_BOOSTING"].items(): + app.config["RERO_ILS_QUERY_BOOSTING"][key] = process_boosting(key, value) diff --git a/rero_ils/modules/record_extensions.py b/rero_ils/modules/record_extensions.py index 49be5956f7..b829e223dc 100644 --- a/rero_ils/modules/record_extensions.py +++ b/rero_ils/modules/record_extensions.py @@ -30,13 +30,13 @@ def _add_org_and_lib(record): :param record: the record metadata. """ - location_pid = extracted_data_from_ref(record.get('location')) + location_pid = extracted_data_from_ref(record.get("location")) # try on the elasticsearch location index try: es_loc = next( LocationsSearch() - .filter('term', pid=location_pid) - .source(['organisation', 'library']) + .filter("term", pid=location_pid) + .source(["organisation", "library"]) .scan() ) organisation_pid = es_loc.organisation.pid @@ -47,12 +47,10 @@ def _add_org_and_lib(record): library_pid = library.pid organisation_pid = library.organisation_pid base_url = get_base_url() - record['organisation'] = { - '$ref': f'{base_url}/api/organisations/{organisation_pid}' - } - record['library'] = { - '$ref': f'{base_url}/api/libraries/{library_pid}' + record["organisation"] = { + "$ref": f"{base_url}/api/organisations/{organisation_pid}" } + record["library"] = {"$ref": f"{base_url}/api/libraries/{library_pid}"} class OrgLibRecordExtension(RecordExtension): @@ -64,7 +62,7 @@ def pre_create(self, record): :param record: the record metadata. """ # do nothing if already exists - if record.get('organisation') and record.get('library'): + if record.get("organisation") and record.get("library"): return _add_org_and_lib(record) # required for validation diff --git a/rero_ils/modules/selfcheck/admin.py b/rero_ils/modules/selfcheck/admin.py index 93082466be..08559f2b5f 100644 --- a/rero_ils/modules/selfcheck/admin.py +++ b/rero_ils/modules/selfcheck/admin.py @@ -39,40 +39,51 @@ class SelfcheckTerminalView(ModelView): can_delete = True list_all = ( - 'id', 'name', 'access_token', 'organisation_pid', 'library_pid', - 'location_pid', 'active', 'last_login_at', 'last_login_ip', + "id", + "name", + "access_token", + "organisation_pid", + "library_pid", + "location_pid", + "active", + "last_login_at", + "last_login_ip", ) - column_list = \ - column_searchable_list = \ - column_sortable_list = \ - column_details_list = \ - list_all + column_list = column_searchable_list = column_sortable_list = ( + column_details_list + ) = list_all - form_columns = ('name', 'access_token', 'location_pid', 'active') + form_columns = ("name", "access_token", "location_pid", "active") form_args = dict( - name=dict(label='Name', validators=[DataRequired()]), - access_token=dict(label='Access token', validators=[DataRequired()]), + name=dict(label="Name", validators=[DataRequired()]), + access_token=dict(label="Access token", validators=[DataRequired()]), location_pid=dict( - label='Location', + label="Location", validators=[DataRequired()], - choices=LocalProxy(lambda: [ - (opts.get('location_pid'), opts.get('location_name')) for opts - in locations_form_options() - ]), + choices=LocalProxy( + lambda: [ + (opts.get("location_pid"), opts.get("location_name")) + for opts in locations_form_options() + ] + ), ), ) - column_filters = ('id', 'name', 'active', 'organisation_pid', - 'library_pid', 'location_pid', 'last_login_at') + column_filters = ( + "id", + "name", + "active", + "organisation_pid", + "library_pid", + "location_pid", + "last_login_at", + ) - column_default_sort = ('last_login_at', True) + column_default_sort = ("last_login_at", True) - form_overrides = { - 'location_pid': SelectField, - 'last_login_at': DateTimeField - } + form_overrides = {"location_pid": SelectField, "last_login_at": DateTimeField} def on_model_change(self, form, model, is_created): """Fill organisation_pid when saving. @@ -86,37 +97,38 @@ def on_model_change(self, form, model, is_created): """ location_pid = form.location_pid.data location = LocationsSearch().get_record_by_pid(location_pid) - model.organisation_pid = location.organisation['pid'] - model.library_pid = location.library['pid'] + model.organisation_pid = location.organisation["pid"] + model.library_pid = location.library["pid"] def locations_form_options(): """Get locations form options.""" location_opts = [] for org in Organisation.get_all(): - query = LocationsSearch() \ - .filter('term', organisation__pid=org.pid) \ - .exclude('term', is_online=True) \ - .sort({'code': {'order': 'asc'}}) \ + query = ( + LocationsSearch() + .filter("term", organisation__pid=org.pid) + .exclude("term", is_online=True) + .sort({"code": {"order": "asc"}}) .params(preserve_order=True) + ) for location in query.scan(): - org_name = org.get('name'), - loc_code = location.code, + org_name = (org.get("name"),) + loc_code = (location.code,) loc_name = location.name - location_opts.append({ - 'location_pid': location.pid, - 'location_name': f'{org_name} - {loc_code} ({loc_name})' - }) + location_opts.append( + { + "location_pid": location.pid, + "location_name": f"{org_name} - {loc_code} ({loc_name})", + } + ) return location_opts selfcheck_terminal_adminview = { - 'model': SelfcheckTerminal, - 'modelview': SelfcheckTerminalView, - 'category': _('Selfcheck Terminal Management'), + "model": SelfcheckTerminal, + "modelview": SelfcheckTerminalView, + "category": _("Selfcheck Terminal Management"), } -__all__ = ( - 'selfcheck_terminal_adminview', - 'SelfcheckTerminalView' -) +__all__ = ("selfcheck_terminal_adminview", "SelfcheckTerminalView") diff --git a/rero_ils/modules/selfcheck/api.py b/rero_ils/modules/selfcheck/api.py index 7a1bcc12e3..d26dab6749 100644 --- a/rero_ils/modules/selfcheck/api.py +++ b/rero_ils/modules/selfcheck/api.py @@ -23,29 +23,42 @@ from flask import current_app from flask_babel import force_locale from flask_babel import gettext as _ -from invenio_circulation.errors import CirculationException, \ - ItemNotAvailableError +from invenio_circulation.errors import CirculationException, ItemNotAvailableError from rero_ils.modules.documents.api import Document from rero_ils.modules.documents.extensions import TitleExtension -from rero_ils.modules.errors import ItemBarcodeNotFound, NoCirculationAction, \ - PatronBarcodeNotFound +from rero_ils.modules.errors import ( + ItemBarcodeNotFound, + NoCirculationAction, + PatronBarcodeNotFound, +) from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemNoteTypes from rero_ils.modules.libraries.api import Library -from rero_ils.modules.loans.api import Loan, \ - get_loans_by_item_pid_by_patron_pid, get_loans_by_patron_pid +from rero_ils.modules.loans.api import ( + Loan, + get_loans_by_item_pid_by_patron_pid, + get_loans_by_patron_pid, +) from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.patron_transactions.api import PatronTransaction -from rero_ils.modules.patron_transactions.utils import \ - get_last_transaction_by_loan_pid, get_transactions_pids_for_patron, \ - get_transactions_total_amount_for_patron +from rero_ils.modules.patron_transactions.utils import ( + get_last_transaction_by_loan_pid, + get_transactions_pids_for_patron, + get_transactions_total_amount_for_patron, +) from rero_ils.modules.patrons.api import Patron from .models import SelfcheckTerminal -from .utils import authorize_selfckeck_terminal, authorize_selfckeck_user, \ - check_sip2_module, format_patron_address, get_patron_status, \ - map_item_circulation_status, map_media_type +from .utils import ( + authorize_selfckeck_terminal, + authorize_selfckeck_user, + check_sip2_module, + format_patron_address, + get_patron_status, + map_item_circulation_status, + map_media_type, +) def selfcheck_login(name, access_token, **kwargs): @@ -63,12 +76,12 @@ def selfcheck_login(name, access_token, **kwargs): if staffer: library = Library.get_record_by_pid(terminal.library_pid) return { - 'authenticated': terminal.active, - 'terminal': terminal.name, - 'transaction_user_id': staffer.pid, - 'institution_id': terminal.organisation_pid, - 'library_name': library.get('name'), - 'library_language': library.get('communication_language') + "authenticated": terminal.active, + "terminal": terminal.name, + "transaction_user_id": staffer.pid, + "institution_id": terminal.organisation_pid, + "library_name": library.get("name"), + "library_language": library.get("communication_language"), } @@ -80,7 +93,8 @@ def validate_patron_account(barcode=None, **kwargs): :return: ``True`` if patron exists or ``False``. """ patron = Patron.get_patron_by_barcode( - barcode=barcode, org_pid=kwargs.get('institution_id')) + barcode=barcode, org_pid=kwargs.get("institution_id") + ) return patron and patron.is_patron @@ -93,7 +107,8 @@ def authorize_patron(barcode, password, **kwargs): :return: ``True`` if patron is authorized or ``False``. """ patron = Patron.get_patron_by_barcode( - barcode=barcode, org_pid=kwargs.get('institution_id')) + barcode=barcode, org_pid=kwargs.get("institution_id") + ) if patron and patron.is_patron: # User email is an optional field. When User hasn't email address, # we take his username as login. @@ -111,9 +126,9 @@ def system_status(terminal, **kwargs): """ terminal = SelfcheckTerminal().find_terminal(name=terminal) return { - 'authenticated': terminal.active, - 'terminal': terminal.name, - 'institution_id': terminal.library_pid + "authenticated": terminal.active, + "terminal": terminal.name, + "institution_id": terminal.library_pid, } @@ -127,22 +142,22 @@ def enable_patron(barcode, **kwargs): # check if invenio_sip2 module is present if check_sip2_module(): from invenio_sip2.models import SelfcheckEnablePatron - institution_id = kwargs.get('institution_id') - patron = Patron.get_patron_by_barcode( - barcode=barcode, org_pid=institution_id) + + institution_id = kwargs.get("institution_id") + patron = Patron.get_patron_by_barcode(barcode=barcode, org_pid=institution_id) if patron: return SelfcheckEnablePatron( patron_status=get_patron_status(patron), - language=patron.patron.get('communication_language', 'und'), + language=patron.patron.get("communication_language", "und"), institution_id=patron.organisation_pid, - patron_id=patron.patron.get('barcode'), - patron_name=patron.formatted_name + patron_id=patron.patron.get("barcode"), + patron_name=patron.formatted_name, ) else: return SelfcheckEnablePatron( patron_id=barcode, institution_id=institution_id, - screen_messages=[_('Error encountered: patron not found')] + screen_messages=[_("Error encountered: patron not found")], ) @@ -156,34 +171,38 @@ def patron_status(barcode, **kwargs): # check if invenio_sip2 module is present if check_sip2_module(): from invenio_sip2.models import SelfcheckPatronStatus - institution_id = kwargs.get('institution_id') - language = kwargs.get('language', current_app.config - .get('BABEL_DEFAULT_LANGUAGE')) + + institution_id = kwargs.get("institution_id") + language = kwargs.get( + "language", current_app.config.get("BABEL_DEFAULT_LANGUAGE") + ) # Temporarily overrides the currently selected locale. # force_locale is allowed to work outside of application context with force_locale(language): patron = Patron.get_patron_by_barcode( - barcode=barcode, org_pid=institution_id) + barcode=barcode, org_pid=institution_id + ) if patron: patron_status_response = SelfcheckPatronStatus( patron_status=get_patron_status(patron), - language=patron.get('communication_language', 'und'), + language=patron.get("communication_language", "und"), patron_id=barcode, patron_name=patron.formatted_name, institution_id=patron.organisation_pid, - currency_type=patron.organisation.get('default_currency'), - valid_patron=patron.is_patron + currency_type=patron.organisation.get("default_currency"), + valid_patron=patron.is_patron, ) fee_amount = get_transactions_total_amount_for_patron( - patron.pid, status='open', with_subscription=False) - patron_status_response['fee_amount'] = '%.2f' % fee_amount + patron.pid, status="open", with_subscription=False + ) + patron_status_response["fee_amount"] = "%.2f" % fee_amount return patron_status_response else: return SelfcheckPatronStatus( patron_id=barcode, institution_id=institution_id, - screen_messages=[_('Error encountered: patron not found')] + screen_messages=[_("Error encountered: patron not found")], ) @@ -197,14 +216,17 @@ def patron_information(barcode, **kwargs): # check if invenio_sip2 module is present if check_sip2_module(): from invenio_sip2.models import SelfcheckPatronInformation - institution_id = kwargs.get('institution_id') - language = kwargs.get('language', current_app.config - .get('BABEL_DEFAULT_LANGUAGE')) + + institution_id = kwargs.get("institution_id") + language = kwargs.get( + "language", current_app.config.get("BABEL_DEFAULT_LANGUAGE") + ) # Temporarily overrides the currently selected locale. # force_locale is allowed to work outside of application context with force_locale(language): patron = Patron.get_patron_by_barcode( - barcode=barcode, org_pid=institution_id) + barcode=barcode, org_pid=institution_id + ) if patron: patron_dumps = patron.dumps() patron_account_information = SelfcheckPatronInformation( @@ -212,61 +234,67 @@ def patron_information(barcode, **kwargs): patron_name=patron.formatted_name, patron_status=get_patron_status(patron), institution_id=patron.organisation_pid, - language=patron.get( - 'patron', {}).get('communication_language', 'und'), - email=patron.get('patron', {}).get( - 'additional_communication_email', - patron_dumps.get('email')), - home_phone=patron_dumps.get('home_phone'), + language=patron.get("patron", {}).get( + "communication_language", "und" + ), + email=patron.get("patron", {}).get( + "additional_communication_email", patron_dumps.get("email") + ), + home_phone=patron_dumps.get("home_phone"), home_address=format_patron_address(patron), - currency_type=patron.organisation.get('default_currency'), - valid_patron=patron.is_patron + currency_type=patron.organisation.get("default_currency"), + valid_patron=patron.is_patron, ) filter_states = [ LoanState.PENDING, LoanState.ITEM_AT_DESK, LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_ON_LOAN + LoanState.ITEM_ON_LOAN, ] - sip2_summary_fields = current_app.config \ - .get('SIP2_SUMMARY_FIELDS') + sip2_summary_fields = current_app.config.get("SIP2_SUMMARY_FIELDS") for loan in get_loans_by_patron_pid(patron.pid, filter_states): item = Item.get_record_by_pid(loan.item_pid) - if field := sip2_summary_fields.get(loan['state']): - patron_account_information.setdefault(field, []) \ - .append(item.get('barcode')) - if loan['state'] == LoanState.ITEM_ON_LOAN \ - and loan.is_loan_overdue(): - patron_account_information\ - .setdefault('overdue_items', [])\ - .append(item.get('barcode')) + if field := sip2_summary_fields.get(loan["state"]): + patron_account_information.setdefault(field, []).append( + item.get("barcode") + ) + if ( + loan["state"] == LoanState.ITEM_ON_LOAN + and loan.is_loan_overdue() + ): + patron_account_information.setdefault( + "overdue_items", [] + ).append(item.get("barcode")) fee_amount = get_transactions_total_amount_for_patron( - patron.pid, status='open', with_subscription=False) - patron_account_information['fee_amount'] = '%.2f' % fee_amount + patron.pid, status="open", with_subscription=False + ) + patron_account_information["fee_amount"] = "%.2f" % fee_amount # check for fine items if fee_amount > 0: # Check if fine items exist transaction_pids = get_transactions_pids_for_patron( - patron.pid, status='open') + patron.pid, status="open" + ) for transaction_pid in transaction_pids: # TODO: return screen message to notify patron if there # are other open transactions - transaction = PatronTransaction \ - .get_record_by_pid(transaction_pid) + transaction = PatronTransaction.get_record_by_pid( + transaction_pid + ) if transaction.loan_pid: loan = Loan.get_record_by_pid(transaction.loan_pid) item = Item.get_record_by_pid(loan.item_pid) - patron_account_information\ - .setdefault('fine_items', []) \ - .append(item.get('barcode')) + patron_account_information.setdefault( + "fine_items", [] + ).append(item.get("barcode")) return patron_account_information else: return SelfcheckPatronInformation( patron_id=barcode, institution_id=institution_id, - screen_messages=[_('Error encountered: patron not found')] + screen_messages=[_("Error encountered: patron not found")], ) @@ -280,12 +308,17 @@ def item_information(item_barcode, **kwargs): """ # check if invenio_sip2 module is present if check_sip2_module(): - from invenio_sip2.models import SelfcheckFeeType, \ - SelfcheckItemInformation, SelfcheckSecurityMarkerType - org_pid = kwargs.get('institution_id') + from invenio_sip2.models import ( + SelfcheckFeeType, + SelfcheckItemInformation, + SelfcheckSecurityMarkerType, + ) + + org_pid = kwargs.get("institution_id") item = Item.get_item_by_barcode(item_barcode, org_pid) - language = kwargs.get('language', current_app.config - .get('BABEL_DEFAULT_LANGUAGE')) + language = kwargs.get( + "language", current_app.config.get("BABEL_DEFAULT_LANGUAGE") + ) # Temporarily overrides the currently selected locale. # force_locale is allowed to work outside of application context with force_locale(language): @@ -294,55 +327,54 @@ def item_information(item_barcode, **kwargs): location = item.get_location() item_information = SelfcheckItemInformation( - item_id=item.get('barcode'), - title_id=TitleExtension.format_text(document.get('title')), - circulation_status=map_item_circulation_status( - item.status), + item_id=item.get("barcode"), + title_id=TitleExtension.format_text(document.get("title")), + circulation_status=map_item_circulation_status(item.status), fee_type=SelfcheckFeeType.OTHER, - security_marker=SelfcheckSecurityMarkerType.OTHER - ) - item_information['media_type'] = map_media_type( - document['type'][0]['main_type'] - ) - item_information['hold_queue_length'] = \ - item.number_of_requests() - item_information['owner'] = location.get_library().get('name') - item_information['permanent_location'] = location.get('name') - item_information['current_location'] = \ - item.get_last_location().get('name') - item_information['fee_type'] = SelfcheckFeeType.OVERDUE + security_marker=SelfcheckSecurityMarkerType.OTHER, + ) + item_information["media_type"] = map_media_type( + document["type"][0]["main_type"] + ) + item_information["hold_queue_length"] = item.number_of_requests() + item_information["owner"] = location.get_library().get("name") + item_information["permanent_location"] = location.get("name") + item_information["current_location"] = item.get_last_location().get( + "name" + ) + item_information["fee_type"] = SelfcheckFeeType.OVERDUE # get loan for item loan_pid = Item.get_loan_pid_with_item_on_loan(item.pid) if loan_pid: loan = Loan.get_record_by_pid(loan_pid) if loan: # format the end date according selfcheck language - item_information['due_date'] = loan['end_date'] + item_information["due_date"] = loan["end_date"] transaction = get_last_transaction_by_loan_pid( - loan_pid=loan.pid, status='open') + loan_pid=loan.pid, status="open" + ) if transaction: - item_information['fee_amount'] = \ - '%.2f' % transaction.total_amount - item_information['currency_type'] = \ - transaction.currency - item_information.get('screen_messages', []) \ - .append(_('overdue')) + item_information["fee_amount"] = ( + "%.2f" % transaction.total_amount + ) + item_information["currency_type"] = transaction.currency + item_information.get("screen_messages", []).append( + _("overdue") + ) # public note public_note = item.get_note(ItemNoteTypes.PUBLIC) if public_note: - item_information.get('screen_messages', []) \ - .append(public_note) + item_information.get("screen_messages", []).append(public_note) return item_information else: return SelfcheckItemInformation( item_id=item_barcode, - screen_messages=[_('Error encountered: item not found')] + screen_messages=[_("Error encountered: item not found")], ) -def selfcheck_checkout(transaction_user_pid, item_barcode, patron_barcode, - **kwargs): +def selfcheck_checkout(transaction_user_pid, item_barcode, patron_barcode, **kwargs): """SIP2 Handler to perform checkout. perform checkout action received from the selfcheck. @@ -355,30 +387,29 @@ def selfcheck_checkout(transaction_user_pid, item_barcode, patron_barcode, from invenio_sip2.errors import SelfcheckCirculationError from invenio_sip2.models import SelfcheckCheckout - language = kwargs.get('language', current_app.config - .get('BABEL_DEFAULT_LANGUAGE')) + language = kwargs.get( + "language", current_app.config.get("BABEL_DEFAULT_LANGUAGE") + ) # Temporarily overrides the currently selected locale. # force_locale is allowed to work outside of application context with force_locale(language): try: - terminal = SelfcheckTerminal.find_terminal( - name=kwargs.get('terminal')) + terminal = SelfcheckTerminal.find_terminal(name=kwargs.get("terminal")) item = Item.get_item_by_barcode( - barcode=item_barcode, - organisation_pid=terminal.organisation_pid + barcode=item_barcode, organisation_pid=terminal.organisation_pid ) if not item: raise ItemBarcodeNotFound document = Document.get_record_by_pid(item.document_pid) checkout = SelfcheckCheckout( - title_id=TitleExtension.format_text(document.get('title')), + title_id=TitleExtension.format_text(document.get("title")), ) staffer = Patron.get_record_by_pid(transaction_user_pid) if staffer.is_professional_user: patron = Patron.get_patron_by_barcode( - barcode=patron_barcode, - org_pid=terminal.organisation_pid) + barcode=patron_barcode, org_pid=terminal.organisation_pid + ) if not patron: raise PatronBarcodeNotFound # do checkout @@ -391,63 +422,65 @@ def selfcheck_checkout(transaction_user_pid, item_barcode, patron_barcode, ) if LoanAction.CHECKOUT in data: loan = data[LoanAction.CHECKOUT] - checkout['checkout'] = True - checkout['desensitize'] = True - checkout['due_date'] = loan['end_date'] + checkout["checkout"] = True + checkout["desensitize"] = True + checkout["due_date"] = loan["end_date"] # checkout note - checkout_note = item.get_note( - ItemNoteTypes.CHECKOUT) + checkout_note = item.get_note(ItemNoteTypes.CHECKOUT) if checkout_note: - checkout.get('screen_messages', []) \ - .append(checkout_note) + checkout.get("screen_messages", []).append(checkout_note) # TODO: When is possible, try to return fields: # magnetic_media except ItemBarcodeNotFound: checkout = SelfcheckCheckout( - title_id='', - due_date=datetime.now(timezone.utc) + title_id="", due_date=datetime.now(timezone.utc) + ) + checkout.get("screen_messages", []).append( + _("Error encountered: item not found") + ) + checkout.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") ) - checkout.get('screen_messages', []).append( - _('Error encountered: item not found')) - checkout.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) except ItemNotAvailableError: # the due date is a required field from sip2 - checkout['due_date'] = datetime.now(timezone.utc) + checkout["due_date"] = datetime.now(timezone.utc) # check if item is already checked out by the current # patron loan = get_loans_by_item_pid_by_patron_pid( - item_pid=item.pid, patron_pid=patron.pid, - filter_states=[LoanState.ITEM_ON_LOAN]) + item_pid=item.pid, + patron_pid=patron.pid, + filter_states=[LoanState.ITEM_ON_LOAN], + ) if loan: - checkout['renewal'] = True - checkout['desensitize'] = True - checkout['due_date'] = loan['end_date'] + checkout["renewal"] = True + checkout["desensitize"] = True + checkout["due_date"] = loan["end_date"] else: - checkout.get('screen_messages', []).append( - _('Item is already checked-out or ' - 'requested by patron.')) + checkout.get("screen_messages", []).append( + _("Item is already checked-out or " "requested by patron.") + ) except PatronBarcodeNotFound: checkout = SelfcheckCheckout( - title_id='', - due_date=datetime.now(timezone.utc) + title_id="", due_date=datetime.now(timezone.utc) + ) + checkout.get("screen_messages", []).append( + _("Error encountered: patron not found") + ) + checkout.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") ) - checkout.get('screen_messages', []).append( - _('Error encountered: patron not found')) - checkout.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) except NoCirculationAction: - checkout.get('screen_messages', []).append( - _('No circulation action is possible')) + checkout.get("screen_messages", []).append( + _("No circulation action is possible") + ) except CirculationException as circ_err: - checkout.get('screen_messages', []).append( - _(circ_err.description)) + checkout.get("screen_messages", []).append(_(circ_err.description)) except Exception: - checkout.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) - raise SelfcheckCirculationError('self checkout failed', - checkout) + checkout.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") + ) + raise SelfcheckCirculationError("self checkout failed", checkout) return checkout @@ -462,29 +495,24 @@ def selfcheck_checkin(transaction_user_pid, item_barcode, **kwargs): if check_sip2_module(): from invenio_sip2.models import SelfcheckCheckin - language = kwargs.get('language', current_app.config - .get('BABEL_DEFAULT_LANGUAGE')) + language = kwargs.get( + "language", current_app.config.get("BABEL_DEFAULT_LANGUAGE") + ) # Temporarily overrides the currently selected locale. # force_locale is allowed to work outside of application context with force_locale(language): try: - terminal = SelfcheckTerminal.find_terminal( - name=kwargs.get('terminal')) + terminal = SelfcheckTerminal.find_terminal(name=kwargs.get("terminal")) library = Library.get_record_by_pid(terminal.library_pid) - checkin = SelfcheckCheckin( - permanent_location=library.get('name') - ) + checkin = SelfcheckCheckin(permanent_location=library.get("name")) item = Item.get_item_by_barcode( - barcode=item_barcode, - organisation_pid=terminal.organisation_pid + barcode=item_barcode, organisation_pid=terminal.organisation_pid ) if not item: raise ItemBarcodeNotFound document = Document.get_record_by_pid(item.document_pid) - checkin['title_id'] = TitleExtension.format_text( - document.get('title') - ) + checkin["title_id"] = TitleExtension.format_text(document.get("title")) staffer = Patron.get_record_by_pid(transaction_user_pid) if staffer.is_professional_user: # do checkin @@ -495,33 +523,35 @@ def selfcheck_checkin(transaction_user_pid, item_barcode, **kwargs): selfcheck_terminal_id=str(terminal.id), ) if LoanAction.CHECKIN in data: - checkin['checkin'] = True - checkin['resensitize'] = True - if item.get_requests(output='count') > 0: - checkin['alert'] = True + checkin["checkin"] = True + checkin["resensitize"] = True + if item.get_requests(output="count") > 0: + checkin["alert"] = True # checkin note checkin_note = item.get_note(ItemNoteTypes.CHECKIN) if checkin_note: - checkin.get('screen_messages', []) \ - .append(checkin_note) + checkin.get("screen_messages", []).append(checkin_note) # TODO: When is possible, try to return fields: # magnetic_media # TODO: implements `print_line` except ItemBarcodeNotFound: - checkin.get('screen_messages', []).append( - _('Error encountered: item not found')) - checkin.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) + checkin.get("screen_messages", []).append( + _("Error encountered: item not found") + ) + checkin.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") + ) except NoCirculationAction: - checkin.get('screen_messages', []).append( - _('No circulation action is possible')) + checkin.get("screen_messages", []).append( + _("No circulation action is possible") + ) except CirculationException as circ_err: - checkin.get('screen_messages', []).append( - _(circ_err.description)) + checkin.get("screen_messages", []).append(_(circ_err.description)) except Exception: - current_app.logger.error('self checkin failed') - checkin.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) + current_app.logger.error("self checkin failed") + checkin.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") + ) return checkin @@ -536,24 +566,24 @@ def selfcheck_renew(transaction_user_pid, item_barcode, **kwargs): if check_sip2_module(): from invenio_sip2.errors import SelfcheckCirculationError from invenio_sip2.models import SelfcheckFeeType, SelfcheckRenew - language = kwargs.get('language', current_app.config - .get('BABEL_DEFAULT_LANGUAGE')) + + language = kwargs.get( + "language", current_app.config.get("BABEL_DEFAULT_LANGUAGE") + ) # Temporarily overrides the currently selected locale. # force_locale is allowed to work outside of application context with force_locale(language): try: - terminal = SelfcheckTerminal.find_terminal( - name=kwargs.get('terminal')) + terminal = SelfcheckTerminal.find_terminal(name=kwargs.get("terminal")) item = Item.get_item_by_barcode( - barcode=item_barcode, - organisation_pid=terminal.organisation_pid + barcode=item_barcode, organisation_pid=terminal.organisation_pid ) if not item: raise ItemBarcodeNotFound document = Document.get_record_by_pid(item.document_pid) renew = SelfcheckRenew( - title_id=TitleExtension.format_text(document.get('title')) + title_id=TitleExtension.format_text(document.get("title")) ) staffer = Patron.get_record_by_pid(transaction_user_pid) @@ -567,38 +597,38 @@ def selfcheck_renew(transaction_user_pid, item_barcode, **kwargs): ) if LoanAction.EXTEND in data: loan = data[LoanAction.EXTEND] - renew['success'] = True - renew['renewal'] = True - renew['desensitize'] = True - renew['due_date'] = loan['end_date'] + renew["success"] = True + renew["renewal"] = True + renew["desensitize"] = True + renew["due_date"] = loan["end_date"] transaction = get_last_transaction_by_loan_pid( - loan_pid=loan.pid, status='open') + loan_pid=loan.pid, status="open" + ) if transaction: # TODO: map transaction type - renew['fee_type'] = SelfcheckFeeType.OVERDUE - renew['fee_amount'] = \ - '%.2f' % transaction.total_amount - renew['currency_type'] = transaction.currency + renew["fee_type"] = SelfcheckFeeType.OVERDUE + renew["fee_amount"] = "%.2f" % transaction.total_amount + renew["currency_type"] = transaction.currency # TODO: When is possible, try to return fields: # magnetic_media except ItemBarcodeNotFound: - renew = SelfcheckRenew(title_id='') - renew.get('screen_messages', []).append( - _('Error encountered: item not found')) - renew.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) + renew = SelfcheckRenew(title_id="") + renew.get("screen_messages", []).append( + _("Error encountered: item not found") + ) + renew.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") + ) except NoCirculationAction: - renew.get('screen_messages', []).append( - _('No circulation action is possible')) + renew.get("screen_messages", []).append( + _("No circulation action is possible") + ) except CirculationException as circ_err: - renew.get('screen_messages', []).append( - _(circ_err.description)) + renew.get("screen_messages", []).append(_(circ_err.description)) except Exception: - renew.get('screen_messages', []).append( - _('Error encountered: please contact a librarian')) - raise SelfcheckCirculationError( - 'self renewal failed', - renew + renew.get("screen_messages", []).append( + _("Error encountered: please contact a librarian") ) + raise SelfcheckCirculationError("self renewal failed", renew) return renew diff --git a/rero_ils/modules/selfcheck/cli.py b/rero_ils/modules/selfcheck/cli.py index d697302392..517134710e 100644 --- a/rero_ils/modules/selfcheck/cli.py +++ b/rero_ils/modules/selfcheck/cli.py @@ -31,89 +31,94 @@ from .models import SelfcheckTerminal -@click.command('create_terminal') -@click.option('-n', '--name', required=True) +@click.command("create_terminal") +@click.option("-n", "--name", required=True) @click.option( - '-u', '--user', required=True, callback=process_user, - help='User ID or email.') + "-u", "--user", required=True, callback=process_user, help="User ID or email." +) +@click.option("-l", "--location-pid", required=True) +@click.option("-s", "--scope", "scopes", multiple=True, callback=process_scopes) +@click.option("-i", "--internal", is_flag=True) @click.option( - '-l', '--location-pid', required=True) + "-t", + "--access_token", + "access_token", + required=False, + help="personalized access_token.", +) @click.option( - '-s', '--scope', 'scopes', multiple=True, callback=process_scopes) -@click.option('-i', '--internal', is_flag=True) -@click.option( - '-t', '--access_token', 'access_token', required=False, - help='personalized access_token.') -@click.option( - '-c', '--comments', 'comments', required=False, - help='comments for selfcheck terminal.') + "-c", + "--comments", + "comments", + required=False, + help="comments for selfcheck terminal.", +) @with_appcontext -def create_terminal(name, user, location_pid, scopes, internal, - access_token, comments): +def create_terminal(name, user, location_pid, scopes, internal, access_token, comments): """Create a personal OAuth token.""" # avoid circular import from rero_ils.modules.cli.utils import create_personal # check if user exist: if not user: - click.secho('ERROR user does not exist', fg='red') + click.secho("ERROR user does not exist", fg="red") sys.exit(1) # check if name already exist if SelfcheckTerminal.find_terminal(name=name): - click.secho( - f'ERROR terminal name already exist: {name}', - fg='red' - ) + click.secho(f"ERROR terminal name already exist: {name}", fg="red") sys.exit(1) if location := LocationsSearch().get_record_by_pid(location_pid): if not (token := get_token(access_token=access_token)): - click.secho(f'create token for: {user}', fg='blue') + click.secho(f"create token for: {user}", fg="blue") token = create_personal( - name, user.id, scopes=scopes, is_internal=internal, - access_token=access_token) + name, + user.id, + scopes=scopes, + is_internal=internal, + access_token=access_token, + ) access_token = token.access_token if access_token: selfcheck_terminal = SelfcheckTerminal( name=name, access_token=access_token, - organisation_pid=location.organisation['pid'], - library_pid=location.library['pid'], + organisation_pid=location.organisation["pid"], + library_pid=location.library["pid"], location_pid=location_pid, - comments=comments + comments=comments, ) db.session.add(selfcheck_terminal) db.session.commit() - click.secho(f'login: {name}', fg='green') - click.secho(access_token, fg='green') + click.secho(f"login: {name}", fg="green") + click.secho(access_token, fg="green") -@click.command('list_terminal') +@click.command("list_terminal") @with_appcontext def list_terminal(): """List all configured terminals.""" - for terminal in SelfcheckTerminal.query.order_by('id').all(): + for terminal in SelfcheckTerminal.query.order_by("id").all(): click.echo(terminal.name) - click.echo(f'\ttoken : {terminal.access_token}') - click.echo(f'\torganisation_pid : {terminal.organisation_pid}') - click.echo(f'\tlibrary_pid : {terminal.library_pid}') - click.echo(f'\tlocation_pid : {terminal.location_pid}') - click.echo(f'\tactive : {terminal.active}') - click.echo(f'\tlast login : {terminal.last_login_at}') - click.echo(f'\tcomments : {terminal.comments}') + click.echo(f"\ttoken : {terminal.access_token}") + click.echo(f"\torganisation_pid : {terminal.organisation_pid}") + click.echo(f"\tlibrary_pid : {terminal.library_pid}") + click.echo(f"\tlocation_pid : {terminal.location_pid}") + click.echo(f"\tactive : {terminal.active}") + click.echo(f"\tlast login : {terminal.last_login_at}") + click.echo(f"\tcomments : {terminal.comments}") -@click.command('update_terminal') -@click.argument('name') -@click.option('-e', '--enable', 'enable', is_flag=True, default=False) -@click.option('-d', '--disable', 'disable', is_flag=True, default=False) -@click.option('-l', '--loc-pid', 'location_pid') -@click.option('-t', '--access-token', 'access_token') -@click.option('-c', '--comments', 'comments') +@click.command("update_terminal") +@click.argument("name") +@click.option("-e", "--enable", "enable", is_flag=True, default=False) +@click.option("-d", "--disable", "disable", is_flag=True, default=False) +@click.option("-l", "--loc-pid", "location_pid") +@click.option("-t", "--access-token", "access_token") +@click.option("-c", "--comments", "comments") @with_appcontext -def update_terminal(name, enable, disable, location_pid, access_token, - comments): +def update_terminal(name, enable, disable, location_pid, access_token, comments): """Update the given terminal.""" if not (terminal := SelfcheckTerminal.find_terminal(name=name)): return @@ -123,19 +128,18 @@ def update_terminal(name, enable, disable, location_pid, access_token, terminal.active = True if location_pid: if location := LocationsSearch().get_record_by_pid(location_pid): - terminal.organisation_pid = location.organisation['pid'], - terminal.library_pid = location.library['pid'], + terminal.organisation_pid = (location.organisation["pid"],) + terminal.library_pid = (location.library["pid"],) terminal.location_pid = location_pid if access_token: if token := get_token(access_token): terminal.access_token = token.access_token else: click.secho( - f'WARNING token is not valid or does not exist : ' - f'{access_token}', - fg='yellow' + f"WARNING token is not valid or does not exist : " f"{access_token}", + fg="yellow", ) if comments: terminal.comments = comments db.session.merge(terminal) - click.secho(f'{name} updated', fg='green') + click.secho(f"{name} updated", fg="green") diff --git a/rero_ils/modules/selfcheck/models.py b/rero_ils/modules/selfcheck/models.py index 96ef32ad98..3f85845de7 100644 --- a/rero_ils/modules/selfcheck/models.py +++ b/rero_ils/modules/selfcheck/models.py @@ -27,19 +27,16 @@ class SelfcheckTerminal(db.Model): """Selfcheck terminal model.""" - __tablename__ = 'selfcheck_terminals' + __tablename__ = "selfcheck_terminals" - id = db.Column( - db.Integer, - primary_key=True - ) + id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(255), unique=True) access_token = db.Column(db.String(255), nullable=False) organisation_pid = db.Column(db.String(255), nullable=False) library_pid = db.Column(db.String(255), nullable=False) location_pid = db.Column(db.String(255), nullable=False) - active = db.Column(db.Boolean(name='active'), default=True) + active = db.Column(db.Boolean(name="active"), default=True) last_login_at = db.Column(db.DateTime) last_login_ip = db.Column(IPAddressType, nullable=True) comments = db.Column(db.Text, nullable=True) diff --git a/rero_ils/modules/selfcheck/permissions.py b/rero_ils/modules/selfcheck/permissions.py index ba2df898b9..e950a1606c 100644 --- a/rero_ils/modules/selfcheck/permissions.py +++ b/rero_ils/modules/selfcheck/permissions.py @@ -18,13 +18,10 @@ """Selfcheck permissions.""" -from ..permissions import deny_access from ...permissions import monitoring_permission +from ..permissions import deny_access def seflcheck_permission_factory(action): """Default api permission factory.""" - if action in ['api-monitoring']: - return monitoring_permission - - return deny_access + return monitoring_permission if action in ["api-monitoring"] else deny_access diff --git a/rero_ils/modules/selfcheck/utils.py b/rero_ils/modules/selfcheck/utils.py index ea89e94794..5ded4b27c5 100644 --- a/rero_ils/modules/selfcheck/utils.py +++ b/rero_ils/modules/selfcheck/utils.py @@ -51,10 +51,9 @@ def authorize_selfckeck_terminal(terminal, access_token, **kwargs): :return: The granted user instance or ``None``. """ if terminal and terminal.access_token == access_token: - token = get_token(access_token=access_token) - if token: + if token := get_token(access_token=access_token): terminal.last_login_at = datetime.utcnow() - terminal.last_login_ip = kwargs.get('terminal_ip', None) + terminal.last_login_ip = kwargs.get("terminal_ip", None) db.session.merge(terminal) return token.user @@ -78,19 +77,23 @@ def format_patron_address(patron): :param patron: patron instance. :return: Formated address like 'street postal code city' for patron. """ - if address := patron.get('second_address'): - street = address.get('street'), - postal_code = address.get('postal_code') - city = address.get('city') + if address := patron.get("second_address"): + street = (address.get("street"),) + postal_code = address.get("postal_code") + city = address.get("city") else: profile = patron.user.user_profile - street = profile['street'].strip() - postal_code = profile['postal_code'].strip() - city = profile['city'].strip() - formated_address = f'{street}, {postal_code} {city}' + street = profile["street"].strip() + postal_code = profile["postal_code"].strip() + city = profile["city"].strip() + formated_address = f"{street}, {postal_code} {city}" # Should never append, but can be imported from an old system - return formated_address.replace(r'\n', ' ').replace(r'\r', ' ')\ - .replace('\n', ' ').replace('\r', ' ') + return ( + formated_address.replace(r"\n", " ") + .replace(r"\r", " ") + .replace("\n", " ") + .replace("\r", " ") + ) def get_patron_status(patron): @@ -106,58 +109,54 @@ def get_patron_status(patron): :return SelfcheckPatronStatus object or None. """ - if check_sip2_module(): - from invenio_sip2.models import PatronStatus, PatronStatusTypes - - patron_status = PatronStatus() - # check if patron is blocked - if patron.is_blocked: - patron_status.add_patron_status_type( - PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.RECALL_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.HOLD_PRIVILEGES_DENIED) - - patron_type = PatronType.get_record_by_pid(patron.patron_type_pid) - # check the patron type checkout limit - if not patron_type.check_checkout_count_limit(patron): - patron_status.add_patron_status_type( - PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.HOLD_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.TOO_MANY_ITEMS_CHARGED) - - # check the patron type fee amount limit - if not patron_type.check_fee_amount_limit(patron): - patron_status.add_patron_status_type( - PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.HOLD_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.EXCESSIVE_OUTSTANDING_FINES) - patron_status.add_patron_status_type( - PatronStatusTypes.EXCESSIVE_OUTSTANDING_FEES) - - # check the patron type overdue limit - if not patron_type.check_overdue_items_limit(patron): - patron_status.add_patron_status_type( - PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.HOLD_PRIVILEGES_DENIED) - patron_status.add_patron_status_type( - PatronStatusTypes.TOO_MANY_ITEMS_OVERDUE) - - return patron_status + if not check_sip2_module(): + return + from invenio_sip2.models import PatronStatus, PatronStatusTypes + + patron_status = PatronStatus() + # check if patron is blocked + if patron.is_blocked: + patron_status.add_patron_status_type(PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) + patron_status.add_patron_status_type( + PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED + ) + patron_status.add_patron_status_type(PatronStatusTypes.RECALL_PRIVILEGES_DENIED) + patron_status.add_patron_status_type(PatronStatusTypes.HOLD_PRIVILEGES_DENIED) + + patron_type = PatronType.get_record_by_pid(patron.patron_type_pid) + # check the patron type checkout limit + if not patron_type.check_checkout_count_limit(patron): + patron_status.add_patron_status_type(PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) + patron_status.add_patron_status_type( + PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED + ) + patron_status.add_patron_status_type(PatronStatusTypes.HOLD_PRIVILEGES_DENIED) + patron_status.add_patron_status_type(PatronStatusTypes.TOO_MANY_ITEMS_CHARGED) + + # check the patron type fee amount limit + if not patron_type.check_fee_amount_limit(patron): + patron_status.add_patron_status_type(PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) + patron_status.add_patron_status_type( + PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED + ) + patron_status.add_patron_status_type(PatronStatusTypes.HOLD_PRIVILEGES_DENIED) + patron_status.add_patron_status_type( + PatronStatusTypes.EXCESSIVE_OUTSTANDING_FINES + ) + patron_status.add_patron_status_type( + PatronStatusTypes.EXCESSIVE_OUTSTANDING_FEES + ) + + # check the patron type overdue limit + if not patron_type.check_overdue_items_limit(patron): + patron_status.add_patron_status_type(PatronStatusTypes.CHARGE_PRIVILEGES_DENIED) + patron_status.add_patron_status_type( + PatronStatusTypes.RENEWAL_PRIVILEGES_DENIED + ) + patron_status.add_patron_status_type(PatronStatusTypes.HOLD_PRIVILEGES_DENIED) + patron_status.add_patron_status_type(PatronStatusTypes.TOO_MANY_ITEMS_OVERDUE) + + return patron_status def map_media_type(media_type): @@ -166,8 +165,8 @@ def map_media_type(media_type): :param media_type: Document type :return: sip2 media type (see invenio_sip2.models.SelfcheckMediaType) """ - return current_app.config.get('SIP2_MEDIA_TYPES').get( - media_type, 'docmaintype_other' + return current_app.config.get("SIP2_MEDIA_TYPES").get( + media_type, "docmaintype_other" ) @@ -179,12 +178,12 @@ def map_item_circulation_status(item_status): (see invenio_sip2.models.SelfcheckCirculationStatus) """ circulation_status = { - ItemStatus.ON_SHELF: 'AVAILABLE', - ItemStatus.AT_DESK: 'WAITING_ON_HOLD_SHELF', - ItemStatus.ON_LOAN: 'CHARGED', - ItemStatus.IN_TRANSIT: 'IN_TRANSIT', - ItemStatus.EXCLUDED: 'OTHER', - ItemStatus.MISSING: 'MISSING', + ItemStatus.ON_SHELF: "AVAILABLE", + ItemStatus.AT_DESK: "WAITING_ON_HOLD_SHELF", + ItemStatus.ON_LOAN: "CHARGED", + ItemStatus.IN_TRANSIT: "IN_TRANSIT", + ItemStatus.EXCLUDED: "OTHER", + ItemStatus.MISSING: "MISSING", } - return circulation_status.get(item_status, 'OTHER') + return circulation_status.get(item_status, "OTHER") diff --git a/rero_ils/modules/serializers/__init__.py b/rero_ils/modules/serializers/__init__.py index a432b67781..810cb9f3c3 100644 --- a/rero_ils/modules/serializers/__init__.py +++ b/rero_ils/modules/serializers/__init__.py @@ -19,30 +19,30 @@ """RERO ILS Record serialization.""" from invenio_records_rest.serializers.response import record_responsify -from invenio_records_rest.serializers.response import \ - search_responsify as _search_responsify +from invenio_records_rest.serializers.response import ( + search_responsify as _search_responsify, +) from .base import ACQJSONSerializer, JSONSerializer from .mixins import CachedDataSerializerMixin, StreamSerializerMixin -from .response import record_responsify_file, search_responsify, \ - search_responsify_file +from .response import record_responsify_file, search_responsify, search_responsify_file from .schema import RecordSchemaJSONV1 __all__ = [ - 'CachedDataSerializerMixin', - 'StreamSerializerMixin', - 'JSONSerializer', - 'ACQJSONSerializer', - 'RecordSchemaJSONV1', - 'json_v1', - 'json_v1_search', - 'json_v1_response', - 'search_responsify', - 'search_responsify_file', - 'record_responsify_file' + "CachedDataSerializerMixin", + "StreamSerializerMixin", + "JSONSerializer", + "ACQJSONSerializer", + "RecordSchemaJSONV1", + "json_v1", + "json_v1_search", + "json_v1_response", + "search_responsify", + "search_responsify_file", + "record_responsify_file", ] json_v1 = JSONSerializer(RecordSchemaJSONV1) -json_v1_search = _search_responsify(json_v1, 'application/json') -json_v1_response = record_responsify(json_v1, 'application/json') +json_v1_search = _search_responsify(json_v1, "application/json") +json_v1_response = record_responsify(json_v1, "application/json") diff --git a/rero_ils/modules/serializers/base.py b/rero_ils/modules/serializers/base.py index d72b8d0fb4..a61c3ba32b 100644 --- a/rero_ils/modules/serializers/base.py +++ b/rero_ils/modules/serializers/base.py @@ -24,16 +24,15 @@ from flask import json, request from flask_babel import gettext as _ from invenio_jsonschemas import current_jsonschemas -from invenio_records_rest.serializers.json import \ - JSONSerializer as _JSONSerializer +from invenio_records_rest.serializers.json import JSONSerializer as _JSONSerializer from .mixins import PostprocessorMixin def schema_from_context(_, context, data, schema): """Get the record's schema from context.""" - record = (context or {}).get('record', {}) - return record.get('$schema', current_jsonschemas.path_to_url(schema)) + record = (context or {}).get("record", {}) + return record.get("$schema", current_jsonschemas.path_to_url(schema)) class JSONSerializer(_JSONSerializer, PostprocessorMixin): @@ -41,43 +40,48 @@ class JSONSerializer(_JSONSerializer, PostprocessorMixin): def preprocess_record(self, pid, record, links_factory=None, **kwargs): """Prepare a record and persistent identifier for serialization.""" - links_factory = links_factory or (lambda x, record=None, **k: dict()) - if request and request.args.get('resolve') == '1': + links_factory = links_factory or (lambda x, record=None, **k: {}) + if request and request.args.get("resolve") == "1": metadata = record.resolve() # if not enable jsonref the dumps is already done in the resolve # method - if getattr(record, 'enable_jsonref', False): + if getattr(record, "enable_jsonref", False): metadata = metadata.dumps() else: - metadata = deepcopy(record.replace_refs()) if self.replace_refs \ - else record.dumps() + metadata = ( + deepcopy(record.replace_refs()) if self.replace_refs else record.dumps() + ) return dict( pid=pid, metadata=metadata, links=links_factory(pid, record=record, **kwargs), revision=record.revision_id, - created=(pytz.utc.localize(record.created).isoformat() - if record.created else None), - updated=(pytz.utc.localize(record.updated).isoformat() - if record.updated else None), + created=( + pytz.utc.localize(record.created).isoformat() + if record.created + else None + ), + updated=( + pytz.utc.localize(record.updated).isoformat() + if record.updated + else None + ), ) @staticmethod def preprocess_search_hit(pid, record_hit, links_factory=None, **kwargs): """Prepare a record hit from Elasticsearch for serialization.""" record = _JSONSerializer.preprocess_search_hit( - pid=pid, - record_hit=record_hit, - links_factory=links_factory, - kwargs=kwargs + pid=pid, record_hit=record_hit, links_factory=links_factory, kwargs=kwargs ) - if record.get('_explanation'): - record[1:] = record.get('_explanation') - del record['_explanation'] + if record.get("_explanation"): + record[1:] = record.get("_explanation") + del record["_explanation"] return record - def serialize_search(self, pid_fetcher, search_result, links=None, - item_links_factory=None, **kwargs): + def serialize_search( + self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs + ): """Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. @@ -89,18 +93,21 @@ def serialize_search(self, pid_fetcher, search_result, links=None, hits=dict( hits=[ self.transform_search_hit( - pid_fetcher(hit['_id'], hit['_source']), - hit, links_factory=item_links_factory, **kwargs) - for hit in search_result['hits']['hits'] + pid_fetcher(hit["_id"], hit["_source"]), + hit, + links_factory=item_links_factory, + **kwargs, + ) + for hit in search_result["hits"]["hits"] ], - total=search_result['hits']['total'], + total=search_result["hits"]["total"], ), links=links or {}, - aggregations=search_result.get('aggregations', dict()), + aggregations=search_result.get("aggregations", {}), ) return json.dumps( self.postprocess_serialize_search(results, pid_fetcher), - **self._format_args() + **self._format_args(), ) @staticmethod @@ -115,14 +122,16 @@ def enrich_bucket_with_data(buckets, search_cls, attributes_name): if not isinstance(attributes_name, list): attributes_name = [attributes_name] # search all requested values using search class - query = search_cls() \ - .filter('terms', pid=[term['key'] for term in buckets]) \ - .source(['pid'] + attributes_name) + query = ( + search_cls() + .filter("terms", pid=[term["key"] for term in buckets]) + .source(["pid"] + attributes_name) + ) data = {result.pid: result.to_dict() for result in query.scan()} # complete buckets with data for term in buckets: for attr in attributes_name: - if info := data.get(term['key']): + if info := data.get(term["key"]): if attr_term := info.get(attr): term[attr] = attr_term else: @@ -135,12 +144,12 @@ def add_date_range_configuration(aggregation, step=86400000): :param aggregation: the aggregation to process. :param step: the number of millis for each step. By default : 1 day. """ - if values := [term['key'] for term in aggregation.get('buckets', [])]: - aggregation['type'] = 'date-range' - aggregation['config'] = { - 'min': min(values), - 'max': max(values), - 'step': step # 1 day in millis + if values := [term["key"] for term in aggregation.get("buckets", [])]: + aggregation["type"] = "date-range" + aggregation["config"] = { + "min": min(values), + "max": max(values), + "step": step, # 1 day in millis } @@ -150,6 +159,7 @@ class ACQJSONSerializer(JSONSerializer, PostprocessorMixin): def preprocess_record(self, pid, record, links_factory=None, **kwargs): """Prepare a record and persistent identifier for serialization.""" # add some dynamic key related to the record. - record['is_current_budget'] = record.is_active + record["is_current_budget"] = record.is_active return super().preprocess_record( - pid=pid, record=record, links_factory=links_factory, kwargs=kwargs) + pid=pid, record=record, links_factory=links_factory, kwargs=kwargs + ) diff --git a/rero_ils/modules/serializers/mixins.py b/rero_ils/modules/serializers/mixins.py index 107834304d..49616b71d6 100644 --- a/rero_ils/modules/serializers/mixins.py +++ b/rero_ils/modules/serializers/mixins.py @@ -52,17 +52,17 @@ def postprocess_serialize_search(self, results, pid_fetcher): :param results: Search result. :param pid_fetcher: Persistent identifier fetcher. """ - for hit in results.get('hits', {}).get('hits', []): + for hit in results.get("hits", {}).get("hits", []): self._postprocess_search_hit(hit) - if aggregations := results.get('aggregations'): + if aggregations := results.get("aggregations"): # special process for nested aggregations # to display the results of nested aggregations # (nested aggregations are created to apply facet filters), # put the value of the nested 'aggs_facet' aggregation # one level up. for key, agg in aggregations.items(): - if agg and ('aggs_facet' in agg): - aggregations[key] = aggregations[key]['aggs_facet'] + if agg and ("aggs_facet" in agg): + aggregations[key] = aggregations[key]["aggs_facet"] self._postprocess_search_aggregations(aggregations) self._postprocess_search_links(results, pid_fetcher) return results @@ -75,9 +75,9 @@ def _postprocess_search_links(self, search_results, pid_fetcher) -> None: into the search result. """ # add REST API to create a record related to the search result. - pid_type = pid_fetcher(None, {'pid': '1'}).pid_type - url = url_for(f'invenio_records_rest.{pid_type}_list', _external=True) - search_results['links'].update({'create': url}) + pid_type = pid_fetcher(None, {"pid": "1"}).pid_type + url = url_for(f"invenio_records_rest.{pid_type}_list", _external=True) + search_results["links"].update({"create": url}) def _postprocess_search_hit(self, hit: dict) -> None: """Post-process a specific search hit. @@ -94,8 +94,8 @@ def _postprocess_search_aggregations(self, aggregations: dict) -> None: :param aggregations: the dictionary representing ElasticSearch aggregations section. """ - if 'document_type' in aggregations: - aggr = aggregations['document_type'].get('buckets') + if "document_type" in aggregations: + aggr = aggregations["document_type"].get("buckets") filter_document_type_buckets(aggr) @@ -156,7 +156,7 @@ def load_all(self, *args): for loader in args: assert issubclass(loader.__class__, RecordsSearch) for hit in loader.scan(): - pid = hit['pid'] + pid = hit["pid"] key = CachedDataSerializerMixin._get_key(loader, pid) self.append(key, hit.to_dict()) @@ -171,7 +171,7 @@ def load_resources(self, loader, pids): for resource in loader.get_records_by_pids(pids): if not inspect.isclass(loader): # AttrDict conversion resource = resource.to_dict() - if pid := resource.get('pid'): + if pid := resource.get("pid"): key = CachedDataSerializerMixin._get_key(loader, pid) self.append(key, resource) resources.append(resource) diff --git a/rero_ils/modules/serializers/response.py b/rero_ils/modules/serializers/response.py index 8d3cd0aada..7b8e8b0b6a 100644 --- a/rero_ils/modules/serializers/response.py +++ b/rero_ils/modules/serializers/response.py @@ -48,28 +48,41 @@ def search_responsify(serializer, mimetype): :param mimetype: MIME type of response. :returns: Function that generates a record HTTP response. """ - def view(pid_fetcher, search_result, code=200, headers=None, links=None, - item_links_factory=None): + + def view( + pid_fetcher, + search_result, + code=200, + headers=None, + links=None, + item_links_factory=None, + ): # Check if the serializer implement a 'reset' function. If yes, then # call this function before perform serialization. - if (reset := getattr(serializer, 'reset', None)) and callable(reset): + if (reset := getattr(serializer, "reset", None)) and callable(reset): reset() response = current_app.response_class( serializer.serialize_search( - pid_fetcher, search_result, - links=links, item_links_factory=item_links_factory), - mimetype=mimetype) + pid_fetcher, + search_result, + links=links, + item_links_factory=item_links_factory, + ), + mimetype=mimetype, + ) response.status_code = code if headers is not None: response.headers.extend(headers) if links is not None: add_link_header(response, links) return response + return view -def search_responsify_file(serializer, mimetype, file_extension, - file_prefix=None, file_suffix=None): +def search_responsify_file( + serializer, mimetype, file_extension, file_prefix=None, file_suffix=None +): """Create a Records-REST search result response serializer. :param serializer: Serializer instance. @@ -78,26 +91,34 @@ def search_responsify_file(serializer, mimetype, file_extension, :returns: Function that generates a record HTTP response. """ - def view(pid_fetcher, search_result, code=200, headers=None, links=None, - item_links_factory=None): + def view( + pid_fetcher, + search_result, + code=200, + headers=None, + links=None, + item_links_factory=None, + ): response = current_app.response_class( serializer.serialize_search( - pid_fetcher, search_result, + pid_fetcher, + search_result, links=links, - item_links_factory=item_links_factory + item_links_factory=item_links_factory, ), - mimetype=mimetype + mimetype=mimetype, ) response.status_code = code if headers is not None: response.headers.extend(headers) - tstamp = datetime.today().strftime('%Y%m%d') + tstamp = datetime.now().strftime("%Y%m%d") parts = filter(None, [file_prefix, tstamp, file_suffix]) - filename = '-'.join(parts) + '.' + file_extension - if not response.headers.get('Content-Disposition'): - response.headers['Content-Disposition'] = \ + filename = "-".join(parts) + "." + file_extension + if not response.headers.get("Content-Disposition"): + response.headers["Content-Disposition"] = ( f'attachment; filename="{filename}"' + ) if links is not None: add_link_header(response, links) @@ -107,8 +128,9 @@ def view(pid_fetcher, search_result, code=200, headers=None, links=None, return view -def record_responsify_file(serializer, mimetype, file_extension, - file_prefix=None, file_suffix=None): +def record_responsify_file( + serializer, mimetype, file_extension, file_prefix=None, file_suffix=None +): """Create a Records-REST search result response serializer. :param serializer: Serializer instance. @@ -116,10 +138,12 @@ def record_responsify_file(serializer, mimetype, file_extension, :param file_extension: File extension. :returns: Function that generates a record HTTP response. """ + def view(pid, record, code=200, headers=None, links_factory=None): response = current_app.response_class( serializer.serialize(pid, record, links_factory=links_factory), - mimetype=mimetype) + mimetype=mimetype, + ) response.status_code = code response.cache_control.no_cache = True response.set_etag(str(record.revision_id)) @@ -127,12 +151,13 @@ def view(pid, record, code=200, headers=None, links_factory=None): if headers is not None: response.headers.extend(headers) - tstamp = datetime.today().strftime('%Y%m%d') + tstamp = datetime.now().strftime("%Y%m%d") parts = filter(None, [file_prefix, tstamp, file_suffix]) - filename = '-'.join(parts) + '.' + file_extension - if not response.headers.get('Content-Disposition'): - response.headers['Content-Disposition'] = \ + filename = "-".join(parts) + "." + file_extension + if not response.headers.get("Content-Disposition"): + response.headers["Content-Disposition"] = ( f'attachment; filename="{filename}"' + ) if links_factory is not None: add_link_header(response, links_factory(pid)) diff --git a/rero_ils/modules/serializers/schema.py b/rero_ils/modules/serializers/schema.py index 5aafeb6c96..9076ac6cd9 100644 --- a/rero_ils/modules/serializers/schema.py +++ b/rero_ils/modules/serializers/schema.py @@ -16,8 +16,7 @@ # along with this program. If not, see . """RERO ILS Record schema for serialization.""" -from invenio_records_rest.schemas import \ - RecordSchemaJSONV1 as _RecordSchemaJSONV1 +from invenio_records_rest.schemas import RecordSchemaJSONV1 as _RecordSchemaJSONV1 from marshmallow import fields diff --git a/rero_ils/modules/sru/cql_parser.py b/rero_ils/modules/sru/cql_parser.py index 25e9375a86..3bcc624364 100644 --- a/rero_ils/modules/sru/cql_parser.py +++ b/rero_ils/modules/sru/cql_parser.py @@ -33,64 +33,64 @@ from ..utils import strip_chars -SERVER_CHOISE_RELATION = '=' -SERVER_CHOISE_INDEX = 'cql.serverchoice' +SERVER_CHOISE_RELATION = "=" +SERVER_CHOISE_INDEX = "cql.serverchoice" -ORDER = ['=', '>', '>=', '<', '<=', '<>'] -MODIFIER_SEPERATOR = '/' -BOOLEANS = ['and', 'or', 'not', 'prox'] -SORT_WORD = 'sortby' +ORDER = ["=", ">", ">=", "<", "<=", "<>"] +MODIFIER_SEPERATOR = "/" +BOOLEANS = ["and", "or", "not", "prox"] +SORT_WORD = "sortby" RESERVED_PREFIXES = { - 'srw': 'http://www.loc.gov/zing/cql/srw-indexes/v1.0/', - 'cql': 'info:srw/cql-context-set/1/cql-v1.1', - 'dc': 'http://purl.org/dc/elements/1.1/' + "srw": "http://www.loc.gov/zing/cql/srw-indexes/v1.0/", + "cql": "info:srw/cql-context-set/1/cql-v1.1", + "dc": "http://purl.org/dc/elements/1.1/", } -XCQL_NAMESPACE = 'http://www.loc.gov/zing/cql/xcql/' +XCQL_NAMESPACE = "http://www.loc.gov/zing/cql/xcql/" -ERROR_ON_EMPTY_TERM = False # index = '' -ERROR_ON_QUOTED_IDENTIFIER = False # '/foo/bar' = '' -ERROR_ON_DUPLICATE_PREFIX = False # >a=b >a=c '' -FULL_RESULT_SET_NAME_CHECK = True # cql.rsn=a and cql.rsn=a (mutant!) +ERROR_ON_EMPTY_TERM = False # index = '' +ERROR_ON_QUOTED_IDENTIFIER = False # '/foo/bar' = '' +ERROR_ON_DUPLICATE_PREFIX = False # >a=b >a=c '' +FULL_RESULT_SET_NAME_CHECK = True # cql.rsn=a and cql.rsn=a (mutant!) ES_INDEX_MAPPINGS = { - 'cql.anywhere': SERVER_CHOISE_INDEX, - 'dc.anywhere': SERVER_CHOISE_INDEX, - 'dc.contributor': 'contribution.role:(' - 'ape OR aqt OR arc OR art OR aus OR aut OR chr OR cll OR cmp OR com OR ' - 'drt OR dsr OR enj OR fmk OR inv OR ive OR ivr OR lbt OR lsa OR lyr OR ' - 'pht OR pra OR prg OR rsp OR scl) AND ' - 'authorized_access_point', - 'dc.creator': 'contribution.role:(' - 'abr OR act OR adi OR adp OR aft OR anm OR ann OR apl OR arr OR ato OR ' - 'auc OR aui OR bkd OR bnd OR brd OR brl OR bsl OR cas OR clr OR clt OR ' - 'cmm OR cnd OR cng OR cns OR col OR cor OR cou OR cre OR crt OR csl OR ' - 'cst OR ctb OR ctg OR ctr OR cur OR cwt OR dfd OR dgg OR dgs OR dnc OR ' - 'dnr OR dpt OR drm OR dst OR dte OR dto OR dub OR edm OR edt OR egr OR ' - 'etr OR exp OR fac OR fds OR fmd OR fmo OR fmp OR his OR hnr OR hst OR ' - 'ill OR ilu OR ins OR isb OR itr OR jud OR jug OR lgd OR ltg OR med OR ' - 'mfr OR mod OR msd OR mtk OR mus OR nrt OR orm OR osp OR oth OR own OR ' - 'pan OR pat OR pbd OR pbl OR plt OR ppm OR ppt OR pre OR prf OR prm OR ' - 'prn OR pro OR prs OR prt OR ptf OR rcd OR rce OR rcp OR rdd OR res OR ' - 'rpc OR rsr OR sds OR sgd OR sll OR sng OR spk OR spn OR srv OR stl OR ' - 'tch OR tld OR tlp OR trc OR trl OR vac OR vdg OR wac OR wal OR wat OR ' - 'win OR wpr OR wst) AND ' - 'authorized_access_point', - 'dc.date': 'provisionActivity.type:"bf:Publication" ' - 'AND provisionActivity.startDate', - 'dc.title': 'title.\\*', + "cql.anywhere": SERVER_CHOISE_INDEX, + "dc.anywhere": SERVER_CHOISE_INDEX, + "dc.contributor": "contribution.role:(" + "ape OR aqt OR arc OR art OR aus OR aut OR chr OR cll OR cmp OR com OR " + "drt OR dsr OR enj OR fmk OR inv OR ive OR ivr OR lbt OR lsa OR lyr OR " + "pht OR pra OR prg OR rsp OR scl) AND " + "authorized_access_point", + "dc.creator": "contribution.role:(" + "abr OR act OR adi OR adp OR aft OR anm OR ann OR apl OR arr OR ato OR " + "auc OR aui OR bkd OR bnd OR brd OR brl OR bsl OR cas OR clr OR clt OR " + "cmm OR cnd OR cng OR cns OR col OR cor OR cou OR cre OR crt OR csl OR " + "cst OR ctb OR ctg OR ctr OR cur OR cwt OR dfd OR dgg OR dgs OR dnc OR " + "dnr OR dpt OR drm OR dst OR dte OR dto OR dub OR edm OR edt OR egr OR " + "etr OR exp OR fac OR fds OR fmd OR fmo OR fmp OR his OR hnr OR hst OR " + "ill OR ilu OR ins OR isb OR itr OR jud OR jug OR lgd OR ltg OR med OR " + "mfr OR mod OR msd OR mtk OR mus OR nrt OR orm OR osp OR oth OR own OR " + "pan OR pat OR pbd OR pbl OR plt OR ppm OR ppt OR pre OR prf OR prm OR " + "prn OR pro OR prs OR prt OR ptf OR rcd OR rce OR rcp OR rdd OR res OR " + "rpc OR rsr OR sds OR sgd OR sll OR sng OR spk OR spn OR srv OR stl OR " + "tch OR tld OR tlp OR trc OR trl OR vac OR vdg OR wac OR wal OR wat OR " + "win OR wpr OR wst) AND " + "authorized_access_point", + "dc.date": 'provisionActivity.type:"bf:Publication" ' + "AND provisionActivity.startDate", + "dc.title": "title.\\*", # TOTO: description search also in: note.label, dissertation.label and # supplementaryContent.discription - 'dc.description': 'summary.label', - 'dc.language': 'language.value', - 'dc.publisher': 'provisionActivity.type:"bf:Publication" AND ' - 'provisionActivity.statement.type:"bf:Agent" AND ' - 'provisionActivity.statement.label.value', - 'dc.type': 'type.main_type', - 'dc.subtype': 'type.subtype', - 'dc.identifier': 'identified_by.value', + "dc.description": "summary.label", + "dc.language": "language.value", + "dc.publisher": 'provisionActivity.type:"bf:Publication" AND ' + 'provisionActivity.statement.type:"bf:Agent" AND ' + "provisionActivity.statement.label.value", + "dc.type": "type.main_type", + "dc.subtype": "type.subtype", + "dc.identifier": "identified_by.value", # TODO: relation search in: issuedWith, otherEdition, otherPhysicalFormat, # precededBy, relatedTo, succeededBy, supplement and supplementTo # 'dc.relation': '', @@ -98,10 +98,10 @@ # 'dc.format': '', # 'dc.rights': '', # 'dc.source': '', - 'dc.subject': 'subject.entity.authorized_access_point', - 'dc.organisation': 'organisation_pid', - 'dc.library': 'library_pid', - 'dc.location': 'holdings.location.pid' + "dc.subject": "subject.entity.authorized_access_point", + "dc.organisation": "organisation_pid", + "dc.library": "library_pid", + "dc.location": "holdings.location.pid", } # End of 'configurable' stuff @@ -110,18 +110,17 @@ class Diagnostic(Exception): """Diagnostic Exceptions.""" - code = 10 # default to generic broken query diagnostic - uri = 'info:srw/diagnostic/1/' - message = '' - details = '' + code = 10 # default to generic broken query diagnostic + uri = "info:srw/diagnostic/1/" + message = "" + details = "" xml_root = None def __str__(self): """String representation of the object.""" - return f'{self.uri}{self.code} [{self.message}]: {self.details}' + return f"{self.uri}{self.code} [{self.message}]: {self.details}" - def __init__(self, code=10, message='Malformed Query', details='', - query='???'): + def __init__(self, code=10, message="Malformed Query", details="", query="???"): """Constructor.""" self.code = code self.message = message @@ -136,28 +135,24 @@ def xml_str(self, pretty_print=True): def init_xml(self): """Init XML.""" - srw_ns = 'http://www.loc.gov/zing/srw/' - element_srw = ElementMaker( - namespace=srw_ns, - nsmap={'srw': srw_ns} - ) - srw_diag_ns = 'http://www.loc.gov/zing/srw/diagnostic/' + srw_ns = "http://www.loc.gov/zing/srw/" + element_srw = ElementMaker(namespace=srw_ns, nsmap={"srw": srw_ns}) + srw_diag_ns = "http://www.loc.gov/zing/srw/diagnostic/" element_srw_diag = ElementMaker( - namespace=srw_diag_ns, - nsmap={'diag': srw_diag_ns} + namespace=srw_diag_ns, nsmap={"diag": srw_diag_ns} ) self.xml_root = element_srw.searchRetrieveResponse() - self.xml_root.append(element_srw.version('1.1')) + self.xml_root.append(element_srw.version("1.1")) echoed_search_rr = element_srw.echoedSearchRetrieveRequest() - echoed_search_rr.append(element_srw.version('1.1')) + echoed_search_rr.append(element_srw.version("1.1")) echoed_search_rr.append(element_srw.query(self.query)) - echoed_search_rr.append(element_srw.recordPacking('xml')) + echoed_search_rr.append(element_srw.recordPacking("xml")) self.xml_root.append(echoed_search_rr) diagnostics = element_srw_diag.diagnostics() - diagnostics.append(element_srw_diag.uri(f'{self.uri}{self.code}')) + diagnostics.append(element_srw_diag.uri(f"{self.uri}{self.code}")) diagnostics.append(element_srw_diag.details(self.details)) - diagnostics.append(element_srw_diag.message(f'{self.message}')) + diagnostics.append(element_srw_diag.message(f"{self.message}")) self.xml_root.append(diagnostics) @@ -178,8 +173,9 @@ def __init__(self, query): def add_prefix(self, name, identifier): """Add prefix.""" - if self.error_on_duplicate_prefix and \ - (name in self.prefixes or name in RESERVED_PREFIXES): + if self.error_on_duplicate_prefix and ( + name in self.prefixes or name in RESERVED_PREFIXES + ): # Maybe error diag = Diagnostic() diag.code = 45 @@ -197,28 +193,20 @@ def resolve_prefix(self, name): return self.prefixes[name] if self.parent is not None: return self.parent.resolve_prefix(name) - if self.config is not None: - # Config is some sort of server config which specifies defaults - return self.config.resolve_prefix(name) - # Top of tree, no config, no resolution->Unknown indexset - # For client we need to allow no prefix? - # diag = Diagnostic15() - # diag.details = name - # diag.query = self.query - # raise diag - return None + return self.config.resolve_prefix(name) if self.config is not None else None class PrefixedObject: """Root object for index, relation, relationModifier.""" - prefix = '' - prefix_uri = '' - value = '' + prefix = "" + prefix_uri = "" + value = "" parent = None - def __init__(self, val, query, - error_on_quoted_identifier=ERROR_ON_QUOTED_IDENTIFIER): + def __init__( + self, val, query, error_on_quoted_identifier=ERROR_ON_QUOTED_IDENTIFIER + ): """Constructor.""" # All prefixed things are case insensitive self.error_on_quoted_identifier = error_on_quoted_identifier @@ -238,14 +226,12 @@ def __init__(self, val, query, def __str__(self): """String representation of the object.""" - if self.prefix: - return f'{self.prefix}.{self.value}' - return f'{self.value}' + return f"{self.prefix}.{self.value}" if self.prefix else f"{self.value}" def split_value(self): """Split value.""" find_point = self.value.find(".") - if self.value.count('.') > 1: + if self.value.count(".") > 1: diag = Diagnostic() diag.code = 15 diag.details = f'Multiple "." characters: {self.value}' @@ -254,12 +240,12 @@ def split_value(self): if find_point == 0: diag = Diagnostic() diag.code = 15 - diag.details = 'Null indexset' + diag.details = "Null indexset" diag.query = self.query raise diag if find_point >= 0: self.prefix = self.value[:find_point].lower() - self.value = self.value[find_point + 1:].lower() + self.value = self.value[find_point + 1 :].lower() def resolve_prefix(self): """Resolve prefix.""" @@ -271,7 +257,7 @@ def resolve_prefix(self): return self.prefix_uri -class ModifiableObject(): +class ModifiableObject: """Mofifiable object.""" # Treat modifiers as keys on boolean/relation? @@ -284,10 +270,14 @@ def __getitem__(self, key): return self.modifiers[key] except Exception: return None - for modifier in self.modifiers: - if (str(modifier.type) == key or modifier.type.value == key): - return modifier - return None + return next( + ( + modifier + for modifier in self.modifiers + if str(modifier.type) == key or modifier.type.value == key + ), + None, + ) class Triple(PrefixableObject): @@ -300,19 +290,17 @@ class Triple(PrefixableObject): def to_es(self): """Create the ES representation of the object.""" - txt = [] boolean = self.boolean.to_es() - if boolean == 'prox': + if boolean == "prox": diag = Diagnostic() diag.code = 37 - diag.message = 'Unsupported boolean operator' - diag.details = 'prox' + diag.message = "Unsupported boolean operator" + diag.details = "prox" diag.query = self.query raise diag - - txt.append(self.left_operand.to_es()) - if boolean == 'not': - txt.append('AND') + txt = [self.left_operand.to_es()] + if boolean == "not": + txt.append("AND") else: txt.append(boolean.upper()) txt.append(self.right_operand.to_es()) @@ -320,20 +308,19 @@ def to_es(self): if self.sort_keys: diag = Diagnostic() diag.code = 80 - diag.message = 'Sort not supported' + diag.message = "Sort not supported" diag.query = self.query raise diag # txt.append('sortBy') # for sort_key in self.sort_keys: # txt.append(sort_key.to_es()) - pre = 'NOT' if boolean == 'not' else '' + pre = "NOT" if boolean == "not" else "" return f'{pre}({" ".join(txt)})' def get_result_set_id(self, top=None): """Get result set id.""" - if FULL_RESULT_SET_NAME_CHECK == 0 or \ - self.boolean.value in ['not', 'prox']: - return '' + if FULL_RESULT_SET_NAME_CHECK == 0 or self.boolean.value in ["not", "prox"]: + return "" if top is None: top_level = 1 @@ -353,11 +340,7 @@ def get_result_set_id(self, top=None): rs_list.append(self.right_operand.get_result_set_id(top)) if top_level == 1: - # Check all elements are the same - # if so we're a fubar form of present - if len(rs_list) == rs_list.count(rs_list[0]): - return rs_list[0] - return '' + return rs_list[0] if len(rs_list) == rs_list.count(rs_list[0]) else "" return rs_list @@ -381,6 +364,7 @@ def __init__(self, ind, rel, term, query): def to_es(self): """Create the ES representation of the object.""" + def index_term(index, relation, term): """Clean term.""" from .explaine import Explain @@ -388,36 +372,36 @@ def index_term(index, relation, term): # try to map dc mappings index = ES_INDEX_MAPPINGS.get(index.lower(), index) # try to map es mappings - index = Explain('tmp').es_mappings.get(index, index) + index = Explain("tmp").es_mappings.get(index, index) # if relation in ORDER: # term = f'"{term}"' - if relation in ['=', 'all', 'any']: - relation = '' + if relation in ["=", "all", "any"]: + relation = "" if str(index) == SERVER_CHOISE_INDEX: - return f'{relation}{term}' - return f'{index}:{relation}{term}' + return f"{relation}{term}" + return f"{index}:{relation}{term}" index = self.index.to_es() relation = self.relation.to_es() - if relation == '<>': - text = index_term(index, '-', f'"{self.term.to_es()}"') + if relation == "<>": + text = index_term(index, "-", f'"{self.term.to_es()}"') elif relation in ORDER: text = index_term(index, relation, self.term.to_es()) else: texts = [] - for term in self.term.to_es().split(' '): + for term in self.term.to_es().split(" "): texts.append(index_term(index, relation, term)) if texts: - texts[0] = texts[0].replace('"', '') + texts[0] = texts[0].replace('"', "") texts[-1] = texts[-1].rstrip('"') - if relation == 'any': + if relation == "any": text = f'({" OR ".join(texts)})' - elif relation == 'all': + elif relation == "all": text = f'({" AND ".join(texts)})' else: diag = Diagnostic() diag.code = 19 - diag.message = 'Unsupported relation' + diag.message = "Unsupported relation" diag.details = relation diag.query = self.query raise diag @@ -425,7 +409,7 @@ def index_term(index, relation, term): if self.sort_keys: diag = Diagnostic() diag.code = 80 - diag.message = 'Sort not supported' + diag.message = "Sort not supported" diag.query = self.query raise diag # text.append('sortBy') @@ -437,10 +421,12 @@ def get_result_set_id(self, top=None): """Get result set id.""" idx = self.index idx.resolve_prefix() - if idx.prefix_uri == RESERVED_PREFIXES['cql'] and \ - idx.value.lower() == 'resultsetid': + if ( + idx.prefix_uri == RESERVED_PREFIXES["cql"] + and idx.value.lower() == "resultsetid" + ): return self.term.value - return '' + return "" class Index(PrefixedObject, ModifiableObject): @@ -449,9 +435,9 @@ class Index(PrefixedObject, ModifiableObject): def __init__(self, val, query): """Constructor.""" PrefixedObject.__init__(self, val, query) - if self.value in ['(', ')'] + ORDER: + if self.value in ["(", ")"] + ORDER: diag = Diagnostic() - diag.message = 'Invalid characters in index name' + diag.message = "Invalid characters in index name" diag.details = self.value diag.query = self.query raise diag @@ -461,7 +447,7 @@ def to_es(self): if self.modifiers: diag = Diagnostic() diag.code = 21 - diag.message = 'Unsupported combination of relation modifers' + diag.message = "Unsupported combination of relation modifers" diag.details = self.modifiers diag.query = self.query raise diag @@ -473,7 +459,7 @@ class Relation(PrefixedObject, ModifiableObject): def __init__(self, rel, query, mods=[]): """Constructor.""" - self.prefix = 'cql' + self.prefix = "cql" PrefixedObject.__init__(self, rel, query) self.modifiers = mods for mod in mods: @@ -484,7 +470,7 @@ def to_es(self): if self.modifiers: diag = Diagnostic() diag.code = 21 - diag.message = 'Unsupported combination of relation modifers' + diag.message = "Unsupported combination of relation modifers" diag.details = self.modifiers diag.query = self.query raise diag @@ -494,29 +480,24 @@ def to_es(self): class Term: """Term.""" - value = '' + value = "" def __init__(self, value, query, error_on_empty_term=ERROR_ON_EMPTY_TERM): """Constructor.""" - if value != '': + if value != "": # Unquoted literal - if value in ['>=', '<=', '>', '<', '<>', '/', '=']: + if value in [">=", "<=", ">", "<", "<>", "/", "="]: diag = Diagnostic() diag.code = 25 diag.details = value diag.query = query raise diag - # Check existence of meaningful term - nonanchar = 0 - for char in value: - if char != '^': - nonanchar = 1 - break + nonanchar = next((1 for char in value if char != "^"), 0) if not nonanchar: diag = Diagnostic() diag.code = 32 - diag.details = 'Only anchoring charater(s) in term: ' + value + diag.details = f"Only anchoring charater(s) in term: {value}" diag.query = query raise diag @@ -527,20 +508,21 @@ def __init__(self, value, query, error_on_empty_term=ERROR_ON_EMPTY_TERM): # Check for badly placed \s startidx = 0 - idx = value.find('\\', startidx) + idx = value.find("\\", startidx) while idx > -1: - if len(value) < idx + 2 or \ - not value[idx + 1] in ['?', '\\', '*', '^']: + if len(value) < idx + 2 or value[idx + 1] not in [ + "?", + "\\", + "*", + "^", + ]: diag = Diagnostic() diag.code = 26 diag.details = value diag.query = query raise diag - if value[idx + 1] == '\\': - startidx = idx + 2 - else: - startidx = idx + 1 - idx = value.find('\\', startidx) + startidx = idx + 2 if value[idx + 1] == "\\" else idx + 1 + idx = value.find("\\", startidx) elif error_on_empty_term: diag = Diagnostic() diag.code = 27 @@ -550,7 +532,7 @@ def __init__(self, value, query, error_on_empty_term=ERROR_ON_EMPTY_TERM): def __str__(self): """String representation of the object.""" - return f'{self.value}' + return f"{self.value}" def to_es(self): """Create the ES representation of the object.""" @@ -560,7 +542,7 @@ def to_es(self): class Boolean(ModifiableObject): """Object to represent a CQL boolean.""" - value = '' + value = "" parent = None def __init__(self, bool_value, query, mods=[]): @@ -575,11 +557,11 @@ def to_es(self): if self.modifiers: diag = Diagnostic() diag.code = 21 - diag.message = 'Unsupported combination of relation modifers' + diag.message = "Unsupported combination of relation modifers" diag.details = self.modifiers diag.query = self.query raise diag - return f'{self.value}' + return f"{self.value}" def resolve_prefix(self, name): """Resolve prefix.""" @@ -591,7 +573,7 @@ class ModifierTypeType(PrefixedObject): # Same as index, but we'll XCQLify in ModifierClause parent = None - prefix = 'cql' + prefix = "cql" class ModifierClause: @@ -599,10 +581,10 @@ class ModifierClause: parent = None type = None - comparison = '' - value = '' + comparison = "" + value = "" - def __init__(self, modifier_type, comp='', val='', query=''): + def __init__(self, modifier_type, comp="", val="", query=""): """Constructor.""" self.type = ModifierType(modifier_type, query) self.type.parent = self @@ -612,8 +594,8 @@ def __init__(self, modifier_type, comp='', val='', query=''): def __str__(self): """String representation of the object.""" if self.value: - return f'{self.type}{self.comparison}{self.value}' - return f'{self.type}' + return f"{self.type}{self.comparison}{self.value}" + return f"{self.type}" def to_es(self): """Create the ES representation of the object.""" @@ -632,136 +614,138 @@ class CQLshlex(shlex): """Shlex with additions for CQL parsing.""" quotes = '"' - commenters = '' - next_token = '' + commenters = "" + next_token = "" def __init__(self, thing, query): """Constructor.""" shlex.__init__(self, thing) - self.wordchars += '!@#$%^&*-+{}[];,.?|~`:\\' + self.wordchars += "!@#$%^&*-+{}[];,.?|~`:\\" self.query = query def read_token(self): """Read a token from the input stream (no pushback or inclusions).""" while 1: - if self.next_token != '': + if self.next_token != "": self.token = self.next_token - self.next_token = '' + self.next_token = "" # Bah. SUPER ugly non portable - if self.token == '/': - self.state = ' ' + if self.token == "/": + self.state = " " break nextchar = self.instream.read(1) - if nextchar == '\n': + if nextchar == "\n": self.lineno = self.lineno + 1 if self.state is None: - self.token = '' # past end of file + self.token = "" # past end of file break - if self.state == ' ': + if self.state == " ": if not nextchar: self.state = None # end of file break if nextchar in self.whitespace: if self.token: - break # emit current token + break # emit current token continue if nextchar in self.commenters: self.instream.readline() self.lineno = self.lineno + 1 elif nextchar in self.wordchars: self.token = nextchar - self.state = 'a' + self.state = "a" elif nextchar in self.quotes: self.token = nextchar self.state = nextchar - elif nextchar in ['<', '>']: + elif nextchar in ["<", ">"]: self.token = nextchar - self.state = '<' + self.state = "<" else: self.token = nextchar if self.token: - break # emit current token + break # emit current token continue - elif self.state == '<': + elif self.state == "<": # Only accumulate <=, >= or <> - if self.token == '>' and nextchar == '=': + if self.token == ">" and nextchar == "=": self.token = self.token + nextchar - self.state = ' ' + self.state = " " break - if self.token == '<' and nextchar in ['>', '=']: + if self.token == "<" and nextchar in [">", "="]: self.token = self.token + nextchar - self.state = ' ' + self.state = " " break if not nextchar: self.state = None break - if nextchar == '/': - self.state = '/' - self.next_token = '/' + if nextchar == "/": + self.state = "/" + self.next_token = "/" break if nextchar in self.wordchars: - self.state = 'a' + self.state = "a" self.next_token = nextchar break if nextchar in self.quotes: self.state = nextchar self.next_token = nextchar break - self.state = ' ' + self.state = " " break elif self.state in self.quotes: self.token = self.token + nextchar # Allow escaped quotes - if nextchar == self.state and self.token[-2] != '\\': - self.state = ' ' + if nextchar == self.state and self.token[-2] != "\\": + self.state = " " break - if not nextchar: # end of file + if not nextchar: # end of file # Override SHLEX's ValueError to throw diagnostic diag = Diagnostic() diag.details = self.token[:-1] diag.query = self.query raise diag - elif self.state == 'a': + elif self.state == "a": if not nextchar: - self.state = None # end of file + self.state = None # end of file break if nextchar in self.whitespace: - self.state = ' ' + self.state = " " if self.token: - break # emit current token + break # emit current token continue if nextchar in self.commenters: self.instream.readline() self.lineno = self.lineno + 1 - elif ord(nextchar) > 126 or \ - nextchar in self.wordchars or \ - nextchar in self.quotes: + elif ( + ord(nextchar) > 126 + or nextchar in self.wordchars + or nextchar in self.quotes + ): self.token = self.token + nextchar - elif nextchar in ['>', '<']: + elif nextchar in [">", "<"]: self.next_token = nextchar - self.state = '<' + self.state = "<" break else: self.push_token(nextchar) # self.pushback = [nextchar] + self.pushback - self.state = ' ' + self.state = " " if self.token: - break # emit current token + break # emit current token continue result = self.token - self.token = '' + self.token = "" return result class CQLParser: """Token parser to create object structure for CQL.""" - parser = '' - current_token = '' - next_token = '' + parser = "" + current_token = "" + next_token = "" def __init__(self, parser): """Initialise with shlex parser.""" @@ -788,28 +772,24 @@ def fetch_token(self): def prefixes(self): """Create prefixes dictionary.""" prefs = {} - while self.current_token == '>': + while self.current_token == ">": # Strip off maps self.fetch_token() - identifier = [] - if self.next_token == '=': + if self.next_token == "=": # Named map name = self.current_token self.fetch_token() # = is current self.fetch_token() # id is current - identifier.append(self.current_token) else: - name = '' - identifier.append(self.current_token) + name = "" + identifier = [self.current_token] self.fetch_token() # URIs can have slashes, and may be unquoted (standard BNF checked) - while self.current_token == '/' or identifier[-1] == '/': + while self.current_token == "/" or identifier[-1] == "/": identifier.append(self.current_token) self.fetch_token() - identifier = ''.join(identifier) - if len(identifier) > 1 and \ - identifier[0] == '"' and \ - identifier[-1] == '"': + identifier = "".join(identifier) + if len(identifier) > 1 and identifier[0] == '"' and identifier[-1] == '"': identifier = identifier[1:-1] prefs[name.lower()] = identifier return prefs @@ -818,9 +798,7 @@ def query(self): """Parse query.""" prefs = self.prefixes() left = self.sub_query() - while 1: - if not self.current_token: - break + while 1 and self.current_token: if self.is_boolean(self.current_token): boolobject = self.boolean() right = self.sub_query() @@ -850,13 +828,10 @@ def sort_query(self): if not self.current_token: # trailing sort with no keys diag = Diagnostic() - diag.message = 'No sort keys supplied' + diag.message = "No sort keys supplied" diag.query = self.parser.query raise diag - while self.current_token: - # current is index name - if self.current_token == ')': - break + while self.current_token and self.current_token != ")": index = IndexerType(self.current_token, self.parser.query) self.fetch_token() index.modifiers = self.modifiers() @@ -875,47 +850,32 @@ def sub_query(self): diag.details = self.current_token diag.query = self.parser.query raise diag + elif prefs := self.prefixes(): + query = self.query() + for key, value in prefs.items(): + query.add_prefix(key, value) else: - prefs = self.prefixes() - if prefs: - query = self.query() - for key, value in prefs.items(): - query.add_prefix(key, value) - else: - query = self.clause() + query = self.clause() return query def clause(self): """Find searchClause.""" is_boolean = self.is_boolean(self.next_token) sort = self.is_sort(self.next_token) - if not sort and \ - not is_boolean and \ - not (self.next_token in [')', '(', '']): - index = IndexerType(self.current_token, self.parser.query) - self.fetch_token() # Skip Index - relation = self.relation() - if self.current_token == '': - diag = Diagnostic() - diag.details = 'Expected Term, got end of query.' - diag.query = self.parser.query - raise diag - term = TermType(self.current_token, self.parser.query) - self.fetch_token() # Skip Term - irt = RelatioSearchClauseType(index, relation, term, - self.parser.query) - - elif self.current_token and \ - (is_boolean or sort or self.next_token in [')', '']): + if not sort and not is_boolean and self.next_token not in [")", "(", ""]: + irt = self._extracted_from_clause_6() + elif self.current_token and ( + is_boolean or sort or self.next_token in [")", ""] + ): irt = RelatioSearchClauseType( IndexerType(SERVER_CHOISE_INDEX, self.parser.query), RelationType(SERVER_CHOISE_RELATION, self.parser.query), TermType(self.current_token, self.parser.query), - self.parser.query + self.parser.query, ) self.fetch_token() - elif self.current_token == '>': + elif self.current_token == ">": prefs = self.prefixes() clause = self.clause() for key, value in prefs.items(): @@ -925,11 +885,25 @@ def clause(self): else: diag = Diagnostic() token = self.current_token - diag.details = f'Expected Boolean or Relation but got: {token}' + diag.details = f"Expected Boolean or Relation but got: {token}" diag.query = self.parser.query raise diag return irt + # TODO Rename this here and in `clause` + def _extracted_from_clause_6(self): + index = IndexerType(self.current_token, self.parser.query) + self.fetch_token() # Skip Index + relation = self.relation() + if self.current_token == "": + diag = Diagnostic() + diag.details = "Expected Term, got end of query." + diag.query = self.parser.query + raise diag + term = TermType(self.current_token, self.parser.query) + self.fetch_token() # Skip Term + return RelatioSearchClauseType(index, relation, term, self.parser.query) + def modifiers(self): """Modifiers.""" mods = [] @@ -939,7 +913,7 @@ def modifiers(self): mod = mod.lower() if mod == MODIFIER_SEPERATOR: diag = Diagnostic() - diag.details = 'Null modifier' + diag.details = "Null modifier" diag.query = self.parser.query raise diag self.fetch_token() @@ -949,8 +923,8 @@ def modifiers(self): value = self.current_token self.fetch_token() else: - comp = '' - value = '' + comp = "" + value = "" mods.append(ModifierClause(mod, comp, value, self.parser.query)) return mods @@ -989,11 +963,11 @@ def parse(query): lexer = CQLshlex(query_io_string, query) parser = CQLParser(lexer) query = parser.query() - if parser.current_token != '': + if parser.current_token != "": diag = Diagnostic() diag.code = 10 current_token = repr(parser.current_token) - diag.details = f'Unprocessed tokens remain: {current_token}' + diag.details = f"Unprocessed tokens remain: {current_token}" diag.query = query_orig raise diag del lexer diff --git a/rero_ils/modules/sru/explaine.py b/rero_ils/modules/sru/explaine.py index d7a62564ee..b7da453a38 100644 --- a/rero_ils/modules/sru/explaine.py +++ b/rero_ils/modules/sru/explaine.py @@ -22,33 +22,38 @@ http://www.loc.gov/standards/sru/explain/ """ + +import contextlib + import jsonref from flask import current_app from invenio_search import current_search from lxml import etree from lxml.builder import ElementMaker -from .cql_parser import ES_INDEX_MAPPINGS from ..utils import get_schema_for_resource +from .cql_parser import ES_INDEX_MAPPINGS -class Explain(): +class Explain: """SRU explain class.""" - def __init__(self, database, doc_type='doc'): + def __init__(self, database, doc_type="doc"): """Constructor.""" self.database = database self.number_of_records = current_app.config.get( - 'RERO_SRU_NUMBER_OF_RECORDS', 100) - self.maximum_records = current_app.config.get( - 'RERO_SRU_MAXIMUM_RECORDS', 1000) + "RERO_SRU_NUMBER_OF_RECORDS", 100 + ) + self.maximum_records = current_app.config.get("RERO_SRU_MAXIMUM_RECORDS", 1000) self.doc_type = doc_type - self.index = current_app.config.get( - 'RECORDS_REST_ENDPOINTS', {} - ).get(doc_type, {}).get('search_index') + self.index = ( + current_app.config.get("RECORDS_REST_ENDPOINTS", {}) + .get(doc_type, {}) + .get("search_index") + ) self.es_mappings = {} for index in self.get_es_mappings(self.index): - self.es_mappings[index.lower().replace('.', '__')] = index + self.es_mappings[index.lower().replace(".", "__")] = index self.init_xml() def __str__(self): @@ -60,61 +65,51 @@ def get_properties(self, data): keys = [] for key, value in data.items(): if isinstance(value, dict): - properties = value.get('properties') - if properties: + if properties := value.get("properties"): sub_keys = self.get_properties(properties) for sub_key in sub_keys: - if '.' in sub_key and sub_key[0] != '$': - keys.append('.'.join([key, sub_key])) - elif properties[sub_key].get('index', True): - keys.append('.'.join([key, sub_key])) - elif key[0] != '$': + if "." in sub_key and sub_key[0] != "$": + keys.append(".".join([key, sub_key])) + elif properties[sub_key].get("index", True): + keys.append(".".join([key, sub_key])) + elif key[0] != "$": keys.append(key) return keys def get_es_mappings(self, index): """Get mappings from ES.""" mappings = {} - try: + with contextlib.suppress(Exception): index_alias = current_search.aliases.get(index) index_file_name = list(index_alias.values())[0] data = jsonref.load(open(index_file_name)) - mappings = self.get_properties( - data.get('mappings').get('properties')) - except Exception: - pass + mappings = self.get_properties(data.get("mappings").get("properties")) return mappings def init_xml(self): """Init XML.""" - sru_ns = 'http://www.loc.gov/standards/sru/' - element_sru = ElementMaker( - namespace=sru_ns, - nsmap={'sru': sru_ns} - ) + sru_ns = "http://www.loc.gov/standards/sru/" + element_sru = ElementMaker(namespace=sru_ns, nsmap={"sru": sru_ns}) zr_ns = "http://explain.z3950.org/dtd/2.1/" - element_zr = ElementMaker( - namespace=zr_ns, - nsmap={'zr': zr_ns} - ) + element_zr = ElementMaker(namespace=zr_ns, nsmap={"zr": zr_ns}) self.xml_root = element_sru.explainResponse() - self.xml_root.append(element_sru.version('1.1')) + self.xml_root.append(element_sru.version("1.1")) record = element_sru.record() - record.append(element_sru.recordPacking('xml')) - record.append(element_sru.recordSchema( - 'http://explain.z3950.org/dtd/2.1/')) + record.append(element_sru.recordPacking("xml")) + record.append(element_sru.recordSchema("http://explain.z3950.org/dtd/2.1/")) record_data = element_sru.recordData() explain = element_zr.explain() - server_info = element_zr.serverInfo({ - 'protocol': 'SRU', - 'version': '1.1', - 'transport': current_app.config.get('RERO_ILS_APP_URL_SCHEME'), - 'method': 'GET' - }) - server_info.append(element_zr.host( - current_app.config.get('RERO_ILS_APP_HOST'))) + server_info = element_zr.serverInfo( + { + "protocol": "SRU", + "version": "1.1", + "transport": current_app.config.get("RERO_ILS_APP_URL_SCHEME"), + "method": "GET", + } + ) + server_info.append(element_zr.host(current_app.config.get("RERO_ILS_APP_HOST"))) # server_info.append(element_zr.port('5000')) server_info.append(element_zr.database(self.database)) explain.append(server_info) @@ -138,24 +133,20 @@ def init_xml(self): def init_index_info_dc(self): """Init index info for DC.""" - dc_ns = 'info:srw/cql-context-set/1/dc-v1.1' - element_dc = ElementMaker( - namespace=dc_ns, - nsmap={'dc': dc_ns} - ) + dc_ns = "info:srw/cql-context-set/1/dc-v1.1" + element_dc = ElementMaker(namespace=dc_ns, nsmap={"dc": dc_ns}) index = element_dc.index() dc_map = element_dc.map() for dc_index in ES_INDEX_MAPPINGS: - dc_map.append(element_dc.name(dc_index.replace('dc.', ''))) + dc_map.append(element_dc.name(dc_index.replace("dc.", ""))) index.append(dc_map) return index def init_index_info(self): """Init index info.""" - rero_ils_ns = get_schema_for_resource('doc') + rero_ils_ns = get_schema_for_resource("doc") element_rero_ils = ElementMaker( - namespace=rero_ils_ns, - nsmap={'rero-ils': rero_ils_ns} + namespace=rero_ils_ns, nsmap={"rero-ils": rero_ils_ns} ) index = element_rero_ils.index() es_map = element_rero_ils.map() @@ -168,23 +159,18 @@ def init_schema_info(self, element): """Init schema info.""" schema = element.schemaInfo() # Todo: documents -> doc - schema.append(element.set( - { - 'name': 'json', - 'identifier': get_schema_for_resource('doc') - } - )) + schema.append( + element.set({"name": "json", "identifier": get_schema_for_resource("doc")}) + ) return schema def init_config_info(self, element): """Init config info.""" config = element.configInfo() - config.append(element.default( - str(self.number_of_records), - {'type': 'numberOfRecords'} - )) - config.append(element.setting( - str(self.maximum_records), - {'type': 'maximumRecords'} - )) + config.append( + element.default(str(self.number_of_records), {"type": "numberOfRecords"}) + ) + config.append( + element.setting(str(self.maximum_records), {"type": "maximumRecords"}) + ) return config diff --git a/rero_ils/modules/sru/views.py b/rero_ils/modules/sru/views.py index a42bc9a385..5461e3822e 100644 --- a/rero_ils/modules/sru/views.py +++ b/rero_ils/modules/sru/views.py @@ -25,12 +25,15 @@ from werkzeug.exceptions import HTTPException from werkzeug.wrappers import Response -from .cql_parser import Diagnostic, parse -from .explaine import Explain from ..documents.api import DocumentsSearch, document_id_fetcher -from ..documents.serializers import json_doc_search, xml_dc_search, \ - xml_marcxmlsru_search +from ..documents.serializers import ( + json_doc_search, + xml_dc_search, + xml_marcxmlsru_search, +) from ..utils import strip_chars +from .cql_parser import Diagnostic, parse +from .explaine import Explain class SRUDocumentsSearch(ContentNegotiatedMethodView): @@ -42,80 +45,77 @@ def __init__(self, **kwargs): """Init.""" super().__init__( method_serializers={ - 'GET': { - 'application/xml': xml_marcxmlsru_search, - 'application/xml+dc': xml_dc_search, - 'application/rero+json': json_doc_search, + "GET": { + "application/xml": xml_marcxmlsru_search, + "application/xml+dc": xml_dc_search, + "application/rero+json": json_doc_search, } }, serializers_query_aliases={ - 'marcxmlsru': 'application/xml', - 'dc': 'application/xml+dc', - 'json': 'application/rero+json' + "marcxmlsru": "application/xml", + "dc": "application/xml+dc", + "json": "application/rero+json", }, - default_method_media_type={ - 'GET': 'application/xml' - }, - default_media_type='application/xml', + default_method_media_type={"GET": "application/xml"}, + default_media_type="application/xml", **kwargs ) def get(self, **kwargs): """Implement the GET /sru/documents.""" - operation = flask_request.args.get('operation', None) - query = flask_request.args.get('query', None) - version = flask_request.args.get('version', '1.1') - start_record = max(int(flask_request.args.get('startRecord', 1)), 1) + operation = flask_request.args.get("operation", None) + query = flask_request.args.get("query", None) + version = flask_request.args.get("version", "1.1") + start_record = max(int(flask_request.args.get("startRecord", 1)), 1) maximum_records = min( - int(flask_request.args.get( - 'maximumRecords', - current_app.config.get('RERO_SRU_NUMBER_OF_RECORDS', 100) - )), - current_app.config.get('RERO_SRU_MAXIMUM_RECORDS', 1000) + int( + flask_request.args.get( + "maximumRecords", + current_app.config.get("RERO_SRU_NUMBER_OF_RECORDS", 100), + ) + ), + current_app.config.get("RERO_SRU_MAXIMUM_RECORDS", 1000), ) # TODO: enable es query string # query_string = flask_request.args.get('q', None) - if operation == 'searchRetrieve' and query: # or query_string: + if operation == "searchRetrieve" and query: # or query_string: try: query_string = parse(query).to_es() except Diagnostic as err: response = Response(err.xml_str()) - response.headers['content-type'] = 'application/xml' + response.headers["content-type"] = "application/xml" raise HTTPException(response=response) - search = DocumentsSearch().query( - 'query_string', query=query_string) + search = DocumentsSearch().query("query_string", query=query_string) records = [] - search = search[(start_record-1):maximum_records+(start_record-1)] + search = search[(start_record - 1) : maximum_records + (start_record - 1)] for hit in search.execute(): - records.append({ - '_id': hit.meta.id, - '_index': hit.meta.index, - '_source': hit.to_dict(), - '_version': 0 - }) + records.append( + { + "_id": hit.meta.id, + "_index": hit.meta.index, + "_source": hit.to_dict(), + "_version": 0, + } + ) result = { - 'hits': { - 'hits': records, - 'total': { - 'value': search.count(), - 'relation': 'eq' + "hits": { + "hits": records, + "total": {"value": search.count(), "relation": "eq"}, + "sru": { + "query": strip_chars(query), + "query_es": query_string, + "start_record": start_record, + "maximum_records": maximum_records, }, - 'sru': { - 'query': strip_chars(query), - 'query_es': query_string, - 'start_record': start_record, - 'maximum_records': maximum_records - } } } return self.make_response( - pid_fetcher=document_id_fetcher, - search_result=result + pid_fetcher=document_id_fetcher, search_result=result ) - explain = Explain('api/sru/documents') + explain = Explain("api/sru/documents") response = Response(str(explain)) - response.headers['content-type'] = 'application/xml' + response.headers["content-type"] = "application/xml" raise HTTPException(response=response) diff --git a/rero_ils/modules/stats/api/api.py b/rero_ils/modules/stats/api/api.py index 29f8c8b2ee..2f66a12feb 100644 --- a/rero_ils/modules/stats/api/api.py +++ b/rero_ils/modules/stats/api/api.py @@ -31,9 +31,7 @@ # provider StatProvider = type( - 'StatProvider', - (Provider,), - dict(identifier=StatIdentifier, pid_type='stat') + "StatProvider", (Provider,), dict(identifier=StatIdentifier, pid_type="stat") ) # minter stat_id_minter = partial(id_minter, provider=StatProvider) @@ -47,9 +45,9 @@ class StatsSearch(IlsRecordsSearch): class Meta: """Search only on stats index.""" - index = 'stats' + index = "stats" doc_types = None - fields = ('*',) + fields = ("*",) facets = {} default_filter = None @@ -63,9 +61,7 @@ class Stat(IlsRecord): provider = StatProvider model_cls = StatMetadata - _extensions = [ - StatisticsDumperExtension() - ] + _extensions = [StatisticsDumperExtension()] def update(self, data, commit=True, dbcommit=False, reindex=False): """Update data for record.""" @@ -75,7 +71,7 @@ def update(self, data, commit=True, dbcommit=False, reindex=False): @property def organisation_pid(self): """Get organisation pid from the config for report.""" - if ref := self.get('config', {}).get('organisation', {}).get('$ref'): + if ref := self.get("config", {}).get("organisation", {}).get("$ref"): return extracted_data_from_ref(ref) @@ -89,4 +85,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='stat') + super().bulk_index(record_id_iterator, doc_type="stat") diff --git a/rero_ils/modules/stats/api/indicators/base.py b/rero_ils/modules/stats/api/indicators/base.py index e65d931b4d..9f5669b9d2 100644 --- a/rero_ils/modules/stats/api/indicators/base.py +++ b/rero_ils/modules/stats/api/indicators/base.py @@ -30,7 +30,7 @@ def __init__(self, report_cfg): :param report_cfg: StatsReport - the given report configuration """ self.cfg = report_cfg - self.label_na_msg = 'not available' + self.label_na_msg = "not available" @property @abstractmethod diff --git a/rero_ils/modules/stats/api/indicators/circulation.py b/rero_ils/modules/stats/api/indicators/circulation.py index 48e9c3f1b1..ffadbc1aef 100644 --- a/rero_ils/modules/stats/api/indicators/circulation.py +++ b/rero_ils/modules/stats/api/indicators/circulation.py @@ -45,19 +45,25 @@ def query(self): :returns: an elasticsearch query object """ - es_query = LoanOperationLogsSearch()[:0]\ - .filter('terms', loan__item__library_pid=self.cfg.lib_pids)\ - .filter('term', record__type='loan')\ - .filter('term', loan__trigger=self.trigger) + es_query = ( + LoanOperationLogsSearch()[:0] + .filter("terms", loan__item__library_pid=self.cfg.lib_pids) + .filter("term", record__type="loan") + .filter("term", loan__trigger=self.trigger) + ) if period := self.cfg.period: - es_query = es_query.filter( - 'range', date=self.cfg.get_range_period(period)) + es_query = es_query.filter("range", date=self.cfg.get_range_period(period)) if lib_pids := self.cfg.filter_by_libraries: loc_pids = [ - hit.pid for hit in LocationsSearch().filter( - "terms", library__pid=lib_pids).source('pid').scan()] + hit.pid + for hit in LocationsSearch() + .filter("terms", library__pid=lib_pids) + .source("pid") + .scan() + ] es_query = es_query.filter( - 'terms', loan__transaction_location__pid=loc_pids) + "terms", loan__transaction_location__pid=loc_pids + ) return es_query def aggregation(self, distribution): @@ -67,63 +73,42 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'transaction_location': A( - 'terms', - field='loan.transaction_location.pid', - size=self.cfg.aggs_size + "transaction_location": A( + "terms", field="loan.transaction_location.pid", size=self.cfg.aggs_size ), - 'transaction_month': A( - 'date_histogram', - field='date', - calendar_interval='month', - format='yyyy-MM' + "transaction_month": A( + "date_histogram", + field="date", + calendar_interval="month", + format="yyyy-MM", ), - 'transaction_year': A( - 'date_histogram', - field='date', - calendar_interval='year', - format='yyyy' + "transaction_year": A( + "date_histogram", field="date", calendar_interval="year", format="yyyy" ), - 'patron_type': A( - 'terms', - field='loan.patron.type', - size=self.cfg.aggs_size + "patron_type": A( + "terms", field="loan.patron.type", size=self.cfg.aggs_size ), - 'patron_age': A( - 'terms', - field='loan.patron.age', - size=self.cfg.aggs_size + "patron_age": A("terms", field="loan.patron.age", size=self.cfg.aggs_size), + "patron_type": A( + "terms", field="loan.patron.type", size=self.cfg.aggs_size ), - 'patron_type': A( - 'terms', - field='loan.patron.type', - size=self.cfg.aggs_size + "patron_postal_code": A( + "terms", field="loan.patron.postal_code", size=self.cfg.aggs_size ), - 'patron_postal_code': A( - 'terms', - field='loan.patron.postal_code', - size=self.cfg.aggs_size + "document_type": A( + "terms", field="loan.item.document.type", size=self.cfg.aggs_size ), - 'document_type': A( - 'terms', - field='loan.item.document.type', - size=self.cfg.aggs_size + "transaction_channel": A( + "terms", field="loan.transaction_channel", size=self.cfg.aggs_size ), - 'transaction_channel': A( - 'terms', - field='loan.transaction_channel', - size=self.cfg.aggs_size + "owning_library": A( + "terms", field="loan.item.library_pid", size=self.cfg.aggs_size ), - 'owning_library': A( - 'terms', - field='loan.item.library_pid', - size=self.cfg.aggs_size + "owning_location": A( + "terms", + field="loan.item.holding.location_name.raw", + size=self.cfg.aggs_size, ), - 'owning_location': A( - 'terms', - field='loan.item.holding.location_name.raw', - size=self.cfg.aggs_size - ) } return cfg[distribution] @@ -136,19 +121,17 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'transaction_location': lambda: - f'{self.cfg.locations.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'transaction_month': lambda: bucket.key_as_string, - 'transaction_year': lambda: bucket.key_as_string, - 'patron_type': lambda: bucket.key, - 'patron_age': lambda: bucket.key, - 'document_type': lambda: bucket.key, - 'patron_postal_code': lambda: bucket.key, - 'transaction_channel': lambda: bucket.key, - 'owning_library': lambda: - f'{self.cfg.libraries.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'owning_location': lambda: bucket.key, + "transaction_location": lambda: f"{self.cfg.locations.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "transaction_month": lambda: bucket.key_as_string, + "transaction_year": lambda: bucket.key_as_string, + "patron_type": lambda: bucket.key, + "patron_age": lambda: bucket.key, + "document_type": lambda: bucket.key, + "patron_postal_code": lambda: bucket.key, + "transaction_channel": lambda: bucket.key, + "owning_library": lambda: f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "owning_location": lambda: bucket.key, } return cfg[distribution]() diff --git a/rero_ils/modules/stats/api/indicators/others.py b/rero_ils/modules/stats/api/indicators/others.py index 87d03b0946..73048acd6f 100644 --- a/rero_ils/modules/stats/api/indicators/others.py +++ b/rero_ils/modules/stats/api/indicators/others.py @@ -41,12 +41,10 @@ def query(self): :returns: an elasticsearch query object """ es_query = DocumentsSearch()[:0].filter( - 'term', - organisation_pid=self.cfg.org_pid + "term", organisation_pid=self.cfg.org_pid ) if pids := self.cfg.filter_by_libraries: - es_query = es_query.filter( - 'terms', library_pid=pids) + es_query = es_query.filter("terms", library_pid=pids) return es_query def aggregation(self, distribution): @@ -56,30 +54,29 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'owning_library': A( - 'terms', - field='holdings.organisation.library_pid', + "owning_library": A( + "terms", + field="holdings.organisation.library_pid", size=self.cfg.aggs_size, - include=self.cfg.lib_pids + include=self.cfg.lib_pids, + ), + "created_month": A( + "date_histogram", + field="_created", + calendar_interval="month", + format="yyyy-MM", ), - 'created_month': A( - 'date_histogram', - field='_created', - calendar_interval='month', - format='yyyy-MM' + "created_year": A( + "date_histogram", + field="_created", + calendar_interval="year", + format="yyyy", ), - 'created_year': A( - 'date_histogram', field='_created', - calendar_interval='year', format='yyyy' + "imported": A( + "filters", + other_bucket_key="not imported", + filters={"imported": {"exists": {"field": "adminMetadata.source"}}}, ), - 'imported': A( - 'filters', other_bucket_key="not imported", - filters={ - 'imported': { - 'exists': {'field': 'adminMetadata.source'} - } - } - ) } return cfg[distribution] @@ -92,12 +89,11 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'owning_library': lambda: - f'{self.cfg.libraries.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'created_month': lambda: bucket.key_as_string, - 'created_year': lambda: bucket.key_as_string, - 'imported': lambda: bucket + "owning_library": lambda: f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "created_month": lambda: bucket.key_as_string, + "created_year": lambda: bucket.key_as_string, + "imported": lambda: bucket, } return cfg[distribution]() @@ -111,12 +107,13 @@ def query(self): :returns: an elasticsearch query object """ - es_query = HoldingsSearch()[:0]\ - .filter('term', holdings_type='serial')\ - .filter('term', organisation__pid=self.cfg.org_pid) + es_query = ( + HoldingsSearch()[:0] + .filter("term", holdings_type="serial") + .filter("term", organisation__pid=self.cfg.org_pid) + ) if pids := self.cfg.filter_by_libraries: - es_query = es_query.filter( - 'terms', library__pid=pids) + es_query = es_query.filter("terms", library__pid=pids) return es_query def aggregation(self, distribution): @@ -126,24 +123,24 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'owning_library': A( - 'terms', - field='library.pid', + "owning_library": A( + "terms", + field="library.pid", size=self.cfg.aggs_size, - include=self.cfg.lib_pids + include=self.cfg.lib_pids, ), - 'created_month': A( - 'date_histogram', - field='_created', - calendar_interval='month', - format='yyyy-MM' + "created_month": A( + "date_histogram", + field="_created", + calendar_interval="month", + format="yyyy-MM", + ), + "created_year": A( + "date_histogram", + field="_created", + calendar_interval="year", + format="yyyy", ), - 'created_year': A( - 'date_histogram', - field='_created', - calendar_interval='year', - format='yyyy' - ) } return cfg[distribution] @@ -156,11 +153,10 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'owning_library': lambda: - f'{self.cfg.libraries.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'created_month': lambda: bucket.key_as_string, - 'created_year': lambda: bucket.key_as_string + "owning_library": lambda: f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "created_month": lambda: bucket.key_as_string, + "created_year": lambda: bucket.key_as_string, } return cfg[distribution]() @@ -174,11 +170,9 @@ def query(self): :returns: an elasticsearch query object """ - es_query = ItemsSearch()[:0].filter( - 'term', organisation__pid=self.cfg.org_pid) + es_query = ItemsSearch()[:0].filter("term", organisation__pid=self.cfg.org_pid) if pids := self.cfg.filter_by_libraries: - es_query = es_query.filter( - 'terms', library__pid=pids) + es_query = es_query.filter("terms", library__pid=pids) return es_query def aggregation(self, distribution): @@ -188,45 +182,39 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'owning_library': A( - 'terms', - field='library.pid', + "owning_library": A( + "terms", + field="library.pid", size=self.cfg.aggs_size, - include=self.cfg.lib_pids + include=self.cfg.lib_pids, ), - 'owning_location': A( - 'terms', - field='location.pid', + "owning_location": A( + "terms", + field="location.pid", size=self.cfg.aggs_size, - include=self.cfg.loc_pids + include=self.cfg.loc_pids, ), - 'type': A( - 'terms', - field='type', - size=self.cfg.aggs_size + "type": A("terms", field="type", size=self.cfg.aggs_size), + "document_type": A( + "terms", + field="document.document_type.main_type", + size=self.cfg.aggs_size, ), - 'document_type': A( - 'terms', - field='document.document_type.main_type', - size=self.cfg.aggs_size + "document_subtype": A( + "terms", field="document.document_type.subtype", size=self.cfg.aggs_size ), - 'document_subtype': A( - 'terms', - field='document.document_type.subtype', - size=self.cfg.aggs_size + "created_month": A( + "date_histogram", + field="_created", + calendar_interval="month", + format="yyyy-MM", ), - 'created_month': A( - 'date_histogram', - field='_created', - calendar_interval='month', - format='yyyy-MM' + "created_year": A( + "date_histogram", + field="_created", + calendar_interval="year", + format="yyyy", ), - 'created_year': A( - 'date_histogram', - field='_created', - calendar_interval='year', - format='yyyy' - ) } return cfg[distribution] @@ -239,17 +227,15 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'owning_library': lambda: - f'{self.cfg.libraries.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'owning_location': lambda: - f'{self.cfg.locations.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'type': lambda: bucket.key, - 'document_type': lambda: bucket.key, - 'document_subtype': lambda: bucket.key, - 'created_month': lambda: bucket.key_as_string, - 'created_year': lambda: bucket.key_as_string + "owning_library": lambda: f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "owning_location": lambda: f"{self.cfg.locations.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "type": lambda: bucket.key, + "document_type": lambda: bucket.key, + "document_subtype": lambda: bucket.key, + "created_month": lambda: bucket.key_as_string, + "created_year": lambda: bucket.key_as_string, } return cfg[distribution]() @@ -273,12 +259,10 @@ def query(self): .filter("term", operation="delete") ) if period := self.cfg.period: - es_query = es_query.filter( - "range", date=self.cfg.get_range_period(period)) + es_query = es_query.filter("range", date=self.cfg.get_range_period(period)) if pids := self.cfg.filter_by_libraries: es_query = es_query.filter( - Q("terms", record__library_pid=pids) - | Q("terms", library__value=pids) + Q("terms", record__library_pid=pids) | Q("terms", library__value=pids) ) return es_query @@ -289,15 +273,11 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'owning_library': A( - 'terms', - field='record.library_pid', - size=self.cfg.aggs_size + "owning_library": A( + "terms", field="record.library_pid", size=self.cfg.aggs_size ), "operator_library": A( - "terms", - field="library.value", - size=self.cfg.aggs_size + "terms", field="library.value", size=self.cfg.aggs_size ), "action_month": A( "date_histogram", @@ -306,10 +286,7 @@ def aggregation(self, distribution): format="yyyy-MM", ), "action_year": A( - "date_histogram", - field="date", - calendar_interval="year", - format="yyyy" + "date_histogram", field="date", calendar_interval="year", format="yyyy" ), } return cfg[distribution] @@ -323,12 +300,10 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - "owning_library": lambda: - f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " - f"({bucket.key})", - "operator_library": lambda: - f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " - f"({bucket.key})", + "owning_library": lambda: f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "operator_library": lambda: f"{self.cfg.libraries.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", "action_month": lambda: bucket.key_as_string, "action_year": lambda: bucket.key_as_string, } @@ -345,13 +320,14 @@ def query(self): :returns: an elasticsearch query object """ es_query = ILLRequestsSearch()[:0].filter( - 'term', organisation__pid=self.cfg.org_pid) + "term", organisation__pid=self.cfg.org_pid + ) if period := self.cfg.period: es_query = es_query.filter( - 'range', _created=self.cfg.get_range_period(period)) + "range", _created=self.cfg.get_range_period(period) + ) if pids := self.cfg.filter_by_libraries: - es_query = es_query.filter( - 'terms', library__pid=pids) + es_query = es_query.filter("terms", library__pid=pids) return es_query def aggregation(self, distribution): @@ -361,28 +337,22 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'pickup_location': A( - 'terms', - field='pickup_location.pid', - size=self.cfg.aggs_size + "pickup_location": A( + "terms", field="pickup_location.pid", size=self.cfg.aggs_size ), - 'status': A( - 'terms', - field='status', - size=self.cfg.aggs_size + "status": A("terms", field="status", size=self.cfg.aggs_size), + "created_month": A( + "date_histogram", + field="_created", + calendar_interval="month", + format="yyyy-MM", ), - 'created_month': A( - 'date_histogram', - field='_created', - calendar_interval='month', - format='yyyy-MM' + "created_year": A( + "date_histogram", + field="_created", + calendar_interval="year", + format="yyyy", ), - 'created_year': A( - 'date_histogram', - field='_created', - calendar_interval='year', - format='yyyy' - ) } return cfg[distribution] @@ -395,11 +365,10 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'pickup_location': lambda: - f'{self.cfg.locations.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'status': lambda: bucket.key, - 'created_month': lambda: bucket.key_as_string, - 'created_year': lambda: bucket.key_as_string + "pickup_location": lambda: f"{self.cfg.locations.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "status": lambda: bucket.key, + "created_month": lambda: bucket.key_as_string, + "created_year": lambda: bucket.key_as_string, } return cfg[distribution]() diff --git a/rero_ils/modules/stats/api/indicators/patron.py b/rero_ils/modules/stats/api/indicators/patron.py index 434d3dd3f9..69d9afcadf 100644 --- a/rero_ils/modules/stats/api/indicators/patron.py +++ b/rero_ils/modules/stats/api/indicators/patron.py @@ -41,9 +41,7 @@ def query(self): :returns: an elasticsearch query object """ - es_query = PatronsSearch()[:0].filter( - 'term', organisation__pid=self.cfg.org_pid) - return es_query + return PatronsSearch()[:0].filter("term", organisation__pid=self.cfg.org_pid) def aggregation(self, distribution): """Elasticsearch Aggregation configuration to compute distributions. @@ -52,44 +50,28 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'created_month': A( - 'date_histogram', - field='_created', - calendar_interval='month', - format='yyyy-MM' + "created_month": A( + "date_histogram", + field="_created", + calendar_interval="month", + format="yyyy-MM", ), - 'created_year': A( - 'date_histogram', - field='_created', - calendar_interval='year', - format='yyyy' + "created_year": A( + "date_histogram", + field="_created", + calendar_interval="year", + format="yyyy", ), - 'birth_year': A( - 'date_histogram', - field='birth_date', - calendar_interval='year', - format='yyyy' + "birth_year": A( + "date_histogram", + field="birth_date", + calendar_interval="year", + format="yyyy", ), - 'postal_code': A( - 'terms', - field='postal_code', - size=self.cfg.aggs_size - ), - 'gender': A( - 'terms', - field='gender', - size=self.cfg.aggs_size - ), - 'role': A( - 'terms', - field='roles', - size=self.cfg.aggs_size - ), - 'type': A( - 'terms', - field='patron.type.pid', - size=self.cfg.aggs_size - ) + "postal_code": A("terms", field="postal_code", size=self.cfg.aggs_size), + "gender": A("terms", field="gender", size=self.cfg.aggs_size), + "role": A("terms", field="roles", size=self.cfg.aggs_size), + "type": A("terms", field="patron.type.pid", size=self.cfg.aggs_size), } return cfg[distribution] @@ -102,15 +84,14 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'created_month': lambda: bucket.key_as_string, - 'created_year': lambda: bucket.key_as_string, - 'type': lambda: - f'{self.cfg.patron_types.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})', - 'birth_year': lambda: bucket.key_as_string, - 'gender': lambda: bucket.key, - 'postal_code': lambda: bucket.key, - 'role': lambda: bucket.key, + "created_month": lambda: bucket.key_as_string, + "created_year": lambda: bucket.key_as_string, + "type": lambda: f"{self.cfg.patron_types.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})", + "birth_year": lambda: bucket.key_as_string, + "gender": lambda: bucket.key, + "postal_code": lambda: bucket.key, + "role": lambda: bucket.key, } return cfg[distribution]() @@ -126,35 +107,40 @@ def query(self): """ es_query = super().query range_period = self.cfg.get_range_period(self.cfg.period) - op_query = LoanOperationLogsSearch()[:0].source()\ + op_query = ( + LoanOperationLogsSearch()[:0] + .source() .get_logs_by_trigger( triggers=[ ItemCirculationAction.EXTEND, ItemCirculationAction.REQUEST, ItemCirculationAction.CHECKIN, - ItemCirculationAction.CHECKOUT + ItemCirculationAction.CHECKOUT, ], - date_range=range_period)\ - .filter( - 'terms', loan__item__library_pid=self.cfg.lib_pids) + date_range=range_period, + ) + .filter("terms", loan__item__library_pid=self.cfg.lib_pids) + ) if lib_pids := self.cfg.filter_by_libraries: loc_pids = [ - hit.pid for hit in LocationsSearch().filter( - "terms", library__pid=lib_pids).source('pid').scan()] + hit.pid + for hit in LocationsSearch() + .filter("terms", library__pid=lib_pids) + .source("pid") + .scan() + ] op_query = op_query.filter( - 'terms', loan__transaction_location__pid=loc_pids) - op_query.aggs.bucket('hashed_pid', A( - 'terms', - field='loan.patron.hashed_pid', - size=100000 - )) + "terms", loan__transaction_location__pid=loc_pids + ) + op_query.aggs.bucket( + "hashed_pid", A("terms", field="loan.patron.hashed_pid", size=100000) + ) results = op_query.execute() convert = { - hashlib.md5(f'{i}'.encode()).hexdigest(): i + hashlib.md5(f"{i}".encode()).hexdigest(): i for i in range(1, PatronIdentifier.max() + 1) } active_patron_pids = [ - convert[v.key] for v in - results.aggregations.hashed_pid.buckets + convert[v.key] for v in results.aggregations.hashed_pid.buckets ] - return es_query.filter('terms', pid=active_patron_pids) + return es_query.filter("terms", pid=active_patron_pids) diff --git a/rero_ils/modules/stats/api/indicators/requests.py b/rero_ils/modules/stats/api/indicators/requests.py index 1716ef6bca..32e201c47c 100644 --- a/rero_ils/modules/stats/api/indicators/requests.py +++ b/rero_ils/modules/stats/api/indicators/requests.py @@ -34,10 +34,8 @@ def aggregation(self, distribution): :returns: an elasticsearch aggregation object """ cfg = { - 'pickup_location': A( - 'terms', - field='loan.pickup_location.pid', - size=self.cfg.aggs_size + "pickup_location": A( + "terms", field="loan.pickup_location.pid", size=self.cfg.aggs_size ) } if agg := cfg.get(distribution): @@ -53,9 +51,8 @@ def label(self, distribution, bucket): :rtype: str """ cfg = { - 'pickup_location': lambda: - f'{self.cfg.locations.get(bucket.key, self.label_na_msg)} ' - f'({bucket.key})' + "pickup_location": lambda: f"{self.cfg.locations.get(bucket.key, self.label_na_msg)} " + f"({bucket.key})" } if label_fn := cfg.get(distribution): return label_fn() diff --git a/rero_ils/modules/stats/api/librarian.py b/rero_ils/modules/stats/api/librarian.py index fdb7cd064b..53a875b1ad 100644 --- a/rero_ils/modules/stats/api/librarian.py +++ b/rero_ils/modules/stats/api/librarian.py @@ -53,9 +53,9 @@ def __init__(self, to_date=None): to_date = arrow.Arrow.fromdatetime(to_date) to_date = to_date or arrow.utcnow() - relativedelta(days=1) # Get statistics per month - _from = f'{to_date.year}-{to_date.month:02d}-01T00:00:00' - _to = to_date.format(fmt='YYYY-MM-DDT23:59:59') - self.date_range = {'gte': _from, 'lte': _to} + _from = f"{to_date.year}-{to_date.month:02d}-01T00:00:00" + _to = to_date.format(fmt="YYYY-MM-DDT23:59:59") + self.date_range = {"gte": _from, "lte": _to} def _get_locations_code_name(self, location_pids): """Location code and name. @@ -64,13 +64,12 @@ def _get_locations_code_name(self, location_pids): :return: concatenated code and name of location :rtype: string """ - location_search = LocationsSearch()\ - .filter('terms', pid=location_pids)\ - .source(['code', 'name', 'pid']) - res = {} - for hit in location_search.scan(): - res[hit.pid] = f'{hit.code} - {hit.name}' - return res + location_search = ( + LocationsSearch() + .filter("terms", pid=location_pids) + .source(["code", "name", "pid"]) + ) + return {hit.pid: f"{hit.code} - {hit.name}" for hit in location_search.scan()} def process(self, library): """Process statistics for a give library. @@ -79,29 +78,29 @@ def process(self, library): :return: a dict containing all the processed values. """ return { - 'checkouts_for_transaction_library': - self.checkouts_for_transaction_library(library.pid), - 'checkouts_for_owning_library': - self.checkouts_for_owning_library(library.pid), - 'active_patrons_by_postal_code': - self.active_patrons_by_postal_code(library.pid), - 'new_active_patrons_by_postal_code': - self.active_patrons_by_postal_code( - library.pid, new_patrons=True), - 'new_documents': - self.new_documents(library.pid), - 'new_items': - self.number_of_new_items(library.pid), - 'renewals': - self.renewals(library.pid), - 'validated_requests': - self.validated_requests(library.pid), - 'items_by_document_type_and_subtype': - self.items_by_document_type_and_subtype(library.pid), - 'new_items_by_location': - self.new_items_by_location(library.pid), - 'loans_of_transaction_library_by_item_location': - self.loans_of_transaction_library_by_item_location(library.pid) + "checkouts_for_transaction_library": self.checkouts_for_transaction_library( + library.pid + ), + "checkouts_for_owning_library": self.checkouts_for_owning_library( + library.pid + ), + "active_patrons_by_postal_code": self.active_patrons_by_postal_code( + library.pid + ), + "new_active_patrons_by_postal_code": self.active_patrons_by_postal_code( + library.pid, new_patrons=True + ), + "new_documents": self.new_documents(library.pid), + "new_items": self.number_of_new_items(library.pid), + "renewals": self.renewals(library.pid), + "validated_requests": self.validated_requests(library.pid), + "items_by_document_type_and_subtype": self.items_by_document_type_and_subtype( + library.pid + ), + "new_items_by_location": self.new_items_by_location(library.pid), + "loans_of_transaction_library_by_item_location": self.loans_of_transaction_library_by_item_location( + library.pid + ), } def checkouts_for_transaction_library(self, library_pid): @@ -114,11 +113,14 @@ def checkouts_for_transaction_library(self, library_pid): :rtype: integer """ location_pids = LocationsSearch().location_pids(library_pid) - return LoanOperationLogsSearch().get_logs_by_trigger( - triggers=[ItemCirculationAction.CHECKOUT], - date_range=self.date_range - ).filter('terms', loan__transaction_location__pid=location_pids)\ + return ( + LoanOperationLogsSearch() + .get_logs_by_trigger( + triggers=[ItemCirculationAction.CHECKOUT], date_range=self.date_range + ) + .filter("terms", loan__transaction_location__pid=location_pids) .count() + ) def checkouts_for_owning_library(self, library_pid): """Number of circulation operation during the specified timeframe. @@ -129,11 +131,14 @@ def checkouts_for_owning_library(self, library_pid): :return: the number of matched circulation operation :rtype: integer """ - return LoanOperationLogsSearch().get_logs_by_trigger( - triggers=[ItemCirculationAction.CHECKOUT], - date_range=self.date_range - ).filter('term', loan__item__library_pid=library_pid)\ + return ( + LoanOperationLogsSearch() + .get_logs_by_trigger( + triggers=[ItemCirculationAction.CHECKOUT], date_range=self.date_range + ) + .filter("term", loan__item__library_pid=library_pid) .count() + ) def active_patrons_by_postal_code(self, library_pid, new_patrons=False): """Number of circulation operation during the specified timeframe. @@ -147,31 +152,40 @@ def active_patrons_by_postal_code(self, library_pid, new_patrons=False): """ location_pids = LocationsSearch().location_pids(library_pid) - search = LoanOperationLogsSearch().get_logs_by_trigger( + search = ( + LoanOperationLogsSearch() + .get_logs_by_trigger( triggers=[ ItemCirculationAction.REQUEST, ItemCirculationAction.CHECKIN, - ItemCirculationAction.CHECKOUT - ], date_range=self.date_range - ).filter('terms', loan__transaction_location__pid=location_pids) + ItemCirculationAction.CHECKOUT, + ], + date_range=self.date_range, + ) + .filter("terms", loan__transaction_location__pid=location_pids) + ) if new_patrons: # Get new patrons in date range and hash the pids - search_patron = PatronsSearch()\ - .filter("range", _created=self.date_range)\ - .source('pid').scan() + search_patron = ( + PatronsSearch() + .filter("range", _created=self.date_range) + .source("pid") + .scan() + ) new_patron_hashed_pids = set() for p in search_patron: hashed_pid = hashlib.md5(p.pid.encode()).hexdigest() new_patron_hashed_pids.add(hashed_pid) search = search.filter( - 'terms', loan__patron__hashed_pid=list(new_patron_hashed_pids)) + "terms", loan__patron__hashed_pid=list(new_patron_hashed_pids) + ) stats = {} patron_pids = set() # Main postal code from user profile for s in search.scan(): patron_pid = s.loan.patron.hashed_pid - postal_code = 'unknown' - if 'postal_code' in s.loan.patron: + postal_code = "unknown" + if "postal_code" in s.loan.patron: postal_code = s.loan.patron.postal_code stats.setdefault(postal_code, 0) @@ -187,12 +201,14 @@ def new_documents(self, library_pid): :return: the number of matched documents :rtype: integer """ - return RecordsSearch(index=OperationLog.index_name)\ - .filter('range', date=self.date_range)\ - .filter('term', record__type='doc')\ - .filter('term', operation='create')\ - .filter('term', library__value=library_pid)\ + return ( + RecordsSearch(index=OperationLog.index_name) + .filter("range", date=self.date_range) + .filter("term", record__type="doc") + .filter("term", operation="create") + .filter("term", library__value=library_pid) .count() + ) def renewals(self, library_pid): """Number of items with loan extended. @@ -202,10 +218,14 @@ def renewals(self, library_pid): :return: the number of matched documents :rtype: integer """ - return LoanOperationLogsSearch().get_logs_by_trigger( - triggers=[ItemCirculationAction.EXTEND], - date_range=self.date_range - ).filter('term', loan__item__library_pid=library_pid).count() + return ( + LoanOperationLogsSearch() + .get_logs_by_trigger( + triggers=[ItemCirculationAction.EXTEND], date_range=self.date_range + ) + .filter("term", loan__item__library_pid=library_pid) + .count() + ) def validated_requests(self, library_pid): """Number of validated requests. @@ -216,10 +236,14 @@ def validated_requests(self, library_pid): :return: the number of matched documents :rtype: integer """ - return LoanOperationLogsSearch().get_logs_by_trigger( - triggers=['validate_request'], - date_range=self.date_range - ).filter('term', library__value=library_pid).count() + return ( + LoanOperationLogsSearch() + .get_logs_by_trigger( + triggers=["validate_request"], date_range=self.date_range + ) + .filter("term", library__value=library_pid) + .count() + ) def new_items_by_location(self, library_pid): """Number of new items per library by location. @@ -232,20 +256,20 @@ def new_items_by_location(self, library_pid): """ location_pids = LocationsSearch().location_pids(library_pid) - search = ItemsSearch()[:0]\ - .filter('range', _created=self.date_range)\ - .filter('term', library__pid=library_pid)\ - .source('location.pid') - search.aggs.bucket('location_pid', 'terms', field='location.pid', - size=10000) + search = ( + ItemsSearch()[:0] + .filter("range", _created=self.date_range) + .filter("term", library__pid=library_pid) + .source("location.pid") + ) + search.aggs.bucket("location_pid", "terms", field="location.pid", size=10000) res = search.execute() - stats = {} - location_pids = [ - bucket.key for bucket in res.aggregations.location_pid.buckets] + location_pids = [bucket.key for bucket in res.aggregations.location_pid.buckets] location_names = self._get_locations_code_name(location_pids) - for bucket in res.aggregations.location_pid.buckets: - stats[location_names[bucket.key]] = bucket.doc_count - return stats + return { + location_names[bucket.key]: bucket.doc_count + for bucket in res.aggregations.location_pid.buckets + } def items_by_document_type_and_subtype(self, library_pid): """Number of items per library by document type and sub-type. @@ -256,16 +280,18 @@ def items_by_document_type_and_subtype(self, library_pid): :return: the number of matched documents :rtype: dict """ - search = ItemsSearch()[:0]\ - .filter('range', _created={'lte': self.date_range['lte']})\ - .filter('term', library__pid=library_pid)\ - .source('document.document_type') - search.aggs\ - .bucket('main_type', 'terms', - field='document.document_type.main_type', size=10000) - search.aggs\ - .bucket('subtype', 'terms', - field='document.document_type.subtype', size=10000) + search = ( + ItemsSearch()[:0] + .filter("range", _created={"lte": self.date_range["lte"]}) + .filter("term", library__pid=library_pid) + .source("document.document_type") + ) + search.aggs.bucket( + "main_type", "terms", field="document.document_type.main_type", size=10000 + ) + search.aggs.bucket( + "subtype", "terms", field="document.document_type.subtype", size=10000 + ) res = search.execute() stats = { bucket.key: bucket.doc_count @@ -286,27 +312,39 @@ def loans_of_transaction_library_by_item_location(self, library_pid): """ location_pids = LocationsSearch().location_pids(library_pid) - search = LoanOperationLogsSearch().get_logs_by_trigger( + search = ( + LoanOperationLogsSearch() + .get_logs_by_trigger( triggers=[ ItemCirculationAction.CHECKIN, - ItemCirculationAction.CHECKOUT - ], date_range=self.date_range - ).filter('terms', loan__transaction_location__pid=location_pids)\ - .source('loan').scan() + ItemCirculationAction.CHECKOUT, + ], + date_range=self.date_range, + ) + .filter("terms", loan__transaction_location__pid=location_pids) + .source("loan") + .scan() + ) stats = {} - libraries_map = {lib.pid: lib.name for lib in LibrariesSearch().source( - ['pid', 'name', 'organisation']).scan()} + libraries_map = { + lib.pid: lib.name + for lib in LibrariesSearch().source(["pid", "name", "organisation"]).scan() + } for s in search: item_library_pid = s.loan.item.library_pid item_library_name = libraries_map[item_library_pid] location_name = s.loan.item.holding.location_name - key = f'{item_library_pid}: {item_library_name} - {location_name}' - stats.setdefault(key, { - # TODO: to be removed as it is already in the key - 'location_name': location_name, - ItemCirculationAction.CHECKIN: 0, - ItemCirculationAction.CHECKOUT: 0}) + key = f"{item_library_pid}: {item_library_name} - {location_name}" + stats.setdefault( + key, + { + # TODO: to be removed as it is already in the key + "location_name": location_name, + ItemCirculationAction.CHECKIN: 0, + ItemCirculationAction.CHECKOUT: 0, + }, + ) stats[key][s.loan.trigger] += 1 return stats diff --git a/rero_ils/modules/stats/api/pricing.py b/rero_ils/modules/stats/api/pricing.py index cf7bf3acbc..ae6d297a0d 100644 --- a/rero_ils/modules/stats/api/pricing.py +++ b/rero_ils/modules/stats/api/pricing.py @@ -26,8 +26,7 @@ from invenio_access.permissions import system_identity from invenio_search.api import RecordsSearch -from rero_ils.modules.acquisition.acq_order_lines.api import \ - AcqOrderLinesSearch +from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLinesSearch from rero_ils.modules.documents.api import DocumentsSearch from rero_ils.modules.ill_requests.api import ILLRequestsSearch from rero_ils.modules.ill_requests.models import ILLRequestStatus @@ -53,12 +52,13 @@ def __init__(self, to_date=None): to_date = arrow.Arrow.fromdatetime(to_date) to_date = to_date or arrow.utcnow() - relativedelta(days=1) self.months_delta = current_app.config.get( - 'RERO_ILS_STATS_BILLING_TIMEFRAME_IN_MONTHS' + "RERO_ILS_STATS_BILLING_TIMEFRAME_IN_MONTHS" ) - _from = (to_date - relativedelta( - months=self.months_delta)).format(fmt='YYYY-MM-DDT00:00:00') - _to = to_date.format(fmt='YYYY-MM-DDT23:59:59') - self.date_range = {'gte': _from, 'lte': _to} + _from = (to_date - relativedelta(months=self.months_delta)).format( + fmt="YYYY-MM-DDT00:00:00" + ) + _to = to_date.format(fmt="YYYY-MM-DDT23:59:59") + self.date_range = {"gte": _from, "lte": _to} @classmethod def get_stat_pid(cls, type, date_range): @@ -67,19 +67,18 @@ def get_stat_pid(cls, type, date_range): :param type: type of statistics :param date_range: statistics time interval """ - _from = date_range['from'] - _to = date_range['to'] - search = StatsSearch()\ - .filter("term", type=type)\ - .scan() - - stat_pid = list() - for s in search: - if 'date_range' in s and\ - 'from' in s.date_range and 'to' in s.date_range: - if s.date_range['from'] == _from and s.date_range['to'] == _to: - stat_pid.append(s.pid) - if stat_pid: + _from = date_range["from"] + _to = date_range["to"] + search = StatsSearch().filter("term", type=type).scan() + + if stat_pid := [ + s.pid + for s in search + if "date_range" in s + and "from" in s.date_range + and "to" in s.date_range + and (s.date_range["from"] == _from and s.date_range["to"] == _to) + ]: assert len(stat_pid) == 1 return stat_pid[0] return @@ -87,14 +86,8 @@ def get_stat_pid(cls, type, date_range): def collect(self): """Collect all the statistics.""" stats = [] - for lib in LibrariesSearch().source( - ['pid', 'name', 'organisation']).scan(): - data = { - 'library': { - 'pid': lib.pid, - 'name': lib.name - } - } + for lib in LibrariesSearch().source(["pid", "name", "organisation"]).scan(): + data = {"library": {"pid": lib.pid, "name": lib.name}} data |= self.process(lib) stats.append(data) return stats @@ -106,40 +99,34 @@ def process(self, library): :return: a dict containing all the processed values. """ return { - 'number_of_documents': self.number_of_documents(library.pid), - 'number_of_libraries': self.number_of_libraries( - library.organisation.pid), - 'number_of_librarians': self.number_of_librarians(library.pid), - 'number_of_active_patrons': self.number_of_active_patrons( - library.pid), - 'number_of_order_lines': self.number_of_order_lines(library.pid), - 'number_of_checkouts': - self.number_of_circ_operations( - library.pid, ItemCirculationAction.CHECKOUT), - 'number_of_renewals': - self.number_of_circ_operations( - library.pid, ItemCirculationAction.EXTEND), - 'number_of_ill_requests': - self.number_of_ill_requests( - library.pid, [ILLRequestStatus.DENIED]), - 'number_of_items': self.number_of_items(library.pid), - 'number_of_new_items': self.number_of_new_items(library.pid), - 'number_of_deleted_items': self.number_of_deleted_items( - library.pid), - 'number_of_patrons': self.number_of_patrons( - library.organisation.pid), - 'number_of_new_patrons': self.number_of_patrons( - library.organisation.pid), - 'number_of_checkins': - self.number_of_circ_operations( - library.pid, ItemCirculationAction.CHECKIN), - 'number_of_requests': - self.number_of_circ_operations( - library.pid, ItemCirculationAction.REQUEST), - 'number_of_docs_with_files': self.number_of_docs_with_files( - library.pid), - 'number_of_files': self.number_of_files(library.pid), - 'files_volume': self.files_volume(library.pid) + "number_of_documents": self.number_of_documents(library.pid), + "number_of_libraries": self.number_of_libraries(library.organisation.pid), + "number_of_librarians": self.number_of_librarians(library.pid), + "number_of_active_patrons": self.number_of_active_patrons(library.pid), + "number_of_order_lines": self.number_of_order_lines(library.pid), + "number_of_checkouts": self.number_of_circ_operations( + library.pid, ItemCirculationAction.CHECKOUT + ), + "number_of_renewals": self.number_of_circ_operations( + library.pid, ItemCirculationAction.EXTEND + ), + "number_of_ill_requests": self.number_of_ill_requests( + library.pid, [ILLRequestStatus.DENIED] + ), + "number_of_items": self.number_of_items(library.pid), + "number_of_new_items": self.number_of_new_items(library.pid), + "number_of_deleted_items": self.number_of_deleted_items(library.pid), + "number_of_patrons": self.number_of_patrons(library.organisation.pid), + "number_of_new_patrons": self.number_of_patrons(library.organisation.pid), + "number_of_checkins": self.number_of_circ_operations( + library.pid, ItemCirculationAction.CHECKIN + ), + "number_of_requests": self.number_of_circ_operations( + library.pid, ItemCirculationAction.REQUEST + ), + "number_of_docs_with_files": self.number_of_docs_with_files(library.pid), + "number_of_files": self.number_of_files(library.pid), + "files_volume": self.files_volume(library.pid), } def number_of_documents(self, library_pid): @@ -170,10 +157,12 @@ def number_of_librarians(self, library_pid): :return: the number of matched librarians :rtype: integer """ - return PatronsSearch()\ - .filter('terms', roles=UserRole.PROFESSIONAL_ROLES)\ - .filter('term', libraries__pid=library_pid)\ + return ( + PatronsSearch() + .filter("terms", roles=UserRole.PROFESSIONAL_ROLES) + .filter("term", libraries__pid=library_pid) .count() + ) def number_of_active_patrons(self, library_pid): """Number of patrons who did a transaction in the past 365 days. @@ -182,15 +171,17 @@ def number_of_active_patrons(self, library_pid): :return: the number of matched active patrons :rtype: integer """ - patrons = set() - op_logs_query = LoanOperationLogsSearch().get_logs_by_trigger( - triggers=[ - ItemCirculationAction.CHECKOUT, - ItemCirculationAction.EXTEND - ], date_range=self.date_range - ).filter('term', loan__item__library_pid=library_pid) - for res in op_logs_query.source(['loan']).scan(): - patrons.add(res.loan.patron.hashed_pid) + op_logs_query = ( + LoanOperationLogsSearch() + .get_logs_by_trigger( + triggers=[ItemCirculationAction.CHECKOUT, ItemCirculationAction.EXTEND], + date_range=self.date_range, + ) + .filter("term", loan__item__library_pid=library_pid) + ) + patrons = { + res.loan.patron.hashed_pid for res in op_logs_query.source(["loan"]).scan() + } return len(patrons) def number_of_order_lines(self, library_pid): @@ -200,9 +191,12 @@ def number_of_order_lines(self, library_pid): :return: the number of matched order lines :rtype: integer """ - return AcqOrderLinesSearch()\ - .filter('range', _created=self.date_range)\ - .filter('term', library__pid=library_pid).count() + return ( + AcqOrderLinesSearch() + .filter("range", _created=self.date_range) + .filter("term", library__pid=library_pid) + .count() + ) def number_of_circ_operations(self, library_pid, trigger): """Number of circulation operation during the specified timeframe. @@ -212,11 +206,12 @@ def number_of_circ_operations(self, library_pid, trigger): :return: the number of matched circulation operation :rtype: integer """ - return LoanOperationLogsSearch().get_logs_by_trigger( - triggers=[trigger], - date_range=self.date_range - ).filter('term', loan__item__library_pid=library_pid)\ + return ( + LoanOperationLogsSearch() + .get_logs_by_trigger(triggers=[trigger], date_range=self.date_range) + .filter("term", loan__item__library_pid=library_pid) .count() + ) def number_of_ill_requests(self, library_pid, exclude_status): """Number of existing ILL requests for a time range and a library. @@ -226,10 +221,12 @@ def number_of_ill_requests(self, library_pid, exclude_status): :return: the number of matched inter library loan requests :rtype: integer """ - query = ILLRequestsSearch()\ - .filter('range', _created=self.date_range)\ - .filter('term', library__pid=library_pid)\ - .exclude('terms', status=exclude_status) + query = ( + ILLRequestsSearch() + .filter("range", _created=self.date_range) + .filter("term", library__pid=library_pid) + .exclude("terms", status=exclude_status) + ) return query.count() # -------- optional ----------- @@ -242,8 +239,7 @@ def number_of_items(self, library_pid): :rtype: integer """ # can be done using the facet - return ItemsSearch().filter( - 'term', library__pid=library_pid).count() + return ItemsSearch().filter("term", library__pid=library_pid).count() def number_of_deleted_items(self, library_pid): """Number of deleted items during the specified timeframe. @@ -252,12 +248,14 @@ def number_of_deleted_items(self, library_pid): :return: the number of matched deleted items :rtype: integer """ - return RecordsSearch(index=OperationLog.index_name)\ - .filter('range', date=self.date_range)\ - .filter('term', operation='delete')\ - .filter('term', record__type='item')\ - .filter('term', library__value=library_pid)\ + return ( + RecordsSearch(index=OperationLog.index_name) + .filter("range", date=self.date_range) + .filter("term", operation="delete") + .filter("term", record__type="item") + .filter("term", library__value=library_pid) .count() + ) def number_of_new_items(self, library_pid): """Number of new created items during the specified timeframe. @@ -267,9 +265,12 @@ def number_of_new_items(self, library_pid): :rtype: integer """ # can be done using the facet or operation logs - return ItemsSearch()\ - .filter('range', _created=self.date_range)\ - .filter('term', library__pid=library_pid).count() + return ( + ItemsSearch() + .filter("range", _created=self.date_range) + .filter("term", library__pid=library_pid) + .count() + ) def number_of_new_patrons(self, organisation_pid): """New patrons for an organisation during the specified timeframe. @@ -278,10 +279,12 @@ def number_of_new_patrons(self, organisation_pid): :return: the number of matched newly created patrons :rtype: integer """ - return PatronsSearch()\ - .filter('range', _created=self.date_range)\ - .filter('term', organisation__pid=organisation_pid)\ + return ( + PatronsSearch() + .filter("range", _created=self.date_range) + .filter("term", organisation__pid=organisation_pid) .count() + ) def number_of_patrons(self, organisation_pid): """Number of users with a librarian role. @@ -292,10 +295,12 @@ def number_of_patrons(self, organisation_pid): :return: the number of matched patrons :rtype: integer """ - return PatronsSearch()\ - .filter('term', roles='patron')\ - .filter('term', organisation__pid=organisation_pid)\ + return ( + PatronsSearch() + .filter("term", roles="patron") + .filter("term", organisation__pid=organisation_pid) .count() + ) def number_of_docs_with_files(self, library_pid): """Number of documents containing files belonging to a given library. @@ -305,16 +310,18 @@ def number_of_docs_with_files(self, library_pid): :return: the number of matched documents :rtype: integer """ - return DocumentsSearch().filter( - 'term', files__library_pid=library_pid).count() + return DocumentsSearch().filter("term", files__library_pid=library_pid).count() def _get_record_file_query(self): """Get a record file query on the related index.""" ext = current_app.extensions["rero-invenio-files"] record_service = ext.records_service return record_service.search_request( - system_identity, dict(size=1), record_service.record_cls, - record_service.config.search) + system_identity, + dict(size=1), + record_service.record_cls, + record_service.config.search, + ) def number_of_files(self, library_pid): """Number of files linked to my library. @@ -325,10 +332,8 @@ def number_of_files(self, library_pid): :rtype: integer """ es_query = self._get_record_file_query() - es_query = es_query.filter( - 'term', metadata__library__pid=library_pid) - es_query.aggs.metric( - 'number_of_files', 'sum', field="metadata.n_files") + es_query = es_query.filter("term", metadata__library__pid=library_pid) + es_query.aggs.metric("number_of_files", "sum", field="metadata.n_files") return int(es_query.execute().aggs.number_of_files.value) def files_volume(self, library_pid): @@ -340,8 +345,6 @@ def files_volume(self, library_pid): :rtype: str """ es_query = self._get_record_file_query() - es_query = es_query.filter( - 'term', metadata__library__pid=library_pid) - es_query.aggs.metric( - 'files_size', 'sum', field="metadata.file_size") - return "%.3f" % (es_query.execute().aggs.files_size.value/(1024*1024)) + es_query = es_query.filter("term", metadata__library__pid=library_pid) + es_query.aggs.metric("files_size", "sum", field="metadata.file_size") + return "%.3f" % (es_query.execute().aggs.files_size.value / (1024 * 1024)) diff --git a/rero_ils/modules/stats/api/report.py b/rero_ils/modules/stats/api/report.py index a85ad58327..e68643b700 100644 --- a/rero_ils/modules/stats/api/report.py +++ b/rero_ils/modules/stats/api/report.py @@ -28,12 +28,19 @@ from rero_ils.modules.stats_cfg.api import StatConfiguration from rero_ils.modules.utils import extracted_data_from_ref -from .indicators import NumberOfActivePatronsCfg, NumberOfCirculationCfg, \ - NumberOfDeletedItemsCfg, NumberOfDocumentsCfg, NumberOfILLRequests, \ - NumberOfItemsCfg, NumberOfPatronsCfg, NumberOfRequestsCfg, \ - NumberOfSerialHoldingsCfg from ..api.api import Stat from ..models import StatType +from .indicators import ( + NumberOfActivePatronsCfg, + NumberOfCirculationCfg, + NumberOfDeletedItemsCfg, + NumberOfDocumentsCfg, + NumberOfILLRequests, + NumberOfItemsCfg, + NumberOfPatronsCfg, + NumberOfRequestsCfg, + NumberOfSerialHoldingsCfg, +) class StatsReport: @@ -47,30 +54,40 @@ def __init__(self, config): if not isinstance(config, StatConfiguration): config = StatConfiguration(data=config) self.config = config - self.is_active = config.get('is_active', False) - self.indicator = config['category']['indicator']['type'] - self.period = config['category']['indicator'].get('period') - self.distributions = config[ - 'category']['indicator'].get('distributions', []) + self.is_active = config.get("is_active", False) + self.indicator = config["category"]["indicator"]["type"] + self.period = config["category"]["indicator"].get("period") + self.distributions = config["category"]["indicator"].get("distributions", []) self.org_pid = config.organisation_pid self.filter_by_libraries = [] - for library in config.get('filter_by_libraries', []): - self.filter_by_libraries.append(extracted_data_from_ref(library)) + self.filter_by_libraries.extend( + extracted_data_from_ref(library) + for library in config.get("filter_by_libraries", []) + ) self.libraries = { - hit.pid: hit.name for hit in LibrariesSearch().by_organisation_pid( - self.org_pid).source(['pid', 'name']).scan() + hit.pid: hit.name + for hit in LibrariesSearch() + .by_organisation_pid(self.org_pid) + .source(["pid", "name"]) + .scan() } self.lib_pids = list(self.libraries.keys()) - es_locations = LocationsSearch().by_organisation_pid( - self.org_pid).source(['pid', 'name', 'library']).scan() + es_locations = ( + LocationsSearch() + .by_organisation_pid(self.org_pid) + .source(["pid", "name", "library"]) + .scan() + ) self.locations = { - hit.pid: f'{self.libraries[hit.library.pid]} / {hit.name}' + hit.pid: f"{self.libraries[hit.library.pid]} / {hit.name}" for hit in es_locations } self.patron_types = { - hit.pid: hit.name for hit in - PatronTypesSearch().by_organisation_pid( - self.org_pid).source(['pid', 'name']).scan() + hit.pid: hit.name + for hit in PatronTypesSearch() + .by_organisation_pid(self.org_pid) + .source(["pid", "name"]) + .scan() } self.loc_pids = list(self.locations.keys()) @@ -84,19 +101,20 @@ def indicator_cfg(self): :rtype: IndicatorCfg instance. """ cfg = { - 'number_of_documents': NumberOfDocumentsCfg(self), - 'number_of_serial_holdings': NumberOfSerialHoldingsCfg(self), - 'number_of_items': NumberOfItemsCfg(self), - 'number_of_deleted_items': NumberOfDeletedItemsCfg(self), - 'number_of_ill_requests': NumberOfILLRequests(self), - 'number_of_checkins': NumberOfCirculationCfg(self, 'checkin'), - 'number_of_checkouts': NumberOfCirculationCfg(self, 'checkout'), - 'number_of_extends': NumberOfCirculationCfg(self, 'extend'), - 'number_of_requests': NumberOfRequestsCfg(self, 'request'), - 'number_of_validate_requests': NumberOfRequestsCfg( - self, 'validate_request'), - 'number_of_patrons': NumberOfPatronsCfg(self), - 'number_of_active_patrons': NumberOfActivePatronsCfg(self) + "number_of_documents": NumberOfDocumentsCfg(self), + "number_of_serial_holdings": NumberOfSerialHoldingsCfg(self), + "number_of_items": NumberOfItemsCfg(self), + "number_of_deleted_items": NumberOfDeletedItemsCfg(self), + "number_of_ill_requests": NumberOfILLRequests(self), + "number_of_checkins": NumberOfCirculationCfg(self, "checkin"), + "number_of_checkouts": NumberOfCirculationCfg(self, "checkout"), + "number_of_extends": NumberOfCirculationCfg(self, "extend"), + "number_of_requests": NumberOfRequestsCfg(self, "request"), + "number_of_validate_requests": NumberOfRequestsCfg( + self, "validate_request" + ), + "number_of_patrons": NumberOfPatronsCfg(self), + "number_of_active_patrons": NumberOfActivePatronsCfg(self), } return cfg[self.indicator] @@ -111,8 +129,7 @@ def _process_aggregations(self, es_results): if isinstance(dist1, str): key1 = dist1 parent_dist = es_results.aggs[distrib1].buckets[key1] - doc_count = \ - es_results.aggs[distrib1].buckets[key1].doc_count + doc_count = es_results.aggs[distrib1].buckets[key1].doc_count else: parent_dist = dist1 key1 = self.indicator_cfg.label(distrib1, dist1) @@ -132,7 +149,7 @@ def _process_aggregations(self, es_results): values[key2] = doc_count y_keys.add(key2) if values: - results[key1]['values'] = values + results[key1]["values"] = values y_keys = sorted(y_keys) x_keys = sorted(results.keys()) return self._process_distributions(x_keys, y_keys, results) @@ -141,16 +158,16 @@ def _process_distributions(self, x_keys, y_keys, results): """Process the elasticsearch aggregations results.""" data = [] if y_keys: - data.append([''] + y_keys) + data.append([""] + y_keys) for x_key in x_keys: values = [x_key] if y_keys: - for y_key in y_keys: - values.append( - results[x_key].get('values', {}).get(y_key, 0)) + values.extend( + results[x_key].get("values", {}).get(y_key, 0) for y_key in y_keys + ) else: - values.append(results[x_key].get('count', 0)) + values.append(results[x_key].get("count", 0)) data.append(values) return data @@ -182,22 +199,22 @@ def collect(self, force=False): def get_range_period(self, period): """Get the range period for elasticsearch date range aggs.""" - if period == 'month': + if period == "month": # now - 1 month previous_month = datetime.now() - relativedelta(months=1) # get last day of previous month using relativedelta with # day=31: this will add a max value of 31 days but stays # in the same month previous_month = previous_month + relativedelta(day=31) - month = '%02d' % previous_month.month - _from = f'{previous_month.year}-{month}-01T00:00:00' - _to = f'{previous_month.year}-{month}-{previous_month.day}' - _to = f'{_to}T23:59:59' + month = "%02d" % previous_month.month + _from = f"{previous_month.year}-{month}-01T00:00:00" + _to = f"{previous_month.year}-{month}-{previous_month.day}" + _to = f"{_to}T23:59:59" return dict(gte=_from, lte=_to) - elif period == 'year': + elif period == "year": previous_year = datetime.now().year - 1 - _from = f'{previous_year}-01-01T00:00:00' - _to = f'{previous_year}-12-31T23:59:59' + _from = f"{previous_year}-01-01T00:00:00" + _to = f"{previous_year}-12-31T23:59:59" return dict(gte=_from, lte=_to) def create_stat(self, values, dbcommit=True, reindex=True): @@ -209,14 +226,11 @@ def create_stat(self, values, dbcommit=True, reindex=True): :returns: the create report. """ data = dict( - type=StatType.REPORT, - config=self.config.dumps(), - values=[dict(results=values)] - ) + type=StatType.REPORT, + config=self.config.dumps(), + values=[dict(results=values)], + ) if self.period: - range = self.get_range_period(self.period) - data['date_range'] = { - 'from': range['gte'], - 'to': range['lte'] - } + date_range = self.get_range_period(self.period) + data["date_range"] = {"from": date_range["gte"], "to": date_range["lte"]} return Stat.create(data, dbcommit=dbcommit, reindex=reindex) diff --git a/rero_ils/modules/stats/cli.py b/rero_ils/modules/stats/cli.py index 60d3550d4f..4d834f18c0 100644 --- a/rero_ils/modules/stats/cli.py +++ b/rero_ils/modules/stats/cli.py @@ -43,7 +43,7 @@ def report(): @stats.command() -@click.argument('type') +@click.argument("type") @with_appcontext def dumps(type): """Dumps the current stats value. @@ -57,7 +57,7 @@ def dumps(type): @stats.command() -@click.argument('type') +@click.argument("type") @with_appcontext def collect(type): """Extract the stats values and store it. @@ -69,9 +69,9 @@ def collect(type): if type == StatType.BILLING: _stats = StatsForPricing(to_date=to_date) elif type == StatType.LIBRARIAN: - _from = f'{to_date.year}-{to_date.month:02d}-01T00:00:00' - _to = to_date.format(fmt='YYYY-MM-DDT23:59:59') - date_range = {'from': _from, 'to': _to} + _from = f"{to_date.year}-{to_date.month:02d}-01T00:00:00" + _to = to_date.format(fmt="YYYY-MM-DDT23:59:59") + date_range = {"from": _from, "to": _to} _stats = StatsForLibrarian(to_date=to_date) else: return @@ -79,20 +79,23 @@ def collect(type): stats_values = _stats.collect() with current_app.app_context(): stat = Stat.create( - dict(type=type, date_range=date_range, - values=stats_values), - dbcommit=True, reindex=True) + dict(type=type, date_range=date_range, values=stats_values), + dbcommit=True, + reindex=True, + ) click.secho( f'Statistics of type {stat["type"]}\ have been collected and created.\ - New pid: {stat.pid}', fg='green') + New pid: {stat.pid}', + fg="green", + ) @stats.command() -@click.argument('year', type=int) -@click.argument('timespan', default='yearly') -@click.option('--n_months', default=12) -@click.option('-f', '--force', is_flag=True, default=False) +@click.argument("year", type=int) +@click.argument("timespan", default="yearly") +@click.option("--n_months", default=12) +@click.option("-f", "--force", is_flag=True, default=False) @with_appcontext def collect_year(year, timespan, n_months, force): """Extract the stats librarian for one year and store them in db. @@ -105,22 +108,22 @@ def collect_year(year, timespan, n_months, force): stat_pid = None type = StatType.LIBRARIAN if year: - if timespan == 'montly': + if timespan == "montly": if n_months not in range(1, 13): - click.secho(f'ERROR: not a valid month', fg='red') + click.secho(f"ERROR: not a valid month", fg="red") raise click.Abort() n_months += 1 for month in range(1, n_months): - first_day = f'{year}-{month:02d}-01T23:59:59'\ - .format(fmt='YYYY-MM-DDT23:59:59') - first_day = arrow.get(first_day, 'YYYY-MM-DDTHH:mm:ss') - to_date = first_day + relativedelta(months=1)\ - - relativedelta(days=1) - _from = f'{to_date.year}-{to_date.month:02d}-01T00:00:00' - _to = to_date.format(fmt='YYYY-MM-DDT23:59:59') + first_day = f"{year}-{month:02d}-01T23:59:59".format( + fmt="YYYY-MM-DDT23:59:59" + ) + first_day = arrow.get(first_day, "YYYY-MM-DDTHH:mm:ss") + to_date = first_day + relativedelta(months=1) - relativedelta(days=1) + _from = f"{to_date.year}-{to_date.month:02d}-01T00:00:00" + _to = to_date.format(fmt="YYYY-MM-DDT23:59:59") - date_range = {'from': _from, 'to': _to} + date_range = {"from": _from, "to": _to} _stats = StatsForLibrarian(to_date=to_date) @@ -128,103 +131,121 @@ def collect_year(year, timespan, n_months, force): if stat_pid and not force: click.secho( - f'ERROR: statistics of type {type}\ + f"ERROR: statistics of type {type}\ for time interval {_from} - {_to}\ - already exist. Pid: {stat_pid}', fg='red') + already exist. Pid: {stat_pid}", + fg="red", + ) return - stat_data = dict(type=type, date_range=date_range, - values=_stats.collect()) + stat_data = dict( + type=type, date_range=date_range, values=_stats.collect() + ) with current_app.app_context(): if stat_pid: rec_stat = Stat.get_record_by_pid(stat_pid) - stat = rec_stat.update(data=stat_data, commit=True, - dbcommit=True, reindex=True) + stat = rec_stat.update( + data=stat_data, commit=True, dbcommit=True, reindex=True + ) click.secho( - f'WARNING: statistics of type {type}\ + f"WARNING: statistics of type {type}\ have been collected and updated\ for {year}-{month}.\ - Pid: {stat.pid}', fg='yellow') + Pid: {stat.pid}", + fg="yellow", + ) else: - stat = Stat.create(stat_data, dbcommit=True, - reindex=True) + stat = Stat.create(stat_data, dbcommit=True, reindex=True) click.secho( - f'Statistics of type {type} have been collected\ + f"Statistics of type {type} have been collected\ and created for {year}-{month}.\ - New pid: {stat.pid}', fg='green') + New pid: {stat.pid}", + fg="green", + ) else: - _from = arrow.get(f'{year}-01-01', 'YYYY-MM-DD')\ - .format(fmt='YYYY-MM-DDT00:00:00') - _to = arrow.get(f'{year}-12-31', 'YYYY-MM-DD')\ - .format(fmt='YYYY-MM-DDT23:59:59') - date_range = {'from': _from, 'to': _to} + _from = arrow.get(f"{year}-01-01", "YYYY-MM-DD").format( + fmt="YYYY-MM-DDT00:00:00" + ) + _to = arrow.get(f"{year}-12-31", "YYYY-MM-DD").format( + fmt="YYYY-MM-DDT23:59:59" + ) + date_range = {"from": _from, "to": _to} _stats = StatsForLibrarian() - _stats.date_range = {'gte': _from, 'lte': _to} + _stats.date_range = {"gte": _from, "lte": _to} stat_pid = _stats.get_stat_pid(type, date_range) if stat_pid and not force: click.secho( - f'ERROR: statistics of type {type}\ + f"ERROR: statistics of type {type}\ for time interval {_from} - {_to}\ - already exist. Pid: {stat_pid}', fg='red') + already exist. Pid: {stat_pid}", + fg="red", + ) return - stat_data = dict(type=type, date_range=date_range, - values=_stats.collect()) + stat_data = dict(type=type, date_range=date_range, values=_stats.collect()) with current_app.app_context(): if stat_pid: rec_stat = Stat.get_record_by_pid(stat_pid) - stat = rec_stat.update(data=stat_data, commit=True, - dbcommit=True, reindex=True) + stat = rec_stat.update( + data=stat_data, commit=True, dbcommit=True, reindex=True + ) click.secho( - f'WARNING: statistics of type {type}\ + f"WARNING: statistics of type {type}\ have been collected and updated for {year}.\ - Pid: {stat.pid}', fg='yellow') + Pid: {stat.pid}", + fg="yellow", + ) else: stat = Stat.create(stat_data, dbcommit=True, reindex=True) click.secho( - f'Statistics of type {type} have been collected and\ + f"Statistics of type {type} have been collected and\ created for {year}.\ - New pid: {stat.pid}', fg='green') + New pid: {stat.pid}", + fg="green", + ) return @report.command() -@click.argument('pid') +@click.argument("pid") @with_appcontext def dumps(pid): """Extract the stats value for preview. :param pid: pid value of the configuration to use. """ - from .api.report import StatsReport from ..stats_cfg.api import StatConfiguration + from .api.report import StatsReport + cfg = StatConfiguration.get_record_by_pid(pid) if not cfg: - click.secho(f'Configuration does not exists.', fg='red') + click.secho(f"Configuration does not exists.", fg="red") else: from pprint import pprint + pprint(StatsReport(cfg).collect()) @report.command() -@click.argument('pid') +@click.argument("pid") @with_appcontext def collect(pid): """Extract the stats report values and store it. :param pid: pid value of the configuration to use. """ - from .api.report import StatsReport from ..stats_cfg.api import StatConfiguration + from .api.report import StatsReport + cfg = StatConfiguration.get_record_by_pid(pid) if not cfg: - click.secho(f'Configuration does not exists.', fg='red') + click.secho(f"Configuration does not exists.", fg="red") else: stat_report = StatsReport(cfg) values = stat_report.collect() @@ -232,9 +253,8 @@ def collect(pid): @report.command() -@click.argument('frequency', type=click.Choice(['month', 'year'])) -@click.option('--delayed', '-d', is_flag=True, - help='Run indexing in background.') +@click.argument("frequency", type=click.Choice(["month", "year"])) +@click.option("--delayed", "-d", is_flag=True, help="Run indexing in background.") @with_appcontext def collect_all(frequency, delayed): """Extract the stats report values and store it. @@ -242,9 +262,10 @@ def collect_all(frequency, delayed): :param pid: pid value of the configuration to use. """ from .tasks import collect_stats_reports + if delayed: res = collect_stats_reports.delay(frequency) - click.secho(f'Generated reports delayed, task id: {res}', fg='green') + click.secho(f"Generated reports delayed, task id: {res}", fg="green") else: res = collect_stats_reports(frequency) - click.secho(f'Generated {len(res)} reports.', fg='green') + click.secho(f"Generated {len(res)} reports.", fg="green") diff --git a/rero_ils/modules/stats/exceptions.py b/rero_ils/modules/stats/exceptions.py index b5a1d4a255..66332e8e7b 100644 --- a/rero_ils/modules/stats/exceptions.py +++ b/rero_ils/modules/stats/exceptions.py @@ -33,5 +33,4 @@ def __init__(self, pid, *args, **kwargs): def __str__(self): """Exception as string.""" - return f'Statistics configuration pid: {self.pid} '\ - 'is not active.' + return f"Statistics configuration pid: {self.pid} " "is not active." diff --git a/rero_ils/modules/stats/extensions.py b/rero_ils/modules/stats/extensions.py index b8237e406b..ecbace26eb 100644 --- a/rero_ils/modules/stats/extensions.py +++ b/rero_ils/modules/stats/extensions.py @@ -43,21 +43,18 @@ def pre_dump(self, record, data, dumper=None): :param dumper: the dumper class used to dump the record. """ # to filter the search list results - if lib := record.get('config', {}).get('library'): - lib_pid = ( - lib.get('pid') - or extracted_data_from_ref(lib.get('$ref'))) + if lib := record.get("config", {}).get("library"): + lib_pid = lib.get("pid") or extracted_data_from_ref(lib.get("$ref")) org_pid = Library.get_record_by_pid(lib_pid).organisation_pid - record['organisation'] = { - 'pid': org_pid - } + record["organisation"] = {"pid": org_pid} if not current_librarian: return - if record['type'] == StatType.LIBRARIAN: + if record["type"] == StatType.LIBRARIAN: library_pids = current_librarian.manageable_library_pids - record['values'] = list(filter( - lambda lib: lib['library']['pid'] in library_pids, - record['values'] - )) + record["values"] = list( + filter( + lambda lib: lib["library"]["pid"] in library_pids, record["values"] + ) + ) diff --git a/rero_ils/modules/stats/models.py b/rero_ils/modules/stats/models.py index 155dd54791..8dd921ecd2 100644 --- a/rero_ils/modules/stats/models.py +++ b/rero_ils/modules/stats/models.py @@ -26,24 +26,25 @@ class StatIdentifier(RecordIdentifier): """Sequence generator for Stat identifiers.""" - __tablename__ = 'stat_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "stat_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class StatMetadata(db.Model, RecordMetadataBase): """Stat record metadata.""" - __tablename__ = 'stat_metadata' + __tablename__ = "stat_metadata" class StatType: """Type of statistics record.""" - BILLING = 'billing' - LIBRARIAN = 'librarian' - REPORT = 'report' + BILLING = "billing" + LIBRARIAN = "librarian" + REPORT = "report" diff --git a/rero_ils/modules/stats/permissions.py b/rero_ils/modules/stats/permissions.py index 1118b0e06f..2a7d038253 100644 --- a/rero_ils/modules/stats/permissions.py +++ b/rero_ils/modules/stats/permissions.py @@ -25,16 +25,19 @@ from invenio_access import action_factory, any_user from invenio_records_permissions.generators import Disable -from rero_ils.modules.permissions import AllowedByAction, OrganisationNeed, \ - RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + OrganisationNeed, + RecordPermissionPolicy, +) from rero_ils.permissions import admin_permission from .models import StatType # Actions to control statistics policies for CRUD operations -search_action = action_factory('stat-search') -read_action = action_factory('stat-read') -access_action = action_factory('stat-access') +search_action = action_factory("stat-search") +read_action = action_factory("stat-read") +access_action = action_factory("stat-access") class RestrictStatistics(AllowedByAction): @@ -49,7 +52,7 @@ def excludes(self, record=None, **kwargs): """ if ( record - and record.get('type') == StatType.BILLING + and record.get("type") == StatType.BILLING and not admin_permission.require().can() ): return [any_user] @@ -63,7 +66,7 @@ def needs(self, record=None, *args, **kwargs): :param kwargs: extra named arguments. :returns: a list of Needs to validate access. """ - if record and record.get('type') == StatType.REPORT: + if record and record.get("type") == StatType.REPORT: # Check if the record organisation match an ``OrganisationNeed`` required_need = OrganisationNeed(record.organisation_pid) if required_need not in g.identity.provides: @@ -74,13 +77,8 @@ def needs(self, record=None, *args, **kwargs): class StatisticsPermissionPolicy(RecordPermissionPolicy): """Statistics permission policy used by the CRUD operations.""" - can_search = [ - AllowedByAction(search_action) - ] - can_read = [ - RestrictStatistics(read_action), - AllowedByAction(search_action) - ] + can_search = [AllowedByAction(search_action)] + can_read = [RestrictStatistics(read_action), AllowedByAction(search_action)] can_create = [Disable()] can_update = [Disable()] can_delete = [Disable()] @@ -89,26 +87,25 @@ class StatisticsPermissionPolicy(RecordPermissionPolicy): class StatisticsUIPermissionPolicy(RecordPermissionPolicy): """Statistics permission policy used by the CRUD operations.""" - can_read = [ - RestrictStatistics(read_action), - AllowedByAction(read_action) - ] + can_read = [RestrictStatistics(read_action), AllowedByAction(read_action)] def stats_ui_permission_factory(record, *args, **kwargs): """Permission for stats detailed view.""" - return StatisticsUIPermissionPolicy('read', record=record) + return StatisticsUIPermissionPolicy("read", record=record) # DECORATORS ================================================================== # Decorators used to protect access to some API blueprints + def check_logged_as_admin(fn): """Decorator to check if the current logged user is logged as an admin. If no user is connected: return 401 (unauthorized) If current logged user has not the `admin` role: return 403 (forbidden) """ + @wraps(fn) def wrapper(*args, **kwargs): if not current_user.is_authenticated: @@ -116,6 +113,7 @@ def wrapper(*args, **kwargs): if not admin_permission.require().can(): abort(403) return fn(*args, **kwargs) + return wrapper @@ -125,11 +123,13 @@ def check_logged_as_librarian(fn): If no user is connected: return 401 (unauthorized) If current logged user has not the `librarian` role: return 403 (forbidden) """ + @wraps(fn) def wrapper(*args, **kwargs): if not current_user.is_authenticated: abort(401) - if not StatisticsUIPermissionPolicy('read').require().can(): + if not StatisticsUIPermissionPolicy("read").require().can(): abort(403) return fn(*args, **kwargs) + return wrapper diff --git a/rero_ils/modules/stats/serializers.py b/rero_ils/modules/stats/serializers.py index 33db170720..ae793db8fe 100644 --- a/rero_ils/modules/stats/serializers.py +++ b/rero_ils/modules/stats/serializers.py @@ -29,19 +29,19 @@ class StatCSVSerializer(CSVSerializer): """Process data to write in csv file.""" ordered_keys = [ - 'library id', - 'library name', - 'checkouts_for_transaction_library', - 'checkouts_for_owning_library', - 'renewals', - 'validated_requests', - 'active_patrons_by_postal_code', - 'new_active_patrons_by_postal_code', - 'items_by_document_type_and_subtype', - 'new_items', - 'new_items_by_location', - 'new_documents', - 'loans_of_transaction_library_by_item_location' + "library id", + "library name", + "checkouts_for_transaction_library", + "checkouts_for_owning_library", + "renewals", + "validated_requests", + "active_patrons_by_postal_code", + "new_active_patrons_by_postal_code", + "items_by_document_type_and_subtype", + "new_items", + "new_items_by_location", + "new_documents", + "loans_of_transaction_library_by_item_location", ] def _format_csv(self, records): @@ -50,38 +50,36 @@ def _format_csv(self, records): assert len(records) == 1 record = records[0] - if record['metadata'].get('type') == StatType.LIBRARIAN: + if record["metadata"].get("type") == StatType.LIBRARIAN: # statistics of type librarian - headers = [key.capitalize().replace('_', ' ') - for key in self.ordered_keys] + headers = [key.capitalize().replace("_", " ") for key in self.ordered_keys] line = Line() writer = csv.writer(line) writer.writerow(headers) yield line.read() values = sorted( - record['metadata']['values'], - key=lambda v: v['library']['name'] + record["metadata"]["values"], key=lambda v: v["library"]["name"] ) for value in values: - library = value['library'] - value['library name'] = library['name'] - value['library id'] = library['pid'] - del value['library'] + library = value["library"] + value["library name"] = library["name"] + value["library id"] = library["pid"] + del value["library"] for v in value: if isinstance(value[v], dict): - dict_to_text = '' - for k, m in value[v].items(): - dict_to_text += f'{k} :{m}\r\n' + dict_to_text = "".join( + f"{k} :{m}\r\n" for k, m in value[v].items() + ) value[v] = dict_to_text value = StatCSVSerializer.sort_dict_by_key(value)[1] writer.writerow(value) yield line.read() - elif record['metadata'].get('type') == StatType.BILLING: + elif record["metadata"].get("type") == StatType.BILLING: # statistics of type billing - headers = set(('library name', 'library id')) - for value in record['metadata']['values']: - headers.update([v for v in value.keys() if v != 'library']) + headers = {"library name", "library id"} + for value in record["metadata"]["values"]: + headers.update([v for v in value.keys() if v != "library"]) # write the CSV output in memory line = Line() @@ -90,25 +88,24 @@ def _format_csv(self, records): yield line.read() # sort by library name values = sorted( - record['metadata']['values'], - key=lambda v: v['library']['name'] + record["metadata"]["values"], key=lambda v: v["library"]["name"] ) for value in values: - library = value['library'] - value['library name'] = library['name'] - value['library id'] = library['pid'] - del value['library'] + library = value["library"] + value["library name"] = library["name"] + value["library id"] = library["pid"] + del value["library"] for v in value: if isinstance(value[v], dict): - dict_to_text = '' - for k, m in value[v].items(): - dict_to_text += f'{k} :{m}\r\n' + dict_to_text = "".join( + f"{k} :{m}\r\n" for k, m in value[v].items() + ) value[v] = dict_to_text writer.writerow(value) yield line.read() - elif record['metadata'].get('type') == StatType.REPORT: - values = record['metadata']['values'][0]['results'] + elif record["metadata"].get("type") == StatType.REPORT: + values = record["metadata"]["values"][0]["results"] for value in values: line = Line() writer = csv.writer(line) @@ -124,8 +121,9 @@ def sort_dict_by_key(cls, dictionary): :returns: a list of tuples :rtype: list """ - tuple_list = sorted(dictionary.items(), - key=lambda x: cls.ordered_keys.index(x[0])) + tuple_list = sorted( + dictionary.items(), key=lambda x: cls.ordered_keys.index(x[0]) + ) return list(zip(*tuple_list)) def process_dict(self, dictionary): @@ -152,10 +150,12 @@ def record_responsify(serializer, mimetype): :param mimetype: MIME type of response. :returns: Function that generates a record HTTP response. """ + def view(pid, record, code=200, headers=None, links_factory=None): response = current_app.response_class( serializer.serialize(pid, record, links_factory=links_factory), - mimetype=mimetype) + mimetype=mimetype, + ) response.status_code = code response.cache_control.no_cache = True response.set_etag(str(record.revision_id)) @@ -165,10 +165,11 @@ def view(pid, record, code=200, headers=None, links_factory=None): # set the output filename date = record.created.isoformat() - filename = f'stats-{date}.csv' - if not response.headers.get('Content-Disposition'): - response.headers['Content-Disposition'] = \ + filename = f"stats-{date}.csv" + if not response.headers.get("Content-Disposition"): + response.headers["Content-Disposition"] = ( f'attachment; filename="{filename}"' + ) if links_factory is not None: add_link_header(response, links_factory(pid)) @@ -178,4 +179,4 @@ def view(pid, record, code=200, headers=None, links_factory=None): return view -csv_v1_response = record_responsify(csv_v1, 'text/csv') +csv_v1_response = record_responsify(csv_v1, "text/csv") diff --git a/rero_ils/modules/stats/tasks.py b/rero_ils/modules/stats/tasks.py index e2931a4c17..c85e090cce 100644 --- a/rero_ils/modules/stats/tasks.py +++ b/rero_ils/modules/stats/tasks.py @@ -20,8 +20,7 @@ from celery import shared_task from flask import current_app -from rero_ils.modules.stats_cfg.api import StatConfiguration, \ - StatsConfigurationSearch +from rero_ils.modules.stats_cfg.api import StatConfiguration, StatsConfigurationSearch from .api.api import Stat from .api.librarian import StatsForLibrarian @@ -37,7 +36,9 @@ def collect_stats_billing(): with current_app.app_context(): stat = Stat.create( dict(type=StatType.BILLING, values=stats_pricing), - dbcommit=True, reindex=True) + dbcommit=True, + reindex=True, + ) return f'New statistics of type {stat["type"]} has\ been created with a pid of: {stat.pid}' @@ -46,25 +47,30 @@ def collect_stats_billing(): def collect_stats_librarian(): """Collect and store the monthly statistics for librarian.""" stats_librarian = StatsForLibrarian() - date_range = {'from': stats_librarian.date_range['gte'], - 'to': stats_librarian.date_range['lte']} + date_range = { + "from": stats_librarian.date_range["gte"], + "to": stats_librarian.date_range["lte"], + } stats_values = stats_librarian.collect() with current_app.app_context(): stat = Stat.create( - dict(type=StatType.LIBRARIAN, date_range=date_range, - values=stats_values), - dbcommit=True, reindex=True) + dict(type=StatType.LIBRARIAN, date_range=date_range, values=stats_values), + dbcommit=True, + reindex=True, + ) return f'New statistics of type {stat["type"]} has\ been created with a pid of: {stat.pid}' @shared_task() -def collect_stats_reports(frequency='month'): +def collect_stats_reports(frequency="month"): """Collect and store the montly statistics for librarian.""" pids = [ - hit.pid for hit in - StatsConfigurationSearch().filter( - 'term', frequency=frequency).source('pid').scan() + hit.pid + for hit in StatsConfigurationSearch() + .filter("term", frequency=frequency) + .source("pid") + .scan() ] to_return = [] logger = current_app.logger @@ -77,7 +83,8 @@ def collect_stats_reports(frequency='month'): to_return.append(report.pid) except Exception as error: logger.error( - f'Unable to generate report from config({pid}) :: {error}', - exc_info=True, stack_info=True + f"Unable to generate report from config({pid}) :: {error}", + exc_info=True, + stack_info=True, ) return to_return diff --git a/rero_ils/modules/stats/views.py b/rero_ils/modules/stats/views.py index 3d2ef23132..4b96f88d02 100644 --- a/rero_ils/modules/stats/views.py +++ b/rero_ils/modules/stats/views.py @@ -48,62 +48,66 @@ def stats_view_method(pid, record, template=None, **kwargs): # We make a `dumps` to trigger the extension on the statistical record # that allows to filter the libraries. record = record.dumps() - return render_template( - template, - record=record - ) + return render_template(template, record=record) blueprint = Blueprint( - 'stats', + "stats", __name__, - url_prefix='/stats', - template_folder='templates', - static_folder='static', + url_prefix="/stats", + template_folder="templates", + static_folder="static", ) -@blueprint.route('/', methods=['GET']) +@blueprint.route("/", methods=["GET"]) @check_logged_as_admin def stats_billing(): """Show the list of the first 100 items on the billing stats list. Note: includes old statistics where the field type was absent. """ - f = ~Q('exists', field='type') | Q('term', type=StatType.BILLING) - search = StatsSearch().filter('bool', must=[f]).sort('-_created')\ - .source(['pid', '_created']) - hits = search[0:100].execute().to_dict() + f = ~Q("exists", field="type") | Q("term", type=StatType.BILLING) + search = ( + StatsSearch() + .filter("bool", must=[f]) + .sort("-_created") + .source(["pid", "_created"]) + ) + hits = search[:100].execute().to_dict() return render_template( - 'rero_ils/stats_list.html', records=hits['hits']['hits'], - type=StatType.BILLING) + "rero_ils/stats_list.html", records=hits["hits"]["hits"], type=StatType.BILLING + ) -@blueprint.route('/live', methods=['GET']) +@blueprint.route("/live", methods=["GET"]) @check_logged_as_admin def live_stats_billing(): """Show the current billing stats values.""" now = arrow.utcnow() stats = StatsForPricing(to_date=now).collect() return render_template( - 'rero_ils/detailed_view_stats.html', - record=dict(created=now, values=stats)) + "rero_ils/detailed_view_stats.html", record=dict(created=now, values=stats) + ) -@blueprint.route('/librarian', methods=['GET']) +@blueprint.route("/librarian", methods=["GET"]) @check_logged_as_librarian def stats_librarian(): """Show the list of the first 100 items on the librarian stats list.""" - search = StatsSearch()\ - .filter('term', type=StatType.LIBRARIAN).sort('-_created')\ - .source(['pid', '_created', 'date_range']) - hits = search[0:100].execute().to_dict() + search = ( + StatsSearch() + .filter("term", type=StatType.LIBRARIAN) + .sort("-_created") + .source(["pid", "_created", "date_range"]) + ) + hits = search[:100].execute().to_dict() return render_template( - 'rero_ils/stats_list.html', records=hits['hits']['hits'], - type='librarian') + "rero_ils/stats_list.html", records=hits["hits"]["hits"], type="librarian" + ) -@blueprint.route('/librarian//csv') +@blueprint.route("/librarian//csv") @check_logged_as_librarian def stats_librarian_queries(record_pid): """Download specific statistic query into csv file. @@ -111,8 +115,8 @@ def stats_librarian_queries(record_pid): :param record_pid: statistics pid :return: response object, the csv file """ - queries = ['loans_of_transaction_library_by_item_location'] - query_id = request.args.get('query_id', None) + queries = ["loans_of_transaction_library_by_item_location"] + query_id = request.args.get("query_id", None) if query_id not in queries: abort(404) @@ -123,44 +127,53 @@ def stats_librarian_queries(record_pid): # note : This is done by the `pre_dump` extension from `Stats` record, record = record.dumps() - _from = record['date_range']['from'].split('T')[0] - _to = record['date_range']['to'].split('T')[0] - filename = f'{query_id}_{_from}_{_to}.csv' + _from = record["date_range"]["from"].split("T")[0] + _to = record["date_range"]["to"].split("T")[0] + filename = f"{query_id}_{_from}_{_to}.csv" data = StringIO() w = csv.writer(data) - if query_id == 'loans_of_transaction_library_by_item_location': - fieldnames = ['Transaction library', 'Item library', - 'Item location', 'Checkins', 'Checkouts'] + if query_id == "loans_of_transaction_library_by_item_location": + fieldnames = [ + "Transaction library", + "Item library", + "Item location", + "Checkins", + "Checkouts", + ] w.writerow(fieldnames) - for result in record['values']: - transaction_library = \ + for result in record["values"]: + transaction_library = ( f"{result['library']['pid']}: {result['library']['name']}" + ) if not result[query_id]: - w.writerow((transaction_library, '-', '-', 0, 0)) + w.writerow((transaction_library, "-", "-", 0, 0)) else: for location in result[query_id]: result_loc = result[query_id][location] - location_name = result_loc['location_name'] - item_library =\ - location.replace(f' - {location_name}', '') - w.writerow(( - transaction_library, - item_library, - location_name, - result_loc['checkin'], - result_loc['checkout'])) + location_name = result_loc["location_name"] + item_library = location.replace(f" - {location_name}", "") + w.writerow( + ( + transaction_library, + item_library, + location_name, + result_loc["checkin"], + result_loc["checkout"], + ) + ) output = make_response(data.getvalue()) - output.headers["Content-Disposition"] = f'attachment; filename={filename}' + output.headers["Content-Disposition"] = f"attachment; filename={filename}" output.headers["Content-type"] = "text/csv" return output # JINJA FILTERS =============================================================== + @jinja2.pass_context @blueprint.app_template_filter() def yearmonthfilter(context, value, format="%Y-%m-%dT%H:%M:%S"): @@ -169,7 +182,7 @@ def yearmonthfilter(context, value, format="%Y-%m-%dT%H:%M:%S"): value: datetime returns: year and month of datetime """ - utc = pytz.timezone('UTC') + utc = pytz.timezone("UTC") value = datetime.datetime.strptime(value, format) value = utc.localize(value, is_dst=None).astimezone(pytz.utc) datetime_object = datetime.datetime.strptime(str(value.month), "%m") @@ -209,7 +222,7 @@ def sort_dict_by_library(context, dictionary): returns: sorted dict :rtype: dict """ - return sorted(dictionary, key=lambda v: v['library']['name']) + return sorted(dictionary, key=lambda v: v["library"]["name"]) @jinja2.pass_context @@ -222,9 +235,11 @@ def process_data(context, value): returns: processed dict :rtype: dict """ - if 'library' in value: - updated_dict = {'library id': value['library']['pid'], - 'library name': value['library']['name']} - updated_dict.update(value) - updated_dict.pop('library') + if "library" in value: + updated_dict = { + "library id": value["library"]["pid"], + "library name": value["library"]["name"], + } + updated_dict |= value + updated_dict.pop("library") return updated_dict diff --git a/rero_ils/modules/stats_cfg/api.py b/rero_ils/modules/stats_cfg/api.py index 869fd51d97..5708d7bfeb 100644 --- a/rero_ils/modules/stats_cfg/api.py +++ b/rero_ils/modules/stats_cfg/api.py @@ -29,9 +29,9 @@ # provider StatCfgProvider = type( - 'StatCfgProvider', + "StatCfgProvider", (Provider,), - dict(identifier=StatCfgIdentifier, pid_type='stacfg') + dict(identifier=StatCfgIdentifier, pid_type="stacfg"), ) # minter stat_cfg_id_minter = partial(id_minter, provider=StatCfgProvider) @@ -45,9 +45,9 @@ class StatsConfigurationSearch(IlsRecordsSearch): class Meta: """Search only on stats_cfg index.""" - index = 'stats_cfg' + index = "stats_cfg" doc_types = None - fields = ('*',) + fields = ("*",) facets = {} default_filter = None @@ -73,19 +73,21 @@ def get_links_to_me(self, get_pids=False): links = {} - query = StatsSearch()\ - .filter('term', type='report')\ - .filter('term', config__pid=self.pid) + query = ( + StatsSearch() + .filter("term", type="report") + .filter("term", config__pid=self.pid) + ) if get_pids: - query = query.source(['pid']).scan() + query = query.source(["pid"]).scan() reports = [s.pid for s in query] else: reports = query.count() # get number of reports or list of reports pids for configuration if reports: - links['reports'] = reports + links["reports"] = reports return links def reasons_not_to_delete(self): @@ -94,22 +96,20 @@ def reasons_not_to_delete(self): :return: dict with number of reports or reports pids """ cannot_delete = {} - # It is not possible to delete configuration if there are reports. - links = self.get_links_to_me() - if links: - cannot_delete['links'] = links + if links := self.get_links_to_me(): + cannot_delete["links"] = links return cannot_delete @property def organisation_pid(self): """Shortcut for organisation pid.""" - library = extracted_data_from_ref(self.get('library'), data='record') + library = extracted_data_from_ref(self.get("library"), data="record") return library.organisation_pid @property def library_pid(self): """Shortcut for library pid.""" - return extracted_data_from_ref(self.get('library')) + return extracted_data_from_ref(self.get("library")) class StatsConfigurationIndexer(IlsRecordsIndexer): @@ -123,4 +123,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='stacfg') + super().bulk_index(record_id_iterator, doc_type="stacfg") diff --git a/rero_ils/modules/stats_cfg/dumpers.py b/rero_ils/modules/stats_cfg/dumpers.py index b0fe883085..fcaf5f5d5b 100644 --- a/rero_ils/modules/stats_cfg/dumpers.py +++ b/rero_ils/modules/stats_cfg/dumpers.py @@ -34,14 +34,16 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data['organisation'] = dict(pid=record.organisation_pid) + data["organisation"] = dict(pid=record.organisation_pid) return data # dumper used for indexing -indexer_dumper = MultiDumper(dumpers=[ - # make a fresh copy - Dumper(), - ReplaceRefsDumper(), - IndexerDumper() -]) +indexer_dumper = MultiDumper( + dumpers=[ + # make a fresh copy + Dumper(), + ReplaceRefsDumper(), + IndexerDumper(), + ] +) diff --git a/rero_ils/modules/stats_cfg/jsonresolver.py b/rero_ils/modules/stats_cfg/jsonresolver.py index 8f4c7d2a52..a52d103fc3 100644 --- a/rero_ils/modules/stats_cfg/jsonresolver.py +++ b/rero_ils/modules/stats_cfg/jsonresolver.py @@ -23,7 +23,7 @@ from ..jsonresolver import resolve_json_refs -@jsonresolver.route('/api/stats_cfg/', host='bib.rero.ch') +@jsonresolver.route("/api/stats_cfg/", host="bib.rero.ch") def stats_cfg_resolver(pid): """Statistics configuration resolver.""" - return resolve_json_refs('stacfg', pid) + return resolve_json_refs("stacfg", pid) diff --git a/rero_ils/modules/stats_cfg/models.py b/rero_ils/modules/stats_cfg/models.py index 88269b3b16..7ac11bea93 100644 --- a/rero_ils/modules/stats_cfg/models.py +++ b/rero_ils/modules/stats_cfg/models.py @@ -26,16 +26,17 @@ class StatCfgIdentifier(RecordIdentifier): """Sequence generator for the statistics configuration identifiers.""" - __tablename__ = 'stat_cfg_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "stat_cfg_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class StatCfgMetadata(db.Model, RecordMetadataBase): """Statistics configuration record metadata.""" - __tablename__ = 'stat_cfg_metadata' + __tablename__ = "stat_cfg_metadata" diff --git a/rero_ils/modules/stats_cfg/permissions.py b/rero_ils/modules/stats_cfg/permissions.py index 523fbd08c0..6298619f22 100644 --- a/rero_ils/modules/stats_cfg/permissions.py +++ b/rero_ils/modules/stats_cfg/permissions.py @@ -19,17 +19,20 @@ from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByManageableLibrary, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByManageableLibrary, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) # Actions to control statistics configuration policies for CRUD operations -search_action = action_factory('stat_cfg-search') -read_action = action_factory('stat_cfg-read') -create_action = action_factory('stat_cfg-create') -update_action = action_factory('stat_cfg-update') -delete_action = action_factory('stat_cfg-delete') -access_action = action_factory('stat_cfg-access') +search_action = action_factory("stat_cfg-search") +read_action = action_factory("stat_cfg-read") +create_action = action_factory("stat_cfg-create") +update_action = action_factory("stat_cfg-update") +delete_action = action_factory("stat_cfg-delete") +access_action = action_factory("stat_cfg-access") class StatisticsConfigurationPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/stats_cfg/serializers/__init__.py b/rero_ils/modules/stats_cfg/serializers/__init__.py index 72f6791bca..d799acfbab 100644 --- a/rero_ils/modules/stats_cfg/serializers/__init__.py +++ b/rero_ils/modules/stats_cfg/serializers/__init__.py @@ -23,10 +23,8 @@ from .json import StatsCfgJSONSerializer -__all__ = [ - 'json_search' -] +__all__ = ["json_search"] """JSON serializer.""" _json = StatsCfgJSONSerializer(RecordSchemaJSONV1) -json_search = search_responsify(_json, 'application/rero+json') +json_search = search_responsify(_json, "application/rero+json") diff --git a/rero_ils/modules/stats_cfg/serializers/json.py b/rero_ils/modules/stats_cfg/serializers/json.py index 4ae4221cf8..f005826da5 100644 --- a/rero_ils/modules/stats_cfg/serializers/json.py +++ b/rero_ils/modules/stats_cfg/serializers/json.py @@ -32,8 +32,7 @@ def _postprocess_search_aggregations(self, aggregations: dict) -> None: aggregations section. """ JSONSerializer.enrich_bucket_with_data( - aggregations.get('library', {}).get('buckets', []), - LibrariesSearch, 'name' + aggregations.get("library", {}).get("buckets", []), LibrariesSearch, "name" ) super()._postprocess_search_aggregations(aggregations) diff --git a/rero_ils/modules/stats_cfg/views.py b/rero_ils/modules/stats_cfg/views.py index fc107dd949..73fcd74e72 100644 --- a/rero_ils/modules/stats_cfg/views.py +++ b/rero_ils/modules/stats_cfg/views.py @@ -27,15 +27,15 @@ from .api import StatConfiguration api_blueprint = Blueprint( - 'stats_cfg', + "stats_cfg", __name__, - url_prefix='/stats_cfg', - template_folder='templates', - static_folder='static', + url_prefix="/stats_cfg", + template_folder="templates", + static_folder="static", ) -@api_blueprint.route('/live/', methods=['GET']) +@api_blueprint.route("/live/", methods=["GET"]) @check_logged_as_librarian def live_stats_reports(pid): """Preview of the stats report values. @@ -44,6 +44,6 @@ def live_stats_reports(pid): """ cfg = StatConfiguration.get_record_by_pid(pid) if not cfg: - abort(404, f'Configuration not found for pid {pid}.') + abort(404, f"Configuration not found for pid {pid}.") res = StatsReport(cfg).collect(force=True) return jsonify(res) diff --git a/rero_ils/modules/tasks.py b/rero_ils/modules/tasks.py index 17e9a109c4..898221fc60 100644 --- a/rero_ils/modules/tasks.py +++ b/rero_ils/modules/tasks.py @@ -19,15 +19,16 @@ """Celery tasks to index records.""" from celery import shared_task -from celery.messaging import establish_connection +from flask import current_app from .api import IlsRecordsIndexer from .utils import set_timestamp @shared_task(ignore_result=True) -def process_bulk_queue(version_type=None, queue=None, search_bulk_kwargs=None, - stats_only=True): +def process_bulk_queue( + version_type=None, queue=None, search_bulk_kwargs=None, stats_only=True +): """Process bulk indexing queue. :param str version_type: Elasticsearch version type. @@ -44,19 +45,18 @@ def process_bulk_queue(version_type=None, queue=None, search_bulk_kwargs=None, connected_queue = None if queue: - connection = establish_connection() + connection = current_app.extensions["invenio-celery"].celery.connection() connected_queue = connect_queue(connection, queue) indexer = IlsRecordsIndexer( - version_type=version_type, - queue=connected_queue, - routing_key=queue + version_type=version_type, queue=connected_queue, routing_key=queue ) return indexer.process_bulk_queue( - search_bulk_kwargs=search_bulk_kwargs, stats_only=stats_only) + search_bulk_kwargs=search_bulk_kwargs, stats_only=stats_only + ) @shared_task(ignore_result=True) def scheduler_timestamp(): """Writes a time stamp to current cache.""" - time = set_timestamp('scheduler') - return {'scheduler': time} + time = set_timestamp("scheduler") + return {"scheduler": time} diff --git a/rero_ils/modules/templates/api.py b/rero_ils/modules/templates/api.py index 3fe5ed9355..c81e3c30c3 100644 --- a/rero_ils/modules/templates/api.py +++ b/rero_ils/modules/templates/api.py @@ -30,9 +30,9 @@ # provider TemplateProvider = type( - 'TemplateProvider', + "TemplateProvider", (Provider,), - dict(identifier=TemplateIdentifier, pid_type='tmpl') + dict(identifier=TemplateIdentifier, pid_type="tmpl"), ) # minter template_id_minter = partial(id_minter, provider=TemplateProvider) @@ -46,9 +46,9 @@ class TemplatesSearch(IlsRecordsSearch): class Meta: """Search only on Templates index.""" - index = 'templates' + index = "templates" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -57,47 +57,40 @@ class Meta: class Template(IlsRecord): """Templates class.""" - _extensions = [ - CleanDataDictExtension() - ] + _extensions = [CleanDataDictExtension()] minter = template_id_minter fetcher = template_id_fetcher provider = TemplateProvider model_cls = TemplateMetadata - schema = 'templates/template-v0.0.1.json' - pids_exist_check = { - 'required': { - 'org': 'organisation', - 'ptrn': 'creator' - } - } + schema = "templates/template-v0.0.1.json" + pids_exist_check = {"required": {"org": "organisation", "ptrn": "creator"}} def replace_refs(self): """Replace the ``$ref`` keys within the JSON.""" # For template, we don't need to resolve $ref inside the ``data`` # attribute. Other $ref should be resolved. - data = self.pop('data', {}) + data = self.pop("data", {}) dumped = super().replace_refs() - dumped['data'] = data - self['data'] = data + dumped["data"] = data + self["data"] = data return dumped @property def creator_pid(self): """Shortcut for template creator pid.""" - if self.get('creator'): - return extracted_data_from_ref(self.get('creator')) + if self.get("creator"): + return extracted_data_from_ref(self.get("creator")) @property def is_public(self): """Shortcut for template public visibility.""" - return self.get('visibility') == TemplateVisibility.PUBLIC + return self.get("visibility") == TemplateVisibility.PUBLIC @property def is_private(self): """Shortcut for template private visibility.""" - return self.get('visibility') == TemplateVisibility.PRIVATE + return self.get("visibility") == TemplateVisibility.PRIVATE class TemplatesIndexer(IlsRecordsIndexer): @@ -110,4 +103,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='tmpl') + super().bulk_index(record_id_iterator, doc_type="tmpl") diff --git a/rero_ils/modules/templates/extensions.py b/rero_ils/modules/templates/extensions.py index 7ba7b1b7f6..101cf045c8 100644 --- a/rero_ils/modules/templates/extensions.py +++ b/rero_ils/modules/templates/extensions.py @@ -23,30 +23,18 @@ class CleanDataDictExtension(RecordExtension): """Defines the methods needed by an extension.""" fields_to_clean = { - 'documents': [ - 'pid' + "documents": ["pid"], + "items": [ + "pid", + "barcode", + "status", + "document", + "holding", + "organisation", + "library", ], - 'items': [ - 'pid', - 'barcode', - 'status', - 'document', - 'holding', - 'organisation', - 'library' - ], - 'holdings': [ - 'pid', - 'organisation', - 'library', - 'document' - ], - 'patrons': [ - 'pid', - 'user_id', - 'patron.subscriptions', - 'patron.barcode' - ] + "holdings": ["pid", "organisation", "library", "document"], + "patrons": ["pid", "user_id", "patron.subscriptions", "patron.barcode"], } def _clean_record(self, record): @@ -67,16 +55,16 @@ def _clean(data, keys): if not data: return for key in keys: - if '.' in key: - root_path, child_path = key.split('.', 1) + if "." in key: + root_path, child_path = key.split(".", 1) _clean(data.get(root_path, {}), [child_path]) else: data.pop(key, None) - if not record.get('data'): + if not record.get("data"): return - if fields := self.fields_to_clean.get(record.get('template_type')): - _clean(record['data'], fields) + if fields := self.fields_to_clean.get(record.get("template_type")): + _clean(record["data"], fields) def pre_commit(self, record): """Called before a record is committed.""" diff --git a/rero_ils/modules/templates/jsonresolver.py b/rero_ils/modules/templates/jsonresolver.py index d53d21e06a..f5ff2cdf7d 100644 --- a/rero_ils/modules/templates/jsonresolver.py +++ b/rero_ils/modules/templates/jsonresolver.py @@ -22,7 +22,7 @@ from rero_ils.modules.jsonresolver import resolve_json_refs -@jsonresolver.route('/api/templates/', host='bib.rero.ch') +@jsonresolver.route("/api/templates/", host="bib.rero.ch") def ill_request_resolver(pid): """Resolver for templates record.""" - return resolve_json_refs('tmpl', pid) + return resolve_json_refs("tmpl", pid) diff --git a/rero_ils/modules/templates/listener.py b/rero_ils/modules/templates/listener.py index d0f0c7bc4b..4fa16f0379 100644 --- a/rero_ils/modules/templates/listener.py +++ b/rero_ils/modules/templates/listener.py @@ -21,8 +21,15 @@ from .api import TemplatesSearch -def prepare_template_data(sender, json=None, record=None, index=None, - doc_type=None, arguments=None, **dummy_kwargs): +def prepare_template_data( + sender, + json=None, + record=None, + index=None, + doc_type=None, + arguments=None, + **dummy_kwargs +): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. @@ -30,8 +37,8 @@ def prepare_template_data(sender, json=None, record=None, index=None, :param index: The index in which the record will be indexed. :param doc_type: The document type of the record. """ - if index.split('-')[0] == TemplatesSearch.Meta.index: + if index.split("-")[0] == TemplatesSearch.Meta.index: # remove `data` fields from ES. # This metadata isn't required for indexing process and cause some # troubles with $ref resolution - json.pop('data', None) + json.pop("data", None) diff --git a/rero_ils/modules/templates/loaders/__init__.py b/rero_ils/modules/templates/loaders/__init__.py index 974ba575cc..7c996263ea 100644 --- a/rero_ils/modules/templates/loaders/__init__.py +++ b/rero_ils/modules/templates/loaders/__init__.py @@ -24,6 +24,4 @@ json_v1 = marshmallow_loader(TemplateMetadataSchemaV1) -__all__ = ( - 'json_v1', -) +__all__ = ("json_v1",) diff --git a/rero_ils/modules/templates/models.py b/rero_ils/modules/templates/models.py index e45568f954..545e9cca1e 100644 --- a/rero_ils/modules/templates/models.py +++ b/rero_ils/modules/templates/models.py @@ -27,23 +27,24 @@ class TemplateIdentifier(RecordIdentifier): """Sequence generator for templates identifiers.""" - __tablename__ = 'template_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "template_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class TemplateMetadata(db.Model, RecordMetadataBase): """Template record metadata.""" - __tablename__ = 'template_metadata' + __tablename__ = "template_metadata" class TemplateVisibility: """Class to handle different template visibilities.""" - PUBLIC = 'public' - PRIVATE = 'private' + PUBLIC = "public" + PRIVATE = "private" diff --git a/rero_ils/modules/templates/permissions.py b/rero_ils/modules/templates/permissions.py index d75eb3ec18..67f1c52cd3 100644 --- a/rero_ils/modules/templates/permissions.py +++ b/rero_ils/modules/templates/permissions.py @@ -23,18 +23,23 @@ from invenio_access import action_factory from rero_ils.modules.patrons.api import current_librarian -from rero_ils.modules.permissions import AllowedByAction, LibraryNeed, \ - OrganisationNeed, OwnerNeed, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + LibraryNeed, + OrganisationNeed, + OwnerNeed, + RecordPermissionPolicy, +) from rero_ils.modules.templates.api import Template from rero_ils.modules.users.models import UserRole # Actions to control Templates policies for CRUD operations -search_action = action_factory('tmpl-search') -read_action = action_factory('tmpl-read') -create_action = action_factory('tmpl-create') -update_action = action_factory('tmpl-update') -delete_action = action_factory('tmpl-delete') -access_action = action_factory('tmpl-access') +search_action = action_factory("tmpl-search") +read_action = action_factory("tmpl-read") +create_action = action_factory("tmpl-create") +update_action = action_factory("tmpl-update") +delete_action = action_factory("tmpl-delete") +access_action = action_factory("tmpl-access") class AllowedByActionTemplateReadRestriction(AllowedByAction): @@ -76,12 +81,14 @@ def needs(self, record=None, *args, **kwargs): # - library_administration : only templates for user belonging to # its library # - other : only own templates. - if (roles := current_librarian.get('roles')) and record.is_private: + if (roles := current_librarian.get("roles")) and record.is_private: if UserRole.FULL_PERMISSIONS in roles: pass elif UserRole.LIBRARY_ADMINISTRATOR in roles: - if all(LibraryNeed(lib_pid) not in provided_needs - for lib_pid in current_librarian.library_pids): + if all( + LibraryNeed(lib_pid) not in provided_needs + for lib_pid in current_librarian.library_pids + ): return [] # empty array == disable operation elif OwnerNeed(record.creator_pid) not in provided_needs: return [] # empty array == disable operation @@ -111,7 +118,7 @@ def needs(self, record, **kwargs): # any template (private or public) in their own organisation. needs = { OrganisationNeed(record.organisation_pid), - RoleNeed(UserRole.FULL_PERMISSIONS) + RoleNeed(UserRole.FULL_PERMISSIONS), } if needs.issubset(g.identity.provides): return required_needs diff --git a/rero_ils/modules/templates/schemas/json.py b/rero_ils/modules/templates/schemas/json.py index 66d7bf54ca..c6aec39917 100644 --- a/rero_ils/modules/templates/schemas/json.py +++ b/rero_ils/modules/templates/schemas/json.py @@ -45,7 +45,7 @@ class TemplateMetadataSchemaV1(StrictKeysMixin): description = SanitizedUnicode() visibility = SanitizedUnicode( required=True, - validate=OneOf([TemplateVisibility.PUBLIC, TemplateVisibility.PRIVATE]) + validate=OneOf([TemplateVisibility.PUBLIC, TemplateVisibility.PRIVATE]), ) data = fields.Dict() creator = fields.Nested(RefSchema) @@ -54,7 +54,7 @@ class TemplateMetadataSchemaV1(StrictKeysMixin): load_only=True, attribute="$schema", data_key="$schema", - deserialize=schema_from_template + deserialize=schema_from_template, ) # DEV NOTES : Why using marshmallow validation process @@ -64,8 +64,8 @@ class TemplateMetadataSchemaV1(StrictKeysMixin): # procedure only send an HTTP 403 status, without any message, this isn't # enough relevant for end user. - @validates('visibility') - @http_applicable_method('POST') + @validates("visibility") + @http_applicable_method("POST") def validate_visibility(self, data, **kwargs): """Validate the visibility field through REST API request. @@ -79,11 +79,11 @@ def validate_visibility(self, data, **kwargs): """ if data == TemplateVisibility.PUBLIC: raise ValidationError( - _('Template can be created only with `private` visibility') + _("Template can be created only with `private` visibility") ) @validates_schema() - @http_applicable_method('PUT') + @http_applicable_method("PUT") def validate_visibility_changes(self, data, **kwargs): """Validate `visibility` changes through REST API request. @@ -96,24 +96,21 @@ def validate_visibility_changes(self, data, **kwargs): :raises ValidationError: if error has detected on visibility attribute """ # Load DB record - db_record = Template.get_record_by_pid(data.get('pid')) + db_record = Template.get_record_by_pid(data.get("pid")) if not db_record: raise ValidationError(f'Unable to load Template#{data.get("pid")}') # Check if visibility of the template changed. If not, we can stop # the validation process. - if db_record.get('visibility') == data.get('visibility'): + if db_record.get("visibility") == data.get("visibility"): return # Only lib_admin and full_permission roles can change visibility field - allowed_roles = [ - UserRole.FULL_PERMISSIONS, - UserRole.LIBRARY_ADMINISTRATOR - ] + allowed_roles = [UserRole.FULL_PERMISSIONS, UserRole.LIBRARY_ADMINISTRATOR] user_roles = set() if current_librarian: - user_roles = set(current_librarian.get('roles', [])) + user_roles = set(current_librarian.get("roles", [])) if not user_roles.intersection(allowed_roles): raise ValidationError( - _('You are not allowed to change template visibility') + _("You are not allowed to change template visibility") ) diff --git a/rero_ils/modules/templates/views.py b/rero_ils/modules/templates/views.py index f672adc104..cc79b9fd37 100644 --- a/rero_ils/modules/templates/views.py +++ b/rero_ils/modules/templates/views.py @@ -22,9 +22,9 @@ from flask import Blueprint blueprint = Blueprint( - 'templates', + "templates", __name__, - url_prefix='/template', - template_folder='templates', - static_folder='static', + url_prefix="/template", + template_folder="templates", + static_folder="static", ) diff --git a/rero_ils/modules/users/api.py b/rero_ils/modules/users/api.py index 2c28227290..1f3d68bc26 100644 --- a/rero_ils/modules/users/api.py +++ b/rero_ils/modules/users/api.py @@ -34,47 +34,43 @@ from sqlalchemy import func from werkzeug.local import LocalProxy +from ...utils import remove_empties_from_dict from ..api import ils_record_format_checker from ..utils import PasswordValidatorException, get_schema_for_resource -from ...utils import remove_empties_from_dict -_records_state = LocalProxy(lambda: current_app.extensions['invenio-records']) +_records_state = LocalProxy(lambda: current_app.extensions["invenio-records"]) def get_profile_countries(): """Get country list from the jsonschema.""" - schema = current_jsonschemas.get_schema('common/countries-v0.0.1.json') - options = schema['country']['form']['options'] - return [ - (option.get('value'), _((option.get('label')))) for option in options - ] + schema = current_jsonschemas.get_schema("common/countries-v0.0.1.json") + options = schema["country"]["form"]["options"] + return [(option.get("value"), _((option.get("label")))) for option in options] def get_readonly_profile_fields() -> list[str]: """Disallow to edit some fields for patrons.""" - if current_user.has_role('patron'): - return ['first_name', 'last_name', 'birth_date'] - return ['keep_history'] + if current_user.has_role("patron"): + return ["first_name", "last_name", "birth_date"] + return ["keep_history"] def password_generator(): """Password generator.""" - generator = obj_or_import_string( - current_app.config['RERO_ILS_PASSWORD_GENERATOR']) + generator = obj_or_import_string(current_app.config["RERO_ILS_PASSWORD_GENERATOR"]) return generator( - current_app.config['RERO_ILS_PASSWORD_MIN_LENGTH'], - current_app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] + current_app.config["RERO_ILS_PASSWORD_MIN_LENGTH"], + current_app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"], ) def password_validator(password): """Password validator.""" - validator = obj_or_import_string( - current_app.config['RERO_ILS_PASSWORD_VALIDATOR']) + validator = obj_or_import_string(current_app.config["RERO_ILS_PASSWORD_VALIDATOR"]) return validator( password, - current_app.config['RERO_ILS_PASSWORD_MIN_LENGTH'], - current_app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] + current_app.config["RERO_ILS_PASSWORD_MIN_LENGTH"], + current_app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"], ) @@ -82,13 +78,21 @@ class User(object): """User API.""" profile_fields = [ - 'first_name', 'last_name', 'street', 'postal_code', 'gender', - 'city', 'birth_date', 'home_phone', 'business_phone', - 'mobile_phone', 'other_phone', 'keep_history', 'country' - ] - user_fields = [ - 'email', 'username', 'password' + "first_name", + "last_name", + "street", + "postal_code", + "gender", + "city", + "birth_date", + "home_phone", + "business_phone", + "mobile_phone", + "other_phone", + "keep_history", + "country", ] + user_fields = ["email", "username", "password"] def __init__(self, user): """User class initializer.""" @@ -109,28 +113,27 @@ def create(cls, data, send_email=True, **kwargs): """ with db.session.begin_nested(): # Generate password if not present - profile = { - k: v for k, v in data.items() - if k in cls.profile_fields - } + profile = {k: v for k, v in data.items() if k in cls.profile_fields} if profile: cls._validate_profile(profile) cls._validate_data(data=data) - password = data.get('password', password_generator()) + password = data.get("password", password_generator()) cls._validate_password(password=password) user = BaseUser( - username=data.get('username'), + username=data.get("username"), password=hash_password(password), - user_profile=profile, active=True) + user_profile=profile, + active=True, + ) db.session.add(user) # send the reset password notification for new users - if email := data.get('email'): + if email := data.get("email"): user.email = email else: - user.domain = 'unknown' + user.domain = "unknown" db.session.merge(user) db.session.commit() - if data.get('email') and send_email: + if data.get("email") and send_email: send_reset_password_instructions(user) confirm_user(user) return cls(user) @@ -141,10 +144,11 @@ def update(self, data): :param data - dictionary representing a user record to update """ from ..patrons.listener import update_from_profile + profile = {k: v for k, v in data.items() if k in self.profile_fields} if profile: self._validate_profile(profile) - if password := data.get('password'): + if password := data.get("password"): self._validate_password(password=password) self._validate_data(data) @@ -152,8 +156,8 @@ def update(self, data): with db.session.begin_nested(): if password: user.password = hash_password(password) - user.username = data.get('username') - if email := data.get('email'): + user.username = data.get("username") + if email := data.get("email"): user.email = email else: user._email = None @@ -161,29 +165,27 @@ def update(self, data): db.session.merge(user) db.session.commit() confirm_user(user) - update_from_profile('user', self.user) + update_from_profile("user", self.user) return self @classmethod def _validate_data(cls, data): """Additional user record validations.""" - if not data.get('email') and not data.get('username'): - raise ValidationError( - _('A username or email is required.') - ) + if not data.get("email") and not data.get("username"): + raise ValidationError(_("A username or email is required.")) @classmethod def _validate_profile(cls, profile, **kwargs): """Validate user record against schema.""" - schema = get_schema_for_resource('user') - profile['$schema'] = schema + schema = get_schema_for_resource("user") + profile["$schema"] = schema _records_state.validate( profile, schema, format_checker=ils_record_format_checker, - cls=Draft4Validator + cls=Draft4Validator, ) - profile.pop('$schema') + profile.pop("$schema") @classmethod def _validate_password(cls, password): @@ -212,17 +214,15 @@ def get_record(cls, user_id): :return: the user record """ user = BaseUser.query.filter_by(id=user_id).first() - if not user: - return None - return cls(user) + return cls(user) if user else None def dumps(self): """Return pure Python dictionary with record metadata.""" - url = url_for('api_users.users_item', _external=True, id=self.user.id) + url = url_for("api_users.users_item", _external=True, id=self.user.id) return { - 'id': self.user.id, - 'links': {'self': url}, - 'metadata': self.dumps_metadata(True) + "id": self.user.id, + "links": {"self": url}, + "metadata": self.dumps_metadata(True), } def dumps_metadata(self, dump_patron: bool = False) -> dict: @@ -232,24 +232,23 @@ def dumps_metadata(self, dump_patron: bool = False) -> dict: :return a dictionary with all dump user metadata. """ from ..patrons.api import Patron - metadata = { - 'roles': [r.name for r in self.user.roles] - } + + metadata = {"roles": [r.name for r in self.user.roles]} if user_profile := self.user.user_profile: - metadata.update(user_profile) + metadata |= user_profile if self.user.email: - metadata['email'] = self.user.email + metadata["email"] = self.user.email if self.user.username: - metadata['username'] = self.user.username + metadata["username"] = self.user.username if dump_patron: for patron in Patron.get_patrons_by_user(self.user): - metadata.setdefault('patrons', []).append({ - 'pid': patron.pid, - 'roles': patron.get('roles'), - 'organisation': { - 'pid': patron.organisation_pid + metadata.setdefault("patrons", []).append( + { + "pid": patron.pid, + "roles": patron.get("roles"), + "organisation": {"pid": patron.organisation_pid}, } - }) + ) return remove_empties_from_dict(metadata) @classmethod @@ -270,10 +269,9 @@ def get_by_email(cls, email): :return: the user record """ user = BaseUser.query.filter( - func.lower(BaseUser.email) == func.lower(email)).first() - if not user: - return None - return cls(user) + func.lower(BaseUser.email) == func.lower(email) + ).first() + return cls(user) if user else None @classmethod def get_by_username_or_email(cls, username_or_email): diff --git a/rero_ils/modules/users/api_views.py b/rero_ils/modules/users/api_views.py index 85162b6c95..93ef175cd8 100644 --- a/rero_ils/modules/users/api_views.py +++ b/rero_ils/modules/users/api_views.py @@ -25,39 +25,35 @@ from .api import password_validator -api_blueprint = Blueprint( - 'api_user', - __name__, - url_prefix='/user' -) +api_blueprint = Blueprint("api_user", __name__, url_prefix="/user") -@api_blueprint.route('/password/generate', methods=['GET']) +@api_blueprint.route("/password/generate", methods=["GET"]) @check_logged_as_librarian def password_generate(): """Generation of a password.""" - min_length = current_app.config.get('RERO_ILS_PASSWORD_MIN_LENGTH', 8) - special_char = current_app.config.get('RERO_ILS_PASSWORD_SPECIAL_CHAR') - length = int(request.args.get('length', min_length)) + min_length = current_app.config.get("RERO_ILS_PASSWORD_MIN_LENGTH", 8) + special_char = current_app.config.get("RERO_ILS_PASSWORD_SPECIAL_CHAR") + length = int(request.args.get("length", min_length)) if length < min_length: - abort(400, - f'The password must be at least {min_length} characters long.') + abort(400, f"The password must be at least {min_length} characters long.") generator = obj_or_import_string( - current_app.config.get('RERO_ILS_PASSWORD_GENERATOR')) + current_app.config.get("RERO_ILS_PASSWORD_GENERATOR") + ) try: return generator(length=length, special_char=special_char) except Exception: - abort(400, 'Password generator error.') + abort(400, "Password generator error.") -@api_blueprint.route('/password/validate', methods=['POST']) +@api_blueprint.route("/password/validate", methods=["POST"]) def password_validate(): """Validation of a password.""" - password = request.get_json().get('password') + password = request.get_json().get("password") if not password: - abort(400, 'The password must be filled in.') + abort(400, "The password must be filled in.") try: password_validator(password) except PasswordValidatorException as pve: abort(400, str(pve)) - return jsonify({'message': 'Valid password'}) + return jsonify({"message": "Valid password"}) diff --git a/rero_ils/modules/users/forms.py b/rero_ils/modules/users/forms.py index e24fd8a36b..cde10fc1e6 100644 --- a/rero_ils/modules/users/forms.py +++ b/rero_ils/modules/users/forms.py @@ -34,9 +34,9 @@ def __init__(self, *args, **kwargs): self.password.validators = [ password_required, PasswordValidator( - length=current_app.config['RERO_ILS_PASSWORD_MIN_LENGTH'], - special_char=current_app.config[ - 'RERO_ILS_PASSWORD_SPECIAL_CHAR']) + length=current_app.config["RERO_ILS_PASSWORD_MIN_LENGTH"], + special_char=current_app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"], + ), ] @@ -49,7 +49,7 @@ def __init__(self, *args, **kwargs): self.password.validators = [ password_required, PasswordValidator( - length=current_app.config['RERO_ILS_PASSWORD_MIN_LENGTH'], - special_char=current_app.config[ - 'RERO_ILS_PASSWORD_SPECIAL_CHAR']) + length=current_app.config["RERO_ILS_PASSWORD_MIN_LENGTH"], + special_char=current_app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"], + ), ] diff --git a/rero_ils/modules/users/listener.py b/rero_ils/modules/users/listener.py index 59409ec083..3c609d068e 100644 --- a/rero_ils/modules/users/listener.py +++ b/rero_ils/modules/users/listener.py @@ -27,7 +27,7 @@ def user_register_forms(sender, app=None, **kwargs): :param app: the Flask application instance. :param kwargs: additional arguments. """ - if security := app.extensions.get('security'): + if security := app.extensions.get("security"): # Override Register form security.register_form = RegisterForm @@ -39,6 +39,6 @@ def user_reset_password_forms(sender, app=None, **kwargs): :param app: the Flask application instance. :param kwargs: additional arguments. """ - if security := app.extensions.get('security'): + if security := app.extensions.get("security"): # Override Reset password form security.reset_password_form = ResetPasswordForm diff --git a/rero_ils/modules/users/models.py b/rero_ils/modules/users/models.py index 67bf8d0fcc..1163393aa9 100644 --- a/rero_ils/modules/users/models.py +++ b/rero_ils/modules/users/models.py @@ -22,22 +22,26 @@ class UserRole: """Roles available for RERO-ILS users.""" - PATRON = 'patron' - PROFESSIONAL_READ_ONLY = 'pro_read_only' - ACQUISITION_MANAGER = 'pro_acquisition_manager' - FULL_PERMISSIONS = 'pro_full_permissions' - CATALOG_MANAGER = 'pro_catalog_manager' - CIRCULATION_MANAGER = 'pro_circulation_manager' - LIBRARY_ADMINISTRATOR = 'pro_library_administrator' - USER_MANAGER = 'pro_user_manager' - PRO_ENTITY_MANAGER = 'pro_entity_manager' - STATISTICS_MANAGER = 'pro_statistic_manager' + PATRON = "patron" + PROFESSIONAL_READ_ONLY = "pro_read_only" + ACQUISITION_MANAGER = "pro_acquisition_manager" + FULL_PERMISSIONS = "pro_full_permissions" + CATALOG_MANAGER = "pro_catalog_manager" + CIRCULATION_MANAGER = "pro_circulation_manager" + LIBRARY_ADMINISTRATOR = "pro_library_administrator" + USER_MANAGER = "pro_user_manager" + PRO_ENTITY_MANAGER = "pro_entity_manager" + STATISTICS_MANAGER = "pro_statistic_manager" LIBRARIAN_ROLES = [ - PROFESSIONAL_READ_ONLY, ACQUISITION_MANAGER, - CATALOG_MANAGER, CIRCULATION_MANAGER, - LIBRARY_ADMINISTRATOR, USER_MANAGER, PRO_ENTITY_MANAGER, - STATISTICS_MANAGER + PROFESSIONAL_READ_ONLY, + ACQUISITION_MANAGER, + CATALOG_MANAGER, + CIRCULATION_MANAGER, + LIBRARY_ADMINISTRATOR, + USER_MANAGER, + PRO_ENTITY_MANAGER, + STATISTICS_MANAGER, ] PROFESSIONAL_ROLES = [FULL_PERMISSIONS] + LIBRARIAN_ROLES diff --git a/rero_ils/modules/users/validators.py b/rero_ils/modules/users/validators.py index 3f95b77311..f67305445c 100644 --- a/rero_ils/modules/users/validators.py +++ b/rero_ils/modules/users/validators.py @@ -19,8 +19,7 @@ from wtforms import ValidationError -from rero_ils.modules.utils import PasswordValidatorException, \ - password_validator +from rero_ils.modules.utils import PasswordValidatorException, password_validator class PasswordValidator: @@ -39,7 +38,8 @@ def __call__(self, form, field): :raise ValidationError: If the password is invalid. """ try: - password_validator(field.data, length=self.length, - special_char=self.special_char) + password_validator( + field.data, length=self.length, special_char=self.special_char + ) except PasswordValidatorException as e: raise ValidationError(str(e)) from e diff --git a/rero_ils/modules/users/views.py b/rero_ils/modules/users/views.py index a4d31f9a5a..21d882f0c1 100644 --- a/rero_ils/modules/users/views.py +++ b/rero_ils/modules/users/views.py @@ -26,10 +26,10 @@ from flask_login import current_user from invenio_rest import ContentNegotiatedMethodView -from .api import User -from .models import UserRole from ...modules.patrons.api import Patron, current_librarian from ...permissions import login_and_librarian +from .api import User +from .models import UserRole def check_permission(fn): @@ -37,11 +37,13 @@ def check_permission(fn): The access is allow when the connected user is a librarian. """ + @wraps(fn) def is_logged_librarian(*args, **kwargs): """Decorated view.""" login_and_librarian() return fn(*args, **kwargs) + return is_logged_librarian @@ -51,15 +53,16 @@ def check_user_permission(fn): The access is allow when the connected user is a librarian or the user id is the same of the id argument. """ + @wraps(fn) def is_logged(*args, **kwargs): """Decorated view.""" if not current_user.is_authenticated: abort(401) - if not current_librarian and \ - str(current_user.id) != kwargs.get('id', None): + if not current_librarian and str(current_user.id) != kwargs.get("id", None): abort(403) return fn(*args, **kwargs) + return is_logged @@ -69,6 +72,7 @@ def check_user_list_permission(fn): The access is allow when the connected user is a librarian or the user id is the same of the id argument. """ + @wraps(fn) def is_logged(*args, **kwargs): """Decorated view.""" @@ -77,6 +81,7 @@ def is_logged(*args, **kwargs): if not current_librarian and not current_user: abort(403) return fn(*args, **kwargs) + return is_logged @@ -86,13 +91,17 @@ def check_user_readonly_permission(fn): The access is allow when the connected user and the profile is not in readonly. """ + @wraps(fn) def is_user_readonly(*args, **kwargs): """Decorated view.""" - if current_app.config.get('RERO_PUBLIC_USERPROFILES_READONLY', False) \ - or not current_user.is_authenticated: + if ( + current_app.config.get("RERO_PUBLIC_USERPROFILES_READONLY", False) + or not current_user.is_authenticated + ): abort(401) return fn(*args, **kwargs) + return is_user_readonly @@ -103,21 +112,15 @@ def __init__(self, **kwargs): """Init.""" super().__init__( method_serializers={ - 'GET': { - 'application/json': json.dumps - }, - 'PUT': { - 'application/json': json.dumps - } - }, - serializers_query_aliases={ - 'json': json.dumps + "GET": {"application/json": json.dumps}, + "PUT": {"application/json": json.dumps}, }, + serializers_query_aliases={"json": json.dumps}, default_method_media_type={ - 'GET': 'application/json', - 'PUT': 'application/json' + "GET": "application/json", + "PUT": "application/json", }, - default_media_type='application/json', + default_media_type="application/json", **kwargs ) @@ -132,10 +135,9 @@ def put(self, id): """Implement the PUT.""" user = User.get_record(id) user = user.update(request.get_json()) - editing_own_public_profile = str(current_user.id) == id and \ - not ( - current_user.has_role(UserRole.FULL_PERMISSIONS) and - current_user.has_role(UserRole.USER_MANAGER) + editing_own_public_profile = str(current_user.id) == id and not ( + current_user.has_role(UserRole.FULL_PERMISSIONS) + and current_user.has_role(UserRole.USER_MANAGER) ) if editing_own_public_profile: Patron.set_communication_channel(user) @@ -149,64 +151,47 @@ def __init__(self, **kwargs): """Init.""" super().__init__( method_serializers={ - 'GET': { - 'application/json': json.dumps - }, - 'POST': { - 'application/json': json.dumps - } - }, - serializers_query_aliases={ - 'json': json.dumps + "GET": {"application/json": json.dumps}, + "POST": {"application/json": json.dumps}, }, + serializers_query_aliases={"json": json.dumps}, default_method_media_type={ - 'GET': 'application/json', - 'POST': 'application/json' + "GET": "application/json", + "POST": "application/json", }, - default_media_type='application/json', + default_media_type="application/json", **kwargs ) @check_user_list_permission def get(self): """Get user info for the professionnal view.""" - email_or_username = request.args.get('q', '').strip() - hits = { - 'hits': { - 'hits': [], - 'total': { - 'relation': 'eq', - 'value': 0 - } - } - } + email_or_username = request.args.get("q", "").strip() + hits = {"hits": {"hits": [], "total": {"relation": "eq", "value": 0}}} if not email_or_username: return hits - if email_or_username.startswith('email:'): - user = User.get_by_email( - email_or_username[len('email:'):]) - elif email_or_username.startswith('username:'): - user = User.get_by_username( - email_or_username[len('username:'):]) + if email_or_username.startswith("email:"): + user = User.get_by_email(email_or_username[len("email:") :]) + elif email_or_username.startswith("username:"): + user = User.get_by_username(email_or_username[len("username:") :]) else: user = User.get_by_username_or_email(email_or_username) if not user: return hits # if librarian: send all user data # if patron: send only the user id - data = user.dumps() if current_librarian else {'id': user.id} - hits['hits']['hits'].append(data) - hits['hits']['total']['value'] = 1 + data = user.dumps() if current_librarian else {"id": user.id} + hits["hits"]["hits"].append(data) + hits["hits"]["total"]["value"] = 1 return hits @check_permission def post(self): """Implement the POST.""" user = User.create(request.get_json()) - editing_own_public_profile = str(current_user.id) == user.id and \ - not ( - current_user.has_role(UserRole.FULL_PERMISSIONS) and - current_user.has_role(UserRole.USER_MANAGER) + editing_own_public_profile = str(current_user.id) == user.id and not ( + current_user.has_role(UserRole.FULL_PERMISSIONS) + and current_user.has_role(UserRole.USER_MANAGER) ) if editing_own_public_profile: Patron.set_communication_channel(user) @@ -214,25 +199,24 @@ def post(self): blueprint = Blueprint( - 'users', + "users", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) -@blueprint.route('//user/profile') +@blueprint.route("//user/profile") @check_user_readonly_permission def profile(viewcode): """User Profile editor Page.""" - return render_template('rero_ils/user_profile.html', - viewcode=viewcode) + return render_template("rero_ils/user_profile.html", viewcode=viewcode) -@blueprint.route('//user/password') +@blueprint.route("//user/password") @check_user_readonly_permission def password(viewcode): """User change password Page.""" - return render_template('rero_ils/user_password.html', - viewcode=viewcode, - current_user=current_user) + return render_template( + "rero_ils/user_password.html", viewcode=viewcode, current_user=current_user + ) diff --git a/rero_ils/modules/utils.py b/rero_ils/modules/utils.py index e53471732c..75e0db8c27 100644 --- a/rero_ils/modules/utils.py +++ b/rero_ils/modules/utils.py @@ -18,6 +18,8 @@ """Utilities for rero-ils editor.""" + +import contextlib import cProfile import os import pstats @@ -25,7 +27,7 @@ import re import string import unicodedata -from datetime import date, datetime, time +from datetime import date, datetime, time, timezone from functools import wraps from io import StringIO from json import JSONDecodeError, JSONDecoder, dumps @@ -49,13 +51,13 @@ from lxml import etree from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT from requests.adapters import HTTPAdapter -from requests.packages.urllib3.util.retry import Retry +from urllib3.util import Retry from werkzeug.local import LocalProxy # jsonschema resolver # SEE: RECORDS_REFRESOLVER_STORE for more details refresolver_store = LocalProxy( - lambda: current_app.extensions['rero-ils'].jsonschema_store + lambda: current_app.extensions["rero-ils"].jsonschema_store ) @@ -66,13 +68,14 @@ def get_mef_url(entity_type): :return the base url to use to contact MEF. :rtype str """ - return current_app.config\ - .get('RERO_ILS_MEF_CONFIG', {})\ - .get(entity_type, {})\ - .get('base_url') + return ( + current_app.config.get("RERO_ILS_MEF_CONFIG", {}) + .get(entity_type, {}) + .get("base_url") + ) -def cached(timeout=50, key_prefix='default', query_string=False): +def cached(timeout=50, key_prefix="default", query_string=False): """Cache traffic. Decorator. Use this to cache a function. By default, the cache key is @@ -91,16 +94,17 @@ def cached(timeout=50, key_prefix='default', query_string=False): avoids creating different caches for the same query just because the parameters were passed in a different order. """ + def caching(f): @wraps(f) def wrapper(*args, **kwargs): cache_fun = current_cache.cached( - timeout=timeout, - key_prefix=key_prefix, - query_string=query_string + timeout=timeout, key_prefix=key_prefix, query_string=query_string ) return cache_fun(f)(*args, **kwargs) + return wrapper + return caching @@ -113,29 +117,30 @@ def memoize(timeout=50): :param timeout: Default 50. If set to an integer, will cache for that amount of time. Unit of time is in seconds. """ + def memoize(f): @wraps(f) def wrapper(*args, **kwargs): memoize_fun = current_cache.memoize(timeout=timeout) return memoize_fun(f)(*args, **kwargs) + return wrapper + return memoize def strtotime(strtime): """String to datetime.""" - splittime = strtime.split(':') - return time( - hour=int(splittime[0]), - minute=int(splittime[1]) - ) + splittime = strtime.split(":") + return time(hour=int(splittime[0]), minute=int(splittime[1])) -def do_bulk_index(uuids, doc_type='rec', process=False, verbose=False): +def do_bulk_index(uuids, doc_type="rec", process=False, verbose=False): """Bulk index records.""" if verbose: - click.echo(f' add to index: {len(uuids)}') + click.echo(f" add to index: {len(uuids)}") from .api import IlsRecordsIndexer + indexer = IlsRecordsIndexer() retry = True minutes = 1 @@ -144,10 +149,10 @@ def do_bulk_index(uuids, doc_type='rec', process=False, verbose=False): indexer.bulk_index(uuids, doc_type=doc_type) retry = False except Exception as exc: - msg = f'Bulk Index Error: retry in {minutes} min {exc}' + msg = f"Bulk Index Error: retry in {minutes} min {exc}" current_app.logger.error(msg) if verbose: - click.secho(msg, fg='red') + click.secho(msg, fg="red") sleep(minutes * 60) retry = True minutes *= 2 @@ -158,9 +163,7 @@ def do_bulk_index(uuids, doc_type='rec', process=False, verbose=False): def date_string_to_utc(date): """Converts a date of string format to a datetime utc aware.""" parsed_date = parser.parse(date) - if parsed_date.tzinfo: - return parsed_date - return pytz.utc.localize(parsed_date) + return parsed_date if parsed_date.tzinfo else pytz.utc.localize(parsed_date) def read_json_record(json_file, buf_size=1024, decoder=JSONDecoder()): @@ -171,15 +174,15 @@ def read_json_record(json_file, buf_size=1024, decoder=JSONDecoder()): :param decoder: decoder to use for decoding :return: record Generator """ - buffer = json_file.read(2).replace('\n', '') + buffer = json_file.read(2).replace("\n", "") # we have to delete the first [ for an list of records - if buffer.startswith('['): + if buffer.startswith("["): buffer = buffer[1:].lstrip() while True: block = json_file.read(buf_size) if not block: break - buffer += block.replace('\n', '') + buffer += block.replace("\n", "") pos = 0 while True: try: @@ -194,7 +197,7 @@ def read_json_record(json_file, buf_size=1024, decoder=JSONDecoder()): if len(buffer) <= 0: # buffer is empty read more data buffer = json_file.read(buf_size) - if buffer.startswith(','): + if buffer.startswith(","): # delete records deliminators buffer = buffer[1:].lstrip() @@ -233,50 +236,45 @@ def read_xml_record(xml_file): :return: record Generator """ - yield from lazyxml( - file=xml_file, - opening_tag='', - closing_tag='' - ) + yield from lazyxml(file=xml_file, opening_tag="", closing_tag="") def get_record_class_and_permissions_from_route(route_name): """Get record class and permission factories for a record route name.""" - endpoints = current_app.config.get('RECORDS_REST_ENDPOINTS') - endpoints.update(current_app.config.get('CIRCULATION_REST_ENDPOINTS', {})) + endpoints = current_app.config.get("RECORDS_REST_ENDPOINTS") + endpoints.update(current_app.config.get("CIRCULATION_REST_ENDPOINTS", {})) registry = current_app.extensions["invenio-records-resources"].registry # invenio records resources case - try: + with contextlib.suppress(KeyError): service = registry.get(route_name) record_class = service.record_cls permission_cls = service.permission_policy permissions = dict( - read=lambda record: permission_cls('read', record=record), - list=lambda record: permission_cls('search', record=record), - create=lambda record: permission_cls('create', record=record), - update=lambda record: permission_cls('update', record=record), - delete=lambda record: permission_cls('delete', record=record) + read=lambda record: permission_cls("read", record=record), + list=lambda record: permission_cls("search", record=record), + create=lambda record: permission_cls("create", record=record), + update=lambda record: permission_cls("update", record=record), + delete=lambda record: permission_cls("delete", record=record), ) return record_class, permissions - except KeyError: - pass # legacy invenio records rest case for endpoint in endpoints.items(): record = endpoint[1] - list_route = record.get('list_route').replace('/', '') + list_route = record.get("list_route").replace("/", "") if list_route == route_name: - record_class = obj_or_import_string(record.get('record_class')) + record_class = obj_or_import_string(record.get("record_class")) permissions = dict( - read=obj_or_import_string( - record.get('read_permission_factory_imp')), - list=obj_or_import_string( - record.get('list_permission_factory_imp')), + read=obj_or_import_string(record.get("read_permission_factory_imp")), + list=obj_or_import_string(record.get("list_permission_factory_imp")), create=obj_or_import_string( - record.get('create_permission_factory_imp')), + record.get("create_permission_factory_imp") + ), update=obj_or_import_string( - record.get('update_permission_factory_imp')), + record.get("update_permission_factory_imp") + ), delete=obj_or_import_string( - record.get('delete_permission_factory_imp')) + record.get("delete_permission_factory_imp") + ), ) return record_class, permissions @@ -285,97 +283,98 @@ def get_endpoint_configuration(module): """Search into configuration file to find configuration for a module. :param module: name of module (class name or endpoint name or module name) - :return: The configuration dictionary of the resource. 'None' if resource + :return: The configuration dictionary of the resource. "None" if resource is not found. """ if not isinstance(module, str): # Get the pid_type for the class module = module.provider.pid_type - endpoints = current_app.config.get('RECORDS_REST_ENDPOINTS', {}) | \ - current_app.config.get('CIRCULATION_REST_ENDPOINTS', {}) + endpoints = current_app.config.get( + "RECORDS_REST_ENDPOINTS", {} + ) | current_app.config.get("CIRCULATION_REST_ENDPOINTS", {}) for idx, endpoint in endpoints.items(): - search_index = endpoint.get('search_index') + search_index = endpoint.get("search_index") if search_index == module or idx == module: return endpoint -def extracted_data_from_ref(input, data='pid'): +def extracted_data_from_ref(input, data="pid"): """Extract a data from a `$ref` string. - :param input: string where to search data, or a dict containing '$ref' key + :param input: string where to search data, or a dict containing "$ref" key :param data: the data to found. Allowed values are : - * 'pid': the pid from the input - * 'resource': the resource search_index from input - * 'record_class': the record class to used to manage the input - * 'record': the record represented by the input + * "pid": the pid from the input + * "resource": the resource search_index from input + * "record_class": the record class to used to manage the input + * "record": the record represented by the input USAGE : - * extracted_data_from_ref('http://localhost/[resource]/[pid]', data='pid') - * extracted_data_from_ref({'$ref': 'http://localhost/[resource]/[pid]'}) + * extracted_data_from_ref("http://localhost/[resource]/[pid]", data="pid") + * extracted_data_from_ref({"$ref": "http://localhost/[resource]/[pid]"}) """ def extract_part(input_string, idx=0): """Extract part of a $ref string.""" - parts = input_string.split('/') + parts = input_string.split("/") if len(parts) > abs(idx): - return input_string.split('/')[idx] + return input_string.split("/")[idx] def get_acronym(): """Get resource acronym for a $ref URI.""" - resource_list = extracted_data_from_ref(input, data='resource') + resource_list = extracted_data_from_ref(input, data="resource") endpoints = { - endpoint.get('search_index'): acronym - for acronym, endpoint - in current_app.config.get('RECORDS_REST_ENDPOINTS', {}).items() + endpoint.get("search_index"): acronym + for acronym, endpoint in current_app.config.get( + "RECORDS_REST_ENDPOINTS", {} + ).items() } return endpoints.get(resource_list) def get_record_class(): """Search about a record_class name for a $ref URI.""" - resource_list = extracted_data_from_ref(input, data='resource') + resource_list = extracted_data_from_ref(input, data="resource") if resource_list is None: return None configuration = get_endpoint_configuration(resource_list) - if configuration and configuration.get('record_class'): - return obj_or_import_string(configuration.get('record_class')) + if configuration and configuration.get("record_class"): + return obj_or_import_string(configuration.get("record_class")) def get_record(): """Try to load a resource corresponding to a $ref URI.""" - pid = extracted_data_from_ref(input, data='pid') - record_class = extracted_data_from_ref(input, data='record_class') + pid = extracted_data_from_ref(input, data="pid") + record_class = extracted_data_from_ref(input, data="record_class") if record_class and pid: return record_class.get_record_by_pid(pid) def get_data_from_es(): """Try to load a resource from elasticsearch.""" - pid = extracted_data_from_ref(input, data='pid') - resource_list = extracted_data_from_ref(input, data='resource') + pid = extracted_data_from_ref(input, data="pid") + resource_list = extracted_data_from_ref(input, data="resource") if resource_list is None: return None configuration = get_endpoint_configuration(resource_list) - if pid and configuration and configuration.get('search_class'): - search_class = obj_or_import_string( - configuration.get('search_class')) - result = search_class().filter('term', pid=pid).execute() + if pid and configuration and configuration.get("search_class"): + search_class = obj_or_import_string(configuration.get("search_class")) + result = search_class().filter("term", pid=pid).execute() if len(result) == 1: return result[0].to_dict() if isinstance(input, str): - input = {'$ref': input} + input = {"$ref": input} switcher = { - 'es_record': get_data_from_es, - 'pid': lambda: extract_part(input.get('$ref'), -1), - 'resource': lambda: extract_part(input.get('$ref'), -2), - 'acronym': get_acronym, - 'record_class': get_record_class, - 'record': get_record + "es_record": get_data_from_es, + "pid": lambda: extract_part(input.get("$ref"), -1), + "resource": lambda: extract_part(input.get("$ref"), -2), + "acronym": get_acronym, + "record_class": get_record_class, + "record": get_record, } if data in switcher: return switcher.get(data)() def add_years(initial_date, years): - """Return a date that's `years` years after the date (or datetime) object. + """Return a date that"s `years` years after the date (or datetime) object. Return the same calendar date (month and day) in the destination year, if it exists, otherwise use the following day (thus changing February 29 @@ -386,8 +385,9 @@ def add_years(initial_date, years): try: return initial_date.replace(year=initial_date.year + years) except ValueError: - return initial_date + (date(initial_date.year + years, 1, 1) - - date(initial_date.year, 1, 1)) + return initial_date + ( + date(initial_date.year + years, 1, 1) - date(initial_date.year, 1, 1) + ) def trim_item_barcode_for_record(data=None): @@ -396,10 +396,10 @@ def trim_item_barcode_for_record(data=None): :param data: the patron or item record :return: data with trimmed barcode """ - if data and data.get('barcode'): - data['barcode'] = data.get('barcode').strip() - if data and data.get('patron', {}).get('barcode'): - data['patron']['barcode'][0] = data['patron']['barcode'][0].strip() + if data and data.get("barcode"): + data["barcode"] = data.get("barcode").strip() + if data and data.get("patron", {}).get("barcode"): + data["patron"]["barcode"][0] = data["patron"]["barcode"][0].strip() return data @@ -411,12 +411,10 @@ def generate_item_barcode(data=None): :param data: the item record :return: data with a generated barcode """ - if not data.get('barcode'): - data['barcode'] = 'f-' + datetime.now().strftime('%Y%m%d%I%M%S%f') - if data.get('patron') and not data.get('patron', {}).get('barcode'): - data['patron']['barcode'] = [ - 'f-' + datetime.now().strftime('%Y%m%d%I%M%S%f') - ] + if not data.get("barcode"): + data["barcode"] = "f-" + datetime.now().strftime("%Y%m%d%I%M%S%f") + if data.get("patron") and not data.get("patron", {}).get("barcode"): + data["patron"]["barcode"] = ["f-" + datetime.now().strftime("%Y%m%d%I%M%S%f")] return data @@ -427,18 +425,18 @@ def get_schema_for_resource(resource): Either a resource class (subclass of IlsRecord USAGE: - schema = get_schema_for_resource('ptrn') + schema = get_schema_for_resource("ptrn") schema = get_schema_for_resource(Patron) """ if not isinstance(resource, str): resource = resource.provider.pid_type - schemas = current_app.config.get('RERO_ILS_DEFAULT_JSON_SCHEMA') + schemas = current_app.config.get("RERO_ILS_DEFAULT_JSON_SCHEMA") if resource in schemas: return ( - f'{current_app.config.get("JSONSCHEMAS_URL_SCHEME")}://' - f'{current_app.config.get("JSONSCHEMAS_HOST")}' - f'{current_app.config.get("JSONSCHEMAS_ENDPOINT")}' - f'{schemas[resource]}' + f"{current_app.config.get('JSONSCHEMAS_URL_SCHEME')}://" + f"{current_app.config.get('JSONSCHEMAS_HOST')}" + f"{current_app.config.get('JSONSCHEMAS_ENDPOINT')}" + f"{schemas[resource]}" ) @@ -450,12 +448,16 @@ def pid_exists(info, pid_type, pid, raise_on_error=False): :param raise_on_error: Raise PidDoesNotExist exception if enabled. :return: True if pid was found. Otherwise False. """ - if PersistentIdentifier.query\ - .filter_by(pid_type=str(pid_type), pid_value=str(pid))\ - .count() == 1: + if ( + PersistentIdentifier.query.filter_by( + pid_type=str(pid_type), pid_value=str(pid) + ).count() + == 1 + ): return True if raise_on_error: from .api import IlsRecordError + raise IlsRecordError.PidDoesNotExist(info, pid_type, pid) @@ -465,15 +467,16 @@ def pids_exists_in_data(info, data, required=None, not_required=None): :param info: Info to add to errors description. :param data: data with information to test. :param required: dictionary with required pid types and key in data to - test. example {'doc', 'document'} + test. example {"doc", "document"} :param not_required: dictionary with not required pid types and keys - in data to test. example {'item', 'item'} + in data to test. example {"item", "item"} :return: True if all requirements Otherwise False. """ + def pids_exists_in_data_test(info, data, tests, is_required): """Test the pids exists.""" return_value = [] - endpoints = current_app.config['RECORDS_REST_ENDPOINTS'] + endpoints = current_app.config["RECORDS_REST_ENDPOINTS"] for pid_type, keys in tests.items(): # make a list of keys if isinstance(keys, str): @@ -484,43 +487,36 @@ def pids_exists_in_data_test(info, data, tests, is_required): data_to_test_list = [data_to_test_list] for data_to_test in data_to_test_list: try: - list_route = endpoints[pid_type]['list_route'] - data_pid = data_to_test.get('pid') or \ - data_to_test.get('$ref').split(list_route)[1] + list_route = endpoints[pid_type]["list_route"] + data_pid = ( + data_to_test.get("pid") + or data_to_test.get("$ref").split(list_route)[1] + ) except Exception: data_pid = None if not data_pid and is_required: - return_value.append(f'{info}: No pid found: ' - f'{pid_type} {data_to_test}') + return_value.append( + f"{info}: No pid found: " f"{pid_type} {data_to_test}" + ) else: if data_pid and not pid_exists( - info=info, - pid_type=pid_type, - pid=data_pid + info=info, pid_type=pid_type, pid=data_pid ): return_value.append( - f'{info}: Pid does not exist: ' - f'{pid_type} {data_pid}' + f"{info}: Pid does not exist: " f"{pid_type} {data_pid}" ) if is_required and not data_to_test_list: - return_value.append( - f'{info}: No data found: {key}') + return_value.append(f"{info}: No data found: {key}") return return_value required = required or {} not_required = not_required or {} return_value_required = pids_exists_in_data_test( - info=info, - data=data, - tests=required, - is_required=True + info=info, data=data, tests=required, is_required=True ) return_value_not_required = pids_exists_in_data_test( - info=info, - data=data, - tests=not_required, - is_required=False + info=info, data=data, tests=not_required, is_required=False ) return return_value_required + return_value_not_required @@ -528,8 +524,8 @@ def pids_exists_in_data_test(info, data, tests, is_required): def get_base_url(): """Get base url.""" return ( - f'{current_app.config.get("RERO_ILS_APP_URL_SCHEME")}://' - f'{current_app.config.get("RERO_ILS_APP_HOST")}' + f"{current_app.config.get('RERO_ILS_APP_URL_SCHEME')}://" + f"{current_app.config.get('RERO_ILS_APP_HOST')}" ) @@ -541,10 +537,10 @@ def get_ref_for_pid(module, pid): :return: url for record """ configuration = get_endpoint_configuration(module) - if module == 'loans': - configuration = {'list_route': '/loans/'} - if configuration and configuration.get('list_route'): - return f'{get_base_url()}/api{configuration.get("list_route")}{pid}' + if module == "loans": + configuration = {"list_route": "/loans/"} + if configuration and configuration.get("list_route"): + return f"{get_base_url()}/api{configuration.get('list_route')}{pid}" def get_record_class_from_schema_or_pid_type(schema=None, pid_type=None): @@ -560,9 +556,10 @@ def get_record_class_from_schema_or_pid_type(schema=None, pid_type=None): if schema: pid_type = get_pid_type_from_schema(schema) return obj_or_import_string( - current_app.config - .get('RECORDS_REST_ENDPOINTS') - .get(pid_type, {}).get('record_class')) + current_app.config.get("RECORDS_REST_ENDPOINTS") + .get(pid_type, {}) + .get("record_class") + ) def get_indexer_class_by_resource(resource): @@ -572,7 +569,7 @@ def get_indexer_class_by_resource(resource): :return: indexer class for resource name """ endpoint = get_endpoint_configuration(resource) - if indexer_cls := endpoint.get('indexer_class'): + if indexer_cls := endpoint.get("indexer_class"): return obj_or_import_string(indexer_cls) @@ -595,27 +592,29 @@ def get_pid_type_from_schema(schema): :param schema: record schema. :return: the pid type. """ - try: - pid_type_schema_value = schema.split('schemas')[1] - schemas = current_app.config.get('RERO_ILS_DEFAULT_JSON_SCHEMA') - return [key for key, value in schemas.items() - if value == pid_type_schema_value][0] - except IndexError: - pass + with contextlib.suppress(IndexError): + pid_type_schema_value = schema.split("schemas")[1] + schemas = current_app.config.get("RERO_ILS_DEFAULT_JSON_SCHEMA") + return [ + key for key, value in schemas.items() if value == pid_type_schema_value + ][0] def get_patron_from_arguments(**kwargs): """Try to load a patron from potential arguments.""" from .patrons.api import Patron - required_arguments = {'patron', 'patron_barcode', 'patron_pid', 'loan'} + + required_arguments = {"patron", "patron_barcode", "patron_pid", "loan"} # if the kwargs does not contain at least one of the required arguments # return None if not required_arguments.intersection(kwargs): return None - return kwargs.get('patron') \ - or Patron.get_record_by_pid(kwargs.get('patron_pid')) \ - or Patron.get_record_by_pid(kwargs.get('loan').get('patron_pid'))\ - or Patron.get_patron_by_barcode(kwargs.get('patron_barcode')) + return ( + kwargs.get("patron") + or Patron.get_record_by_pid(kwargs.get("patron_pid")) + or Patron.get_record_by_pid(kwargs.get("loan").get("patron_pid")) + or Patron.get_patron_by_barcode(kwargs.get("patron_barcode")) + ) def set_timestamp(name, **kwargs): @@ -628,35 +627,38 @@ def set_timestamp(name, **kwargs): :param kwargs: any additional :returns: time of time stamp """ - time_stamps = current_cache.get('timestamps') or {} - utc_now = datetime.utcnow() - time_stamps[name] = kwargs | {'time': utc_now, 'name': name} - if not current_cache.set(key='timestamps', value=time_stamps, timeout=0): - current_app.logger.warning(f'Can not set time stamp for: {name}') + time_stamps = current_cache.get("timestamps") or {} + utc_now = datetime.now(timezone.utc) + time_stamps[name] = kwargs | {"time": utc_now, "name": name} + if not current_cache.set(key="timestamps", value=time_stamps, timeout=0): + current_app.logger.warning(f"Can not set time stamp for: {name}") return utc_now def settimestamp(func): """Set timestamp function wrapper.""" + @wraps(func) def wrapped(*args, **kwargs): result = func(*args, **kwargs) set_timestamp(func.__name__, result=result) return result + return wrapped -def profile(output_file=None, sort_by='cumulative', lines_to_print=None, - strip_dirs=False): +def profile( + output_file=None, sort_by="cumulative", lines_to_print=None, strip_dirs=False +): """A time profiler decorator. Inspired by and modified the profile decorator of Giampaolo Rodola: http://code.activestate.com/recipes/577817-profile-decorator/ Args: output_file: str or None. Default is None - Path of the output file. If only name of the file is given, it's + Path of the output file. If only name of the file is given, it"s saved in the current directory. - If it's None, the name of the decorated function is used. + If it"s None, the name of the decorated function is used. sort_by: str or SortKey enum or tuple/list of str/SortKey enum Sorting criteria for the Stats object. For a list of valid string and SortKey refer to: @@ -664,7 +666,7 @@ def profile(output_file=None, sort_by='cumulative', lines_to_print=None, lines_to_print: int or None Number of lines to print. Default (None) is for all the lines. This is useful in reducing the size of the printout, especially - that sorting by 'cumulative', the time consuming operations + that sorting by "cumulative", the time consuming operations are printed toward the top of the file. strip_dirs: bool Whether to remove the leading path info from file names. @@ -676,14 +678,14 @@ def profile(output_file=None, sort_by='cumulative', lines_to_print=None, def inner(func): @wraps(func) def wrapper(*args, **kwargs): - _output_file = output_file or f'{func.__name__}.prof' + _output_file = output_file or f"{func.__name__}.prof" pr = cProfile.Profile() pr.enable() retval = func(*args, **kwargs) pr.disable() pr.dump_stats(_output_file) - with open(_output_file, 'w') as f: + with open(_output_file, "w") as f: ps = pstats.Stats(pr, stream=f) if strip_dirs: ps.strip_dirs() @@ -701,15 +703,16 @@ def wrapper(*args, **kwargs): def timeit(func): """Output how long a function took to execute.""" + @wraps(func) def wrapped(*args, **kwargs): start_time = datetime.now() result = func(*args, **kwargs) click.echo( - f'\t>> timeit: {datetime.now() - start_time} ' - f'{func} {type(args[0])}' + f"\t>> timeit: {datetime.now() - start_time} " f"{func} {type(args[0])}" ) return result + return wrapped @@ -719,49 +722,49 @@ def get_timestamp(name): :param name: name of time stamp. :returns: data for time stamp """ - if time_stamps := current_cache.get('timestamps'): + if time_stamps := current_cache.get("timestamps"): data = time_stamps.get(name, {}) - data.pop('name', None) - return data if data else None + data.pop("name", None) + return data or None def csv_metadata_line(record, uuid, date): """Build CSV metadata table line.""" created_date = updated_date = date - sep = '\t' - data = unicodedata.normalize('NFC', dumps(record, ensure_ascii=False)) + sep = "\t" + data = unicodedata.normalize("NFC", dumps(record, ensure_ascii=False)) metadata = ( created_date, updated_date, uuid, data, - '1', + "1", ) metadata_line = sep.join(metadata) - return metadata_line + '\n' + return metadata_line + "\n" def csv_pidstore_line(pid_type, pid, uuid, date): """Build CSV pidstore table line.""" created_date = updated_date = date - sep = '\t' + sep = "\t" pidstore_data = [ created_date, updated_date, pid_type, pid, - 'R', - 'rec', + "R", + "rec", uuid, ] pidstore_line = sep.join(pidstore_data) - return pidstore_line + '\n' + return pidstore_line + "\n" def raw_connection(): """Return a raw connection to the database.""" with current_app.app_context(): - URI = current_app.config.get('SQLALCHEMY_DATABASE_URI') + URI = current_app.config.get("SQLALCHEMY_DATABASE_URI") engine = sqlalchemy.create_engine(URI) connection = engine.raw_connection() connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) @@ -773,18 +776,13 @@ def db_copy_from(buffer, table, columns, raise_exception=True): connection = raw_connection() cursor = connection.cursor() try: - cursor.copy_from( - file=buffer, - table=table, - columns=columns, - sep='\t' - ) + cursor.copy_from(file=buffer, table=table, columns=columns, sep="\t") connection.commit() except psycopg2.DataError as error: if raise_exception: raise psycopg2.DataError(error) else: - current_app.logger.error('data load error: {error}') + current_app.logger.error("data load error: {error}") connection.close() @@ -793,39 +791,35 @@ def db_copy_to(filehandle, table, columns, raise_exception=True): connection = raw_connection() cursor = connection.cursor() try: - cursor.copy_to( - file=filehandle, - table=table, - columns=columns, - sep='\t' - ) + cursor.copy_to(file=filehandle, table=table, columns=columns, sep="\t") cursor.connection.commit() except psycopg2.DataError as error: if raise_exception: raise psycopg2.DataError(error) else: - current_app.logger.error(f'data load error: {error}') - cursor.execute(f'VACUUM ANALYSE {table}') + current_app.logger.error(f"data load error: {error}") + cursor.execute(f"VACUUM ANALYSE {table}") cursor.close() connection.close() -def bulk_load(pid_type, data, table, columns, bulk_count=0, verbose=False, - reindex=False): +def bulk_load( + pid_type, data, table, columns, bulk_count=0, verbose=False, reindex=False +): """Bulk load pid_type data to table.""" if bulk_count <= 0: - bulk_count = current_app.config.get('BULK_CHUNK_COUNT', 100000) + bulk_count = current_app.config.get("BULK_CHUNK_COUNT", 100000) count = 0 buffer = StringIO() buffer_uuid = [] - index = columns.index('id') if 'id' in columns else -1 + index = columns.index("id") if "id" in columns else -1 start_time = datetime.now() - with open(data, 'r', encoding='utf-8', buffering=1) as input_file: + with open(data, "r", encoding="utf-8", buffering=1) as input_file: for line in input_file: count += 1 - buffer.write(line.replace('\\', '\\\\')) + buffer.write(line.replace("\\", "\\\\")) if index >= 0 and reindex: - buffer_uuid.append(line.split('\t')[index]) + buffer_uuid.append(line.split("\t")[index]) if count % bulk_count == 0: buffer.flush() buffer.seek(0) @@ -834,16 +828,14 @@ def bulk_load(pid_type, data, table, columns, bulk_count=0, verbose=False, diff_time = end_time - start_time start_time = end_time click.echo( - f'{pid_type} copy from file: {count} ' - f'{diff_time.seconds}s', - nl=False + f"{pid_type} copy from file: {count} " f"{diff_time.seconds}s", + nl=False, ) db_copy_from(buffer=buffer, table=table, columns=columns) buffer.close() if index >= 0 and reindex: - do_bulk_index(uuids=buffer_uuid, doc_type=pid_type, - verbose=verbose) + do_bulk_index(uuids=buffer_uuid, doc_type=pid_type, verbose=verbose) buffer_uuid.clear() elif verbose: click.echo() @@ -853,33 +845,24 @@ def bulk_load(pid_type, data, table, columns, bulk_count=0, verbose=False, end_time = datetime.now() diff_time = end_time - start_time click.echo( - f'{pid_type} copy from file: {count} {diff_time.seconds}s', - nl=False + f"{pid_type} copy from file: {count} {diff_time.seconds}s", nl=False ) buffer.flush() buffer.seek(0) db_copy_from(buffer=buffer, table=table, columns=columns) buffer.close() if index >= 0 and reindex: - do_bulk_index(uuids=buffer_uuid, doc_type=pid_type, - verbose=verbose) + do_bulk_index(uuids=buffer_uuid, doc_type=pid_type, verbose=verbose) buffer_uuid.clear() elif verbose: click.echo() -def bulk_load_metadata(pid_type, metadata, bulk_count=0, verbose=True, - reindex=False): +def bulk_load_metadata(pid_type, metadata, bulk_count=0, verbose=True, reindex=False): """Bulk load pid_type data to metadata table.""" record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) table, identifier = record_class.get_metadata_identifier_names() - columns = ( - 'created', - 'updated', - 'id', - 'json', - 'version_id' - ) + columns = ("created", "updated", "id", "json", "version_id") bulk_load( pid_type=pid_type, data=metadata, @@ -887,22 +870,21 @@ def bulk_load_metadata(pid_type, metadata, bulk_count=0, verbose=True, columns=columns, bulk_count=bulk_count, verbose=verbose, - reindex=reindex + reindex=reindex, ) -def bulk_load_pidstore(pid_type, pidstore, bulk_count=0, verbose=True, - reindex=False): +def bulk_load_pidstore(pid_type, pidstore, bulk_count=0, verbose=True, reindex=False): """Bulk load pid_type data to metadata table.""" - table = 'pidstore_pid' + table = "pidstore_pid" columns = ( - 'created', - 'updated', - 'pid_type', - 'pid_value', - 'status', - 'object_type', - 'object_uuid', + "created", + "updated", + "pid_type", + "pid_value", + "status", + "object_type", + "object_uuid", ) bulk_load( pid_type=pid_type, @@ -911,7 +893,7 @@ def bulk_load_pidstore(pid_type, pidstore, bulk_count=0, verbose=True, columns=columns, bulk_count=bulk_count, verbose=verbose, - reindex=reindex + reindex=reindex, ) @@ -919,7 +901,7 @@ def bulk_load_pids(pid_type, ids, bulk_count=0, verbose=True, reindex=False): """Bulk load pid_type data to id table.""" record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) metadata, identifier = record_class.get_metadata_identifier_names() - columns = ('recid', ) + columns = ("recid",) bulk_load( pid_type=pid_type, data=ids, @@ -927,7 +909,7 @@ def bulk_load_pids(pid_type, ids, bulk_count=0, verbose=True, reindex=False): columns=columns, bulk_count=bulk_count, verbose=verbose, - reindex=reindex + reindex=reindex, ) max_pid = 0 with open(ids) as file: @@ -940,60 +922,50 @@ def bulk_load_pids(pid_type, ids, bulk_count=0, verbose=True, reindex=False): def bulk_save(pid_type, file_name, table, columns, verbose=False): """Bulk save pid_type data to file.""" - with open(file_name, 'w', encoding='utf-8') as output_file: - db_copy_to( - filehandle=output_file, - table=table, - columns=columns - ) + with open(file_name, "w", encoding="utf-8") as output_file: + db_copy_to(filehandle=output_file, table=table, columns=columns) def bulk_save_metadata(pid_type, file_name, verbose=False): """Bulk save pid_type data from metadata table.""" if verbose: - click.echo(f'Save {pid_type} metadata to file: {file_name}') + click.echo(f"Save {pid_type} metadata to file: {file_name}") record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) metadata, identifier = record_class.get_metadata_identifier_names() - columns = ( - 'created', - 'updated', - 'id', - 'json', - 'version_id' - ) + columns = ("created", "updated", "id", "json", "version_id") bulk_save( pid_type=pid_type, file_name=file_name, table=metadata, columns=columns, - verbose=verbose + verbose=verbose, ) def bulk_save_pidstore(pid_type, file_name, file_name_tmp, verbose=False): """Bulk save pid_type data from pids table.""" if verbose: - click.echo(f'Save {pid_type} pidstore to file: {file_name}') + click.echo(f"Save {pid_type} pidstore to file: {file_name}") if not os.path.isfile(file_name_tmp): - table = 'pidstore_pid' + table = "pidstore_pid" columns = ( - 'created', - 'updated', - 'pid_type', - 'pid_value', - 'status', - 'object_type', - 'object_uuid', + "created", + "updated", + "pid_type", + "pid_value", + "status", + "object_type", + "object_uuid", ) bulk_save( pid_type=pid_type, file_name=file_name_tmp, table=table, columns=columns, - verbose=verbose + verbose=verbose, ) # clean pid file - with open(file_name_tmp, 'r') as file_in: + with open(file_name_tmp, "r") as file_in: with open(file_name, "w") as file_out: count = 0 for line in file_in: @@ -1006,31 +978,32 @@ def bulk_save_pidstore(pid_type, file_name, file_name_tmp, verbose=False): def bulk_save_pids(pid_type, file_name, verbose=False): """Bulk save pid_type data from id table.""" if verbose: - click.echo(f'Save {pid_type} ids to file: {file_name}') + click.echo(f"Save {pid_type} ids to file: {file_name}") record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) metadata, identifier = record_class.get_metadata_identifier_names() - columns = ('recid', ) + columns = ("recid",) bulk_save( pid_type=pid_type, file_name=file_name, table=identifier.__tablename__, columns=columns, - verbose=verbose + verbose=verbose, ) def number_records_in_file(json_file, type): """Get number of records per file.""" count = 0 - with open(json_file, 'r', buffering=1) as file: + with open(json_file, "r", buffering=1) as file: for line in file: - if (type == 'json' and '"pid"' in line) or type == 'csv': + if (type == "json" and '"pid"' in line) or type == "csv": count += 1 return count -def requests_retry_session(retries=5, backoff_factor=0.5, - status_forcelist=(500, 502, 504), session=None): +def requests_retry_session( + retries=5, backoff_factor=0.5, status_forcelist=(500, 502, 504), session=None +): """Request retry session. :params retries: The total number of retry attempts to make. @@ -1049,8 +1022,8 @@ def requests_retry_session(retries=5, backoff_factor=0.5, status_forcelist=status_forcelist, ) adapter = HTTPAdapter(max_retries=retry) - session.mount('http://', adapter) - session.mount('https://', adapter) + session.mount("http://", adapter) + session.mount("https://", adapter) return session @@ -1066,13 +1039,13 @@ def __init__(self, filename, indent=2): :param indent: indentation. """ self.indent = indent - self.file_handle = open(filename, 'w') - self.file_handle.write('[') + self.file_handle = open(filename, "w") + self.file_handle.write("[") def __del__(self): """Destructor.""" if self.file_handle: - self.file_handle.write('\n]') + self.file_handle.write("\n]") self.file_handle.close() self.file_handle = None @@ -1098,13 +1071,13 @@ def write(self, data): :param data: JSON data to write into the file. """ if self.count > 0: - self.file_handle.write(',') + self.file_handle.write(",") if self.indent: - for line in dumps(data, indent=self.indent).split('\n'): - self.file_handle.write(f'\n{" ".ljust(self.indent)}') + for line in dumps(data, indent=self.indent).split("\n"): + self.file_handle.write(f"\n{' '.ljust(self.indent)}") self.file_handle.write(line) else: - self.file_handle.write(dumps(data), separators=(',', ':')) + self.file_handle.write(dumps(data), separators=(",", ":")) self.count += 1 def close(self): @@ -1115,6 +1088,7 @@ def close(self): def set_user_name(sender, user): """Set the username in the current flask session.""" from .patrons.api import current_librarian, current_patrons + user_name = None remove_user_name(sender, user) @@ -1123,28 +1097,25 @@ def set_user_name(sender, user): elif current_patrons: user_name = current_patrons[0].formatted_name else: - try: + with contextlib.suppress(AttributeError): user_name = current_user.email - # AnonymousUser - except AttributeError: - pass if user_name: - session['user_name'] = user_name + session["user_name"] = user_name def remove_user_name(sender, user): """Remove the username in the current flask session.""" - if session.get('user_name'): - del session['user_name'] + if session.get("user_name"): + del session["user_name"] def sorted_pids(query): """Get sorted pids from a ES query.""" - pids = [hit.pid for hit in query.source('pid').scan()] + pids = [hit.pid for hit in query.source("pid").scan()] try: return sorted(pids, key=int) except Exception as err: - current_app.logger.info(f'Can not sort pids from query: {err}') + current_app.logger.info(f"Can not sort pids from query: {err}") return pids @@ -1159,21 +1130,21 @@ def get_objects(record_class, query): yield record_class.get_record(hit.meta.id) -def strip_chars(string, extra=u''): +def strip_chars(string, extra=""): """Remove control characters from string.""" - remove_re = re.compile(u'[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F%s]' % extra) - new_string, _ = remove_re.subn('', string) + remove_re = re.compile("[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F%s]" % extra) + new_string, _ = remove_re.subn("", string) return new_string -def truncate_string(str_input, max_length, ellipsis='...'): +def truncate_string(str_input, max_length, ellipsis="..."): """Truncate a string if too long and add an ellipsis.""" if len(str_input) > max_length: - return str_input[:max_length - len(ellipsis)] + ellipsis + return str_input[: max_length - len(ellipsis)] + ellipsis return str_input -def draw_data_table(columns, rows=[], padding=''): +def draw_data_table(columns, rows=[], padding=""): """Draw data as a table using ASCII characters. :param columns: the column headers. Each column is a tuple that must @@ -1187,44 +1158,49 @@ def draw_data_table(columns, rows=[], padding=''): def table_header(): column_lengths = [column[1] for column in columns] - def draw_line(col_lengths, sep='┼'): - return sep.join(['─' * length for length in col_lengths]) + def draw_line(col_lengths, sep="┼"): + return sep.join(["─" * length for length in col_lengths]) - def draw_column_name(cols, sep='│', pad=' '): - return sep.join([ - f'{pad}{truncate_string(col[0], col[1] - len(pad * 2))}{pad}' - .ljust(col[1]) - for col in cols - ]) + def draw_column_name(cols, sep="│", pad=" "): + return sep.join( + [ + f"{pad}{truncate_string(col[0], col[1] - len(pad * 2))}{pad}".ljust( + col[1] + ) + for col in cols + ] + ) - return f"{padding}┌{draw_line(column_lengths, sep='┬')}┐\n" + \ - f"{padding}│{draw_column_name(columns)}│\n" + \ - f"{padding}├{draw_line(column_lengths)}┤\n" + return ( + f"{padding}┌{draw_line(column_lengths, sep='┬')}┐\n" + + f"{padding}│{draw_column_name(columns)}│\n" + + f"{padding}├{draw_line(column_lengths)}┤\n" + ) def table_footer(): return f"{padding}└{'┴'.join(['─' * col[1] for col in columns])}┘\n" - def table_rows(sep='│'): - def draw_row_content(row, pad=' '): + def table_rows(sep="│"): + def draw_row_content(row, pad=" "): parts = [] for idx, col_content in enumerate(row, 0): column = columns[idx] col_length = columns[idx][1] - align = column[2] if len(column) >= 3 else 'left' + align = column[2] if len(column) >= 3 else "left" data = truncate_string(col_content, col_length - len(pad * 2)) - if align == 'right': - data = f'{pad}{data}{pad}'.rjust(col_length) - elif align == 'center': - data = f'{pad}{data}{pad}'.center(col_length) + if align == "right": + data = f"{pad}{data}{pad}".rjust(col_length) + elif align == "center": + data = f"{pad}{data}{pad}".center(col_length) else: - data = f'{pad}{data}{pad}'.ljust(col_length) + data = f"{pad}{data}{pad}".ljust(col_length) parts.append(data) return sep.join(parts) - return "\n".join([ - f"{padding}{sep}{draw_row_content(row)}{sep}" - for row in rows - ])+"\n" + return ( + "\n".join([f"{padding}{sep}{draw_row_content(row)}{sep}" for row in rows]) + + "\n" + ) return table_header() + table_rows() + table_footer() @@ -1236,14 +1212,14 @@ def get_all_roles(): """ roles = [] # Load system roles registered into invenio-access - if access_ext := current_app.extensions['invenio-access']: + if access_ext := current_app.extensions["invenio-access"]: roles = [ - (role_name, 'system_role') + (role_name, "system_role") for role_name in access_ext.system_roles.keys() - if role_name != 'system_process' + if role_name != "system_process" ] # Complete with existing roles from `invenio-accounts` - roles.extend([(role.name, 'role') for role in Role.query.all()]) + roles.extend([(role.name, "role") for role in Role.query.all()]) return roles @@ -1260,23 +1236,27 @@ def password_validator(pw, length=8, special_char=False): :return True or raise PasswordValidatorException """ if len(pw) < length: - raise PasswordValidatorException(ngettext( - 'Field must be at least %(num)d character long.', - 'Field must be at least %(num)d characters long.', - num=length - )) + raise PasswordValidatorException( + ngettext( + "Field must be at least %(num)d character long.", + "Field must be at least %(num)d characters long.", + num=length, + ) + ) if not set(string.ascii_lowercase).intersection(pw): raise PasswordValidatorException( - _('The password must contain a lower case character.')) + _("The password must contain a lower case character.") + ) if not set(string.ascii_uppercase).intersection(pw): raise PasswordValidatorException( - _('The password must contain a upper case character.')) + _("The password must contain a upper case character.") + ) if not set(string.digits).intersection(pw): - raise PasswordValidatorException( - _('The password must contain a number.')) + raise PasswordValidatorException(_("The password must contain a number.")) if special_char and not set(string.punctuation).intersection(pw): raise PasswordValidatorException( - _('The password must contain a special character.')) + _("The password must contain a special character.") + ) return True @@ -1289,7 +1269,7 @@ def password_generator(length=8, special_char=False): """ min_length = 4 if special_char else 3 if length < min_length: - raise ValueError(f'Minimal size {min_length}') + raise ValueError(f"Minimal size {min_length}") password = [random.choice(string.ascii_lowercase)] password.append(random.choice(string.ascii_uppercase)) @@ -1301,4 +1281,4 @@ def password_generator(length=8, special_char=False): password.append(random.choice(string.ascii_letters + string.digits)) random.shuffle(password) - return ''.join(password) + return "".join(password) diff --git a/rero_ils/modules/vendors/api.py b/rero_ils/modules/vendors/api.py index f868c7cc26..98c49a5b4b 100644 --- a/rero_ils/modules/vendors/api.py +++ b/rero_ils/modules/vendors/api.py @@ -22,8 +22,7 @@ from flask_babel import gettext as _ -from rero_ils.modules.acquisition.acq_invoices.api import \ - AcquisitionInvoicesSearch +from rero_ils.modules.acquisition.acq_invoices.api import AcquisitionInvoicesSearch from rero_ils.modules.api import IlsRecord, IlsRecordsIndexer, IlsRecordsSearch from rero_ils.modules.fetchers import id_fetcher from rero_ils.modules.minters import id_minter @@ -34,9 +33,7 @@ # provider VendorProvider = type( - 'VendorProvider', - (Provider,), - dict(identifier=VendorIdentifier, pid_type='vndr') + "VendorProvider", (Provider,), dict(identifier=VendorIdentifier, pid_type="vndr") ) # minter vendor_id_minter = partial(id_minter, provider=VendorProvider) @@ -50,9 +47,9 @@ class VendorsSearch(IlsRecordsSearch): class Meta: """Search only on vendor index.""" - index = 'vendors' + index = "vendors" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -65,11 +62,7 @@ class Vendor(IlsRecord): fetcher = vendor_id_fetcher provider = VendorProvider model_cls = VendorMetadata - pids_exist_check = { - 'required': { - 'org': 'organisation' - } - } + pids_exist_check = {"required": {"org": "organisation"}} def extended_validation(self, **kwargs): """Add additional record validation. @@ -79,13 +72,13 @@ def extended_validation(self, **kwargs): - notes array has multiple notes with same type """ # CONTACTS field - types = [contact.get('type') for contact in self.get('contacts', [])] + types = [contact.get("type") for contact in self.get("contacts", [])] if len(types) != len(set(types)): - return _('Can not have multiple contacts with the same type.') + return _("Can not have multiple contacts with the same type.") # NOTES field - types = [note.get('type') for note in self.get('notes', [])] + types = [note.get("type") for note in self.get("notes", [])] if len(types) != len(set(types)): - return _('Can not have multiple notes with the same type.') + return _("Can not have multiple notes with the same type.") return True @@ -96,8 +89,8 @@ def get_contact(self, contact_type): to see all contact type available. :return data relative to this contact type. """ - for contact in self.get('contacts', []): - if contact['type'] == contact_type: + for contact in self.get("contacts", []): + if contact["type"] == contact_type: return contact @property @@ -108,11 +101,12 @@ def order_email(self): order contact information does not exist, the default contact information will be used. """ - contact = \ - self.get_contact(VendorContactType.ORDER) or \ - self.get_contact(VendorContactType.DEFAULT) or \ - {} - return contact.get('email') + contact = ( + self.get_contact(VendorContactType.ORDER) + or self.get_contact(VendorContactType.DEFAULT) + or {} + ) + return contact.get("email") @property def serial_email(self): @@ -122,11 +116,12 @@ def serial_email(self): serial contact information does not exist, the default contact information will be used. """ - contact = \ - self.get_contact(VendorContactType.SERIAL) or \ - self.get_contact(VendorContactType.DEFAULT) or \ - {} - return contact.get('email') + contact = ( + self.get_contact(VendorContactType.SERIAL) + or self.get_contact(VendorContactType.DEFAULT) + or {} + ) + return contact.get("email") def get_note(self, note_type): """Get a specific type of note. @@ -136,8 +131,9 @@ def get_note(self, note_type): :return the note content if exists, otherwise returns None. """ note = [ - note.get('content') for note in self.get('notes', []) - if note.get('type') == note_type + note.get("content") + for note in self.get("notes", []) + if note.get("type") == note_type ] return next(iter(note), None) @@ -150,12 +146,11 @@ def get_links_to_me(self, get_pids=False): from rero_ils.modules.acquisition.acq_orders.api import AcqOrdersSearch from rero_ils.modules.holdings.api import HoldingsSearch - acq_orders_query = AcqOrdersSearch()\ - .filter('term', vendor__pid=self.pid) - acq_invoices_query = AcquisitionInvoicesSearch()\ - .filter('term', vendor__pid=self.pid) - hold_query = HoldingsSearch()\ - .filter('term', vendor__pid=self.pid) + acq_orders_query = AcqOrdersSearch().filter("term", vendor__pid=self.pid) + acq_invoices_query = AcquisitionInvoicesSearch().filter( + "term", vendor__pid=self.pid + ) + hold_query = HoldingsSearch().filter("term", vendor__pid=self.pid) links = {} if get_pids: acq_orders = sorted_pids(acq_orders_query) @@ -166,18 +161,18 @@ def get_links_to_me(self, get_pids=False): acq_invoices = acq_invoices_query.count() holdings = hold_query.count() if acq_orders: - links['acq_orders'] = acq_orders + links["acq_orders"] = acq_orders if acq_invoices: - links['acq_invoices'] = acq_invoices + links["acq_invoices"] = acq_invoices if holdings: - links['holdings'] = holdings + links["holdings"] = holdings return links def reasons_not_to_delete(self): """Get reasons not to delete record.""" cannot_delete = {} if links := self.get_links_to_me(): - cannot_delete['links'] = links + cannot_delete["links"] = links return cannot_delete @@ -191,4 +186,4 @@ def bulk_index(self, record_id_iterator): :param record_id_iterator: Iterator yielding record UUIDs. """ - super().bulk_index(record_id_iterator, doc_type='vndr') + super().bulk_index(record_id_iterator, doc_type="vndr") diff --git a/rero_ils/modules/vendors/dumpers.py b/rero_ils/modules/vendors/dumpers.py index 7a05db47fc..4e274278dd 100644 --- a/rero_ils/modules/vendors/dumpers.py +++ b/rero_ils/modules/vendors/dumpers.py @@ -30,11 +30,13 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data.update({ - 'name': record.get('name'), - 'language': record.get('communication_language', 'eng'), - 'email': record.order_email - }) + data.update( + { + "name": record.get("name"), + "language": record.get("communication_language", "eng"), + "email": record.order_email, + } + ) data = {k: v for k, v in data.items() if v} return data @@ -48,10 +50,12 @@ def dump(self, record, data): :param record: The record to dump. :param data: The initial dump data passed in by ``record.dumps()``. """ - data.update({ - 'name': record.get('name'), - 'language': record.get('communication_language', 'eng'), - 'email': record.serial_email - }) + data.update( + { + "name": record.get("name"), + "language": record.get("communication_language", "eng"), + "email": record.serial_email, + } + ) data = {k: v for k, v in data.items() if v} return data diff --git a/rero_ils/modules/vendors/jsonresolver.py b/rero_ils/modules/vendors/jsonresolver.py index f51cd76f8f..d30d132c9b 100644 --- a/rero_ils/modules/vendors/jsonresolver.py +++ b/rero_ils/modules/vendors/jsonresolver.py @@ -22,13 +22,11 @@ from invenio_pidstore.models import PersistentIdentifier, PIDStatus -@jsonresolver.route('/api/vendors/', host='bib.rero.ch') +@jsonresolver.route("/api/vendors/", host="bib.rero.ch") def vendor_resolver(pid): """Resolver for vendor record.""" - persistent_id = PersistentIdentifier.get('vndr', pid) + persistent_id = PersistentIdentifier.get("vndr", pid) if persistent_id.status == PIDStatus.REGISTERED: return dict(pid=persistent_id.pid_value) - current_app.logger.error( - f'Doc resolver error: /api/vendors/{pid} {persistent_id}' - ) - raise Exception('unable to resolve') + current_app.logger.error(f"Doc resolver error: /api/vendors/{pid} {persistent_id}") + raise Exception("unable to resolve") diff --git a/rero_ils/modules/vendors/models.py b/rero_ils/modules/vendors/models.py index ec39cd29fe..7128e5f243 100644 --- a/rero_ils/modules/vendors/models.py +++ b/rero_ils/modules/vendors/models.py @@ -27,38 +27,39 @@ class VendorIdentifier(RecordIdentifier): """Sequence generator for Vendor identifiers.""" - __tablename__ = 'vendor_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "vendor_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) class VendorMetadata(db.Model, RecordMetadataBase): """Vendor record metadata.""" - __tablename__ = 'vendor_metadata' + __tablename__ = "vendor_metadata" class VendorContactType: """Type of vendor contact.""" - DEFAULT = 'default' - ORDER = 'order' - SERIAL = 'serial' + DEFAULT = "default" + ORDER = "order" + SERIAL = "serial" class VendorNoteType: """Type of vendor note.""" - ORDER = 'order_note' - CLAIM = 'claim_note' - RETURN = 'return_note' - INVOICE = 'invoice_note' - PAYMENT = 'payment_note' - RECEIPT = 'receipt_note' - CREDIT = 'credit_note' - STAFF = 'staff_note' - GENERAL = 'general_note' + ORDER = "order_note" + CLAIM = "claim_note" + RETURN = "return_note" + INVOICE = "invoice_note" + PAYMENT = "payment_note" + RECEIPT = "receipt_note" + CREDIT = "credit_note" + STAFF = "staff_note" + GENERAL = "general_note" diff --git a/rero_ils/modules/vendors/permissions.py b/rero_ils/modules/vendors/permissions.py index 0e9aa5b925..cbe21eff1e 100644 --- a/rero_ils/modules/vendors/permissions.py +++ b/rero_ils/modules/vendors/permissions.py @@ -19,15 +19,18 @@ """Permissions for vendors.""" from invenio_access import action_factory -from rero_ils.modules.permissions import AllowedByAction, \ - AllowedByActionRestrictByOrganisation, RecordPermissionPolicy +from rero_ils.modules.permissions import ( + AllowedByAction, + AllowedByActionRestrictByOrganisation, + RecordPermissionPolicy, +) -search_action = action_factory('vndr-search') -read_action = action_factory('vndr-read') -create_action = action_factory('vndr-create') -update_action = action_factory('vndr-update') -delete_action = action_factory('vndr-delete') -access_action = action_factory('vndr-access') +search_action = action_factory("vndr-search") +read_action = action_factory("vndr-read") +create_action = action_factory("vndr-create") +update_action = action_factory("vndr-update") +delete_action = action_factory("vndr-delete") +access_action = action_factory("vndr-access") class VendorPermissionPolicy(RecordPermissionPolicy): diff --git a/rero_ils/modules/views.py b/rero_ils/modules/views.py index 3eae3e3671..d23a51a044 100644 --- a/rero_ils/modules/views.py +++ b/rero_ils/modules/views.py @@ -23,30 +23,34 @@ import polib import requests -from flask import Blueprint, abort, current_app, jsonify, make_response, \ - request +from flask import Blueprint, abort, current_app, jsonify, make_response, request from rero_ils.modules.utils import cached, get_all_roles -from .decorators import check_authentication, check_logged_as_librarian, \ - check_permission, parse_permission_payload +from .decorators import ( + check_authentication, + check_logged_as_librarian, + check_permission, + parse_permission_payload, +) from .patrons.api import Patron -from .permissions import PermissionContext, expose_action_needs_by_patron, \ - expose_action_needs_by_role, manage_role_permissions +from .permissions import ( + PermissionContext, + expose_action_needs_by_patron, + expose_action_needs_by_role, + manage_role_permissions, +) from .permissions import permission_management as permission_management_action from .permissions import record_permissions -api_blueprint = Blueprint( - 'api_blueprint', - __name__, - url_prefix='' -) +api_blueprint = Blueprint("api_blueprint", __name__, url_prefix="") # PERMISSIONS APIS' =========================================================== -@api_blueprint.route('/permissions/', methods=['GET']) -@api_blueprint.route('/permissions//', methods=['GET']) + +@api_blueprint.route("/permissions/", methods=["GET"]) +@api_blueprint.route("/permissions//", methods=["GET"]) @cached(timeout=10, query_string=True) @check_authentication def permissions(route_name, record_pid=None): @@ -60,10 +64,10 @@ def permissions(route_name, record_pid=None): return record_permissions(record_pid=record_pid, route_name=route_name) -@api_blueprint.route('/permission/management', methods=['POST', 'DELETE']) +@api_blueprint.route("/permission/management", methods=["POST", "DELETE"]) @check_permission([permission_management_action]) @parse_permission_payload -def permission_management(context, permission, method='allow', **kwargs): +def permission_management(context, permission, method="allow", **kwargs): """Manage permissions. This API allows to manage RERO-ILS permission to allow/disallow any action @@ -80,25 +84,23 @@ def permission_management(context, permission, method='allow', **kwargs): """ # TODO :: implements other SYSTEM_ROLE and USER context if context != PermissionContext.BY_ROLE: - abort(501, 'This permission context management isn\'t yet implemented') + abort(501, "This permission context management isn't yet implemented") try: if context == PermissionContext.BY_ROLE: - role_name = kwargs.get('role_name') + role_name = kwargs.get("role_name") manage_role_permissions(method, permission, role_name) except NameError as ne: abort(400, str(ne)) except Exception as e: abort(500, str(e)) - return jsonify({ - 'context': context, - 'permission': permission, - 'method': method - } | kwargs), 204 if method == 'deny' else 200 + return jsonify( + {"context": context, "permission": permission, "method": method} | kwargs + ), (204 if method == "deny" else 200) -@api_blueprint.route('/permissions/by_role', methods=['GET']) +@api_blueprint.route("/permissions/by_role", methods=["GET"]) @check_permission([permission_management_action]) def permissions_by_role(): """Expose permissions by roles. @@ -113,14 +115,14 @@ def permissions_by_role(): --> all permissions for "admin" and "pro_read_only" roles. """ filtered_roles = get_all_roles() - if role_names := request.args.getlist('role'): - if 'all' not in role_names: + if role_names := request.args.getlist("role"): + if "all" not in role_names: filtered_roles = [r for r in filtered_roles if r[0] in role_names] return jsonify(expose_action_needs_by_role(filtered_roles)) -@api_blueprint.route('/permissions/by_patron/', methods=['GET']) +@api_blueprint.route("/permissions/by_patron/", methods=["GET"]) @check_permission([permission_management_action]) def permissions_by_patron(patron_pid): """Expose permissions for a specific user. @@ -129,16 +131,16 @@ def permissions_by_patron(patron_pid): """ patron = Patron.get_record_by_pid(patron_pid) if not patron: - abort(404, 'Patron not found') + abort(404, "Patron not found") return jsonify(expose_action_needs_by_patron(patron)) # PROXY APIS' ================================================================= -@api_blueprint.route('/proxy') +@api_blueprint.route("/proxy") @check_logged_as_librarian def proxy(): """Proxy to get record metadata from MEF server.""" - if not (url := request.args.get('url')): + if not (url := request.args.get("url")): abort(400, "Missing `url` parameter") response = requests.get(url) return make_response(response.content, response.status_code) @@ -146,27 +148,28 @@ def proxy(): # TRANSLATIONS APIS' ========================================================== -@api_blueprint.route('/translations/.json') + +@api_blueprint.route("/translations/.json") def translations(ln): """Exposes translations in JSON format. :param ln: language ISO 639-1 Code (two chars). """ - babel = current_app.extensions['babel'] + babel = current_app.extensions["babel"] paths = babel.default_directories try: - path = next(p for p in paths if p.find('rero_ils') > -1) + path = next(p for p in paths if p.find("rero_ils") > -1) except StopIteration: - current_app.logger.error(f'translations for {ln} does not exist') + current_app.logger.error(f"translations for {ln} does not exist") abort(404) - po_file_name = f'{path}/{ln}/LC_MESSAGES/{babel.default_domain}.po' + po_file_name = f"{path}/{ln}/LC_MESSAGES/{babel.default_domain}.po" if not os.path.isfile(po_file_name): abort(404) try: po = polib.pofile(po_file_name) except Exception: - current_app.logger.error(f'unable to open po file: {po_file_name}') + current_app.logger.error(f"unable to open po file: {po_file_name}") abort(404) data = {entry.msgid: entry.msgstr or entry.msgid for entry in po} return jsonify(data) diff --git a/rero_ils/oauth/scopes.py b/rero_ils/oauth/scopes.py index 0757ffd7c3..93922c449c 100644 --- a/rero_ils/oauth/scopes.py +++ b/rero_ils/oauth/scopes.py @@ -19,11 +19,9 @@ from invenio_oauth2server.models import Scope -fullname = Scope('fullname', help_text='Full name', group='User') -birthdate = Scope('birthdate', help_text='Birthdate', group='User') -institution = Scope('institution', help_text='Institution', group='User') -expiration_date = Scope('expiration_date', - help_text='Expiration date', - group='User') -patron_type = Scope('patron_type', help_text='Patron type', group='User') -patron_types = Scope('patron_types', help_text='Patron types', group='User') +fullname = Scope("fullname", help_text="Full name", group="User") +birthdate = Scope("birthdate", help_text="Birthdate", group="User") +institution = Scope("institution", help_text="Institution", group="User") +expiration_date = Scope("expiration_date", help_text="Expiration date", group="User") +patron_type = Scope("patron_type", help_text="Patron type", group="User") +patron_types = Scope("patron_types", help_text="Patron types", group="User") diff --git a/rero_ils/permissions.py b/rero_ils/permissions.py index 4b6cec6579..08d01ca8fa 100644 --- a/rero_ils/permissions.py +++ b/rero_ils/permissions.py @@ -38,11 +38,11 @@ RoleNeed(UserRole.STATISTICS_MANAGER), RoleNeed(UserRole.LIBRARY_ADMINISTRATOR), RoleNeed(UserRole.ACQUISITION_MANAGER), - RoleNeed(UserRole.FULL_PERMISSIONS) + RoleNeed(UserRole.FULL_PERMISSIONS), ) -admin_permission = Permission(RoleNeed('admin')) -editor_permission = Permission(RoleNeed('editor'), RoleNeed('admin')) -monitoring_permission = Permission(RoleNeed('monitoring')) +admin_permission = Permission(RoleNeed("admin")) +editor_permission = Permission(RoleNeed("editor"), RoleNeed("admin")) +monitoring_permission = Permission(RoleNeed("monitoring")) def admin_permission_factory(record, *args, **kwargs): @@ -59,15 +59,16 @@ def librarian_update_permission_factory(record, *args, **kwargs): """User has editor role and the record is editable.""" if record.can_edit: return librarian_permission - return type('Check', (), {'can': lambda x: False})() + return type("Check", (), {"can": lambda x: False})() def librarian_delete_permission_factory( - record, credentials_only=False, *args, **kwargs): + record, credentials_only=False, *args, **kwargs +): """User can delete record.""" if credentials_only or record.can_delete[0]: return librarian_permission - return type('Check', (), {'can': lambda x: False})() + return type("Check", (), {"can": lambda x: False})() def login_and_librarian(): @@ -87,11 +88,9 @@ def login_and_patron(): * string: the redirect url to use (optional). """ if current_user and not current_user.is_authenticated: - redirect_url = url_for('security.login', next=request.path) + redirect_url = url_for("security.login", next=request.path) return False, 401, redirect_url - if len(current_patrons) == 0: - return False, 403, None - return True, 200, None + return (False, 403, None) if len(current_patrons) == 0 else (True, 200, None) def can_access_professional_view(func): @@ -99,6 +98,7 @@ def can_access_professional_view(func): and give access to professional view. """ + @wraps(func) def decorated_view(*args, **kwargs): if not current_user.is_authenticated: @@ -106,6 +106,7 @@ def decorated_view(*args, **kwargs): if not current_librarian: abort(403) return func(*args, **kwargs) + return decorated_view @@ -118,6 +119,7 @@ def check_user_is_authenticated(redirect_to=None, code=302): :param redirect_to: the URL to redirect the user if it's not authenticated. :param code: the HTTP code to use for redirect (default=302) """ + def inner_function(func): @wraps(func) def decorated_view(*args, **kwargs): @@ -127,7 +129,9 @@ def decorated_view(*args, **kwargs): return redirect(url_for(redirect_to), code) else: abort(403) + return decorated_view + return inner_function @@ -136,10 +140,12 @@ def wiki_edit_view_permission(): :return: true if the logged user has the editor role """ + @login_required - @roles_required('editor') + @roles_required("editor") def foo(): return True + return foo() @@ -163,7 +169,5 @@ def can_receive_regular_issue(holding): if current_librarian.organisation_pid == holding.organisation_pid: if current_librarian.has_full_permissions: return True - if holding.library_pid not in current_librarian.library_pids: - return False - return True + return holding.library_pid in current_librarian.library_pids return False diff --git a/rero_ils/query.py b/rero_ils/query.py index ed7b60bbdc..beeda6584e 100644 --- a/rero_ils/query.py +++ b/rero_ils/query.py @@ -41,7 +41,7 @@ from .modules.templates.models import TemplateVisibility from .utils import get_i18n_supported_languages -_PUNCTUATION_REGEX = re.compile(r'[:,\?,\,,\.,;,!,=,-]+(\s+|$)') +_PUNCTUATION_REGEX = re.compile(r"[:,\?,\,,\.,;,!,=,-]+(\s+|$)") def and_term_filter(field, **kwargs): @@ -52,16 +52,15 @@ def and_term_filter(field, **kwargs): or 'must_not') :return: Function that returns a boolean AND query between term values. """ + def inner(values): - _filter = Q( - 'bool', - must=[Q('term', **{field: value}) for value in values] - ) - for value in kwargs.get('must', []): + _filter = Q("bool", must=[Q("term", **{field: value}) for value in values]) + for value in kwargs.get("must", []): _filter &= Q(**value) - for value in kwargs.get('must_not', []): + for value in kwargs.get("must_not", []): _filter &= ~Q(**value) return _filter + return inner @@ -73,19 +72,21 @@ def and_i18n_term_filter(field, **kwargs): or 'must_not') :return: Function that returns a boolean AND query between term values. """ + def inner(values): - language = request.args.get('lang', current_i18n.language) + language = request.args.get("lang", current_i18n.language) if not language or language not in get_i18n_supported_languages(): - language = current_app.config.get('BABEL_DEFAULT_LANGUAGE', 'en') - i18n_field = f'{field}_{language}' - must = [Q('term', **{i18n_field: value}) for value in values] - _filter = Q('bool', must=must) + language = current_app.config.get("BABEL_DEFAULT_LANGUAGE", "en") + i18n_field = f"{field}_{language}" + must = [Q("term", **{i18n_field: value}) for value in values] + _filter = Q("bool", must=must) - for value in kwargs.get('must', []): + for value in kwargs.get("must", []): _filter &= Q(**value) - for value in kwargs.get('must_not', []): + for value in kwargs.get("must_not", []): _filter &= ~Q(**value) return _filter + return inner @@ -95,12 +96,14 @@ def i18n_terms_filter(field): :param field: Field name. :returns: Function that returns the Terms query. """ + def inner(values): - language = request.args.get('lang', current_i18n.language) + language = request.args.get("lang", current_i18n.language) if not language or language not in get_i18n_supported_languages(): - language = current_app.config.get('BABEL_DEFAULT_LANGUAGE', 'en') - i18n_field = f'{field}_{language}' - return Q('terms', **{i18n_field: values}) + language = current_app.config.get("BABEL_DEFAULT_LANGUAGE", "en") + i18n_field = f"{field}_{language}" + return Q("terms", **{i18n_field: values}) + return inner @@ -110,8 +113,10 @@ def exclude_terms_filter(field): :param field: Field name. :returns: Function that returns the Terms query. """ + def inner(values): - return ~Q('terms', **{field: values}) + return ~Q("terms", **{field: values}) + return inner @@ -121,15 +126,17 @@ def or_terms_filter_by_criteria(criteria): :param criteria: filter criteria. :return: Function that returns a boolean OR query between term values. """ + def inner(values): should = [] - if values and values[0] == 'true': + if values and values[0] == "true": for field, value in criteria.items(): - if field == '_exists_': - should.append(Q('exists', field=value)) + if field == "_exists_": + should.append(Q("exists", field=value)) else: - should.append(Q('terms', **{field: value})) - return Q('bool', should=should) + should.append(Q("terms", **{field: value})) + return Q("bool", should=should) + return inner @@ -141,40 +148,40 @@ def bool_filter(field, **kwargs): or 'must_not') :return: Function that returns a boolean query. """ + def inner(values): - _filter = Q('term', **{field: bool(int(values[0]))}) - for value in kwargs.get('must', []): + _filter = Q("term", **{field: bool(int(values[0]))}) + for value in kwargs.get("must", []): _filter &= Q(**value) - for value in kwargs.get('must_not', []): + for value in kwargs.get("must_not", []): _filter &= ~Q(**value) return _filter + return inner def documents_search_factory(self, search, query_parser=None): """Search factory with view code parameter.""" - view = request.args.get('view') - facets = request.args.get('facets', []) + view = request.args.get("view") + facets = request.args.get("facets", []) if facets: - facets = facets.split(',') + facets = facets.split(",") # force to have organisation facet if library is set - if 'library' in facets and 'organisation' not in facets: + if "library" in facets and "organisation" not in facets: args = MultiDict(request.args) - args.add('facets', 'organisation') + args.add("facets", "organisation") request.args = ImmutableMultiDict(args) search, urlkwargs = search_factory(self, search) if view: # organisation public view - if view != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if view != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): org = Organisation.get_record_by_viewcode(view) - filters = Q( - 'term', organisation_pid=org['pid'] - ) + filters = Q("term", organisation_pid=org["pid"]) search = search.filter(filters) # exclude masked documents - search = search.exclude(Q('term', _masked=True)) + search = search.exclude(Q("term", _masked=True)) # exclude draft documents - search = search.exclude(Q('term', _draft=True)) + search = search.exclude(Q("term", _draft=True)) return search, urlkwargs @@ -182,24 +189,24 @@ def viewcode_patron_search_factory(self, search, query_parser=None): """Search factory with viewcode or current patron.""" search, urlkwargs = search_factory(self, search) # Public interface - if view := request.args.get('view'): - if view != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if view := request.args.get("view"): + if view != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): org = Organisation.get_record_by_viewcode(view) - search = search.filter('term', organisation__pid=org['pid']) + search = search.filter("term", organisation__pid=org["pid"]) # Admin interface elif current_librarian: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid + "term", organisation__pid=current_librarian.organisation_pid ) # exclude draft records - search = search.filter('bool', must_not=[Q('term', _draft=True)]) + search = search.filter("bool", must_not=[Q("term", _draft=True)]) return search, urlkwargs def holdings_search_factory(self, search, query_parser=None): """Search factory for holdings records.""" search, urlkwargs = search_factory(self, search) - view = request.args.get('view') + view = request.args.get("view") search = search_factory_for_holdings_and_items(view, search) return search, urlkwargs @@ -207,9 +214,9 @@ def holdings_search_factory(self, search, query_parser=None): def items_search_factory(self, search, query_parser=None): """Search factory for item records.""" search, urlkwargs = search_factory(self, search) - view = request.args.get('view') - if org_pid := request.args.get('organisation'): - search = search.filter('term', organisation__pid=org_pid) + view = request.args.get("view") + if org_pid := request.args.get("organisation"): + search = search.filter("term", organisation__pid=org_pid) search = search_factory_for_holdings_and_items(view, search) return search, urlkwargs @@ -219,25 +226,27 @@ def search_factory_for_holdings_and_items(view, search): # Logic for public interface if view: # logic for public organisational interface - if view != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if view != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): org = Organisation.get_record_by_viewcode(view) - search = search.filter('term', organisation__pid=org['pid']) + search = search.filter("term", organisation__pid=org["pid"]) # masked records are hidden for all public interfaces - search = search.filter('bool', must_not=[Q('term', _masked=True)]) + search = search.filter("bool", must_not=[Q("term", _masked=True)]) # PROVISIONAL records are hidden for all public interfaces search = search.filter( - 'bool', must_not=[Q('term', type=TypeOfItem.PROVISIONAL)]) + "bool", must_not=[Q("term", type=TypeOfItem.PROVISIONAL)] + ) return search def remote_entity_view_search_factory(self, search, query_parser=None): """Search factory with view code parameter.""" view = request.args.get( - 'view', current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')) + "view", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) search, urlkwargs = search_factory(self, search) - if view != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if view != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): org = Organisation.get_record_by_viewcode(view) - search = search.filter('term', organisations=org['pid']) + search = search.filter("term", organisations=org["pid"]) return search, urlkwargs @@ -245,7 +254,7 @@ def organisation_organisation_search_factory(self, search, query_parser=None): """Organisation Search factory.""" search, urlkwargs = search_factory(self, search) if current_librarian: - search = search.filter('term', pid=current_librarian.organisation_pid) + search = search.filter("term", pid=current_librarian.organisation_pid) return search, urlkwargs @@ -260,13 +269,14 @@ def organisation_search_factory(self, search, query_parser=None): # US1906: Complete item model if current_librarian: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid + "term", organisation__pid=current_librarian.organisation_pid ) view = request.args.get( - 'view', current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')) + "view", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) search, urlkwargs = search_factory(self, search) - if view != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): - search = search.filter('bool', must_not=[Q('term', _masked=True)]) + if view != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): + search = search.filter("bool", must_not=[Q("term", _masked=True)]) return search, urlkwargs @@ -274,17 +284,14 @@ def organisation_search_factory(self, search, query_parser=None): def view_search_collection_factory(self, search, query_parser=None): """Search factory with view code parameter.""" view = request.args.get( - 'view', current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')) + "view", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) search, urlkwargs = search_factory(self, search) - if view != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if view != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): org = Organisation.get_record_by_viewcode(view) - search = search.filter( - 'term', organisation__pid=org['pid'] - ) - if published := request.args.get('published'): - search = search.filter( - 'term', published=bool(int(published)) - ) + search = search.filter("term", organisation__pid=org["pid"]) + if published := request.args.get("published"): + search = search.filter("term", published=bool(int(published))) return search, urlkwargs @@ -300,25 +307,22 @@ def ill_request_search_factory(self, search, query_parser=None): if current_librarian: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid + "term", organisation__pid=current_librarian.organisation_pid ) elif current_patrons: search = search.filter( - 'terms', - patron__pid=[ptrn.pid for ptrn in current_patrons]) + "terms", patron__pid=[ptrn.pid for ptrn in current_patrons] + ) - search = search.exclude(Q('term', to_anonymize=True)) + search = search.exclude(Q("term", to_anonymize=True)) - if not request.args.get('remove_archived'): + if not request.args.get("remove_archived"): return search, urlkwargs - months = current_app.config.get('RERO_ILS_ILL_HIDE_MONTHS', 6) + months = current_app.config.get("RERO_ILS_ILL_HIDE_MONTHS", 6) date_delta = datetime.now(timezone.utc) - relativedelta(months=months) - filters = Q( - 'range', - _created={'lte': 'now', 'gte': date_delta} - ) - filters |= Q('term', status=ILLRequestStatus.PENDING) + filters = Q("range", _created={"lte": "now", "gte": date_delta}) + filters |= Q("term", status=ILLRequestStatus.PENDING) return search.filter(filters), urlkwargs @@ -335,21 +339,16 @@ def circulation_search_factory(self, search, query_parser=None): # the loans of his profesionnal organisation # initial filter for OR condition - filters = Q('match_none') + filters = Q("match_none") if current_librarian: - filters |= Q( - 'term', organisation__pid=current_librarian.organisation_pid - ) + filters |= Q("term", organisation__pid=current_librarian.organisation_pid) if current_patrons: - filters |= Q( - 'terms', - patron_pid=[ptrn.pid for ptrn in current_patrons] - ) - if filters is not Q('match_none'): - search = search.filter('bool', must=[filters]) + filters |= Q("terms", patron_pid=[ptrn.pid for ptrn in current_patrons]) + if filters is not Q("match_none"): + search = search.filter("bool", must=[filters]) # exclude to_anonymize records - search = search.filter('bool', must_not=[Q('term', to_anonymize=True)]) + search = search.filter("bool", must_not=[Q("term", to_anonymize=True)]) return search, urlkwargs @@ -364,15 +363,25 @@ def templates_search_factory(self, search, query_parser=None): if current_librarian: if current_librarian.has_full_permissions: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid) + "term", organisation__pid=current_librarian.organisation_pid + ) else: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid) - search = search.filter('bool', should=[ - Q('bool', must=[ - Q('match', creator__pid=current_librarian.pid), - Q('match', visibility=TemplateVisibility.PRIVATE)]), - Q('match', visibility=TemplateVisibility.PUBLIC)]) + "term", organisation__pid=current_librarian.organisation_pid + ) + search = search.filter( + "bool", + should=[ + Q( + "bool", + must=[ + Q("match", creator__pid=current_librarian.pid), + Q("match", visibility=TemplateVisibility.PRIVATE), + ], + ), + Q("match", visibility=TemplateVisibility.PUBLIC), + ], + ) return search, urlkwargs @@ -386,12 +395,12 @@ def patron_transactions_search_factory(self, search, query_parser=None): search, urlkwargs = search_factory(self, search) if current_librarian: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid + "term", organisation__pid=current_librarian.organisation_pid ) elif current_patrons: search = search.filter( - 'terms', - patron__pid=[ptrn.pid for ptrn in current_patrons]) + "terms", patron__pid=[ptrn.pid for ptrn in current_patrons] + ) return search, urlkwargs @@ -405,7 +414,7 @@ def acq_accounts_search_factory(self, search, query_parser=None): if current_librarian: search = search.filter( - 'term', organisation__pid=current_librarian.organisation_pid + "term", organisation__pid=current_librarian.organisation_pid ) return search, urlkwargs @@ -418,9 +427,7 @@ def operation_logs_search_factory(self, search, query_parser=None): search, urlkwargs = search_factory(self, search) if not current_librarian and len(current_patrons): patron_pids = [ptrn.pid for ptrn in current_patrons] - search = search.filter( - 'terms', loan__patron__pid=patron_pids - ) + search = search.filter("terms", loan__patron__pid=patron_pids) return search, urlkwargs @@ -435,54 +442,54 @@ def search_factory(self, search, query_parser=None): :param query_parser: a specific query parser :return: Tuple with search instance and URL arguments. """ + def _default_parser(qstr=None, query_boosting=None): """Default parser that uses the Q() from elasticsearch_dsl.""" - query_type = 'query_string' + query_type = "query_string" # avoid elasticsearch errors when it can't convert a boolean or # numerical values during the query lenient = True - default_operator = 'OR' - if request.args.get('simple') == '1': - query_type = 'simple_query_string' - default_operator = 'AND' + default_operator = "OR" + if request.args.get("simple") == "1": + query_type = "simple_query_string" + default_operator = "AND" if qstr: # TODO: remove this bad hack - qstr = _PUNCTUATION_REGEX.sub(' ', qstr) - qstr = re.sub(r'\s+', ' ', qstr).rstrip() + qstr = _PUNCTUATION_REGEX.sub(" ", qstr) + qstr = re.sub(r"\s+", " ", qstr).rstrip() return Q( - query_type, - lenient=lenient, - query=qstr, - fields=query_boosting if query_boosting else ["*"], - default_operator=default_operator, - ) + query_type, + lenient=lenient, + query=qstr, + fields=query_boosting or ["*"], + default_operator=default_operator, + ) return Q() from invenio_records_rest.sorter import default_sorter_factory - query_string = request.values.get('q') - display_score = request.values.get('display_score') + query_string = request.values.get("q") + display_score = request.values.get("display_score") if display_score: search = search.extra(explain=True) query_parser = query_parser or _default_parser search_index = search._index[0] - query_boosting = \ - current_app.config.get('RERO_ILS_QUERY_BOOSTING', {}).get(search_index) + query_boosting = current_app.config.get("RERO_ILS_QUERY_BOOSTING", {}).get( + search_index + ) if ( - flask_request.args.get('fulltext', None) - in [None, '0', 'false', 0, False] + flask_request.args.get("fulltext", None) in [None, "0", "false", 0, False] and query_boosting ): - query_boosting = \ - [v for v in query_boosting if not v.startswith('fulltext')] + query_boosting = [v for v in query_boosting if not v.startswith("fulltext")] try: search = search.query(query_parser(query_string, query_boosting)) except SyntaxError as err: - query = request.values.get('q', '') + query = request.values.get("q", "") current_app.logger.debug( - f'Failed parsing query: {query}', + f"Failed parsing query: {query}", exc_info=True, ) raise InvalidQueryRESTError() from err @@ -491,5 +498,5 @@ def _default_parser(qstr=None, query_boosting=None): search, sortkwargs = default_sorter_factory(search, search_index) for key, value in sortkwargs.items(): urlkwargs.add(key, value) - urlkwargs.add('q', query_string) + urlkwargs.add("q", query_string) return search, urlkwargs diff --git a/rero_ils/schedulers.py b/rero_ils/schedulers.py index db2ac682d9..2cedb92244 100644 --- a/rero_ils/schedulers.py +++ b/rero_ils/schedulers.py @@ -29,13 +29,10 @@ from redisbeat.scheduler import RedisScheduler as OriginalRedisScheduler from werkzeug.local import LocalProxy -current_scheduler = LocalProxy(lambda: RedisScheduler( - app=current_celery, - lazy=True -)) +current_scheduler = LocalProxy(lambda: RedisScheduler(app=current_celery, lazy=True)) logger = get_logger(__name__) -schedstate = namedtuple('schedstate', ('is_due', 'next')) +schedstate = namedtuple("schedstate", ("is_due", "next")) class RedisScheduler(OriginalRedisScheduler): @@ -65,12 +62,11 @@ def __init__(self, app, *args, **kwargs): :param args: see base class definitions :param kwargs: see base class definitions """ - lazy = kwargs.get('lazy', False) - url = app.conf.get("CELERY_REDIS_SCHEDULER_URL", - "redis://localhost:6379") - logger.info(f'Connect: {url} lazy:{lazy}') - kwargs['app'] = app - kwargs['lazy'] = lazy + lazy = kwargs.get("lazy", False) + url = app.conf.get("CELERY_REDIS_SCHEDULER_URL", "redis://localhost:6379") + logger.info(f"Connect: {url} lazy:{lazy}") + kwargs["app"] = app + kwargs["lazy"] = lazy super().__init__(*args, **kwargs) def get(self, name): @@ -91,7 +87,7 @@ def enabled_name(self, name): :param name: name of entry in task scheduler :return: name of the enable key in REDIS DB """ - return f'{self.key}:{name}' + return f"{self.key}:{name}" def merge_inplace(self, tasks): """Merge entries from CELERY_BEAT_SCHEDULE. @@ -99,18 +95,18 @@ def merge_inplace(self, tasks): :param tasks: dictionary with CELERY_BEAT_SCHEDULE tasks """ for name in tasks: - enabled = tasks[name].pop('enabled', True) + enabled = tasks[name].pop("enabled", True) if not self.rdb.get(self.enabled_name(name)): self.rdb[self.enabled_name(name)] = int(enabled) super().merge_inplace(tasks) def setup_schedule(self): """Init entries from CELERY_BEAT_SCHEDULE.""" - beat_schedule = FLASK_TO_CELERY_MAPPING['CELERY_BEAT_SCHEDULE'] + beat_schedule = FLASK_TO_CELERY_MAPPING["CELERY_BEAT_SCHEDULE"] config = deepcopy(self.app.conf.get(beat_schedule)) self.merge_inplace(config) current_schedule = "\n".join(self.display_all(prefix="- Tasks: ")) - msg = f'Current schedule:\n {current_schedule}' + msg = f"Current schedule:\n {current_schedule}" logger.info(msg) def is_due(self, entry): @@ -131,8 +127,10 @@ def is_due(self, entry): """ if self.get_entry_enabled(entry.name): return entry.is_due() - msg = f'Not enabled: {entry.name} = {entry.task} ' \ - f'{repr(entry.schedule)} {entry.kwargs}' + msg = ( + f"Not enabled: {entry.name} = {entry.task} " + f"{repr(entry.schedule)} {entry.kwargs}" + ) logger.info(msg) return schedstate(is_due=False, next=entry.is_due().next) @@ -175,36 +173,38 @@ def add_entry(self, entry, enable=True): :param enable: enable or disable scheduling :return: True if successful """ - result = self.add(**{ - 'name': entry.name, - 'task': entry.task, - 'schedule': entry.schedule, - 'args': entry.args, - 'kwargs': entry.kwargs, - 'options': entry.options - }) + result = self.add( + **{ + "name": entry.name, + "task": entry.task, + "schedule": entry.schedule, + "args": entry.args, + "kwargs": entry.kwargs, + "options": entry.options, + } + ) if result: self.set_entry_enabled(name=entry.name, enable=enable) return result - def display_entry(self, name, prefix='- '): + def display_entry(self, name, prefix="- "): """Display an entry. :param name: name of entry in task scheduler :param prefix: prefix to add to returned info :return: entry as string representative """ - entry_as_text = f'Not found entry: {name}' + entry_as_text = f"Not found entry: {name}" if entry := self.get(name): entry_as_text = ( - f'{prefix}{entry.name} = {entry.task} {repr(entry.schedule)} ' - f'kwargs:{entry.kwargs} ' - f'options:{entry.options} ' - f'enabled:{self.get_entry_enabled(name)}' + f"{prefix}{entry.name} = {entry.task} {repr(entry.schedule)} " + f"kwargs:{entry.kwargs} " + f"options:{entry.options} " + f"enabled:{self.get_entry_enabled(name)}" ) return entry_as_text - def display_all(self, prefix='- '): + def display_all(self, prefix="- "): """Display all entries. :param prefix: prefix to add to returned info @@ -225,9 +225,7 @@ def get_entry_enabled(self, name): :return: enabled status """ value = self.rdb.get(self.enabled_name(name)) - if value is None or value == b'1': - return True - return False + return value is None or value == b"1" def set_entry_enabled(self, name, enable=True): """Set enabled of an entry. @@ -242,7 +240,7 @@ def set_entry_enabled(self, name, enable=True): :param enable: enable or disable scheduling """ if self.get(name): - enabled_name = f'{self.key}:{name}' + enabled_name = f"{self.key}:{name}" self.rdb[enabled_name] = int(enable) def set_enable_all(self, enable=True): @@ -259,17 +257,17 @@ def scheduler(): """Scheduler management commands.""" -@scheduler.command('info') +@scheduler.command("info") @with_appcontext def info(): """Displays infos about all periodic tasks.""" - click.secho('Scheduled tasks:', fg='green') - click.echo('\n'.join(current_scheduler.display_all())) + click.secho("Scheduled tasks:", fg="green") + click.echo("\n".join(current_scheduler.display_all())) -@scheduler.command('init') -@click.option('-r', '--reset', 'reset', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@scheduler.command("init") +@click.option("-r", "--reset", "reset", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext def init(reset, verbose): """Initialize scheduler. @@ -278,20 +276,20 @@ def init(reset, verbose): :param verbose: verbose output """ if reset: - click.secho('Reset REDIS scheduler!', fg='red', bold=True) + click.secho("Reset REDIS scheduler!", fg="red", bold=True) current_scheduler.reset() else: - click.secho('Initalize REDIS scheduler!', fg='yellow') + click.secho("Initalize REDIS scheduler!", fg="yellow") current_scheduler.setup_schedule() if verbose: - click.echo('\n'.join(current_scheduler.display_all())) + click.echo("\n".join(current_scheduler.display_all())) -@scheduler.command('enable_tasks') -@click.option('-a', '--all', 'all', is_flag=True, default=False) -@click.option('-n', '--name', 'names', multiple=True, default=None) -@click.option('-d', '--disable', 'disable', is_flag=True, default=False) -@click.option('-v', '--verbose', 'verbose', is_flag=True, default=False) +@scheduler.command("enable_tasks") +@click.option("-a", "--all", "all", is_flag=True, default=False) +@click.option("-n", "--name", "names", multiple=True, default=None) +@click.option("-d", "--disable", "disable", is_flag=True, default=False) +@click.option("-v", "--verbose", "verbose", is_flag=True, default=False) @with_appcontext def enable_tasks(all, names, disable, verbose): """Enable or disable a periodic tasks. @@ -302,11 +300,11 @@ def enable_tasks(all, names, disable, verbose): :param verbose: verbose output """ if verbose: - click.secho('Scheduler tasks enabled:', fg='green') + click.secho("Scheduler tasks enabled:", fg="green") if all: current_scheduler.set_enable_all(not disable) if verbose: - click.echo('\n'.join(current_scheduler.display_all())) + click.echo("\n".join(current_scheduler.display_all())) else: names = names or [] for name in names: diff --git a/rero_ils/theme/menus.py b/rero_ils/theme/menus.py index 4ae8a976ad..494b2f29b4 100644 --- a/rero_ils/theme/menus.py +++ b/rero_ils/theme/menus.py @@ -31,7 +31,7 @@ # -------------- Utilities ----------------- -class TextWithIcon(): +class TextWithIcon: """Create a dynamic text menu item property to support translations.""" def __init__(self, icon, text): @@ -41,33 +41,35 @@ def __init__(self, icon, text): def __html__(self): """Jinja call this method during the rendering.""" - return f'{self.icon} {_(self.text)}' + return f"{self.icon} {_(self.text)}" -class UserName(): +class UserName: """Create a dynamic menu text user name property.""" def __html__(self): """Jinja call this method during the rendering.""" - account = session.get( - 'user_name', _('My Account') - ) if current_user.is_authenticated else _('My Account') + account = ( + session.get("user_name", _("My Account")) + if current_user.is_authenticated + else _("My Account") + ) if len(account) > 30: - account = f'{account[0:30]}...' + account = f"{account[0:30]}..." # TODO: fix the unclosed span tag - return f''' + return f""" {account} -''' +""" -class CurrentLanguage(): +class CurrentLanguage: """Create a dynamic menu property with the current language.""" def __html__(self): """Jinja call this method during the rendering.""" - ui_language = f'ui_language_{current_i18n.language}' + ui_language = f"ui_language_{current_i18n.language}" return f' {_(ui_language)}' @@ -82,7 +84,8 @@ def rero_register( active_when=None, visible_when=None, expected_args=None, - **kwargs): + **kwargs, +): """Take care each element in kwargs doesn't already exists in item.""" # Check which option in kwargs already exists in `item`. to_delete = [] @@ -103,117 +106,103 @@ def rero_register( active_when, visible_when, expected_args, - **kwargs) + **kwargs, + ) # ---------- Menu definitions --------------- def init_menu_tools(): """Create the header tool menu.""" - item = current_menu.submenu('main.tool') + item = current_menu.submenu("main.tool") rero_register( item, endpoint=None, - text=TextWithIcon( - icon='', - text='Tools' - ), + text=TextWithIcon(icon='', text="Tools"), order=0, - id='tools-menu' + id="tools-menu", ) - item = current_menu.submenu('main.tool.ill_request') + item = current_menu.submenu("main.tool.ill_request") rero_register( item, - endpoint='ill_requests.ill_request_form', + endpoint="ill_requests.ill_request_form", endpoint_arguments_constructor=lambda: dict( viewcode=request.view_args.get( - 'viewcode', current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'))), + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) + ), visible_when=lambda: bool(current_patrons), text=TextWithIcon( icon='', - text='Interlibrary loan request' + text="Interlibrary loan request", ), order=10, - id='ill-request-menu' + id="ill-request-menu", ) - item = current_menu.submenu('main.tool.stats_billing') + item = current_menu.submenu("main.tool.stats_billing") rero_register( item, - endpoint='stats.stats_billing', + endpoint="stats.stats_billing", visible_when=lambda: admin_permission.require().can(), text=TextWithIcon( - icon='', - text='Statistics billing' + icon='', text="Statistics billing" ), order=20, - id='stats-menu-billing' + id="stats-menu-billing", ) - item = current_menu.submenu('main.tool.stats_librarian') + item = current_menu.submenu("main.tool.stats_librarian") rero_register( item, - endpoint='stats.stats_librarian', + endpoint="stats.stats_librarian", visible_when=lambda: librarian_permission.require().can(), - text=TextWithIcon( - icon='', - text='Statistics' - ), + text=TextWithIcon(icon='', text="Statistics"), order=20, - id='stats-menu-librarian' + id="stats-menu-librarian", ) - item = current_menu.submenu('main.tool.collections') + item = current_menu.submenu("main.tool.collections") rero_register( item, - endpoint='rero_ils.search', + endpoint="rero_ils.search", endpoint_arguments_constructor=lambda: dict( viewcode=request.view_args.get( - 'viewcode', current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')), - recordType='collections' + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ), + recordType="collections", + ), + visible_when=lambda: current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + != request.view_args.get( + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") ), - visible_when=lambda: current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE' - ) != request.view_args.get( - 'viewcode', current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')), text=TextWithIcon( - icon='', - text='Exhibition/course' + icon='', text="Exhibition/course" ), order=2, - id='collections-menu' + id="collections-menu", ) - item = current_menu.submenu('main.tool.help') + item = current_menu.submenu("main.tool.help") rero_register( item, - endpoint='wiki.page', - endpoint_arguments_constructor=lambda: {'url': 'public'}, - text=TextWithIcon( - icon='', - text='Help' - ), + endpoint="wiki.page", + endpoint_arguments_constructor=lambda: {"url": "public"}, + text=TextWithIcon(icon='', text="Help"), order=100, - id='help-menu' + id="help-menu", ) def init_menu_lang(): """Create the header language menu.""" - item = current_menu.submenu('main.menu') + item = current_menu.submenu("main.menu") # Bug: when you reload the page with register(**kwargs), it failed # We so check that 'id' already exists. If yes, do not create again # the item. rero_register( - item, - endpoint=None, - text=CurrentLanguage(), - order=0, - id='language-menu' + item, endpoint=None, text=CurrentLanguage(), order=0, id="language-menu" ) order = 10 @@ -225,20 +214,18 @@ def hide_language(lang): return current_i18n.language != lang for language_item in current_i18n.get_locales(): - item = current_menu.submenu(f'main.menu.lang_{language_item.language}') - ui_language = f'ui_language_{language_item.language}' + item = current_menu.submenu(f"main.menu.lang_{language_item.language}") + ui_language = f"ui_language_{language_item.language}" rero_register( item, - endpoint='invenio_i18n.set_lang', + endpoint="invenio_i18n.set_lang", endpoint_arguments_constructor=partial( - return_language, language_item.language), - text=TextWithIcon( - icon='', - text=ui_language + return_language, language_item.language ), + text=TextWithIcon(icon='', text=ui_language), visible_when=partial(hide_language, language_item.language), order=order, - id=f'language-menu-{language_item.language}' + id=f"language-menu-{language_item.language}", ) order += 1 @@ -248,133 +235,116 @@ def init_menu_profile(): def is_not_read_only(): """Hide element menu if the flag is ready only.""" - return not current_app.config.get( - 'RERO_PUBLIC_USERPROFILES_READONLY', False) and \ - current_user.is_authenticated + return ( + not current_app.config.get("RERO_PUBLIC_USERPROFILES_READONLY", False) + and current_user.is_authenticated + ) - item = current_menu.submenu('main.profile') + item = current_menu.submenu("main.profile") rero_register( item, endpoint=None, text=UserName(), order=1, - id='my-account-menu', - cssClass='py-1' + id="my-account-menu", + cssClass="py-1", ) - item = current_menu.submenu('main.profile.login') + item = current_menu.submenu("main.profile.login") rero_register( item, - endpoint='security.login', - endpoint_arguments_constructor=lambda: dict( - next=request.full_path - ), + endpoint="security.login", + endpoint_arguments_constructor=lambda: dict(next=request.full_path), visible_when=lambda: not current_user.is_authenticated, - text=TextWithIcon( - icon='', - text='Login' - ), + text=TextWithIcon(icon='', text="Login"), order=1, - id='login-menu', + id="login-menu", ) - item = current_menu.submenu('main.profile.professional') + item = current_menu.submenu("main.profile.professional") rero_register( item, - endpoint='rero_ils.professional', + endpoint="rero_ils.professional", visible_when=lambda: current_librarian, text=TextWithIcon( - icon='', - text='Professional interface' + icon='', text="Professional interface" ), order=1, - id='professional-interface-menu', + id="professional-interface-menu", ) - item = current_menu.submenu('main.profile.logout') + item = current_menu.submenu("main.profile.logout") viewcode = request.view_args.get( - 'viewcode', - current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE') + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") ) rero_register( item, - endpoint='security.logout', - endpoint_arguments_constructor=lambda: dict(next=f'/{viewcode}'), + endpoint="security.logout", + endpoint_arguments_constructor=lambda: dict(next=f"/{viewcode}"), visible_when=lambda: current_user.is_authenticated, - text=TextWithIcon( - icon='', - text='Logout' - ), + text=TextWithIcon(icon='', text="Logout"), order=2, - id='logout-menu', + id="logout-menu", ) - item = current_menu.submenu('main.profile.profile') - profile_endpoint = 'patrons.profile' + item = current_menu.submenu("main.profile.profile") + profile_endpoint = "patrons.profile" rero_register( item, endpoint=profile_endpoint, endpoint_arguments_constructor=lambda: dict( viewcode=request.view_args.get( - 'viewcode', current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'))), - visible_when=lambda: len(current_patrons) > 0, - text=TextWithIcon( - icon='', - text='My Account' + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) ), + visible_when=lambda: len(current_patrons) > 0, + text=TextWithIcon(icon='', text="My Account"), order=1, - id='profile-menu', + id="profile-menu", ) - item = current_menu.submenu('main.profile.edit_profile') + item = current_menu.submenu("main.profile.edit_profile") rero_register( item, - endpoint='users.profile', + endpoint="users.profile", endpoint_arguments_constructor=lambda: dict( viewcode=request.view_args.get( - 'viewcode', current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'))), - visible_when=lambda: is_not_read_only(), - text=TextWithIcon( - icon='', - text='Edit my profile' + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) ), + visible_when=lambda: is_not_read_only(), + text=TextWithIcon(icon='', text="Edit my profile"), order=1, - id='profile-menu', + id="profile-menu", ) - item = current_menu.submenu('main.profile.change_password') + item = current_menu.submenu("main.profile.change_password") rero_register( item, - endpoint='users.password', + endpoint="users.password", endpoint_arguments_constructor=lambda: dict( viewcode=request.view_args.get( - 'viewcode', current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'))), - visible_when=lambda: is_not_read_only(), - text=TextWithIcon( - icon='', - text='Change password' + "viewcode", current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE") + ) ), + visible_when=lambda: is_not_read_only(), + text=TextWithIcon(icon='', text="Change password"), order=1, - id='profile-menu', + id="profile-menu", ) # Endpoint for: # Application: invenio_oauth2server_settings.index # Security: invenio_accounts.security - item = current_menu.submenu('main.profile.signup') + item = current_menu.submenu("main.profile.signup") rero_register( item, - endpoint='security.register', + endpoint="security.register", visible_when=lambda: not current_app.config.get( - 'RERO_PUBLIC_USERPROFILES_READONLY', False) and - not current_user.is_authenticated, - text=TextWithIcon( - icon='', - text='Sign Up' - ), + "RERO_PUBLIC_USERPROFILES_READONLY", False + ) + and not current_user.is_authenticated, + text=TextWithIcon(icon='', text="Sign Up"), order=2, - id='signup-menu', + id="signup-menu", ) diff --git a/rero_ils/theme/views.py b/rero_ils/theme/views.py index be797b1e46..a53a75efaf 100644 --- a/rero_ils/theme/views.py +++ b/rero_ils/theme/views.py @@ -25,8 +25,18 @@ from functools import wraps from urllib.parse import urlparse -from flask import Blueprint, Response, abort, current_app, jsonify, redirect, \ - render_template, request, session, url_for +from flask import ( + Blueprint, + Response, + abort, + current_app, + jsonify, + redirect, + render_template, + request, + session, + url_for, +) from invenio_jsonschemas import current_jsonschemas from invenio_jsonschemas.errors import JSONSchemaNotFound from invenio_jsonschemas.proxies import current_refresolver_store @@ -38,10 +48,10 @@ from .menus import init_menu_lang, init_menu_profile, init_menu_tools blueprint = Blueprint( - 'rero_ils', + "rero_ils", __name__, - template_folder='templates', - static_folder='static', + template_folder="templates", + static_folder="static", ) @@ -55,64 +65,61 @@ def init_menu(): def check_organisation_viewcode(fn): """Check if viewcode parameter is defined.""" + @wraps(fn) def decorated_view(*args, **kwargs): viewCodes = Organisation.all_code() # Add default view code - viewCodes.append(current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')) - if kwargs['viewcode'] not in viewCodes: + viewCodes.append(current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE")) + if kwargs["viewcode"] not in viewCodes: abort(404) return fn(*args, **kwargs) return decorated_view -@blueprint.route('/error') +@blueprint.route("/error") def error(): """Error to generate exception for test purposes.""" - raise Exception('this is an error for test purposes') + raise Exception("this is an error for test purposes") -@blueprint.route('/robots.txt') +@blueprint.route("/robots.txt") @cached() -def robots(timeout=60*60): # 1 hour timeout +def robots(timeout=60 * 60): # 1 hour timeout """Robots.txt generate response.""" - response = current_app.config['RERO_ILS_ROBOTS'] - response = Response( - response=response, - status=200, mimetype="text/plain") + response = current_app.config["RERO_ILS_ROBOTS"] + response = Response(response=response, status=200, mimetype="text/plain") response.headers["Content-Type"] = "text/plain; charset=utf-8" return response -@blueprint.route('/') +@blueprint.route("/") def index(): """Home Page.""" - return render_template('rero_ils/frontpage.html', - organisations=Organisation.get_all(), - viewcode=current_app.config.get( - 'RERO_ILS_SEARCH_GLOBAL_VIEW_CODE')) + return render_template( + "rero_ils/frontpage.html", + organisations=Organisation.get_all(), + viewcode=current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"), + ) -@blueprint.route('/') -@blueprint.route('//') +@blueprint.route("/") +@blueprint.route("//") @check_organisation_viewcode def index_with_view_code(viewcode): """Home Page.""" - if viewcode == current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): - return redirect(url_for( - 'rero_ils.index' - )) + if viewcode == current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): + return redirect(url_for("rero_ils.index")) else: return render_template( - 'rero_ils/frontpage.html', + "rero_ils/frontpage.html", organisations=Organisation.get_all(), - viewcode=viewcode + viewcode=viewcode, ) -@blueprint.route('/language', methods=['POST', 'PUT']) +@blueprint.route("/language", methods=["POST", "PUT"]) def set_language(): """Set language in session. @@ -125,24 +132,29 @@ def set_language(): } """ data = request.get_json() - if not data or not data.get('lang'): - return jsonify( - {'errors': [{'code': 400, 'title': 'missing lang property'}]}), 400 - lang_code = data.get('lang') - languages = dict(current_app.extensions['invenio-i18n'].get_languages()) + if not data or not data.get("lang"): + return ( + jsonify({"errors": [{"code": 400, "title": "missing lang property"}]}), + 400, + ) + lang_code = data.get("lang") + languages = dict(current_app.extensions["invenio-i18n"].get_languages()) if lang_code not in languages: - return jsonify( - {'errors': [{'code': 400, 'title': 'unsupported language'}]}), 400 - session[current_app.config['I18N_SESSION_KEY']] = lang_code.lower() - return jsonify({'lang': lang_code}) + return ( + jsonify({"errors": [{"code": 400, "title": "unsupported language"}]}), + 400, + ) + session[current_app.config["I18N_SESSION_KEY"]] = lang_code.lower() + return jsonify({"lang": lang_code}) -@blueprint.route('//search/') +@blueprint.route("//search/") @check_organisation_viewcode def search(viewcode, recordType): """Search page ui.""" - return render_template(current_app.config.get('RERO_ILS_SEARCH_TEMPLATE'), - viewcode=viewcode) + return render_template( + current_app.config.get("RERO_ILS_SEARCH_TEMPLATE"), viewcode=viewcode + ) @blueprint.app_template_filter() @@ -151,62 +163,65 @@ def nl2br(string): return string.replace("\n", "
") -@blueprint.app_template_filter('urlActive') +@blueprint.app_template_filter("urlActive") def url_active(string, target): """Add link url on text if http detected.""" - result = re.findall('(https?://[\\w|.|\\-|\\/]+)', string) + result = re.findall("(https?://[\\w|.|\\-|\\/]+)", string) for link in result: if link: string = string.replace( - link, - f'{link}' + link, f'{link}' ) return string -@blueprint.app_template_filter('viewOrganisationName') +@blueprint.app_template_filter("viewOrganisationName") def view_organisation_name(viewcode): """Get view name.""" - if viewcode != current_app.config.get('RERO_ILS_SEARCH_GLOBAL_VIEW_CODE'): + if viewcode != current_app.config.get("RERO_ILS_SEARCH_GLOBAL_VIEW_CODE"): if org := Organisation.get_record_by_viewcode(viewcode): - return org['name'] - return current_app.config.get('RERO_ILS_SEARCH_GLOBAL_NAME', '') + return org["name"] + return current_app.config.get("RERO_ILS_SEARCH_GLOBAL_NAME", "") def prepare_jsonschema(schema): """Json schema prep.""" schema = copy.deepcopy(schema) - schema.pop('$schema', None) - if 'pid' in schema.get('required', []): - schema['required'].remove('pid') - default_country = current_app.config.get('USERPROFILES_DEFAULT_COUNTRY') + schema.pop("$schema", None) + if "pid" in schema.get("required", []): + schema["required"].remove("pid") + default_country = current_app.config.get("USERPROFILES_DEFAULT_COUNTRY") # users - field = schema.get('properties', {}).get('country') + field = schema.get("properties", {}).get("country") # patrons: allOf does not works to remove property if not field: - field = schema.get('properties', {}).get('second_address', {}).get( - 'properties', {}).get('country') + field = ( + schema.get("properties", {}) + .get("second_address", {}) + .get("properties", {}) + .get("country") + ) if field and default_country: - field['default'] = default_country + field["default"] = default_country return schema -@blueprint.route('/schemas/') +@blueprint.route("/schemas/") def schemaform(document_type): """Return schema and form options for the editor.""" doc_type = document_type - doc_type = re.sub('ies$', 'y', doc_type) - doc_type = re.sub('s$', '', doc_type) - doc_type = re.sub('s_cfg$', '_cfg', doc_type) + doc_type = re.sub("ies$", "y", doc_type) + doc_type = re.sub("s$", "", doc_type) + doc_type = re.sub("s_cfg$", "_cfg", doc_type) data = {} schema = None schema_name = None try: if current_app.debug: current_jsonschemas.get_schema.cache_clear() - schema_name = f'{document_type}/{doc_type}-v0.0.1.json' + schema_name = f"{document_type}/{doc_type}-v0.0.1.json" schema = current_jsonschemas.get_schema(schema_name, with_refs=True) - data['schema'] = prepare_jsonschema(schema) + data["schema"] = prepare_jsonschema(schema) except JSONSchemaNotFound: abort(404) @@ -220,54 +235,45 @@ def replace_ref_url(schema, new_host): :param: new_host: The host to replace the $ref with. :returns: modified schema. """ - jsonschema_host = current_app.config.get('JSONSCHEMAS_HOST') + jsonschema_host = current_app.config.get("JSONSCHEMAS_HOST") for k, v in schema.items(): if isinstance(v, dict): - schema[k] = replace_ref_url( - schema=schema[k], - new_host=new_host - ) - if '$ref' in schema and isinstance(schema['$ref'], str): - schema['$ref'] = schema['$ref'] \ - .replace(jsonschema_host, new_host) + schema[k] = replace_ref_url(schema=schema[k], new_host=new_host) + if "$ref" in schema and isinstance(schema["$ref"], str): + schema["$ref"] = schema["$ref"].replace(jsonschema_host, new_host) # Todo: local:// return schema -@blueprint.route('/schemas//') +@blueprint.route("/schemas//") def get_schema(path, schema): """Retrieve a schema.""" - schema_path = f'{path}/{schema}' + schema_path = f"{path}/{schema}" try: - schema = deepcopy(current_refresolver_store[f'local://{schema_path}']) + schema = deepcopy(current_refresolver_store[f"local://{schema_path}"]) except KeyError: abort(404) resolved = request.args.get( - "resolved", current_app.config.get("JSONSCHEMAS_RESOLVE_SCHEMA"), - type=int + "resolved", current_app.config.get("JSONSCHEMAS_RESOLVE_SCHEMA"), type=int ) new_host = urlparse(request.base_url).netloc # Change schema['properties']['$schema']['default'] URL - if default := schema.get( - 'properties', {}).get('$schema', {}).get('default'): - schema['properties']['$schema']['default'] = default.replace( - current_app.config.get('JSONSCHEMAS_HOST'), new_host) + if default := schema.get("properties", {}).get("$schema", {}).get("default"): + schema["properties"]["$schema"]["default"] = default.replace( + current_app.config.get("JSONSCHEMAS_HOST"), new_host + ) # Change $refs - schema = replace_ref_url( - schema=schema, - new_host=new_host - ) + schema = replace_ref_url(schema=schema, new_host=new_host) if resolved: if current_app.debug: current_jsonschemas.get_schema.cache_clear() - schema = deepcopy( - current_jsonschemas.get_schema(schema_path, with_refs=True)) + schema = deepcopy(current_jsonschemas.get_schema(schema_path, with_refs=True)) return jsonify(schema) -@blueprint.route('/professional/', defaults={'path': ''}) -@blueprint.route('/professional/') +@blueprint.route("/professional/", defaults={"path": ""}) +@blueprint.route("/professional/") @can_access_professional_view def professional(path): """Return professional view.""" - return render_template('rero_ils/professional.html') + return render_template("rero_ils/professional.html") diff --git a/rero_ils/theme/webpack.py b/rero_ils/theme/webpack.py index 74582537f0..0cf9f0f1a2 100644 --- a/rero_ils/theme/webpack.py +++ b/rero_ils/theme/webpack.py @@ -35,16 +35,16 @@ theme = WebpackBundle( __name__, - 'assets', + "assets", entry={ - 'global': './scss/rero_ils/styles.scss', - 'reroils_public': './js/reroils/public.js', - 'babeltheque': './scss/rero_ils/babeltheque.scss' + "global": "./scss/rero_ils/styles.scss", + "reroils_public": "./js/reroils/public.js", + "babeltheque": "./scss/rero_ils/babeltheque.scss", }, dependencies={ - 'popper.js': '1.16.1', - 'jquery': '~3.2.1', - 'bootstrap': '~4.5.3', - 'font-awesome': '~4.7.0' - } + "popper.js": "1.16.1", + "jquery": "~3.2.1", + "bootstrap": "~4.5.3", + "font-awesome": "~4.7.0", + }, ) diff --git a/rero_ils/utils.py b/rero_ils/utils.py index d6d1c7124f..6cdd023b21 100644 --- a/rero_ils/utils.py +++ b/rero_ils/utils.py @@ -39,8 +39,8 @@ def get_i18n_supported_languages(): :returns: defined languages from config. """ - languages = [current_app.config.get('BABEL_DEFAULT_LANGUAGE')] - i18n_languages = current_app.config.get('I18N_LANGUAGES') + languages = [current_app.config.get("BABEL_DEFAULT_LANGUAGE")] + i18n_languages = current_app.config.get("I18N_LANGUAGES") return languages + [ln[0] for ln in i18n_languages] @@ -81,8 +81,7 @@ def language_mapping(lang): :param lang: bibliographic language code :returns: language mapping """ - return current_app.config.get('RERO_ILS_LANGUAGE_MAPPING', {})\ - .get(lang, lang) + return current_app.config.get("RERO_ILS_LANGUAGE_MAPPING", {}).get(lang, lang) class TranslatedList(list): diff --git a/scripts/test b/scripts/test index 782e5ea53b..f816609cf8 100755 --- a/scripts/test +++ b/scripts/test @@ -63,30 +63,22 @@ msg "PROGRAM: ${PROGRAM}" # Poetry is a mandatory condition to launch this program! if [[ -z "${VIRTUAL_ENV}" ]]; then - error_msg+exit "Error - Launch this script via poetry command:\n\tpoetry run ${PROGRAM}" + error_msg+exit "Error - Launch this script via poetry command:\n\t${PROGRAM}" fi function pretests () { - # -> Vulnerability found in flask-caching version 2.3.0 - # Vulnerability ID: 40459 + # -> Vulnerability found in flask-caching version 2.0.1 + # Vulnerability ID: 40459 + # -> Vulnerability found in flask-cors version 4.0.1 + # Vulnerability ID: 70624 + # -> Vulnerability found in sqlalchemy version 1.4.53 + # Vulnerability ID: 51668 # -> Vulnerability found in sqlalchemy-utils version 0.38.3 - # Vulnerability ID: 42194 + # Vulnerability ID: 42194 # -> Vulnerability found in wtforms version 2.3.3 - # Vulnerability ID: 42852 - # -> Vulnerability found in py version 1.11.0 - # Vulnerability ID: 51457 - # -> Vulnerability found in sqlalchemy version 1.4.52 - # Vulnerability ID: 51668 - # -> Vulnerability found in werkzeug version 2.2.3 - # Vulnerability ID: 62019 - # -> Vulnerability found in black version 22.12.0 - # Vulnerability ID: 66742 - # -> Vulnerability found in jinja2 version 3.1.4 - # Vulnerability ID: 70612 - # -> Vulnerability found in flask-cors version 4.0.1 - # Vulnerability ID: 70624 + # Vulnerability ID: 42852 # -> Vulnerability found in werkzeug version 2.2.3 - # Vulnerability ID: 71594 + # Vulnerability ID: 62019 # -> Vulnerability found in werkzeug version 2.2.3 # Vulnerability ID: 71595 # -> Vulnerability found in sentry-sdk version 1.45.1 @@ -106,8 +98,10 @@ function pretests () { pydocstyle rero_ils tests docs info_msg "Test isort:" isort --check-only --diff tests rero_ils + info_msg "Test black:" + black tests rero_ils tests --check info_msg "Test useless imports:" - poetry run autoflake -r --remove-all-unused-imports --ignore-init-module-imports --quiet . + autoflake --recursive --remove-all-unused-imports --ignore-init-module-imports --check-diff --quiet . # info_msg "Check-manifest:" # TODO: check if this is required when rero-ils will be published # check-manifest --ignore ".travis-*,docs/_build*" @@ -117,36 +111,36 @@ function pretests () { function tests () { info_msg "Tests All:" - poetry run pytest + pytest } function tests_api () { info_msg "Tests API:" - poetry run pytest ./tests/api + pytest ./tests/api } function tests_e2e () { info_msg "Tests E2E:" - poetry run pytest ./tests/e2e + pytest ./tests/e2e } function tests_scheduler () { info_msg "Tests Scheduler:" - poetry run pytest ./tests/scheduler + pytest ./tests/scheduler } function tests_ui () { info_msg "Tests UI:" - poetry run pytest ./tests/ui + pytest ./tests/ui } function tests_unit () { info_msg "Tests Unit:" - poetry run pytest ./tests/unit + pytest ./tests/unit } function tests_external () { info_msg "Tests External:" - poetry run pytest tests/api/test_external_services.py + pytest tests/api/test_external_services.py } function tests_other () { info_msg "Tests Other:" - poetry run pytest ./tests/conftest.py ./tests/test_version.py ./tests/utils.py + pytest ./tests/conftest.py ./tests/test_version.py ./tests/utils.py } if [ $# -eq 0 ] diff --git a/tests/api/acq_accounts/test_acq_accounts_permissions.py b/tests/api/acq_accounts/test_acq_accounts_permissions.py index 4d57db5432..d6ff484ba7 100644 --- a/tests/api/acq_accounts/test_acq_accounts_permissions.py +++ b/tests/api/acq_accounts/test_acq_accounts_permissions.py @@ -22,101 +22,126 @@ from mock import mock from utils import check_permission -from rero_ils.modules.acquisition.acq_accounts.permissions import \ - AcqAccountPermissionPolicy +from rero_ils.modules.acquisition.acq_accounts.permissions import ( + AcqAccountPermissionPolicy, +) -def test_acq_accounts_permissions(patron_martigny, - librarian_martigny, librarian2_martigny, - system_librarian_martigny, - org_martigny, org_sion, lib_sion, - acq_account_fiction_martigny, - acq_account_books_saxon, - acq_account_fiction_sion): +def test_acq_accounts_permissions( + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + org_martigny, + org_sion, + lib_sion, + acq_account_fiction_martigny, + acq_account_books_saxon, + acq_account_fiction_sion, +): """Test acq_account permissions class.""" # Anonymous user & Patron :: None action allowed identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(AcqAccountPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + AcqAccountPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) login_user(patron_martigny.user) - check_permission(AcqAccountPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_account_fiction_martigny) + check_permission( + AcqAccountPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_account_fiction_martigny, + ) # As staff member without any specific access : # - None action allowed # - except read record of its own library (pro_read_only) login_user(librarian2_martigny.user) - check_permission(AcqAccountPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_account_fiction_martigny) - check_permission(AcqAccountPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_account_fiction_sion) + check_permission( + AcqAccountPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_account_fiction_martigny, + ) + check_permission( + AcqAccountPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_account_fiction_sion, + ) # As staff member with "library-administration" role : # - Search :: everything # - Read :: record of its own library # - Create/Update/Delete :: record of its own library login_user(librarian_martigny.user) - check_permission(AcqAccountPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_account_fiction_martigny) - check_permission(AcqAccountPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_account_books_saxon) + check_permission( + AcqAccountPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_account_fiction_martigny, + ) + check_permission( + AcqAccountPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_account_books_saxon, + ) # As staff member with "full_permissions" role : # - Search :: everything # - Read :: record of its own organisation # - Create/Update/Delete :: record of its own organisation login_user(system_librarian_martigny.user) - check_permission(AcqAccountPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_account_books_saxon) + check_permission( + AcqAccountPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_account_books_saxon, + ) # Special case !!! An acquisition account linked to a closed budget # should be considerate as roll-overed and can't be updated. with mock.patch( - 'rero_ils.modules.acquisition.acq_accounts.api.AcqAccount.is_active', - False + "rero_ils.modules.acquisition.acq_accounts.api.AcqAccount.is_active", False ): - check_permission(AcqAccountPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_account_fiction_martigny) + check_permission( + AcqAccountPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_account_fiction_martigny, + ) diff --git a/tests/api/acq_accounts/test_acq_accounts_rest.py b/tests/api/acq_accounts/test_acq_accounts_rest.py index 06f8956709..421fbb0d81 100644 --- a/tests/api/acq_accounts/test_acq_accounts_rest.py +++ b/tests/api/acq_accounts/test_acq_accounts_rest.py @@ -23,127 +23,125 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_accounts_library_facets( client, org_martigny, acq_account_fiction_martigny, rero_json_header ): """Test record retrieval.""" - url = url_for('invenio_records_rest.acac_list', view='org1') + url = url_for("invenio_records_rest.acac_list", view="org1") res = client.get(url, headers=rero_json_header) data = get_json(res) - aggs = data['aggregations'] - assert 'library' in aggs - assert all('name' in term for term in aggs['library']['buckets']) + aggs = data["aggregations"] + assert "library" in aggs + assert all("name" in term for term in aggs["library"]["buckets"]) -def test_acq_accounts_permissions(client, acq_account_fiction_martigny, - json_header): +def test_acq_accounts_permissions(client, acq_account_fiction_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.acac_item', pid_value='acac1') + item_url = url_for("invenio_records_rest.acac_item", pid_value="acac1") res = client.get(item_url) assert res.status_code == 401 - res, _ = postdata(client, 'invenio_records_rest.acac_list', {}) + res, _ = postdata(client, "invenio_records_rest.acac_list", {}) assert res.status_code == 401 client.put( - url_for('invenio_records_rest.acac_item', pid_value='acac1'), + url_for("invenio_records_rest.acac_item", pid_value="acac1"), data={}, - headers=json_header + headers=json_header, ) res = client.delete(item_url) assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_accounts_get(client, acq_account_fiction_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.acac_item', pid_value='acac1') + item_url = url_for("invenio_records_rest.acac_item", pid_value="acac1") acq_account = acq_account_fiction_martigny res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{acq_account.revision_id}"' + assert res.headers["ETag"] == f'"{acq_account.revision_id}"' data = get_json(res) - assert acq_account.dumps() == data['metadata'] + assert acq_account.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert acq_account.dumps() == data['metadata'] + assert acq_account.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.acac_list', pid='acac1') + list_url = url_for("invenio_records_rest.acac_list", pid="acac1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata']['pid'] == 'acac1' + assert data["hits"]["hits"][0]["metadata"]["pid"] == "acac1" -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_acq_accounts_post_put_delete(client, - lib_saxon, - acq_account_books_saxon_data, - budget_2020_martigny, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_acq_accounts_post_put_delete( + client, lib_saxon, acq_account_books_saxon_data, budget_2020_martigny, json_header +): """Test record retrieval.""" # TEST 1 :: Create record using POST API # and check that the returned record matches the given data acc_data = deepcopy(acq_account_books_saxon_data) - del acc_data['pid'] - res, data = postdata(client, 'invenio_records_rest.acac_list', acc_data) + del acc_data["pid"] + res, data = postdata(client, "invenio_records_rest.acac_list", acc_data) assert res.status_code == 201 - acc_pid = data['metadata'].pop('pid') + acc_pid = data["metadata"].pop("pid") assert acc_pid is not None - assert data['metadata'] == acc_data - acc_data['pid'] = acc_pid + assert data["metadata"] == acc_data + acc_data["pid"] = acc_pid # TEST 2 :: Get the record using GET API # and check that the returned record matches the given data - item_url = url_for('invenio_records_rest.acac_item', pid_value=acc_pid) - list_url = url_for('invenio_records_rest.acac_list', q=f'pid:{acc_pid}') + item_url = url_for("invenio_records_rest.acac_item", pid_value=acc_pid) + list_url = url_for("invenio_records_rest.acac_list", q=f"pid:{acc_pid}") res = client.get(item_url) data = get_json(res) assert res.status_code == 200 - assert acc_data == data['metadata'] + assert acc_data == data["metadata"] # TEST 3 :: Update record using PUT API # and check that the returned record matches the given data - acc_data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(acc_data), - headers=json_header - ) + acc_data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(acc_data), headers=json_header) data = get_json(res) assert res.status_code == 200 - assert data['metadata']['name'] == acc_data['name'] + assert data["metadata"]["name"] == acc_data["name"] res = client.get(item_url) data = get_json(res) assert res.status_code == 200 - assert data['metadata']['name'] == acc_data['name'] + assert data["metadata"]["name"] == acc_data["name"] res = client.get(list_url) - data = get_json(res)['hits']['hits'][0] + data = get_json(res)["hits"]["hits"][0] assert res.status_code == 200 - assert data['metadata']['name'] == acc_data['name'] + assert data["metadata"]["name"] == acc_data["name"] # TEST 3 :: Delete record using DELETE API res = client.delete(item_url) @@ -153,90 +151,102 @@ def test_acq_accounts_post_put_delete(client, def test_acq_accounts_can_delete( - client, document, acq_account_fiction_martigny, - acq_order_line_fiction_martigny, acq_order_fiction_martigny, - acq_receipt_fiction_martigny): + client, + document, + acq_account_fiction_martigny, + acq_order_line_fiction_martigny, + acq_order_fiction_martigny, + acq_receipt_fiction_martigny, +): """Test can delete an acq account.""" can, reasons = acq_account_fiction_martigny.can_delete assert not can - assert reasons['links']['acq_order_lines'] - assert reasons['links']['acq_receipts'] + assert reasons["links"]["acq_order_lines"] + assert reasons["links"]["acq_receipts"] def test_filtered_acq_accounts_get( - client, librarian_martigny, acq_account_fiction_martigny, - librarian_sion, acq_account_fiction_sion): + client, + librarian_martigny, + acq_account_fiction_martigny, + librarian_sion, + acq_account_fiction_sion, +): """Test acq accounts filter by organisation.""" - list_url = url_for('invenio_records_rest.acac_list') + list_url = url_for("invenio_records_rest.acac_list") res = client.get(list_url) assert res.status_code == 401 # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.acac_list') + list_url = url_for("invenio_records_rest.acac_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.acac_list') + list_url = url_for("invenio_records_rest.acac_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 -def test_acq_account_secure_api(client, json_header, - acq_account_fiction_martigny, - librarian_martigny, - librarian_sion): +def test_acq_account_secure_api( + client, + json_header, + acq_account_fiction_martigny, + librarian_martigny, + librarian_sion, +): """Test acq account secure api access.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.acac_item', - pid_value=acq_account_fiction_martigny.pid) + record_url = url_for( + "invenio_records_rest.acac_item", pid_value=acq_account_fiction_martigny.pid + ) res = client.get(record_url) assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.acac_item', - pid_value=acq_account_fiction_martigny.pid) + record_url = url_for( + "invenio_records_rest.acac_item", pid_value=acq_account_fiction_martigny.pid + ) res = client.get(record_url) assert res.status_code == 403 -def test_acq_account_secure_api_create(client, json_header, - acq_account_fiction_martigny_data, - librarian_martigny, - librarian_sion, - acq_account_books_saxon_data, - system_librarian_martigny): +def test_acq_account_secure_api_create( + client, + json_header, + acq_account_fiction_martigny_data, + librarian_martigny, + librarian_sion, + acq_account_books_saxon_data, + system_librarian_martigny, +): """Test acq account secure api create.""" # Martigny login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.acac_list' + post_entrypoint = "invenio_records_rest.acac_list" - acq_account_books_saxon_data.pop('pid') - res, _ = postdata( - client, - post_entrypoint, - acq_account_books_saxon_data - ) + acq_account_books_saxon_data.pop("pid") + res, _ = postdata(client, post_entrypoint, acq_account_books_saxon_data) assert res.status_code == 403 acc_data = deepcopy(acq_account_fiction_martigny_data) - acc_data.pop('pid') + acc_data.pop("pid") # we need to change name/number because it should be unique - acc_data['name'] = 'dummy_name' - acc_data['number'] = 'dummy_number' + acc_data["name"] = "dummy_name" + acc_data["number"] = "dummy_number" res, _ = postdata(client, post_entrypoint, acc_data) assert res.status_code == 201 @@ -246,55 +256,54 @@ def test_acq_account_secure_api_create(client, json_header, assert res.status_code == 403 -def test_acq_account_secure_api_update(client, - acq_account_books_martigny, - librarian_martigny, - librarian_sion, - acq_account_books_martigny_data, - json_header): +def test_acq_account_secure_api_update( + client, + acq_account_books_martigny, + librarian_martigny, + librarian_sion, + acq_account_books_martigny_data, + json_header, +): """Test acq account secure api update.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.acac_item', - pid_value=acq_account_books_martigny.pid) + record_url = url_for( + "invenio_records_rest.acac_item", pid_value=acq_account_books_martigny.pid + ) data = acq_account_books_martigny - data['name'] = 'Test Name' - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 -def test_acq_account_secure_api_delete(client, - acq_account_books_martigny, - librarian_martigny, - librarian_sion, - acq_account_general_fully, - json_header): +def test_acq_account_secure_api_delete( + client, + acq_account_books_martigny, + librarian_martigny, + librarian_sion, + acq_account_general_fully, + json_header, +): """Test acq account secure api delete.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.acac_item', - pid_value=acq_account_books_martigny.pid) + record_url = url_for( + "invenio_records_rest.acac_item", pid_value=acq_account_books_martigny.pid + ) res = client.delete(record_url) assert res.status_code == 204 - record_url = url_for('invenio_records_rest.acac_item', - pid_value=acq_account_general_fully.pid) + record_url = url_for( + "invenio_records_rest.acac_item", pid_value=acq_account_general_fully.pid + ) res = client.delete(record_url) assert res.status_code == 403 diff --git a/tests/api/acq_accounts/test_acq_accounts_serializers.py b/tests/api/acq_accounts/test_acq_accounts_serializers.py index 8e4c0da002..737142c589 100644 --- a/tests/api/acq_accounts/test_acq_accounts_serializers.py +++ b/tests/api/acq_accounts/test_acq_accounts_serializers.py @@ -23,24 +23,31 @@ from utils import get_csv -def test_csv_serializer(client, csv_header, librarian_martigny, - acq_account_fiction_martigny, vendor_martigny, - acq_order_fiction_martigny, - acq_order_line_fiction_martigny, - acq_order_line2_fiction_martigny, - acq_receipt_fiction_martigny, - acq_receipt_line_1_fiction_martigny, - acq_receipt_line_2_fiction_martigny): +def test_csv_serializer( + client, + csv_header, + librarian_martigny, + acq_account_fiction_martigny, + vendor_martigny, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny, + acq_order_line2_fiction_martigny, + acq_receipt_fiction_martigny, + acq_receipt_line_1_fiction_martigny, + acq_receipt_line_2_fiction_martigny, +): """Test CSV formatter""" login_user_via_session(client, librarian_martigny.user) - list_url = url_for('api_exports.acq_account_export', - q=f'pid:{acq_account_fiction_martigny.pid}') + list_url = url_for( + "api_exports.acq_account_export", q=f"pid:{acq_account_fiction_martigny.pid}" + ) response = client.get(list_url, headers=csv_header) assert response.status_code == 200 data = get_csv(response) assert data - assert '"account_pid","account_name","account_number",' \ - '"account_allocated_amount","account_available_amount",' \ - '"account_current_encumbrance","account_current_expenditure",' \ - '"account_available_balance"' \ - in data + assert ( + '"account_pid","account_name","account_number",' + '"account_allocated_amount","account_available_amount",' + '"account_current_encumbrance","account_current_expenditure",' + '"account_available_balance"' in data + ) diff --git a/tests/api/acq_invoices/test_acq_invoices_permissions.py b/tests/api/acq_invoices/test_acq_invoices_permissions.py index 633e0e572a..f2cb66bd52 100644 --- a/tests/api/acq_invoices/test_acq_invoices_permissions.py +++ b/tests/api/acq_invoices/test_acq_invoices_permissions.py @@ -22,15 +22,21 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.acquisition.acq_invoices.permissions import \ - AcqInvoicePermissionPolicy +from rero_ils.modules.acquisition.acq_invoices.permissions import ( + AcqInvoicePermissionPolicy, +) def test_invoice_permissions( - patron_martigny, librarian_martigny, librarian2_martigny, - system_librarian_martigny, document, org_martigny, - acq_invoice_fiction_sion, acq_invoice_fiction_saxon, - acq_invoice_fiction_martigny + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + document, + org_martigny, + acq_invoice_fiction_sion, + acq_invoice_fiction_saxon, + acq_invoice_fiction_martigny, ): """Test invoices permissions class.""" @@ -38,92 +44,115 @@ def test_invoice_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(AcqInvoicePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) login_user(patron_martigny.user) - check_permission(AcqInvoicePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_invoice_fiction_martigny) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_invoice_fiction_martigny, + ) # As staff member without any specific access : # - None action allowed # - except read record of its own library (pro_read_only) login_user(librarian2_martigny.user) - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_invoice_fiction_martigny) - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_invoice_fiction_sion) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_invoice_fiction_martigny, + ) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_invoice_fiction_sion, + ) # As staff member with "library-administration" role : # - Search :: everything # - Read :: record of its own library # - Create/Update/Delete :: record of its own library login_user(librarian_martigny.user) - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_invoice_fiction_martigny) - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_invoice_fiction_saxon) + check_permission( + AcqInvoicePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_invoice_fiction_martigny, + ) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_invoice_fiction_saxon, + ) # As staff member with "full_permissions" role : # - Search :: everything # - Read :: record of its own organisation # - Create/Update/Delete :: record of its own organisation login_user(system_librarian_martigny.user) - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_invoice_fiction_saxon) - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_invoice_fiction_sion) + check_permission( + AcqInvoicePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_invoice_fiction_saxon, + ) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_invoice_fiction_sion, + ) # Special case !!! An acquisition invoice linked to a closed budget # should be considerate as roll-overed and can't be updated. with mock.patch( - 'rero_ils.modules.acquisition.acq_invoices.api.AcquisitionInvoice.' - 'is_active', - False + "rero_ils.modules.acquisition.acq_invoices.api.AcquisitionInvoice." "is_active", + False, ): - check_permission(AcqInvoicePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_invoice_fiction_martigny) + check_permission( + AcqInvoicePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_invoice_fiction_martigny, + ) diff --git a/tests/api/acq_invoices/test_acq_invoices_rest.py b/tests/api/acq_invoices/test_acq_invoices_rest.py index 34115e08ad..a2219d74c1 100644 --- a/tests/api/acq_invoices/test_acq_invoices_rest.py +++ b/tests/api/acq_invoices/test_acq_invoices_rest.py @@ -22,120 +22,121 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acquisition_invoices_library_facets( - client, org_martigny, acq_invoice_fiction_martigny, rero_json_header): + client, org_martigny, acq_invoice_fiction_martigny, rero_json_header +): """Test record retrieval.""" - list_url = url_for('invenio_records_rest.acin_list', view='org1') + list_url = url_for("invenio_records_rest.acin_list", view="org1") res = client.get(list_url, headers=rero_json_header) data = get_json(res) - aggs = data['aggregations'] - assert 'library' in aggs + aggs = data["aggregations"] + assert "library" in aggs -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_acquisition_invoice_total_amount( - client, acq_invoice_fiction_martigny): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_acquisition_invoice_total_amount(client, acq_invoice_fiction_martigny): """Test calculate total amonut of invoice.""" - item_url = url_for('invenio_records_rest.acin_item', pid_value='acin1') + item_url = url_for("invenio_records_rest.acin_item", pid_value="acin1") acq_invoice = acq_invoice_fiction_martigny res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['invoice_price'] == 1500 + assert data["metadata"]["invoice_price"] == 1500 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acquisition_invoice_get(client, acq_invoice_fiction_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.acin_item', pid_value='acin1') + item_url = url_for("invenio_records_rest.acin_item", pid_value="acin1") acq_invoice = acq_invoice_fiction_martigny res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{acq_invoice.revision_id}"' + assert res.headers["ETag"] == f'"{acq_invoice.revision_id}"' data = get_json(res) - assert acq_invoice.dumps() == data['metadata'] + assert acq_invoice.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert acq_invoice.dumps() == data['metadata'] + assert acq_invoice.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.acin_list', pid='acin1') + list_url = url_for("invenio_records_rest.acin_list", pid="acin1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == acq_invoice.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == acq_invoice.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acquisition_invoices_post_put_delete( - client, org_martigny, vendor2_martigny, acq_invoice_fiction_saxon, - json_header): + client, org_martigny, vendor2_martigny, acq_invoice_fiction_saxon, json_header +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.acin_item', pid_value='1') - list_url = url_for('invenio_records_rest.acin_list', q='pid:1') + item_url = url_for("invenio_records_rest.acin_item", pid_value="1") + list_url = url_for("invenio_records_rest.acin_list", q="pid:1") - acq_invoice_fiction_saxon['pid'] = '1' + acq_invoice_fiction_saxon["pid"] = "1" res, data = postdata( - client, - 'invenio_records_rest.acin_list', - acq_invoice_fiction_saxon + client, "invenio_records_rest.acin_list", acq_invoice_fiction_saxon ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == acq_invoice_fiction_saxon + assert data["metadata"] == acq_invoice_fiction_saxon res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert acq_invoice_fiction_saxon == data['metadata'] + assert acq_invoice_fiction_saxon == data["metadata"] # Update record/PUT data = acq_invoice_fiction_saxon - data['invoice_number'] = 'IN-TEST-2' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["invoice_number"] = "IN-TEST-2" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['invoice_number'] == 'IN-TEST-2' + assert data["metadata"]["invoice_number"] == "IN-TEST-2" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['invoice_number'] == 'IN-TEST-2' + assert data["metadata"]["invoice_number"] == "IN-TEST-2" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['invoice_number'] == 'IN-TEST-2' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["invoice_number"] == "IN-TEST-2" # Delete record/DELETE res = client.delete(item_url) @@ -153,36 +154,41 @@ def test_acquisition_invoices_can_delete(client, acq_invoice_fiction_martigny): def test_filtered_acquisition_invoices_get( - client, librarian_martigny, acq_invoice_fiction_martigny, - acq_invoice_fiction_saxon, librarian_sion, - acq_invoice_fiction_sion): + client, + librarian_martigny, + acq_invoice_fiction_martigny, + acq_invoice_fiction_saxon, + librarian_sion, + acq_invoice_fiction_sion, +): """Test acquisition invoices filter by organisation.""" - list_url = url_for('invenio_records_rest.acin_list') + list_url = url_for("invenio_records_rest.acin_list") res = client.get(list_url) assert res.status_code == 401 # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.acin_list') + list_url = url_for("invenio_records_rest.acin_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.acin_list') + list_url = url_for("invenio_records_rest.acin_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 def test_acquisition_invoice_properties( - org_sion, vendor_sion, document, lib_sion, acq_invoice_fiction_sion): + org_sion, vendor_sion, document, lib_sion, acq_invoice_fiction_sion +): """Test acquisition invoice properties.""" assert acq_invoice_fiction_sion.vendor_pid == vendor_sion.pid assert acq_invoice_fiction_sion.library_pid == lib_sion.pid diff --git a/tests/api/acq_order_lines/test_acq_order_lines_permissions.py b/tests/api/acq_order_lines/test_acq_order_lines_permissions.py index 3490c05a8f..de02e03098 100644 --- a/tests/api/acq_order_lines/test_acq_order_lines_permissions.py +++ b/tests/api/acq_order_lines/test_acq_order_lines_permissions.py @@ -22,110 +22,139 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.acquisition.acq_order_lines.permissions import \ - AcqOrderLinePermissionPolicy +from rero_ils.modules.acquisition.acq_order_lines.permissions import ( + AcqOrderLinePermissionPolicy, +) -def test_order_lines_permissions(patron_martigny, - librarian_martigny, librarian2_martigny, - system_librarian_martigny, - document, org_martigny, lib_sion, - vendor2_martigny, - acq_order_line_fiction_sion, - acq_order_line_fiction_saxon, - acq_order_line_fiction_martigny): +def test_order_lines_permissions( + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + document, + org_martigny, + lib_sion, + vendor2_martigny, + acq_order_line_fiction_sion, + acq_order_line_fiction_saxon, + acq_order_line_fiction_martigny, +): """Test order lines permissions class.""" # Anonymous user & Patron :: None action allowed identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) login_user(patron_martigny.user) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_line_fiction_martigny) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_line_fiction_martigny, + ) # As staff member without any specific access : # - None action allowed # - except read record of its own library (pro_read_only) login_user(librarian2_martigny.user) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_line_fiction_martigny) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_line_fiction_sion) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_order_line_fiction_martigny, + ) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_line_fiction_sion, + ) # As staff member with "library-administration" role : # - Search :: everything # - Read :: record of its own library # - Create/Update/Delete :: record of its own library login_user(librarian_martigny.user) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_order_line_fiction_martigny) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_line_fiction_saxon) + check_permission( + AcqOrderLinePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_order_line_fiction_martigny, + ) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_line_fiction_saxon, + ) # As staff member with "full_permissions" role : # - Search :: everything # - Read :: record of its own organisation # - Create/Update/Delete :: record of its own organisation login_user(system_librarian_martigny.user) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_order_line_fiction_saxon) - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_line_fiction_sion) + check_permission( + AcqOrderLinePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_order_line_fiction_saxon, + ) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_line_fiction_sion, + ) # Special case !!! An acquisition order line linked to a closed budget # should be considerate as roll-overed and can't be updated. with mock.patch( - 'rero_ils.modules.acquisition.acq_order_lines.api.AcqOrderLine.' - 'is_active', - False + "rero_ils.modules.acquisition.acq_order_lines.api.AcqOrderLine." "is_active", + False, ): - check_permission(AcqOrderLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_line_fiction_martigny) + check_permission( + AcqOrderLinePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_order_line_fiction_martigny, + ) diff --git a/tests/api/acq_order_lines/test_acq_order_lines_rest.py b/tests/api/acq_order_lines/test_acq_order_lines_rest.py index 3d89dce23c..ce914dfbfb 100644 --- a/tests/api/acq_order_lines/test_acq_order_lines_rest.py +++ b/tests/api/acq_order_lines/test_acq_order_lines_rest.py @@ -22,104 +22,102 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineNoteType +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineNoteType -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_order_lines_get(client, acq_order_line_fiction_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.acol_item', pid_value='acol1') + item_url = url_for("invenio_records_rest.acol_item", pid_value="acol1") acol = acq_order_line_fiction_martigny res = client.get(item_url) data = get_json(res) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{acol.revision_id}"' - assert acol.dumps() == data['metadata'] + assert res.headers["ETag"] == f'"{acol.revision_id}"' + assert acol.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert acol.dumps() == data['metadata'] - assert data['metadata']['total_amount'] == \ - acol.quantity * acol.get('amount') + assert acol.dumps() == data["metadata"] + assert data["metadata"]["total_amount"] == acol.quantity * acol.get("amount") - list_url = url_for('invenio_records_rest.acol_list', pid='acol1') + list_url = url_for("invenio_records_rest.acol_list", pid="acol1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - metadata = data['hits']['hits'][0]['metadata'] - del metadata['total_unreceived_amount'] # dynamically added key - del metadata['status'] # dynamically added key - del metadata['received_quantity'] # dynamically added key + metadata = data["hits"]["hits"][0]["metadata"] + del metadata["total_unreceived_amount"] # dynamically added key + del metadata["status"] # dynamically added key + del metadata["received_quantity"] # dynamically added key assert metadata == acol.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_order_lines_post_put_delete( - client, org_martigny, vendor2_martigny, - acq_order_fiction_saxon, acq_order_line_fiction_saxon, - json_header): + client, + org_martigny, + vendor2_martigny, + acq_order_fiction_saxon, + acq_order_line_fiction_saxon, + json_header, +): """Test CRUD on record.""" # Create record / POST - item_url = url_for('invenio_records_rest.acol_item', pid_value='1') - list_url = url_for('invenio_records_rest.acol_list', q='pid:1') + item_url = url_for("invenio_records_rest.acol_item", pid_value="1") + list_url = url_for("invenio_records_rest.acol_list", q="pid:1") - acq_order_line_fiction_saxon['pid'] = '1' + acq_order_line_fiction_saxon["pid"] = "1" res, data = postdata( - client, - 'invenio_records_rest.acol_list', - acq_order_line_fiction_saxon + client, "invenio_records_rest.acol_list", acq_order_line_fiction_saxon ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == acq_order_line_fiction_saxon + assert data["metadata"] == acq_order_line_fiction_saxon res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert acq_order_line_fiction_saxon == data['metadata'] + assert acq_order_line_fiction_saxon == data["metadata"] # Update record/PUT data = acq_order_line_fiction_saxon - data['notes'] = [{ - 'type': AcqOrderLineNoteType.STAFF, - 'content': 'Test update note' - }] - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["notes"] = [ + {"type": AcqOrderLineNoteType.STAFF, "content": "Test update note"} + ] + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['notes'][0]['content'] == 'Test update note' + assert data["metadata"]["notes"][0]["content"] == "Test update note" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['notes'][0]['content'] == 'Test update note' + assert data["metadata"]["notes"][0]["content"] == "Test update note" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['notes'][0]['content'] == 'Test update note' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["notes"][0]["content"] == "Test update note" # Delete record/DELETE res = client.delete(item_url) @@ -137,34 +135,33 @@ def test_acq_order_lines_can_delete(client, acq_order_line_fiction_martigny): def test_acq_order_lines_document_can_delete( - client, document, acq_order_line_fiction_martigny): + client, document, acq_order_line_fiction_martigny +): """Test can delete a document with a linked acquisition order line.""" can, reasons = document.can_delete assert not can - assert reasons['links']['acq_order_lines'] - - -def test_acq_order_line_secure_api_update(client, - org_sion, - vendor_sion, - acq_order_fiction_sion, - acq_order_line_fiction_sion, - librarian_martigny, - librarian_sion, - json_header): + assert reasons["links"]["acq_order_lines"] + + +def test_acq_order_line_secure_api_update( + client, + org_sion, + vendor_sion, + acq_order_fiction_sion, + acq_order_line_fiction_sion, + librarian_martigny, + librarian_sion, + json_header, +): """Test acq order line secure api update.""" # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.acol_item', - pid_value=acq_order_line_fiction_sion.pid) - data = acq_order_line_fiction_sion - data['notes'] = [{ - 'type': AcqOrderLineNoteType.STAFF, - 'content': 'Test update note' - }] - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header + record_url = url_for( + "invenio_records_rest.acol_item", pid_value=acq_order_line_fiction_sion.pid ) + data = acq_order_line_fiction_sion + data["notes"] = [ + {"type": AcqOrderLineNoteType.STAFF, "content": "Test update note"} + ] + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 diff --git a/tests/api/acq_orders/test_acq_orders_permissions.py b/tests/api/acq_orders/test_acq_orders_permissions.py index 9ae7f742f1..fca5995e8b 100644 --- a/tests/api/acq_orders/test_acq_orders_permissions.py +++ b/tests/api/acq_orders/test_acq_orders_permissions.py @@ -22,15 +22,19 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.acquisition.acq_orders.permissions import \ - AcqOrderPermissionPolicy +from rero_ils.modules.acquisition.acq_orders.permissions import AcqOrderPermissionPolicy def test_orders_permissions( - patron_martigny, librarian_martigny, librarian2_martigny, - system_librarian_martigny, org_martigny, vendor2_martigny, - acq_order_fiction_sion, acq_order_fiction_saxon, - acq_order_fiction_martigny + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + org_martigny, + vendor2_martigny, + acq_order_fiction_sion, + acq_order_fiction_saxon, + acq_order_fiction_martigny, ): """Test orders permissions class.""" @@ -38,91 +42,114 @@ def test_orders_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(AcqOrderPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + AcqOrderPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) login_user(patron_martigny.user) - check_permission(AcqOrderPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_fiction_martigny) + check_permission( + AcqOrderPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_fiction_martigny, + ) # As staff member without any specific access : # - None action allowed # - except read record of its own library (pro_read_only) login_user(librarian2_martigny.user) - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_fiction_martigny) - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_fiction_sion) + check_permission( + AcqOrderPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_order_fiction_martigny, + ) + check_permission( + AcqOrderPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_fiction_sion, + ) # As staff member with "library-administration" role : # - Search :: everything # - Read :: record of its own library # - Create/Update/Delete :: record of its own library login_user(librarian_martigny.user) - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_order_fiction_martigny) - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_fiction_saxon) + check_permission( + AcqOrderPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_order_fiction_martigny, + ) + check_permission( + AcqOrderPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_fiction_saxon, + ) # As staff member with "full_permissions" role : # - Search :: everything # - Read :: record of its own organisation # - Create/Update/Delete :: record of its own organisation login_user(system_librarian_martigny.user) - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_order_fiction_saxon) - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_fiction_sion) + check_permission( + AcqOrderPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_order_fiction_saxon, + ) + check_permission( + AcqOrderPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_order_fiction_sion, + ) # Special case !!! An acquisition order linked to a closed budget # should be considerate as roll-overed and can't be updated. with mock.patch( - 'rero_ils.modules.acquisition.acq_orders.api.AcqOrder.is_active', - False + "rero_ils.modules.acquisition.acq_orders.api.AcqOrder.is_active", False ): - check_permission(AcqOrderPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_order_fiction_martigny) + check_permission( + AcqOrderPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_order_fiction_martigny, + ) diff --git a/tests/api/acq_orders/test_acq_orders_rest.py b/tests/api/acq_orders/test_acq_orders_rest.py index 3a980a1035..ec2b84e1cd 100644 --- a/tests/api/acq_orders/test_acq_orders_rest.py +++ b/tests/api/acq_orders/test_acq_orders_rest.py @@ -23,145 +23,135 @@ from api.acquisition.acq_utils import _del_resource, _make_resource from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.acquisition.acq_orders.models import AcqOrderStatus from rero_ils.modules.utils import get_ref_for_pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_orders_library_facets( client, org_martigny, acq_order_fiction_martigny, rero_json_header ): """Test record retrieval.""" - url = url_for('invenio_records_rest.acor_list', view='org1') + url = url_for("invenio_records_rest.acor_list", view="org1") res = client.get(url, headers=rero_json_header) data = get_json(res) - facets = ['library', 'vendor', 'type', 'status', 'account', 'order_date'] - assert all(facet_name in data['aggregations'] for facet_name in facets) + facets = ["library", "vendor", "type", "status", "account", "order_date"] + assert all(facet_name in data["aggregations"] for facet_name in facets) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_order_get(client, acq_order_fiction_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.acor_item', pid_value='acor1') + item_url = url_for("invenio_records_rest.acor_item", pid_value="acor1") acq_order = deepcopy(acq_order_fiction_martigny) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{acq_order.revision_id}"' + assert res.headers["ETag"] == f'"{acq_order.revision_id}"' data = get_json(res) - assert acq_order.dumps() == data['metadata'] + assert acq_order.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert acq_order.dumps() == data['metadata'] + assert acq_order.dumps() == data["metadata"] - assert acq_order.get_account_statement() == \ - data['metadata']['account_statement'] + assert acq_order.get_account_statement() == data["metadata"]["account_statement"] - list_url = url_for('invenio_records_rest.acor_list', pid='acor1') + list_url = url_for("invenio_records_rest.acor_list", pid="acor1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - metadata = data['hits']['hits'][0]['metadata'] + metadata = data["hits"]["hits"][0]["metadata"] # remove dynamically added fields - del metadata['organisation'] - del metadata['order_lines'] - del metadata['receipts'] - metadata.pop('budget', None) - assert data['hits']['hits'][0]['metadata'] == acq_order.replace_refs() + del metadata["organisation"] + del metadata["order_lines"] + del metadata["receipts"] + metadata.pop("budget", None) + assert data["hits"]["hits"][0]["metadata"] == acq_order.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_orders_post_put_delete( - client, org_martigny, vendor2_martigny, acq_order_fiction_saxon, - acq_order_fiction_saxon_data, json_header + client, + org_martigny, + vendor2_martigny, + acq_order_fiction_saxon, + acq_order_fiction_saxon_data, + json_header, ): """Test record retrieval.""" # Create record / POST - acq_order_fiction_saxon_data.pop('pid', None) + acq_order_fiction_saxon_data.pop("pid", None) res, data = postdata( - client, - 'invenio_records_rest.acor_list', - acq_order_fiction_saxon_data + client, "invenio_records_rest.acor_list", acq_order_fiction_saxon_data ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'].pop('account_statement') == { - 'provisional': { - 'total_amount': 0, - 'quantity': 0 - }, - 'expenditure': { - 'total_amount': 0, - 'quantity': 0 - } + assert data["metadata"].pop("account_statement") == { + "provisional": {"total_amount": 0, "quantity": 0}, + "expenditure": {"total_amount": 0, "quantity": 0}, } - assert data['metadata'].pop('status') == AcqOrderStatus.PENDING - assert not data['metadata'].pop('order_date', None) - acq_order_fiction_saxon_data['pid'] = data['metadata']['pid'] - assert data['metadata'] == acq_order_fiction_saxon_data + assert data["metadata"].pop("status") == AcqOrderStatus.PENDING + assert not data["metadata"].pop("order_date", None) + acq_order_fiction_saxon_data["pid"] = data["metadata"]["pid"] + assert data["metadata"] == acq_order_fiction_saxon_data - pid = data['metadata']['pid'] - item_url = url_for('invenio_records_rest.acor_item', pid_value=pid) - list_url = url_for('invenio_records_rest.acor_list', q=f'pid:{pid}') + pid = data["metadata"]["pid"] + item_url = url_for("invenio_records_rest.acor_item", pid_value=pid) + list_url = url_for("invenio_records_rest.acor_list", q=f"pid:{pid}") res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata'].pop('account_statement') == { - 'provisional': { - 'total_amount': 0, - 'quantity': 0 - }, - 'expenditure': { - 'total_amount': 0, - 'quantity': 0 - } + assert data["metadata"].pop("account_statement") == { + "provisional": {"total_amount": 0, "quantity": 0}, + "expenditure": {"total_amount": 0, "quantity": 0}, } - assert data['metadata'].pop('status') == AcqOrderStatus.PENDING - assert not data['metadata'].pop('order_date', None) - assert acq_order_fiction_saxon_data == data['metadata'] + assert data["metadata"].pop("status") == AcqOrderStatus.PENDING + assert not data["metadata"].pop("order_date", None) + assert acq_order_fiction_saxon_data == data["metadata"] # Update record/PUT api_data = acq_order_fiction_saxon_data - api_data['reference'] = 'Test reference' - res = client.put( - item_url, - data=json.dumps(api_data), - headers=json_header - ) + api_data["reference"] = "Test reference" + res = client.put(item_url, data=json.dumps(api_data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['reference'] == 'Test reference' + assert data["metadata"]["reference"] == "Test reference" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['reference'] == 'Test reference' + assert data["metadata"]["reference"] == "Test reference" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['reference'] == 'Test reference' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["reference"] == "Test reference" # Delete record/DELETE res = client.delete(item_url) @@ -171,95 +161,109 @@ def test_acq_orders_post_put_delete( def test_acq_orders_can_delete( - client, document, acq_order_fiction_martigny, - acq_order_line_fiction_martigny, acq_receipt_fiction_martigny): + client, + document, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny, + acq_receipt_fiction_martigny, +): """Test can delete an acq order.""" can, reasons = acq_order_fiction_martigny.can_delete assert not can - assert reasons['links']['receipts'] + assert reasons["links"]["receipts"] def test_filtered_acq_orders_get( - client, librarian_martigny, acq_order_fiction_martigny, - librarian_sion, acq_order_fiction_sion): + client, + librarian_martigny, + acq_order_fiction_martigny, + librarian_sion, + acq_order_fiction_sion, +): """Test acq accounts filter by organisation.""" - list_url = url_for('invenio_records_rest.acor_list') + list_url = url_for("invenio_records_rest.acor_list") res = client.get(list_url) assert res.status_code == 401 # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.acor_list') + list_url = url_for("invenio_records_rest.acor_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.acor_list') + list_url = url_for("invenio_records_rest.acor_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 def test_acq_order_history_api( - client, vendor_martigny, lib_martigny, rero_json_header, librarian_martigny, - acq_account_fiction_martigny, document, budget_2020_martigny + client, + vendor_martigny, + lib_martigny, + rero_json_header, + librarian_martigny, + acq_account_fiction_martigny, + document, + budget_2020_martigny, ): """Test acquisition order history API.""" login_user_via_session(client, librarian_martigny.user) # STEP#0 :: create order related to each other. data = { - 'vendor': {'$ref': get_ref_for_pid('vndr', vendor_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'type': 'monograph', + "vendor": {"$ref": get_ref_for_pid("vndr", vendor_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "type": "monograph", } - acor1 = _make_resource(client, 'acor', data) - data['previousVersion'] = {'$ref': get_ref_for_pid('acor', acor1.pid)} - acor2 = _make_resource(client, 'acor', data) - data['previousVersion'] = {'$ref': get_ref_for_pid('acor', acor2.pid)} - acor3 = _make_resource(client, 'acor', data) + acor1 = _make_resource(client, "acor", data) + data["previousVersion"] = {"$ref": get_ref_for_pid("acor", acor1.pid)} + acor2 = _make_resource(client, "acor", data) + data["previousVersion"] = {"$ref": get_ref_for_pid("acor", acor2.pid)} + acor3 = _make_resource(client, "acor", data) # add an order line to any order. This will change the order history item # label ; the label should be set to order line related budget name acac = acq_account_fiction_martigny data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', acac.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', acor2.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 2, - 'amount': 50 + "acq_account": {"$ref": get_ref_for_pid("acac", acac.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", acor2.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 2, + "amount": 50, } - acol1 = _make_resource(client, 'acol', data) + acol1 = _make_resource(client, "acol", data) # STEP#1 :: Call API and analyze response # * with unknown acquisition order --> 404 # * with valid acquisition order --> valid response - url = url_for('api_order.order_history', order_pid='dummy') + url = url_for("api_order.order_history", order_pid="dummy") res = client.get(url, headers=rero_json_header) assert res.status_code == 404 - url = url_for('api_order.order_history', order_pid=acor2.pid) + url = url_for("api_order.order_history", order_pid=acor2.pid) res = client.get(url, headers=rero_json_header) data = get_json(res) assert len(data) == 3 - assert data[0]['$ref'] == get_ref_for_pid('acor', acor1.pid) - assert data[1]['$ref'] == get_ref_for_pid('acor', acor2.pid) - assert data[1]['label'] == budget_2020_martigny.name - assert data[2]['$ref'] == get_ref_for_pid('acor', acor3.pid) + assert data[0]["$ref"] == get_ref_for_pid("acor", acor1.pid) + assert data[1]["$ref"] == get_ref_for_pid("acor", acor2.pid) + assert data[1]["label"] == budget_2020_martigny.name + assert data[2]["$ref"] == get_ref_for_pid("acor", acor3.pid) # STEP#2 :: Ensure a linked order cannot be deleted reasons = acor2.reasons_not_to_delete() - assert reasons['links']['orders'] == 1 + assert reasons["links"]["orders"] == 1 # STEP#X :: delete created resources - _del_resource(client, 'acol', acol1.pid) - _del_resource(client, 'acor', acor3.pid) - _del_resource(client, 'acor', acor2.pid) - _del_resource(client, 'acor', acor1.pid) + _del_resource(client, "acol", acol1.pid) + _del_resource(client, "acor", acor3.pid) + _del_resource(client, "acor", acor2.pid) + _del_resource(client, "acor", acor1.pid) diff --git a/tests/api/acq_orders/test_acq_orders_serializers.py b/tests/api/acq_orders/test_acq_orders_serializers.py index cb5238605c..3a56cd3a85 100644 --- a/tests/api/acq_orders/test_acq_orders_serializers.py +++ b/tests/api/acq_orders/test_acq_orders_serializers.py @@ -23,29 +23,37 @@ from utils import get_csv -def test_csv_serializer(client, csv_header, librarian_martigny, - acq_account_fiction_martigny, vendor_martigny, - acq_order_fiction_martigny, - acq_order_line_fiction_martigny, - acq_order_line2_fiction_martigny, - acq_receipt_fiction_martigny, - acq_receipt_line_1_fiction_martigny, - acq_receipt_line_2_fiction_martigny): +def test_csv_serializer( + client, + csv_header, + librarian_martigny, + acq_account_fiction_martigny, + vendor_martigny, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny, + acq_order_line2_fiction_martigny, + acq_receipt_fiction_martigny, + acq_receipt_line_1_fiction_martigny, + acq_receipt_line_2_fiction_martigny, +): """Test CSV formatter""" login_user_via_session(client, librarian_martigny.user) - list_url = url_for('api_exports.acq_order_export', - q=f'pid:{acq_order_fiction_martigny.pid}') + list_url = url_for( + "api_exports.acq_order_export", q=f"pid:{acq_order_fiction_martigny.pid}" + ) response = client.get(list_url, headers=csv_header) assert response.status_code == 200 data = get_csv(response) assert data - assert '"order_pid","order_reference","order_date","order_staff_note",' \ - '"order_vendor_note","order_type","order_status","vendor_name",' \ - '"document_pid","document_creator","document_title",' \ - '"document_publisher","document_publication_year",' \ - '"document_edition_statement","document_series_statement",' \ - '"document_isbn","account_name","account_number",' \ - '"order_lines_priority","order_lines_notes","order_lines_status",' \ - '"ordered_quantity","ordered_unit_price","ordered_amount",' \ - '"receipt_reference","received_quantity","received_amount",' \ - '"receipt_date"' in data + assert ( + '"order_pid","order_reference","order_date","order_staff_note",' + '"order_vendor_note","order_type","order_status","vendor_name",' + '"document_pid","document_creator","document_title",' + '"document_publisher","document_publication_year",' + '"document_edition_statement","document_series_statement",' + '"document_isbn","account_name","account_number",' + '"order_lines_priority","order_lines_notes","order_lines_status",' + '"ordered_quantity","ordered_unit_price","ordered_amount",' + '"receipt_reference","received_quantity","received_amount",' + '"receipt_date"' in data + ) diff --git a/tests/api/acq_orders/test_acq_orders_views.py b/tests/api/acq_orders/test_acq_orders_views.py index cedeba3319..48484b1ee5 100644 --- a/tests/api/acq_orders/test_acq_orders_views.py +++ b/tests/api/acq_orders/test_acq_orders_views.py @@ -23,112 +23,123 @@ from utils import get_json, postdata from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineStatus from rero_ils.modules.acquisition.acq_orders.api import AcqOrder from rero_ils.modules.acquisition.acq_orders.models import AcqOrderStatus from rero_ils.modules.notifications.api import Notification -from rero_ils.modules.notifications.models import NotificationChannel, \ - NotificationStatus, NotificationType, RecipientType -from rero_ils.modules.vendors.dumpers import \ - VendorAcquisitionNotificationDumper +from rero_ils.modules.notifications.models import ( + NotificationChannel, + NotificationStatus, + NotificationType, + RecipientType, +) +from rero_ils.modules.vendors.dumpers import VendorAcquisitionNotificationDumper from rero_ils.modules.vendors.models import VendorContactType def test_order_notification_preview( - app, client, librarian_martigny, + app, + client, + librarian_martigny, acq_order_fiction_martigny, acq_order_line_fiction_martigny, - acq_order_line2_fiction_martigny + acq_order_line2_fiction_martigny, ): """Test order notification preview API.""" login_user_via_session(client, librarian_martigny.user) acor = acq_order_fiction_martigny - url = url_for('api_order.order_notification_preview', order_pid=acor.pid) + url = url_for("api_order.order_notification_preview", order_pid=acor.pid) res = client.get(url) assert res.status_code == 200 data = res.json - assert 'recipient_suggestions' in data and 'preview' in data - assert 'message' not in data + assert "recipient_suggestions" in data and "preview" in data + assert "message" not in data # update the vendor communication_language to force it to an unknown # related template and retry. - with mock.patch.object(VendorAcquisitionNotificationDumper, 'dump', - mock.MagicMock(return_value={ - 'name': 'test vendor name', - 'email': 'test@vendor.com', - 'language': 'dummy' - })): + with mock.patch.object( + VendorAcquisitionNotificationDumper, + "dump", + mock.MagicMock( + return_value={ + "name": "test vendor name", + "email": "test@vendor.com", + "language": "dummy", + } + ), + ): response = client.get(url) assert response.status_code == 200 - assert all(field in response.json - for field in ['recipient_suggestions', 'preview']) + assert all( + field in response.json for field in ["recipient_suggestions", "preview"] + ) def test_send_order( - app, client, librarian_martigny, lib_martigny, + app, + client, + librarian_martigny, + lib_martigny, acq_order_fiction_martigny, - acq_order_line_fiction_martigny, vendor_martigny, - acq_order_line2_fiction_martigny, acq_order_line3_fiction_martigny, - mailbox + acq_order_line_fiction_martigny, + vendor_martigny, + acq_order_line2_fiction_martigny, + acq_order_line3_fiction_martigny, + mailbox, ): """Test send order notification API.""" login_user_via_session(client, librarian_martigny.user) acor = acq_order_fiction_martigny - address = vendor_martigny\ - .get_contact(VendorContactType.DEFAULT)\ - .get('email') - emails = [{'type': 'cc', 'address': address}] + address = vendor_martigny.get_contact(VendorContactType.DEFAULT).get("email") + emails = [{"type": "cc", "address": address}] mailbox.clear() # test when parent order is not in database res, data = postdata( client, - 'api_order.send_order', + "api_order.send_order", data=dict(emails=emails), - url_data=dict(order_pid='toto') + url_data=dict(order_pid="toto"), ) assert res.status_code == 404 # test when email data is not provided res, data = postdata( - client, - 'api_order.send_order', - url_data=dict(order_pid=acor.pid) + client, "api_order.send_order", url_data=dict(order_pid=acor.pid) ) assert res.status_code == 400 # test when email data provided but empty res, data = postdata( client, - 'api_order.send_order', + "api_order.send_order", data=dict(emails=[]), - url_data=dict(order_pid=acor.pid) + url_data=dict(order_pid=acor.pid), ) assert res.status_code == 400 # test when email data provided and has no "to" email address res, data = postdata( client, - 'api_order.send_order', + "api_order.send_order", data=dict(emails=emails), - url_data=dict(order_pid=acor.pid) + url_data=dict(order_pid=acor.pid), ) assert res.status_code == 400 - assert 'required' in data['message'] and '`to`' in data['message'] + assert "required" in data["message"] and "`to`" in data["message"] # have an order line with a status different than approved and ensure it # will not be ordered l3 = AcqOrderLine.get_record_by_pid(acq_order_line3_fiction_martigny.pid) - l3['is_cancelled'] = True + l3["is_cancelled"] = True l3.update(l3, dbcommit=True, reindex=True) # test send order with correct input parameters emails = [ - {'type': 'to', 'address': address}, - {'type': 'reply_to', 'address': lib_martigny.get('email')} + {"type": "to", "address": address}, + {"type": "reply_to", "address": lib_martigny.get("email")}, ] res, data = postdata( client, - 'api_order.send_order', + "api_order.send_order", data=dict(emails=emails), - url_data=dict(order_pid=acor.pid) + url_data=dict(order_pid=acor.pid), ) data = get_json(res) assert res.status_code == 200 @@ -150,7 +161,7 @@ def test_send_order( # ensure that created notification is well constructed from the associated # order and vendor - notification_pid = data.get('data').get('pid') + notification_pid = data.get("data").get("pid") notif = Notification.get_record_by_pid(notification_pid) assert notif.organisation_pid == acor.organisation_pid assert notif.aggregation_key == str(notif.id) @@ -160,11 +171,10 @@ def test_send_order( assert notif.library_pid == acor.library_pid assert notif.can_be_cancelled() == (False, None) assert notif.get_communication_channel() == NotificationChannel.EMAIL - assert notif.get_language_to_use() == \ - vendor_martigny.get('communication_language') + assert notif.get_language_to_use() == vendor_martigny.get("communication_language") assert address in notif.get_recipients(RecipientType.TO) # Check mail content message = mailbox[-1] - shipping = lib_martigny['acquisition_settings']['shipping_informations'] - assert shipping.get('extra') and shipping.get('extra') in message.body + shipping = lib_martigny["acquisition_settings"]["shipping_informations"] + assert shipping.get("extra") and shipping.get("extra") in message.body diff --git a/tests/api/acq_receipt_lines/test_acq_receipt_lines_permissions.py b/tests/api/acq_receipt_lines/test_acq_receipt_lines_permissions.py index dcc85889ca..9b72207bbd 100644 --- a/tests/api/acq_receipt_lines/test_acq_receipt_lines_permissions.py +++ b/tests/api/acq_receipt_lines/test_acq_receipt_lines_permissions.py @@ -22,107 +22,137 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.acquisition.acq_receipt_lines.permissions import \ - AcqReceiptLinePermissionPolicy +from rero_ils.modules.acquisition.acq_receipt_lines.permissions import ( + AcqReceiptLinePermissionPolicy, +) def test_receipt_lines_permissions( - org_martigny, vendor2_martigny, patron_martigny, - system_librarian_martigny, librarian_martigny, librarian2_martigny, + org_martigny, + vendor2_martigny, + patron_martigny, + system_librarian_martigny, + librarian_martigny, + librarian2_martigny, acq_receipt_line_1_fiction_martigny, - acq_receipt_line_fiction_saxon, acq_receipt_line_fiction_sion + acq_receipt_line_fiction_saxon, + acq_receipt_line_fiction_sion, ): """Test receipt line permissions class.""" # Anonymous user & Patron :: None action allowed identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) login_user(patron_martigny.user) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_line_1_fiction_martigny) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_line_1_fiction_martigny, + ) # As staff member without any specific access : # - None action allowed # - except read record of its own library (pro_read_only) login_user(librarian2_martigny.user) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_line_1_fiction_martigny) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_line_fiction_sion) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_line_1_fiction_martigny, + ) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_line_fiction_sion, + ) # As staff member with "library-administration" role : # - Search :: everything # - Read :: record of its own library # - Create/Update/Delete :: record of its own library login_user(librarian_martigny.user) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_receipt_line_1_fiction_martigny) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_line_fiction_saxon) + check_permission( + AcqReceiptLinePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_receipt_line_1_fiction_martigny, + ) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_line_fiction_saxon, + ) # As staff member with "full_permissions" role : # - Search :: everything # - Read :: record of its own organisation # - Create/Update/Delete :: record of its own organisation login_user(system_librarian_martigny.user) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_receipt_line_fiction_saxon) - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_line_fiction_sion) + check_permission( + AcqReceiptLinePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_receipt_line_fiction_saxon, + ) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_line_fiction_sion, + ) # Special case !!! An acquisition receipt line linked to a closed budget # should be considerate as roll-overed and can't be updated. with mock.patch( - 'rero_ils.modules.acquisition.acq_receipt_lines.api.AcqReceiptLine.' - 'is_active', - False + "rero_ils.modules.acquisition.acq_receipt_lines.api.AcqReceiptLine." + "is_active", + False, ): - check_permission(AcqReceiptLinePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_line_1_fiction_martigny) + check_permission( + AcqReceiptLinePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_line_1_fiction_martigny, + ) diff --git a/tests/api/acq_receipt_lines/test_acq_receipt_lines_rest.py b/tests/api/acq_receipt_lines/test_acq_receipt_lines_rest.py index c262645614..804b195f78 100644 --- a/tests/api/acq_receipt_lines/test_acq_receipt_lines_rest.py +++ b/tests/api/acq_receipt_lines/test_acq_receipt_lines_rest.py @@ -24,110 +24,108 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -from rero_ils.modules.acquisition.acq_receipt_lines.models import \ - AcqReceiptLineNoteType +from rero_ils.modules.acquisition.acq_receipt_lines.models import AcqReceiptLineNoteType -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_receipt_lines_get(client, acq_receipt_line_1_fiction_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.acrl_item', pid_value='acrl1') + item_url = url_for("invenio_records_rest.acrl_item", pid_value="acrl1") acq_receipt_line = deepcopy(acq_receipt_line_1_fiction_martigny) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{acq_receipt_line.revision_id}"' + assert res.headers["ETag"] == f'"{acq_receipt_line.revision_id}"' data = get_json(res) - assert acq_receipt_line.dumps() == data['metadata'] + assert acq_receipt_line.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert acq_receipt_line.dumps() == data['metadata'] + assert acq_receipt_line.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.acrl_list', pid='acrl1') + list_url = url_for("invenio_records_rest.acrl_list", pid="acrl1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - metadata = data['hits']['hits'][0]['metadata'] + metadata = data["hits"]["hits"][0]["metadata"] - total_amount = metadata['total_amount'] + total_amount = metadata["total_amount"] assert total_amount == 1000.0 # remove dynamically added fields - del metadata['acq_account'] - del metadata['total_amount'] - del metadata['document'] - assert data['hits']['hits'][0]['metadata'] == \ - acq_receipt_line.replace_refs() - - -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_acq_receipt_lines_post_put_delete(client, org_martigny, - vendor2_martigny, - acq_order_line_fiction_saxon, - acq_receipt_fiction_sion, - acq_receipt_line_fiction_saxon, - json_header): + del metadata["acq_account"] + del metadata["total_amount"] + del metadata["document"] + assert data["hits"]["hits"][0]["metadata"] == acq_receipt_line.replace_refs() + + +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_acq_receipt_lines_post_put_delete( + client, + org_martigny, + vendor2_martigny, + acq_order_line_fiction_saxon, + acq_receipt_fiction_sion, + acq_receipt_line_fiction_saxon, + json_header, +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.acrl_item', pid_value='1') - list_url = url_for('invenio_records_rest.acrl_list', q='pid:1') + item_url = url_for("invenio_records_rest.acrl_item", pid_value="1") + list_url = url_for("invenio_records_rest.acrl_list", q="pid:1") - acq_receipt_line_fiction_saxon['pid'] = '1' + acq_receipt_line_fiction_saxon["pid"] = "1" res, data = postdata( - client, - 'invenio_records_rest.acrl_list', - acq_receipt_line_fiction_saxon + client, "invenio_records_rest.acrl_list", acq_receipt_line_fiction_saxon ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == acq_receipt_line_fiction_saxon + assert data["metadata"] == acq_receipt_line_fiction_saxon res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert acq_receipt_line_fiction_saxon == data['metadata'] + assert acq_receipt_line_fiction_saxon == data["metadata"] # Update record/PUT data = acq_receipt_line_fiction_saxon - notes = [{'content': 'test', 'type': AcqReceiptLineNoteType.STAFF}] - data['notes'] = notes - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + notes = [{"content": "test", "type": AcqReceiptLineNoteType.STAFF}] + data["notes"] = notes + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['notes'] == notes + assert data["metadata"]["notes"] == notes res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['notes'] == notes + assert data["metadata"]["notes"] == notes res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['notes'] == notes + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["notes"] == notes # Delete record/DELETE res = client.delete(item_url) @@ -138,37 +136,42 @@ def test_acq_receipt_lines_post_put_delete(client, org_martigny, def test_acq_receipt_lines_can_delete( - client, document, acq_receipt_line_1_fiction_martigny): + client, document, acq_receipt_line_1_fiction_martigny +): """Test can delete an acq receipt line.""" can, reasons = acq_receipt_line_1_fiction_martigny.can_delete assert can - assert 'links' not in reasons + assert "links" not in reasons def test_filtered_acq_receipt_lines_get( - client, librarian_martigny, acq_receipt_line_1_fiction_martigny, - acq_receipt_line_2_fiction_martigny, - librarian_sion, acq_receipt_line_fiction_sion): + client, + librarian_martigny, + acq_receipt_line_1_fiction_martigny, + acq_receipt_line_2_fiction_martigny, + librarian_sion, + acq_receipt_line_fiction_sion, +): """Test acq receipt lines filter by organisation.""" - list_url = url_for('invenio_records_rest.acrl_list') + list_url = url_for("invenio_records_rest.acrl_list") res = client.get(list_url) assert res.status_code == 401 # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.acrl_list') + list_url = url_for("invenio_records_rest.acrl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 3 + assert data["hits"]["total"]["value"] == 3 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.acrl_list') + list_url = url_for("invenio_records_rest.acrl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 diff --git a/tests/api/acq_receipts/test_acq_receipts_permissions.py b/tests/api/acq_receipts/test_acq_receipts_permissions.py index dad9ba6b4c..d1831fc316 100644 --- a/tests/api/acq_receipts/test_acq_receipts_permissions.py +++ b/tests/api/acq_receipts/test_acq_receipts_permissions.py @@ -22,15 +22,21 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.acquisition.acq_receipts.permissions import \ - AcqReceiptPermissionPolicy +from rero_ils.modules.acquisition.acq_receipts.permissions import ( + AcqReceiptPermissionPolicy, +) def test_receipts_permissions( - patron_martigny, librarian_martigny, librarian2_martigny, - system_librarian_martigny, org_martigny, vendor2_martigny, - acq_receipt_fiction_sion, acq_receipt_fiction_saxon, - acq_receipt_fiction_martigny + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + org_martigny, + vendor2_martigny, + acq_receipt_fiction_sion, + acq_receipt_fiction_saxon, + acq_receipt_fiction_martigny, ): """Test receipt permissions class.""" @@ -38,91 +44,114 @@ def test_receipts_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(AcqReceiptPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) login_user(patron_martigny.user) - check_permission(AcqReceiptPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_fiction_martigny) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_fiction_martigny, + ) # As staff member without any specific access : # - None action allowed # - except read record of its own library (pro_read_only) login_user(librarian2_martigny.user) - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_fiction_martigny) - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_fiction_sion) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_fiction_martigny, + ) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_fiction_sion, + ) # As staff member with "library-administration" role : # - Search :: everything # - Read :: record of its own library # - Create/Update/Delete :: record of its own library login_user(librarian_martigny.user) - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_receipt_fiction_martigny) - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_fiction_saxon) + check_permission( + AcqReceiptPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_receipt_fiction_martigny, + ) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_fiction_saxon, + ) # As staff member with "full_permissions" role : # - Search :: everything # - Read :: record of its own organisation # - Create/Update/Delete :: record of its own organisation login_user(system_librarian_martigny.user) - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, acq_receipt_fiction_saxon) - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_fiction_sion) + check_permission( + AcqReceiptPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + acq_receipt_fiction_saxon, + ) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_fiction_sion, + ) # Special case !!! An acquisition receipt linked to a closed budget # should be considerate as roll-overed and can't be updated. with mock.patch( - 'rero_ils.modules.acquisition.acq_receipts.api.AcqReceipt.is_active', - False + "rero_ils.modules.acquisition.acq_receipts.api.AcqReceipt.is_active", False ): - check_permission(AcqReceiptPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, acq_receipt_fiction_martigny) + check_permission( + AcqReceiptPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + acq_receipt_fiction_martigny, + ) diff --git a/tests/api/acq_receipts/test_acq_receipts_rest.py b/tests/api/acq_receipts/test_acq_receipts_rest.py index 41deb03b35..ada66126a8 100644 --- a/tests/api/acq_receipts/test_acq_receipts_rest.py +++ b/tests/api/acq_receipts/test_acq_receipts_rest.py @@ -24,102 +24,97 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_receipt_get( - client, - org_martigny, vendor2_martigny, - acq_receipt_fiction_saxon + client, org_martigny, vendor2_martigny, acq_receipt_fiction_saxon ): """Test record retrieval.""" acre = acq_receipt_fiction_saxon acq_receipt = deepcopy(acre) - item_url = url_for('invenio_records_rest.acre_item', pid_value=acre.pid) - list_url = url_for('invenio_records_rest.acre_list', q=f'pid:{acre.pid}') + item_url = url_for("invenio_records_rest.acre_item", pid_value=acre.pid) + list_url = url_for("invenio_records_rest.acre_list", q=f"pid:{acre.pid}") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{acq_receipt.revision_id}"' + assert res.headers["ETag"] == f'"{acq_receipt.revision_id}"' data = get_json(res) - assert acq_receipt.dumps() == data['metadata'] + assert acq_receipt.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert acq_receipt.dumps() == data['metadata'] + assert acq_receipt.dumps() == data["metadata"] res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - metadata = data['hits']['hits'][0]['metadata'] + metadata = data["hits"]["hits"][0]["metadata"] # remove dynamically added fields - del metadata['quantity'] - del metadata['receipt_lines'] - del metadata['total_amount'] + del metadata["quantity"] + del metadata["receipt_lines"] + del metadata["total_amount"] assert metadata == acq_receipt.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_acq_receipts_post_put_delete(client, org_martigny, vendor2_martigny, - acq_receipt_fiction_saxon, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_acq_receipts_post_put_delete( + client, org_martigny, vendor2_martigny, acq_receipt_fiction_saxon, json_header +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.acre_item', pid_value='1') - list_url = url_for('invenio_records_rest.acre_list', q='pid:1') + item_url = url_for("invenio_records_rest.acre_item", pid_value="1") + list_url = url_for("invenio_records_rest.acre_list", q="pid:1") - acq_receipt_fiction_saxon['pid'] = '1' + acq_receipt_fiction_saxon["pid"] = "1" res, data = postdata( - client, - 'invenio_records_rest.acre_list', - acq_receipt_fiction_saxon + client, "invenio_records_rest.acre_list", acq_receipt_fiction_saxon ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == acq_receipt_fiction_saxon + assert data["metadata"] == acq_receipt_fiction_saxon res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert acq_receipt_fiction_saxon == data['metadata'] + assert acq_receipt_fiction_saxon == data["metadata"] # Update record/PUT data = acq_receipt_fiction_saxon - data['exchange_rate'] = 1.01 - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["exchange_rate"] = 1.01 + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['exchange_rate'] == 1.01 + assert data["metadata"]["exchange_rate"] == 1.01 res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['exchange_rate'] == 1.01 + assert data["metadata"]["exchange_rate"] == 1.01 res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['exchange_rate'] == 1.01 + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["exchange_rate"] == 1.01 # Delete record/DELETE res = client.delete(item_url) @@ -130,39 +125,43 @@ def test_acq_receipts_post_put_delete(client, org_martigny, vendor2_martigny, def test_acq_receipts_can_delete( - client, document, acq_receipt_fiction_martigny, - acq_receipt_line_1_fiction_martigny): + client, document, acq_receipt_fiction_martigny, acq_receipt_line_1_fiction_martigny +): """Test can delete an acq receipt.""" # We can delete an AcqReceipt even if some children AcqReceiptLines exists # because they will be cascading deleted if we delete the parent AcqReceipt can, reasons = acq_receipt_fiction_martigny.can_delete assert can - assert 'acq_receipt_lines' not in reasons.get('links', {}) + assert "acq_receipt_lines" not in reasons.get("links", {}) def test_filtered_acq_receipts_get( - client, librarian_martigny, acq_receipt_fiction_martigny, - librarian_sion, acq_receipt_fiction_sion): + client, + librarian_martigny, + acq_receipt_fiction_martigny, + librarian_sion, + acq_receipt_fiction_sion, +): """Test acq receipts filter by organisation.""" - list_url = url_for('invenio_records_rest.acre_list') + list_url = url_for("invenio_records_rest.acre_list") res = client.get(list_url) assert res.status_code == 401 # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.acre_list') + list_url = url_for("invenio_records_rest.acre_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.acre_list') + list_url = url_for("invenio_records_rest.acre_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 diff --git a/tests/api/acq_receipts/test_acq_receipts_views.py b/tests/api/acq_receipts/test_acq_receipts_views.py index 0ef80ad502..b022d8d738 100644 --- a/tests/api/acq_receipts/test_acq_receipts_views.py +++ b/tests/api/acq_receipts/test_acq_receipts_views.py @@ -19,14 +19,22 @@ from invenio_accounts.testutils import login_user_via_session from utils import get_json, postdata -from rero_ils.modules.acquisition.acq_receipts.models import \ - AcqReceiptLineCreationStatus +from rero_ils.modules.acquisition.acq_receipts.models import ( + AcqReceiptLineCreationStatus, +) -def test_create_lines(app, client, librarian_martigny, lib_martigny, - librarian_sion, acq_order_line_fiction_martigny, - acq_order_line2_fiction_martigny, - acq_receipt_fiction_martigny, json_header): +def test_create_lines( + app, + client, + librarian_martigny, + lib_martigny, + librarian_sion, + acq_order_line_fiction_martigny, + acq_order_line2_fiction_martigny, + acq_receipt_fiction_martigny, + json_header, +): """Test create_lines API.""" login_user_via_session(client, librarian_martigny.user) receipt = acq_receipt_fiction_martigny @@ -34,60 +42,50 @@ def test_create_lines(app, client, librarian_martigny, lib_martigny, # test when parent order is not in database res, data = postdata( client, - 'api_receipt.lines', + "api_receipt.lines", data=receipt_lines, - url_data=dict(receipt_pid='toto') + url_data=dict(receipt_pid="toto"), ) assert res.status_code == 404 # test when receipt_lines data is not provided res, data = postdata( - client, - 'api_receipt.lines', - url_data=dict(receipt_pid=receipt.pid) + client, "api_receipt.lines", url_data=dict(receipt_pid=receipt.pid) ) assert res.status_code == 400 # test when receipt_lines data provided but empty res, data = postdata( client, - 'api_receipt.lines', + "api_receipt.lines", data=receipt_lines, - url_data=dict(receipt_pid=receipt.pid) + url_data=dict(receipt_pid=receipt.pid), ) assert res.status_code == 400 # test when receipt_lines data provided receipt_lines = [ { - "acq_order_line": { - "$ref": "https://bib.rero.ch/api/acq_order_lines/acol1" - }, + "acq_order_line": {"$ref": "https://bib.rero.ch/api/acq_order_lines/acol1"}, "amount": 1000, "quantity": 1, - "receipt_date": "2021-11-01" + "receipt_date": "2021-11-01", }, { - "acq_order_line": { - "$ref": "https://bib.rero.ch/api/acq_order_lines/acol2" - }, + "acq_order_line": {"$ref": "https://bib.rero.ch/api/acq_order_lines/acol2"}, "amount": 500, "quantity": 1, - "receipt_date": "2021-11-03" + "receipt_date": "2021-11-03", }, - { - "acq_order_line": { - "$ref": "https://bib.rero.ch/api/acq_order_lines/acol2" - } - } + {"acq_order_line": {"$ref": "https://bib.rero.ch/api/acq_order_lines/acol2"}}, ] res, data = postdata( client, - 'api_receipt.lines', + "api_receipt.lines", data=receipt_lines, - url_data=dict(receipt_pid=receipt.pid) + url_data=dict(receipt_pid=receipt.pid), ) assert res.status_code == 200 - response = get_json(res).get('response') - assert response[0]['status'] == AcqReceiptLineCreationStatus.SUCCESS - assert response[1]['status'] == AcqReceiptLineCreationStatus.SUCCESS - assert response[2]['status'] == AcqReceiptLineCreationStatus.FAILURE - assert response[2]['error_message'] + response = get_json(res).get("response") + assert response[0]["status"] == AcqReceiptLineCreationStatus.SUCCESS + assert response[1]["status"] == AcqReceiptLineCreationStatus.SUCCESS + assert response[2]["status"] == AcqReceiptLineCreationStatus.FAILURE + assert response[2]["error_message"] diff --git a/tests/api/acquisition/acq_utils.py b/tests/api/acquisition/acq_utils.py index 5bf6201704..a662881b53 100644 --- a/tests/api/acquisition/acq_utils.py +++ b/tests/api/acquisition/acq_utils.py @@ -24,8 +24,10 @@ from rero_ils.modules.utils import get_record_class_from_schema_or_pid_type -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def _make_resource(client, pid_type, input_data): """Dynamic creation of resource using REST_API. @@ -34,16 +36,18 @@ def _make_resource(client, pid_type, input_data): :param input_data: the resource data. """ record_class = get_record_class_from_schema_or_pid_type(pid_type=pid_type) - url_alias = f'invenio_records_rest.{pid_type}_list' + url_alias = f"invenio_records_rest.{pid_type}_list" res, data = postdata(client, url_alias, input_data) if res.status_code == 201: - return record_class.get_record_by_pid(data['metadata']['pid']) + return record_class.get_record_by_pid(data["metadata"]["pid"]) else: - raise Exception(data['message']) + raise Exception(data["message"]) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def _del_resource(client, pid_type, pid): """Delete a resource using the REST API. @@ -51,6 +55,6 @@ def _del_resource(client, pid_type, pid): :param pid_type: the type of resource to create. :param pid: resource pid to delete. """ - item_url = url_for(f'invenio_records_rest.{pid_type}_item', pid_value=pid) + item_url = url_for(f"invenio_records_rest.{pid_type}_item", pid_value=pid) res = client.delete(item_url) assert res.status_code == 204 diff --git a/tests/api/acquisition/test_acquisition_dumpers.py b/tests/api/acquisition/test_acquisition_dumpers.py index 83dd4b47d2..aa7f5e7372 100644 --- a/tests/api/acquisition/test_acquisition_dumpers.py +++ b/tests/api/acquisition/test_acquisition_dumpers.py @@ -17,14 +17,13 @@ # along with this program. If not, see . """Test library dumpers.""" -from rero_ils.modules.acquisition.acq_orders.dumpers import \ - AcqOrderNotificationDumper +from rero_ils.modules.acquisition.acq_orders.dumpers import AcqOrderNotificationDumper def test_acquisition_dumpers( acq_order_fiction_martigny, acq_order_line_fiction_martigny, - acq_order_line2_fiction_martigny + acq_order_line2_fiction_martigny, ): """Test acquisition dumpers.""" @@ -33,7 +32,7 @@ def test_acquisition_dumpers( # * LibraryAcquisitionNotificationDumper acor = acq_order_fiction_martigny dump_data = acor.dumps(dumper=AcqOrderNotificationDumper()) - assert len(dump_data['order_lines']) == 2 - assert dump_data['library']['shipping_informations'] - assert dump_data['library']['billing_informations'] - assert dump_data['vendor'] + assert len(dump_data["order_lines"]) == 2 + assert dump_data["library"]["shipping_informations"] + assert dump_data["library"]["billing_informations"] + assert dump_data["vendor"] diff --git a/tests/api/acquisition/test_acquisition_reception_workflow.py b/tests/api/acquisition/test_acquisition_reception_workflow.py index 1ba92e928e..ee032a910c 100644 --- a/tests/api/acquisition/test_acquisition_reception_workflow.py +++ b/tests/api/acquisition/test_acquisition_reception_workflow.py @@ -24,29 +24,43 @@ from utils import VerifyRecordPermissionPatch, postdata from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineStatus from rero_ils.modules.acquisition.acq_orders.api import AcqOrder from rero_ils.modules.acquisition.acq_orders.models import AcqOrderStatus -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLinesSearch -from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, \ - AcqReceiptLine, AcqReceiptsSearch -from rero_ils.modules.acquisition.acq_receipts.models import \ - AcqReceiptLineCreationStatus +from rero_ils.modules.acquisition.acq_receipt_lines.api import AcqReceiptLinesSearch +from rero_ils.modules.acquisition.acq_receipts.api import ( + AcqReceipt, + AcqReceiptLine, + AcqReceiptsSearch, +) +from rero_ils.modules.acquisition.acq_receipts.models import ( + AcqReceiptLineCreationStatus, +) from rero_ils.modules.notifications.api import Notification -from rero_ils.modules.notifications.models import NotificationChannel, \ - NotificationStatus, NotificationType, RecipientType +from rero_ils.modules.notifications.models import ( + NotificationChannel, + NotificationStatus, + NotificationType, + RecipientType, +) from rero_ils.modules.utils import get_ref_for_pid from rero_ils.modules.vendors.models import VendorContactType -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acquisition_reception_workflow( - client, rero_json_header, org_martigny, - lib_martigny, lib_saxon, budget_2020_martigny, - vendor_martigny, librarian_martigny, document + client, + rero_json_header, + org_martigny, + lib_martigny, + lib_saxon, + budget_2020_martigny, + vendor_martigny, + librarian_martigny, + document, ): """Test complete acquisition workflow.""" @@ -67,57 +81,57 @@ def assert_account_data(accounts): # --> SXN.000b.00 # --> SXN.000s.00 data = { - 'name': 'Martigny root account', - 'number': 'MTY.0000.00', - 'allocated_amount': 10000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} + "name": "Martigny root account", + "number": "MTY.0000.00", + "allocated_amount": 10000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, } - m_root_acc = _make_resource(client, 'acac', data) + m_root_acc = _make_resource(client, "acac", data) data = { - 'name': 'Martigny Books child account', - 'number': 'MTY.000b.00', - 'allocated_amount': 2000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'parent': {'$ref': get_ref_for_pid('acac', m_root_acc.pid)}, + "name": "Martigny Books child account", + "number": "MTY.000b.00", + "allocated_amount": 2000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "parent": {"$ref": get_ref_for_pid("acac", m_root_acc.pid)}, } - m_books_acc = _make_resource(client, 'acac', data) + m_books_acc = _make_resource(client, "acac", data) data = { - 'name': 'Martigny Serials child account', - 'number': 'MTY.000s.00', - 'allocated_amount': 3000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'parent': {'$ref': get_ref_for_pid('acac', m_root_acc.pid)}, + "name": "Martigny Serials child account", + "number": "MTY.000s.00", + "allocated_amount": 3000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "parent": {"$ref": get_ref_for_pid("acac", m_root_acc.pid)}, } - m_serials_acc = _make_resource(client, 'acac', data) + m_serials_acc = _make_resource(client, "acac", data) data = { - 'name': 'Saxon root account', - 'number': 'SXN.0000.00', - 'allocated_amount': 20000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_saxon.pid)} + "name": "Saxon root account", + "number": "SXN.0000.00", + "allocated_amount": 20000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_saxon.pid)}, } - s_root_acc = _make_resource(client, 'acac', data) + s_root_acc = _make_resource(client, "acac", data) data = { - 'name': 'Saxon Books chid account', - 'number': 'SXN.000b.00', - 'allocated_amount': 2500, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_saxon.pid)}, - 'parent': {'$ref': get_ref_for_pid('acac', s_root_acc.pid)}, + "name": "Saxon Books chid account", + "number": "SXN.000b.00", + "allocated_amount": 2500, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_saxon.pid)}, + "parent": {"$ref": get_ref_for_pid("acac", s_root_acc.pid)}, } - s_books_acc = _make_resource(client, 'acac', data) + s_books_acc = _make_resource(client, "acac", data) data = { - 'name': 'Saxon Serials chid account', - 'number': 'SXN.000s.00', - 'allocated_amount': 4000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_saxon.pid)}, - 'parent': {'$ref': get_ref_for_pid('acac', s_root_acc.pid)}, + "name": "Saxon Serials chid account", + "number": "SXN.000s.00", + "allocated_amount": 4000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_saxon.pid)}, + "parent": {"$ref": get_ref_for_pid("acac", s_root_acc.pid)}, } - s_serials_acc = _make_resource(client, 'acac', data) + s_serials_acc = _make_resource(client, "acac", data) # For each account check data. the dict values are tuples. Each tuple # define `balance`, `expenditure`, `encumbrance` @@ -127,7 +141,7 @@ def assert_account_data(accounts): m_serials_acc: ((3000, 3000), (0, 0), (0, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) @@ -147,26 +161,26 @@ def assert_account_data(accounts): # Items quantity = 24: order total amount = 715 login_user_via_session(client, librarian_martigny.user) data = { - 'vendor': {'$ref': get_ref_for_pid('vndr', vendor_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'type': 'monograph', + "vendor": {"$ref": get_ref_for_pid("vndr", vendor_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "type": "monograph", } - order = _make_resource(client, 'acor', data) - assert order['reference'] == f'ORDER-{order.pid}' + order = _make_resource(client, "acor", data) + assert order["reference"] == f"ORDER-{order.pid}" assert order.get_order_provisional_total_amount() == 0 assert order.status == AcqOrderStatus.PENDING assert order.can_delete data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', m_books_acc.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 5, - 'amount': 10 + "acq_account": {"$ref": get_ref_for_pid("acac", m_books_acc.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 5, + "amount": 10, } - order_line_1 = _make_resource(client, 'acol', data) - order_line_1_ref = get_ref_for_pid('acol', order_line_1.pid) - assert order_line_1.get('total_amount') == 50 + order_line_1 = _make_resource(client, "acol", data) + order_line_1_ref = get_ref_for_pid("acol", order_line_1.pid) + assert order_line_1.get("total_amount") == 50 assert order_line_1.quantity == 5 assert order_line_1.received_quantity == 0 assert order_line_1.unreceived_quantity == 5 @@ -174,15 +188,15 @@ def assert_account_data(accounts): assert order_line_1.status == AcqOrderLineStatus.APPROVED data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', m_books_acc.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 2, - 'amount': 50 + "acq_account": {"$ref": get_ref_for_pid("acac", m_books_acc.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 2, + "amount": 50, } - order_line_2 = _make_resource(client, 'acol', data) - order_line_2_ref = get_ref_for_pid('acol', order_line_2.pid) - assert order_line_2.get('total_amount') == 100 + order_line_2 = _make_resource(client, "acol", data) + order_line_2_ref = get_ref_for_pid("acol", order_line_2.pid) + assert order_line_2.get("total_amount") == 100 assert order_line_2.quantity == 2 assert order_line_2.received_quantity == 0 assert order_line_2.unreceived_quantity == 2 @@ -190,15 +204,15 @@ def assert_account_data(accounts): assert order_line_2.status == AcqOrderLineStatus.APPROVED data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', m_books_acc.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 3, - 'amount': 100 + "acq_account": {"$ref": get_ref_for_pid("acac", m_books_acc.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 3, + "amount": 100, } - order_line_3 = _make_resource(client, 'acol', data) - order_line_3_ref = get_ref_for_pid('acol', order_line_3.pid) - assert order_line_3.get('total_amount') == 300 + order_line_3 = _make_resource(client, "acol", data) + order_line_3_ref = get_ref_for_pid("acol", order_line_3.pid) + assert order_line_3.get("total_amount") == 300 assert order_line_3.quantity == 3 assert order_line_3.received_quantity == 0 assert order_line_3.unreceived_quantity == 3 @@ -206,15 +220,15 @@ def assert_account_data(accounts): assert order_line_3.status == AcqOrderLineStatus.APPROVED data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', m_serials_acc.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 3, - 'amount': 15 + "acq_account": {"$ref": get_ref_for_pid("acac", m_serials_acc.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 3, + "amount": 15, } - order_line_4 = _make_resource(client, 'acol', data) - order_line_4_ref = get_ref_for_pid('acol', order_line_4.pid) - assert order_line_4.get('total_amount') == 45 + order_line_4 = _make_resource(client, "acol", data) + order_line_4_ref = get_ref_for_pid("acol", order_line_4.pid) + assert order_line_4.get("total_amount") == 45 assert order_line_4.quantity == 3 assert order_line_4.received_quantity == 0 assert order_line_4.unreceived_quantity == 3 @@ -222,15 +236,15 @@ def assert_account_data(accounts): assert order_line_4.status == AcqOrderLineStatus.APPROVED data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', m_serials_acc.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 1, - 'amount': 150 + "acq_account": {"$ref": get_ref_for_pid("acac", m_serials_acc.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 1, + "amount": 150, } - order_line_5 = _make_resource(client, 'acol', data) - order_line_5_ref = get_ref_for_pid('acol', order_line_5.pid) - assert order_line_5.get('total_amount') == 150 + order_line_5 = _make_resource(client, "acol", data) + order_line_5_ref = get_ref_for_pid("acol", order_line_5.pid) + assert order_line_5.get("total_amount") == 150 assert order_line_5.quantity == 1 assert order_line_5.received_quantity == 0 assert order_line_5.unreceived_quantity == 1 @@ -238,15 +252,15 @@ def assert_account_data(accounts): assert order_line_5.status == AcqOrderLineStatus.APPROVED data = { - 'acq_account': {'$ref': get_ref_for_pid('acac', m_serials_acc.pid)}, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 10, - 'amount': 7 + "acq_account": {"$ref": get_ref_for_pid("acac", m_serials_acc.pid)}, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 10, + "amount": 7, } - order_line_6 = _make_resource(client, 'acol', data) - order_line_6_ref = get_ref_for_pid('acol', order_line_6.pid) - assert order_line_6.get('total_amount') == 70 + order_line_6 = _make_resource(client, "acol", data) + order_line_6_ref = get_ref_for_pid("acol", order_line_6.pid) + assert order_line_6.get("total_amount") == 70 assert order_line_6.quantity == 10 assert order_line_6.received_quantity == 0 assert order_line_6.unreceived_quantity == 10 @@ -268,7 +282,7 @@ def assert_account_data(accounts): m_serials_acc: ((2735, 2735), (0, 0), (265, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) @@ -287,13 +301,13 @@ def assert_account_data(accounts): # Total = 345 # Items quantity = 16: order total amount = 695 - order_line_2['quantity'] = 6 + order_line_2["quantity"] = 6 order_line_2.update(order_line_2, dbcommit=True, reindex=True) - order_line_3['is_cancelled'] = True + order_line_3["is_cancelled"] = True order_line_3.update(order_line_3, dbcommit=True, reindex=True) - order_line_5['quantity'] = 2 + order_line_5["quantity"] = 2 order_line_5.update(order_line_5, dbcommit=True, reindex=True) - order_line_6['is_cancelled'] = True + order_line_6["is_cancelled"] = True order_line_6.update(order_line_6, dbcommit=True, reindex=True) # ensure correct calculations and status again @@ -303,46 +317,46 @@ def assert_account_data(accounts): m_serials_acc: ((2655, 2655), (0, 0), (345, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) - assert order_line_6.get('total_amount') == 70 + assert order_line_6.get("total_amount") == 70 assert order_line_6.is_cancelled assert order_line_6.quantity == 10 assert order_line_6.received_quantity == 0 assert order_line_6.unreceived_quantity == 10 assert order_line_6.status == AcqOrderLineStatus.CANCELLED - assert order_line_5.get('total_amount') == 300 + assert order_line_5.get("total_amount") == 300 assert not order_line_5.is_cancelled assert order_line_5.quantity == 2 assert order_line_5.received_quantity == 0 assert order_line_5.unreceived_quantity == 2 assert order_line_5.status == AcqOrderLineStatus.APPROVED - assert order_line_4.get('total_amount') == 45 + assert order_line_4.get("total_amount") == 45 assert not order_line_4.is_cancelled assert order_line_4.quantity == 3 assert order_line_4.received_quantity == 0 assert order_line_4.unreceived_quantity == 3 assert order_line_4.status == AcqOrderLineStatus.APPROVED - assert order_line_3.get('total_amount') == 300 + assert order_line_3.get("total_amount") == 300 assert order_line_3.is_cancelled assert order_line_3.quantity == 3 assert order_line_3.received_quantity == 0 assert order_line_3.unreceived_quantity == 3 assert order_line_3.status == AcqOrderLineStatus.CANCELLED - assert order_line_2.get('total_amount') == 300 + assert order_line_2.get("total_amount") == 300 assert not order_line_2.is_cancelled assert order_line_2.quantity == 6 assert order_line_2.received_quantity == 0 assert order_line_2.unreceived_quantity == 6 assert order_line_2.status == AcqOrderLineStatus.APPROVED - assert order_line_1.get('total_amount') == 50 + assert order_line_1.get("total_amount") == 50 assert not order_line_1.is_cancelled assert order_line_1.quantity == 5 assert order_line_1.received_quantity == 0 @@ -354,31 +368,29 @@ def assert_account_data(accounts): # - check order lines (status, order-date) # - check order (status) # - check notification - address = vendor_martigny\ - .get_contact(VendorContactType.DEFAULT)\ - .get('email') + address = vendor_martigny.get_contact(VendorContactType.DEFAULT).get("email") emails = [ - {'type': 'to', 'address': address}, - {'type': 'reply_to', 'address': lib_martigny.get('email')} + {"type": "to", "address": address}, + {"type": "reply_to", "address": lib_martigny.get("email")}, ] res, data = postdata( client, - 'api_order.send_order', + "api_order.send_order", data=dict(emails=emails), - url_data=dict(order_pid=order.pid) + url_data=dict(order_pid=order.pid), ) assert res.status_code == 200 for order_line in [ - {'line': order_line_1, 'status': AcqOrderLineStatus.ORDERED}, - {'line': order_line_2, 'status': AcqOrderLineStatus.ORDERED}, - {'line': order_line_3, 'status': AcqOrderLineStatus.CANCELLED}, - {'line': order_line_4, 'status': AcqOrderLineStatus.ORDERED}, - {'line': order_line_5, 'status': AcqOrderLineStatus.ORDERED}, - {'line': order_line_6, 'status': AcqOrderLineStatus.CANCELLED}, + {"line": order_line_1, "status": AcqOrderLineStatus.ORDERED}, + {"line": order_line_2, "status": AcqOrderLineStatus.ORDERED}, + {"line": order_line_3, "status": AcqOrderLineStatus.CANCELLED}, + {"line": order_line_4, "status": AcqOrderLineStatus.ORDERED}, + {"line": order_line_5, "status": AcqOrderLineStatus.ORDERED}, + {"line": order_line_6, "status": AcqOrderLineStatus.CANCELLED}, ]: - line = AcqOrderLine.get_record_by_pid(order_line.get('line').pid) - assert line.status == order_line.get('status') + line = AcqOrderLine.get_record_by_pid(order_line.get("line").pid) + assert line.status == order_line.get("status") if line.status == AcqOrderLineStatus.CANCELLED: assert not line.order_date else: @@ -387,7 +399,7 @@ def assert_account_data(accounts): order = AcqOrder.get_record_by_pid(order.pid) assert order.status == AcqOrderStatus.ORDERED # notification testing - notification_pid = data.get('data').get('pid') + notification_pid = data.get("data").get("pid") notif = Notification.get_record_by_pid(notification_pid) assert notif.organisation_pid == order.organisation_pid assert notif.aggregation_key == str(notif.id) @@ -397,39 +409,38 @@ def assert_account_data(accounts): assert notif.library_pid == order.library_pid assert notif.can_be_cancelled() == (False, None) assert notif.get_communication_channel() == NotificationChannel.EMAIL - assert notif.get_language_to_use() == \ - vendor_martigny.get('communication_language') + assert notif.get_language_to_use() == vendor_martigny.get("communication_language") assert address in notif.get_recipients(RecipientType.TO) # STEP 5 :: CREATE A RECEIPT # * create a receipt without any order lines yet # * but with some adjustments - ref_acc_book = get_ref_for_pid('acac', m_books_acc.pid) - ref_acc_serial = get_ref_for_pid('acac', m_serials_acc.pid) + ref_acc_book = get_ref_for_pid("acac", m_books_acc.pid) + ref_acc_serial = get_ref_for_pid("acac", m_serials_acc.pid) data = { - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'exchange_rate': 1, - 'amount_adjustments': [ + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "exchange_rate": 1, + "amount_adjustments": [ { - 'label': 'handling fees', - 'amount': 2.0, - 'acq_account': {'$ref': ref_acc_book} + "label": "handling fees", + "amount": 2.0, + "acq_account": {"$ref": ref_acc_book}, }, { - 'label': 'discount', - 'amount': -1.0, - 'acq_account': {'$ref': ref_acc_book} + "label": "discount", + "amount": -1.0, + "acq_account": {"$ref": ref_acc_book}, }, { - 'label': 'handling fees', - 'amount': 10, - 'acq_account': {'$ref': ref_acc_serial} - } + "label": "handling fees", + "amount": 10, + "acq_account": {"$ref": ref_acc_serial}, + }, ], - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'organisation': {'$ref': get_ref_for_pid('org', org_martigny.pid)} + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "organisation": {"$ref": get_ref_for_pid("org", org_martigny.pid)}, } - receipt_1 = _make_resource(client, 'acre', data) + receipt_1 = _make_resource(client, "acre", data) assert receipt_1.total_amount == 11 # 2 - 1 + 10 assert receipt_1.can_delete @@ -439,7 +450,7 @@ def assert_account_data(accounts): m_serials_acc: ((2645, 2645), (10, 0), (345, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) @@ -452,65 +463,56 @@ def assert_account_data(accounts): # CHECK :: Not possible to receive quantity more than what you ordered res, data = postdata( client, - 'api_receipt.lines', - data=[{ - 'acq_order_line': {'$ref': order_line_1_ref}, - 'amount': 10, - 'quantity': 12, - 'receipt_date': '2021-11-01' - }], - url_data=dict(receipt_pid=receipt_1.pid) + "api_receipt.lines", + data=[ + { + "acq_order_line": {"$ref": order_line_1_ref}, + "amount": 10, + "quantity": 12, + "receipt_date": "2021-11-01", + } + ], + url_data=dict(receipt_pid=receipt_1.pid), ) assert res.status_code == 200 - response = data.get('response') - assert response[0]['status'] == AcqReceiptLineCreationStatus.FAILURE + response = data.get("response") + assert response[0]["status"] == AcqReceiptLineCreationStatus.FAILURE # partially receive one order with few quantities in receipt_1 res, data = postdata( client, - 'api_receipt.lines', - data=[{ - 'acq_order_line': {'$ref': order_line_1_ref}, - 'amount': 10, - 'quantity': 2, - 'vat_rate': 6, - 'receipt_date': '2021-11-01' - }], - url_data=dict(receipt_pid=receipt_1.pid) + "api_receipt.lines", + data=[ + { + "acq_order_line": {"$ref": order_line_1_ref}, + "amount": 10, + "quantity": 2, + "vat_rate": 6, + "receipt_date": "2021-11-01", + } + ], + url_data=dict(receipt_pid=receipt_1.pid), ) assert res.status_code == 200 - response = data.get('response') - assert response[0]['status'] == AcqReceiptLineCreationStatus.SUCCESS + response = data.get("response") + assert response[0]["status"] == AcqReceiptLineCreationStatus.SUCCESS # Test order and order lines - for order_line in [{ - 'line': order_line_1, - 'status': AcqOrderLineStatus.PARTIALLY_RECEIVED, - 'received': 2 - }, { - 'line': order_line_2, - 'status': AcqOrderLineStatus.ORDERED, - 'received': 0 - }, { - 'line': order_line_3, - 'status': AcqOrderLineStatus.CANCELLED, - 'received': 0 - }, { - 'line': order_line_4, - 'status': AcqOrderLineStatus.ORDERED, - 'received': 0 - }, { - 'line': order_line_5, - 'status': AcqOrderLineStatus.ORDERED, - 'received': 0 - }, { - 'line': order_line_6, - 'status': AcqOrderLineStatus.CANCELLED, - 'received': 0 - }]: - line = AcqOrderLine.get_record_by_pid(order_line.get('line').pid) - assert line.status == order_line.get('status') - assert line.received_quantity == order_line.get('received') + for order_line in [ + { + "line": order_line_1, + "status": AcqOrderLineStatus.PARTIALLY_RECEIVED, + "received": 2, + }, + {"line": order_line_2, "status": AcqOrderLineStatus.ORDERED, "received": 0}, + {"line": order_line_3, "status": AcqOrderLineStatus.CANCELLED, "received": 0}, + {"line": order_line_4, "status": AcqOrderLineStatus.ORDERED, "received": 0}, + {"line": order_line_5, "status": AcqOrderLineStatus.ORDERED, "received": 0}, + {"line": order_line_6, "status": AcqOrderLineStatus.CANCELLED, "received": 0}, + ]: + line = AcqOrderLine.get_record_by_pid(order_line.get("line").pid) + assert line.status == order_line.get("status") + assert line.received_quantity == order_line.get("received") order = AcqOrder.get_record_by_pid(order.pid) assert order.status == AcqOrderStatus.PARTIALLY_RECEIVED @@ -520,7 +522,7 @@ def assert_account_data(accounts): m_serials_acc: ((2645, 2645), (10, 0), (345, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) @@ -531,76 +533,77 @@ def assert_account_data(accounts): # except `order_line_5` should have the RECEIVED STATUS # * complete the order reception to receive the `order_line_5` data = { - 'exchange_rate': 1, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'organisation': {'$ref': get_ref_for_pid('org', org_martigny.pid)} + "exchange_rate": 1, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "organisation": {"$ref": get_ref_for_pid("org", org_martigny.pid)}, } - receipt_2 = _make_resource(client, 'acre', data) - - data = [{ - 'acq_order_line': {'$ref': order_line_1_ref}, - 'amount': 10, - 'quantity': 3, - 'receipt_date': '2021-11-01' - }, { - 'acq_order_line': {'$ref': order_line_2_ref}, - 'amount': 50, - 'quantity': 6, - 'receipt_date': '2021-11-01' - }, { - 'acq_order_line': {'$ref': order_line_4_ref}, - 'amount': 15, - 'quantity': 3, - 'receipt_date': '2021-11-01' - }, { - 'acq_order_line': {'$ref': order_line_5_ref}, - 'amount': 150, - 'quantity': 12, # too many items ! Max quantity should be 2 - 'receipt_date': '2021-11-01' - }] + receipt_2 = _make_resource(client, "acre", data) + + data = [ + { + "acq_order_line": {"$ref": order_line_1_ref}, + "amount": 10, + "quantity": 3, + "receipt_date": "2021-11-01", + }, + { + "acq_order_line": {"$ref": order_line_2_ref}, + "amount": 50, + "quantity": 6, + "receipt_date": "2021-11-01", + }, + { + "acq_order_line": {"$ref": order_line_4_ref}, + "amount": 15, + "quantity": 3, + "receipt_date": "2021-11-01", + }, + { + "acq_order_line": {"$ref": order_line_5_ref}, + "amount": 150, + "quantity": 12, # too many items ! Max quantity should be 2 + "receipt_date": "2021-11-01", + }, + ] res, data = postdata( - client, - 'api_receipt.lines', - data=data, - url_data=dict(receipt_pid=receipt_2.pid) + client, "api_receipt.lines", data=data, url_data=dict(receipt_pid=receipt_2.pid) ) assert res.status_code == 200 - response = data.get('response') - assert response[0]['status'] == AcqReceiptLineCreationStatus.SUCCESS - assert response[1]['status'] == AcqReceiptLineCreationStatus.SUCCESS - assert response[2]['status'] == AcqReceiptLineCreationStatus.SUCCESS - assert response[3]['status'] == AcqReceiptLineCreationStatus.FAILURE + response = data.get("response") + assert response[0]["status"] == AcqReceiptLineCreationStatus.SUCCESS + assert response[1]["status"] == AcqReceiptLineCreationStatus.SUCCESS + assert response[2]["status"] == AcqReceiptLineCreationStatus.SUCCESS + assert response[3]["status"] == AcqReceiptLineCreationStatus.FAILURE # Test order and order lines for order_line in [ - {'line': order_line_1, 'status': AcqOrderLineStatus.RECEIVED}, - {'line': order_line_2, 'status': AcqOrderLineStatus.RECEIVED}, - {'line': order_line_3, 'status': AcqOrderLineStatus.CANCELLED}, - {'line': order_line_4, 'status': AcqOrderLineStatus.RECEIVED}, - {'line': order_line_5, 'status': AcqOrderLineStatus.ORDERED}, - {'line': order_line_6, 'status': AcqOrderLineStatus.CANCELLED} + {"line": order_line_1, "status": AcqOrderLineStatus.RECEIVED}, + {"line": order_line_2, "status": AcqOrderLineStatus.RECEIVED}, + {"line": order_line_3, "status": AcqOrderLineStatus.CANCELLED}, + {"line": order_line_4, "status": AcqOrderLineStatus.RECEIVED}, + {"line": order_line_5, "status": AcqOrderLineStatus.ORDERED}, + {"line": order_line_6, "status": AcqOrderLineStatus.CANCELLED}, ]: - line = AcqOrderLine.get_record_by_pid(order_line.get('line').pid) - assert line.status == order_line.get('status') + line = AcqOrderLine.get_record_by_pid(order_line.get("line").pid) + assert line.status == order_line.get("status") # Receive the last pending order_line - data = [{ - 'acq_order_line': {'$ref': order_line_5_ref}, - 'amount': 150, - 'quantity': 2, - 'receipt_date': '2021-11-01' - }] + data = [ + { + "acq_order_line": {"$ref": order_line_5_ref}, + "amount": 150, + "quantity": 2, + "receipt_date": "2021-11-01", + } + ] res, data = postdata( - client, - 'api_receipt.lines', - data=data, - url_data=dict(receipt_pid=receipt_2.pid) + client, "api_receipt.lines", data=data, url_data=dict(receipt_pid=receipt_2.pid) ) assert res.status_code == 200 - response = data.get('response') - assert response[0]['status'] == AcqReceiptLineCreationStatus.SUCCESS + response = data.get("response") + assert response[0]["status"] == AcqReceiptLineCreationStatus.SUCCESS order_line = AcqOrderLine.get_record_by_pid(order_line_5.pid) assert order_line.status == AcqOrderLineStatus.RECEIVED @@ -614,7 +617,7 @@ def assert_account_data(accounts): m_serials_acc: ((2645, 2645), (355, 0), (0, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) @@ -629,13 +632,13 @@ def assert_account_data(accounts): # * check receipt links links = order.get_links_to_me(get_pids=True) for pid in [order_line_3.pid, order_line_4.pid, order_line_5.pid]: - assert pid in links['order_lines'] + assert pid in links["order_lines"] for pid in [receipt_1.pid, receipt_2.pid]: - assert pid in links['receipts'] + assert pid in links["receipts"] # DELETE `RECEIPT_2` ---------- - receipt_line_pids = receipt_2.get_receipt_lines(output='pids') - url = url_for('invenio_records_rest.acre_item', pid_value=receipt_2.pid) + receipt_line_pids = receipt_2.get_receipt_lines(output="pids") + url = url_for("invenio_records_rest.acre_item", pid_value=receipt_2.pid) client.delete(url) # Check all resources related to `receipt_2` is deleted for pid in receipt_line_pids: @@ -643,19 +646,17 @@ def assert_account_data(accounts): assert line is None assert AcqReceipt.get_record_by_pid(receipt_2.pid) is None # Check ES is up-to-date - response = AcqReceiptLinesSearch()\ - .filter('terms', pid=receipt_line_pids).execute() + response = AcqReceiptLinesSearch().filter("terms", pid=receipt_line_pids).execute() assert response.hits.total.value == 0 - response = AcqReceiptsSearch() \ - .filter('term', pid=receipt_2.pid).execute() + response = AcqReceiptsSearch().filter("term", pid=receipt_2.pid).execute() assert response.hits.total.value == 0 # Check order status order = AcqOrder.get_record_by_pid(order.pid) assert order.status == AcqOrderStatus.PARTIALLY_RECEIVED # DELETE `RECEIPT_1` ---------- - receipt_line_pids = receipt_1.get_receipt_lines(output='pids') - url = url_for('invenio_records_rest.acre_item', pid_value=receipt_1.pid) + receipt_line_pids = receipt_1.get_receipt_lines(output="pids") + url = url_for("invenio_records_rest.acre_item", pid_value=receipt_1.pid) client.delete(url) # Check all resources related to `receipt_1` is deleted for pid in receipt_line_pids: @@ -663,11 +664,9 @@ def assert_account_data(accounts): assert line is None assert AcqReceipt.get_record_by_pid(receipt_1.pid) is None # Check ES is up-to-date - response = AcqReceiptLinesSearch() \ - .filter('terms', pid=receipt_line_pids).execute() + response = AcqReceiptLinesSearch().filter("terms", pid=receipt_line_pids).execute() assert response.hits.total.value == 0 - response = AcqReceiptsSearch() \ - .filter('term', pid=receipt_1.pid).execute() + response = AcqReceiptsSearch().filter("term", pid=receipt_1.pid).execute() assert response.hits.total.value == 0 # Check order status order = AcqOrder.get_record_by_pid(order.pid) @@ -680,6 +679,6 @@ def assert_account_data(accounts): m_serials_acc: ((2655, 2655), (0, 0), (345, 0)), s_root_acc: ((13500, 20000), (0, 0), (0, 0)), s_books_acc: ((2500, 2500), (0, 0), (0, 0)), - s_serials_acc: ((4000, 4000), (0, 0), (0, 0)) + s_serials_acc: ((4000, 4000), (0, 0), (0, 0)), } assert_account_data(manual_controls) diff --git a/tests/api/acquisition/test_acquisition_rollover.py b/tests/api/acquisition/test_acquisition_rollover.py index 837601720f..60752ee19c 100644 --- a/tests/api/acquisition/test_acquisition_rollover.py +++ b/tests/api/acquisition/test_acquisition_rollover.py @@ -28,35 +28,43 @@ from rero_ils.config import ROLLOVER_LOGGING_CONFIG from rero_ils.modules.acquisition.budgets.api import Budget from rero_ils.modules.acquisition.cli import rollover -from rero_ils.modules.acquisition.exceptions import BudgetDoesNotExist, \ - BudgetNotEmptyError, InactiveBudgetError, IncompatibleBudgetError +from rero_ils.modules.acquisition.exceptions import ( + BudgetDoesNotExist, + BudgetNotEmptyError, + InactiveBudgetError, + IncompatibleBudgetError, +) from rero_ils.modules.acquisition.rollover import AcqRollover from rero_ils.modules.utils import get_ref_for_pid -def test_rollover_cli(client, acq_full_structure_a, org_martigny, script_info): +def test_rollover_cli(client, acq_full_structure_a, org_martigny): """Test rollover script using the CLI command.""" origin_budget = acq_full_structure_a runner = CliRunner() with runner.isolated_filesystem(): - os.mkdir('logs') + os.mkdir("logs") # Missing destination budget argument res = runner.invoke(rollover, [origin_budget.pid]) assert res != 0 # Missing parameters to create destination budget - res = runner.invoke(rollover, [origin_budget.pid, '-n']) + res = runner.invoke(rollover, [origin_budget.pid, "-n"]) assert res != 0 result = runner.invoke( - rollover, [ + rollover, + [ origin_budget.pid, - '--new-budget', - '--budget-name', 'Budget destination', - '--budget-start-date', '2022-01-01', - '--budget-end-date', '2022-12-31' - ], obj=script_info + "--new-budget", + "--budget-name", + "Budget destination", + "--budget-start-date", + "2022-01-01", + "--budget-end-date", + "2022-12-31", + ], ) assert result.exit_code == 0 # all works fine ! @@ -67,21 +75,21 @@ def test_rollover_exceptions( """Test rollover process exceptions.""" origin_budget = acq_full_structure_a # budget data - ref_org_sion = get_ref_for_pid('org', org_sion.pid) - ref_org_martigny = get_ref_for_pid('org', org_martigny.pid) + ref_org_sion = get_ref_for_pid("org", org_sion.pid) + ref_org_martigny = get_ref_for_pid("org", org_martigny.pid) destination_budget = { - 'name': 'Budget destination', - 'start_date': '2022-01-01', - 'end_date': '2022-12-31', - 'is_active': False, - 'organisation': {'$ref': ref_org_martigny} + "name": "Budget destination", + "start_date": "2022-01-01", + "end_date": "2022-12-31", + "is_active": False, + "organisation": {"$ref": ref_org_martigny}, } # Use special logging configuration to disable any logs logging_config = deepcopy(ROLLOVER_LOGGING_CONFIG) - logging_config['handlers']['console'] = {'class': 'logging.NullHandler'} - logging_config['handlers']['file'] = {'class': 'logging.NullHandler'} + logging_config["handlers"]["console"] = {"class": "logging.NullHandler"} + logging_config["handlers"]["file"] = {"class": "logging.NullHandler"} # TEST#1 :: Run rollover process without destination budget # In this case, the rollover script will try to create a new budget @@ -89,7 +97,7 @@ def test_rollover_exceptions( # AssertionError with pytest.raises(AssertionError) as err: AcqRollover(origin_budget, logging_config=logging_config) - assert 'param required' in str(err) + assert "param required" in str(err) # TEST#2 :: Rollover arguments validation # * testing budget record existence. @@ -97,35 +105,52 @@ def test_rollover_exceptions( # * testing original budget is active # * testing destination budget is empty with pytest.raises(BudgetDoesNotExist): - AcqRollover(origin_budget, {}, logging_config=logging_config, - propagate_errors=True) + AcqRollover( + origin_budget, {}, logging_config=logging_config, propagate_errors=True + ) - destination_budget['organisation']['$ref'] = ref_org_sion - sion_budget = _make_resource(client, 'budg', destination_budget) + destination_budget["organisation"]["$ref"] = ref_org_sion + sion_budget = _make_resource(client, "budg", destination_budget) with pytest.raises(IncompatibleBudgetError): - AcqRollover(origin_budget, sion_budget, logging_config=logging_config, - propagate_errors=True) + AcqRollover( + origin_budget, + sion_budget, + logging_config=logging_config, + propagate_errors=True, + ) - destination_budget['organisation']['$ref'] = ref_org_martigny - dest_budget = _make_resource(client, 'budg', destination_budget) - origin_budget['is_active'] = False + destination_budget["organisation"]["$ref"] = ref_org_martigny + dest_budget = _make_resource(client, "budg", destination_budget) + origin_budget["is_active"] = False origin_budget.update(origin_budget, dbcommit=True, reindex=True) with pytest.raises(InactiveBudgetError): - AcqRollover(origin_budget, dest_budget, logging_config=logging_config, - propagate_errors=True) + AcqRollover( + origin_budget, + dest_budget, + logging_config=logging_config, + propagate_errors=True, + ) - origin_budget['is_active'] = True + origin_budget["is_active"] = True origin_budget.update(origin_budget, dbcommit=True, reindex=True) - _make_resource(client, 'acac', { - 'name': 'account_1', - 'number': '000.0000.01', - 'allocated_amount': 1000, - 'budget': {'$ref': get_ref_for_pid('budg', dest_budget.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} - }) + _make_resource( + client, + "acac", + { + "name": "account_1", + "number": "000.0000.01", + "allocated_amount": 1000, + "budget": {"$ref": get_ref_for_pid("budg", dest_budget.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + }, + ) with pytest.raises(BudgetNotEmptyError): - AcqRollover(origin_budget, dest_budget, logging_config=logging_config, - propagate_errors=True) + AcqRollover( + origin_budget, + dest_budget, + logging_config=logging_config, + propagate_errors=True, + ) def test_rollover_misc_functions(client, acq_full_structure_a, org_martigny): @@ -136,48 +161,48 @@ def test_rollover_misc_functions(client, acq_full_structure_a, org_martigny): expected. """ original_budget = acq_full_structure_a - destination_budget = _make_resource(client, 'budg', { - 'name': 'Budget destination', - 'start_date': '2022-01-01', - 'end_date': '2022-12-31', - 'is_active': False, - 'organisation': {'$ref': get_ref_for_pid('org', org_martigny.pid)} - }) + destination_budget = _make_resource( + client, + "budg", + { + "name": "Budget destination", + "start_date": "2022-01-01", + "end_date": "2022-12-31", + "is_active": False, + "organisation": {"$ref": get_ref_for_pid("org", org_martigny.pid)}, + }, + ) # Use special logging configuration to disable any logs logging_config = deepcopy(ROLLOVER_LOGGING_CONFIG) - logging_config['handlers']['console'] = {'class': 'logging.NullHandler'} - logging_config['handlers']['file'] = {'class': 'logging.NullHandler'} + logging_config["handlers"]["console"] = {"class": "logging.NullHandler"} + logging_config["handlers"]["file"] = {"class": "logging.NullHandler"} process = AcqRollover( - original_budget, - destination_budget, - logging_config=logging_config + original_budget, destination_budget, logging_config=logging_config ) # TEST#1 :: budget creation by rollover process new_budget = process._create_new_budget( - name='test_budget', - start_date='2000-01-01', - end_date='2000-12-31' + name="test_budget", start_date="2000-01-01", end_date="2000-12-31" ) assert new_budget - assert new_budget.name == 'test_budget' + assert new_budget.name == "test_budget" # TEST#2 :: confirmation message - with mock.patch('builtins.input', side_effect=['Y', 'No', 'inv', 'y', '']): + with mock.patch("builtins.input", side_effect=["Y", "No", "inv", "y", ""]): confirmation = process._confirm('user will enter "Y"', default=None) assert confirmation confirmation = process._confirm('user will enter "n"') assert not confirmation confirmation = process._confirm('user will enter "inv", next "y"') assert confirmation - confirmation = process._confirm('user just push ENTER', default='no') + confirmation = process._confirm("user just push ENTER", default="no") assert not confirmation with pytest.raises(ValueError): - process._confirm('invalid default', default='dummy') + process._confirm("invalid default", default="dummy") # TEST#3 :: aborting ; this will delete the new created budget assert len(process._stack) == 1 - process._abort_rollover('dummy errors') + process._abort_rollover("dummy errors") control_budget = Budget.get_record_by_pid(new_budget.pid) assert not control_budget diff --git a/tests/api/acquisition/test_acquisition_scenarios.py b/tests/api/acquisition/test_acquisition_scenarios.py index c3d7879817..9af71e7847 100644 --- a/tests/api/acquisition/test_acquisition_scenarios.py +++ b/tests/api/acquisition/test_acquisition_scenarios.py @@ -23,10 +23,9 @@ from flask import url_for from invenio_accounts.testutils import login_user_via_session from jsonschema.exceptions import ValidationError -from utils import VerifyRecordPermissionPatch, flush_index, get_json +from utils import VerifyRecordPermissionPatch, get_json -from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, \ - AcqAccountsSearch +from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, AcqAccountsSearch from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine from rero_ils.modules.acquisition.acq_orders.api import AcqOrder from rero_ils.modules.acquisition.acq_orders.models import AcqOrderStatus @@ -34,20 +33,23 @@ from rero_ils.modules.utils import get_ref_for_pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_create_accounts(client, rero_json_header, org_martigny, lib_martigny, - budget_2020_martigny): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_create_accounts( + client, rero_json_header, org_martigny, lib_martigny, budget_2020_martigny +): """Basic scenario to test account creation.""" # STEP 1 :: Create a root account root_account_data = { - 'name': 'Root account', - 'number': '000.0000.00', - 'allocated_amount': 1000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} + "name": "Root account", + "number": "000.0000.00", + "allocated_amount": 1000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, } - root_account = _make_resource(client, 'acac', root_account_data) + root_account = _make_resource(client, "acac", root_account_data) # STEP 2 :: Create a child account # * Try to create a child account with too much amount regarding root @@ -56,21 +58,21 @@ def test_create_accounts(client, rero_json_header, org_martigny, lib_martigny, # * Create a child account with 70% of the available amount of root # account child_account_data = { - 'name': 'Chid account', - 'number': '000.0001.00', - 'allocated_amount': root_account['allocated_amount'] + 1, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'parent': {'$ref': get_ref_for_pid('acac', root_account.pid)}, + "name": "Chid account", + "number": "000.0001.00", + "allocated_amount": root_account["allocated_amount"] + 1, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "parent": {"$ref": get_ref_for_pid("acac", root_account.pid)}, } with pytest.raises(Exception) as excinfo: - _make_resource(client, 'acac', child_account_data) - assert 'Parent account available amount too low.' in str(excinfo.value) + _make_resource(client, "acac", child_account_data) + assert "Parent account available amount too low." in str(excinfo.value) - amount_70 = round(root_account['allocated_amount'] * 0.7) - amount_30 = root_account['allocated_amount'] - amount_70 - child_account_data['allocated_amount'] = amount_70 - child_account = _make_resource(client, 'acac', child_account_data) + amount_70 = round(root_account["allocated_amount"] * 0.7) + amount_30 = root_account["allocated_amount"] - amount_70 + child_account_data["allocated_amount"] = amount_70 + child_account = _make_resource(client, "acac", child_account_data) # STEP 3 :: Check accounts distribution # * Parent account should have 30% as available amount @@ -79,29 +81,36 @@ def test_create_accounts(client, rero_json_header, org_martigny, lib_martigny, assert root_account.distribution == amount_70 # STEP 4 :: Decrease the allocated amount of parent account too much - root_account_data['allocated_amount'] = amount_30 + 1 + root_account_data["allocated_amount"] = amount_30 + 1 with pytest.raises(Exception) as excinfo: root_account.update(root_account_data, dbcommit=True) - assert 'Remaining balance too low' in str(excinfo.value) + assert "Remaining balance too low" in str(excinfo.value) # RESET DATA - _del_resource(client, 'acac', child_account.pid) - _del_resource(client, 'acac', root_account.pid) + _del_resource(client, "acac", child_account.pid) + _del_resource(client, "acac", root_account.pid) # DEV NOTES : This mock will prevent any translations problems to occurs # When a translation is done, then the input string will be return # without any changes. -@mock.patch('rero_ils.modules.acquisition.acq_accounts.api._', - mock.MagicMock(side_effect=lambda v: v)) -def test_transfer_funds_api(client, rero_json_header, org_martigny, - lib_martigny, budget_2020_martigny, - librarian_martigny): +@mock.patch( + "rero_ils.modules.acquisition.acq_accounts.api._", + mock.MagicMock(side_effect=lambda v: v), +) +def test_transfer_funds_api( + client, + rero_json_header, + org_martigny, + lib_martigny, + budget_2020_martigny, + librarian_martigny, +): """Scenario to test fund transfer between both accounts.""" def _check_account(account): """Check amount available about an account.""" - return account['allocated_amount'], account.remaining_balance[0] + return account["allocated_amount"], account.remaining_balance[0] login_user_via_session(client, librarian_martigny.user) @@ -123,95 +132,118 @@ def _check_account(account): # | +-- C22{20, 20} # +-- C3{100, 100} basic_data = { - 'allocated_amount': 1000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} + "allocated_amount": 1000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, } - account_a = dict(name='A', allocated_amount=2000) + account_a = dict(name="A", allocated_amount=2000) account_a = {**basic_data, **account_a} - account_a = _make_resource(client, 'acac', account_a) - a_ref = {'$ref': get_ref_for_pid('acac', account_a.pid)} + account_a = _make_resource(client, "acac", account_a) + a_ref = {"$ref": get_ref_for_pid("acac", account_a.pid)} - account_b = dict(name='B', allocated_amount=500, parent=a_ref) + account_b = dict(name="B", allocated_amount=500, parent=a_ref) account_b = {**basic_data, **account_b} - account_b = _make_resource(client, 'acac', account_b) - b_ref = {'$ref': get_ref_for_pid('acac', account_b.pid)} + account_b = _make_resource(client, "acac", account_b) + b_ref = {"$ref": get_ref_for_pid("acac", account_b.pid)} - account_c = dict(name='C', allocated_amount=1000, parent=a_ref) + account_c = dict(name="C", allocated_amount=1000, parent=a_ref) account_c = {**basic_data, **account_c} - account_c = _make_resource(client, 'acac', account_c) - c_ref = {'$ref': get_ref_for_pid('acac', account_c.pid)} + account_c = _make_resource(client, "acac", account_c) + c_ref = {"$ref": get_ref_for_pid("acac", account_c.pid)} - account_b1 = dict(name='B1', allocated_amount=300, parent=b_ref) + account_b1 = dict(name="B1", allocated_amount=300, parent=b_ref) account_b1 = {**basic_data, **account_b1} - account_b1 = _make_resource(client, 'acac', account_b1) - account_b2 = dict(name='B2', allocated_amount=50, parent=b_ref) + account_b1 = _make_resource(client, "acac", account_b1) + account_b2 = dict(name="B2", allocated_amount=50, parent=b_ref) account_b2 = {**basic_data, **account_b2} - account_b2 = _make_resource(client, 'acac', account_b2) + account_b2 = _make_resource(client, "acac", account_b2) - account_c1 = dict(name='C1', allocated_amount=100, parent=c_ref) + account_c1 = dict(name="C1", allocated_amount=100, parent=c_ref) account_c1 = {**basic_data, **account_c1} - account_c1 = _make_resource(client, 'acac', account_c1) - account_c2 = dict(name='C2', allocated_amount=100, parent=c_ref) + account_c1 = _make_resource(client, "acac", account_c1) + account_c2 = dict(name="C2", allocated_amount=100, parent=c_ref) account_c2 = {**basic_data, **account_c2} - account_c2 = _make_resource(client, 'acac', account_c2) - account_c3 = dict(name='C3', allocated_amount=100, parent=c_ref) + account_c2 = _make_resource(client, "acac", account_c2) + account_c3 = dict(name="C3", allocated_amount=100, parent=c_ref) account_c3 = {**basic_data, **account_c3} - account_c3 = _make_resource(client, 'acac', account_c3) - c2_ref = {'$ref': get_ref_for_pid('acac', account_c2.pid)} + account_c3 = _make_resource(client, "acac", account_c3) + c2_ref = {"$ref": get_ref_for_pid("acac", account_c2.pid)} - account_c21 = dict(name='C21', allocated_amount=50, parent=c2_ref) + account_c21 = dict(name="C21", allocated_amount=50, parent=c2_ref) account_c21 = {**basic_data, **account_c21} - account_c21 = _make_resource(client, 'acac', account_c21) - account_c22 = dict(name='C22', allocated_amount=20, parent=c2_ref) + account_c21 = _make_resource(client, "acac", account_c21) + account_c22 = dict(name="C22", allocated_amount=20, parent=c2_ref) account_c22 = {**basic_data, **account_c22} - account_c22 = _make_resource(client, 'acac', account_c22) + account_c22 = _make_resource(client, "acac", account_c22) - account_e = dict(name='E', allocated_amount=300) + account_e = dict(name="E", allocated_amount=300) account_e = {**basic_data, **account_e} - account_e = _make_resource(client, 'acac', account_e) - e_ref = {'$ref': get_ref_for_pid('acac', account_e.pid)} + account_e = _make_resource(client, "acac", account_e) + e_ref = {"$ref": get_ref_for_pid("acac", account_e.pid)} - account_f = dict(name='F', allocated_amount=200, parent=e_ref) + account_f = dict(name="F", allocated_amount=200, parent=e_ref) account_f = {**basic_data, **account_f} - account_f = _make_resource(client, 'acac', account_f) - f_ref = {'$ref': get_ref_for_pid('acac', account_f.pid)} + account_f = _make_resource(client, "acac", account_f) + f_ref = {"$ref": get_ref_for_pid("acac", account_f.pid)} - account_g = dict(name='G', allocated_amount=100, parent=f_ref) + account_g = dict(name="G", allocated_amount=100, parent=f_ref) account_g = {**basic_data, **account_g} - account_g = _make_resource(client, 'acac', account_g) + account_g = _make_resource(client, "acac", account_g) # TEST 0 :: Try the API with invalid arguments. - res = client.get(url_for('api_acq_account.transfer_funds')) + res = client.get(url_for("api_acq_account.transfer_funds")) assert res.status_code == 400 - assert 'argument is required' in res.get_data(as_text=True) - cases_to_test = [{ - 'source': 'dummy', 'target': 'dummy', 'amount': 'dummy', - 'error': 'Unable to load source account' - }, { - 'source': account_a.pid, 'target': 'dummy', 'amount': 'dummy', - 'error': 'Unable to load target account' - }, { - 'source': account_a.pid, 'target': account_b.pid, 'amount': 'dummy', - 'error': "could not convert" - }, { - 'source': account_a.pid, 'target': account_b.pid, 'amount': -1.52, - 'error': "'amount' should be a positive number" - }, { - 'source': account_a.pid, 'target': account_a.pid, 'amount': 1, - 'error': "Cannot transfer fund to myself" - }, { - 'source': account_a.pid, 'target': account_e.pid, 'amount': 100000, - 'error': "Not enough available money from source account" - }] + assert "argument is required" in res.get_data(as_text=True) + cases_to_test = [ + { + "source": "dummy", + "target": "dummy", + "amount": "dummy", + "error": "Unable to load source account", + }, + { + "source": account_a.pid, + "target": "dummy", + "amount": "dummy", + "error": "Unable to load target account", + }, + { + "source": account_a.pid, + "target": account_b.pid, + "amount": "dummy", + "error": "could not convert", + }, + { + "source": account_a.pid, + "target": account_b.pid, + "amount": -1.52, + "error": "'amount' should be a positive number", + }, + { + "source": account_a.pid, + "target": account_a.pid, + "amount": 1, + "error": "Cannot transfer fund to myself", + }, + { + "source": account_a.pid, + "target": account_e.pid, + "amount": 100000, + "error": "Not enough available money from source account", + }, + ] for case in cases_to_test: - res = client.get(url_for( - 'api_acq_account.transfer_funds', - source=case['source'], target=case['target'], amount=case['amount'] - )) + res = client.get( + url_for( + "api_acq_account.transfer_funds", + source=case["source"], + target=case["target"], + amount=case["amount"], + ) + ) assert res.status_code == 400 data = get_json(res) - assert case['error'] in data['message'] + assert case["error"] in data["message"] # STATUS BEFORE NEXT TEST # A{2000, 500} E{300, 100} @@ -229,10 +261,14 @@ def _check_account(account): # Transfer 25 from C21 account to C account. After this transfer, the # C20 remaining balance should be equal to 25 ; the remaining balance for # C account should be 725 - res = client.get(url_for( - 'api_acq_account.transfer_funds', - source=account_c21.pid, target=account_c.pid, amount=25 - )) + res = client.get( + url_for( + "api_acq_account.transfer_funds", + source=account_c21.pid, + target=account_c.pid, + amount=25, + ) + ) assert res.status_code == 200 account_c21 = AcqAccount.get_record_by_pid(account_c21.pid) account_c2 = AcqAccount.get_record_by_pid(account_c2.pid) @@ -259,10 +295,14 @@ def _check_account(account): # for A account should be 400. The remaining balance for intermediate # accounts (C, C2) should be the same, but allocated amount should be # increased by 100 (1100, 175) - res = client.get(url_for( - 'api_acq_account.transfer_funds', - source=account_a.pid, target=account_c22.pid, amount=100 - )) + res = client.get( + url_for( + "api_acq_account.transfer_funds", + source=account_a.pid, + target=account_c22.pid, + amount=100, + ) + ) assert res.status_code == 200 account_a = AcqAccount.get_record_by_pid(account_a.pid) account_c = AcqAccount.get_record_by_pid(account_c.pid) @@ -288,10 +328,14 @@ def _check_account(account): # TEST 3 :: Transfer 300 from B1 account to C21 account. # Same behavior than previous test, but source account isn't the common # ancestor. - res = client.get(url_for( - 'api_acq_account.transfer_funds', - source=account_b1.pid, target=account_c21.pid, amount=300 - )) + res = client.get( + url_for( + "api_acq_account.transfer_funds", + source=account_b1.pid, + target=account_c21.pid, + amount=300, + ) + ) assert res.status_code == 200 account_b1 = AcqAccount.get_record_by_pid(account_b1.pid) account_b = AcqAccount.get_record_by_pid(account_b.pid) @@ -322,10 +366,14 @@ def _check_account(account): # We transfer 100 from F account to C3 account. As both accounts aren't # in the same tree, they not exists a common ancestor. Each root tag # should be update (E will decrease, A will increase) - res = client.get(url_for( - 'api_acq_account.transfer_funds', - source=account_f.pid, target=account_c3.pid, amount=100 - )) + res = client.get( + url_for( + "api_acq_account.transfer_funds", + source=account_f.pid, + target=account_c3.pid, + amount=100, + ) + ) assert res.status_code == 200 account_f = AcqAccount.get_record_by_pid(account_f.pid) account_e = AcqAccount.get_record_by_pid(account_e.pid) @@ -351,25 +399,31 @@ def _check_account(account): # +-- C3{200, 200} # delete accounts - _del_resource(client, 'acac', account_g.pid) - _del_resource(client, 'acac', account_f.pid) - _del_resource(client, 'acac', account_e.pid) - - _del_resource(client, 'acac', account_c22.pid) - _del_resource(client, 'acac', account_c21.pid) - _del_resource(client, 'acac', account_c3.pid) - _del_resource(client, 'acac', account_c2.pid) - _del_resource(client, 'acac', account_c1.pid) - _del_resource(client, 'acac', account_c.pid) - _del_resource(client, 'acac', account_b2.pid) - _del_resource(client, 'acac', account_b1.pid) - _del_resource(client, 'acac', account_b.pid) - _del_resource(client, 'acac', account_a.pid) + _del_resource(client, "acac", account_g.pid) + _del_resource(client, "acac", account_f.pid) + _del_resource(client, "acac", account_e.pid) + + _del_resource(client, "acac", account_c22.pid) + _del_resource(client, "acac", account_c21.pid) + _del_resource(client, "acac", account_c3.pid) + _del_resource(client, "acac", account_c2.pid) + _del_resource(client, "acac", account_c1.pid) + _del_resource(client, "acac", account_c.pid) + _del_resource(client, "acac", account_b2.pid) + _del_resource(client, "acac", account_b1.pid) + _del_resource(client, "acac", account_b.pid) + _del_resource(client, "acac", account_a.pid) def test_acquisition_order( - client, rero_json_header, org_martigny, lib_martigny, budget_2020_martigny, - vendor_martigny, librarian_martigny, document + client, + rero_json_header, + org_martigny, + lib_martigny, + budget_2020_martigny, + vendor_martigny, + librarian_martigny, + document, ): """Scenario to test orders creation.""" @@ -377,44 +431,44 @@ def test_acquisition_order( # STEP 0 :: Create the account tree basic_data = { - 'allocated_amount': 1000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} + "allocated_amount": 1000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, } - account_a = dict(name='A', allocated_amount=2000) + account_a = dict(name="A", allocated_amount=2000) account_a = {**basic_data, **account_a} - account_a = _make_resource(client, 'acac', account_a) - account_a_ref = {'$ref': get_ref_for_pid('acac', account_a.pid)} + account_a = _make_resource(client, "acac", account_a) + account_a_ref = {"$ref": get_ref_for_pid("acac", account_a.pid)} - account_b = dict(name='B', allocated_amount=500, parent=account_a_ref) + account_b = dict(name="B", allocated_amount=500, parent=account_a_ref) account_b = {**basic_data, **account_b} - account_b = _make_resource(client, 'acac', account_b) - account_b_ref = {'$ref': get_ref_for_pid('acac', account_b.pid)} + account_b = _make_resource(client, "acac", account_b) + account_b_ref = {"$ref": get_ref_for_pid("acac", account_b.pid)} # TEST 1 :: Create an order and add some order lines on it. # * The creation of the order will be successful # * We create first order line linked to account B. After this creation, # we can check the encumbrance of this account and its parent account. order_data = { - 'vendor': {'$ref': get_ref_for_pid('vndr', vendor_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'type': 'monograph', + "vendor": {"$ref": get_ref_for_pid("vndr", vendor_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "type": "monograph", } - order = _make_resource(client, 'acor', order_data) - assert order['reference'] == f'ORDER-{order.pid}' + order = _make_resource(client, "acor", order_data) + assert order["reference"] == f"ORDER-{order.pid}" assert order.get_order_provisional_total_amount() == 0 assert order.status == AcqOrderStatus.PENDING assert order.can_delete basic_data = { - 'acq_account': account_b_ref, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 4, - 'amount': 25 + "acq_account": account_b_ref, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 4, + "amount": 25, } - order_line_1 = _make_resource(client, 'acol', basic_data) - assert order_line_1.get('total_amount') == 100 + order_line_1 = _make_resource(client, "acol", basic_data) + assert order_line_1.get("total_amount") == 100 assert account_b.encumbrance_amount[0] == 100 assert account_b.remaining_balance[0] == 400 # 500 - 100 @@ -433,15 +487,15 @@ def test_acquisition_order( # * As this new order line has CANCELLED status, its amount is not # calculated into the encumbrance_amount basic_data = { - 'acq_account': account_b_ref, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 2, - 'amount': 10, - 'is_cancelled': True + "acq_account": account_b_ref, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 2, + "amount": 10, + "is_cancelled": True, } - order_line_1_1 = _make_resource(client, 'acol', basic_data) - assert order_line_1_1.get('total_amount') == 20 + order_line_1_1 = _make_resource(client, "acol", basic_data) + assert order_line_1_1.get("total_amount") == 20 assert account_b.encumbrance_amount[0] == 100 assert account_b.remaining_balance[0] == 400 # 500 - 100 @@ -459,17 +513,16 @@ def test_acquisition_order( order_line_2 = dict(quantity=50) order_line_2 = {**basic_data, **order_line_2} with pytest.raises(Exception) as excinfo: - _make_resource(client, 'acol', order_line_2) - assert 'Parent account available amount too low' in str(excinfo.value) + _make_resource(client, "acol", order_line_2) + assert "Parent account available amount too low" in str(excinfo.value) - order_line_1['quantity'] = 50 + order_line_1["quantity"] = 50 with pytest.raises(Exception) as excinfo: order_line_1.update(order_line_1, dbcommit=True, reindex=True) - assert 'Parent account available amount too low' in str(excinfo.value) + assert "Parent account available amount too low" in str(excinfo.value) - order_line_1['quantity'] = 20 - order_line_1 = order_line_1.update(order_line_1, dbcommit=True, - reindex=True) + order_line_1["quantity"] = 20 + order_line_1 = order_line_1.update(order_line_1, dbcommit=True, reindex=True) assert account_b.encumbrance_amount[0] == 500 assert account_b.remaining_balance[0] == 0 assert account_a.encumbrance_amount == (0, 500) @@ -481,15 +534,14 @@ def test_acquisition_order( # line will raise a ValidationError # * Update the account 'encumbrance_exceedance' setting to allow more # encumbrance and try to add an item to order_line. It will be OK - order_line_1['quantity'] += 1 + order_line_1["quantity"] += 1 with pytest.raises(Exception) as excinfo: order_line_1.update(order_line_1, dbcommit=True, reindex=True) - assert 'Parent account available amount too low' in str(excinfo.value) + assert "Parent account available amount too low" in str(excinfo.value) - account_b['encumbrance_exceedance'] = 5 # 5% of 500 = 25 + account_b["encumbrance_exceedance"] = 5 # 5% of 500 = 25 account_b = account_b.update(account_b, dbcommit=True, reindex=True) - order_line_1 = order_line_1.update(order_line_1, dbcommit=True, - reindex=True) + order_line_1 = order_line_1.update(order_line_1, dbcommit=True, reindex=True) assert account_b.encumbrance_amount[0] == 525 assert account_b.remaining_balance[0] == -25 assert account_a.encumbrance_amount == (0, 525) @@ -498,7 +550,7 @@ def test_acquisition_order( # Test cascade deleting of order lines when attempting to delete a # PENDING order. order_line_1 = AcqOrderLine.get_record_by_pid(order_line_1.pid) - order_line_1['is_cancelled'] = True + order_line_1["is_cancelled"] = True order_line_1.update(order_line_1, dbcommit=True, reindex=True) order = AcqOrder.get_record_by_pid(order.pid) @@ -506,29 +558,35 @@ def test_acquisition_order( # Delete CANCELLED order is not permitted with pytest.raises(IlsRecordError.NotDeleted): - _del_resource(client, 'acor', order.pid) + _del_resource(client, "acor", order.pid) - order_line_1['is_cancelled'] = False + order_line_1["is_cancelled"] = False order_line_1.update(order_line_1, dbcommit=True, reindex=True) order = AcqOrder.get_record_by_pid(order.pid) assert order.status == AcqOrderStatus.PENDING # DELETE created resources - _del_resource(client, 'acor', order.pid) + _del_resource(client, "acor", order.pid) # Deleting the parent PENDING order does delete all of its order lines order_line_1 = AcqOrderLine.get_record_by_pid(order_line_1.pid) order_line_1_1 = AcqOrderLine.get_record_by_pid(order_line_1_1.pid) assert not order_line_1 assert not order_line_1_1 - _del_resource(client, 'acac', account_b.pid) - _del_resource(client, 'acac', account_a.pid) + _del_resource(client, "acac", account_b.pid) + _del_resource(client, "acac", account_a.pid) def test_acquisition_order_line_account_changes( - client, rero_json_header, org_martigny, lib_martigny, budget_2020_martigny, - vendor_martigny, librarian_martigny, document + client, + rero_json_header, + org_martigny, + lib_martigny, + budget_2020_martigny, + vendor_martigny, + librarian_martigny, + document, ): """Test validation behavior on if related account of order line changes.""" @@ -546,30 +604,38 @@ def test_acquisition_order_line_account_changes( # 3) add an order line related to the acc#A # 4) check if balance/encumbrance are correct into ES indexes. basic_data = { - 'allocated_amount': 0, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} + "allocated_amount": 0, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, } - account_a = dict(name='A', allocated_amount=1000) - account_a = _make_resource(client, 'acac', {**basic_data, **account_a}) - account_a_ref = {'$ref': get_ref_for_pid('acac', account_a.pid)} - - account_b = dict(name='B') - account_b = _make_resource(client, 'acac', {**basic_data, **account_b}) - account_b_ref = {'$ref': get_ref_for_pid('acac', account_b.pid)} - - order = _make_resource(client, 'acor', { - 'vendor': {'$ref': get_ref_for_pid('vndr', vendor_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'type': 'monograph', - }) - order_line = _make_resource(client, 'acol', { - 'acq_account': account_a_ref, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 2, - 'amount': 100 - }) + account_a = dict(name="A", allocated_amount=1000) + account_a = _make_resource(client, "acac", {**basic_data, **account_a}) + account_a_ref = {"$ref": get_ref_for_pid("acac", account_a.pid)} + + account_b = dict(name="B") + account_b = _make_resource(client, "acac", {**basic_data, **account_b}) + account_b_ref = {"$ref": get_ref_for_pid("acac", account_b.pid)} + + order = _make_resource( + client, + "acor", + { + "vendor": {"$ref": get_ref_for_pid("vndr", vendor_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "type": "monograph", + }, + ) + order_line = _make_resource( + client, + "acol", + { + "acq_account": account_a_ref, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 2, + "amount": 100, + }, + ) assert account_a.encumbrance_amount == (200, 0) assert account_a.remaining_balance == (800, 800) @@ -581,10 +647,10 @@ def test_acquisition_order_line_account_changes( # to the acc#B (not the acc#A). It will try to change that. But # validation problem should occur because the remaining balance for this # account isn't correct to accept this order_line. - order_line['acq_account'] = account_b_ref + order_line["acq_account"] = account_b_ref with pytest.raises(ValidationError) as err: order_line.update(order_line, dbcommit=True, reindex=True) - assert 'Parent account available amount too low' in str(err) + assert "Parent account available amount too low" in str(err) order_line = AcqOrderLine.get_record(order_line.id) assert order_line.account_pid == account_a.pid @@ -592,11 +658,11 @@ def test_acquisition_order_line_account_changes( # Staff member add some money on the destination account to accept this # order line. We update the order line again and check that balance of # original and destination account are correct. - account_b['allocated_amount'] = 1000 + account_b["allocated_amount"] = 1000 account_b = account_b.update(account_b, dbcommit=True, reindex=True) - order_line['acq_account'] = account_b_ref + order_line["acq_account"] = account_b_ref order_line = order_line.update(order_line, dbcommit=True, reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + AcqAccountsSearch.flush_and_refresh() assert order_line.account_pid == account_b.pid assert account_a.encumbrance_amount == (0, 0) assert account_a.remaining_balance == (1000, 1000) @@ -604,7 +670,7 @@ def test_acquisition_order_line_account_changes( assert account_b.remaining_balance == (800, 800) # RESET FIXTURES - _del_resource(client, 'acol', order_line.pid) - _del_resource(client, 'acor', order.pid) - _del_resource(client, 'acac', account_b.pid) - _del_resource(client, 'acac', account_a.pid) + _del_resource(client, "acol", order_line.pid) + _del_resource(client, "acor", order.pid) + _del_resource(client, "acac", account_b.pid) + _del_resource(client, "acac", account_a.pid) diff --git a/tests/api/acquisition/test_acquisition_serializers.py b/tests/api/acquisition/test_acquisition_serializers.py index ff94df49a5..eda0d049b3 100644 --- a/tests/api/acquisition/test_acquisition_serializers.py +++ b/tests/api/acquisition/test_acquisition_serializers.py @@ -27,53 +27,64 @@ def test_acquisition_orders_serializers( - client, librarian_martigny, budget_2020_martigny, lib_martigny, - vendor_martigny, document, rero_json_header + client, + librarian_martigny, + budget_2020_martigny, + lib_martigny, + vendor_martigny, + document, + rero_json_header, ): """Test orders serializer.""" login_user_via_session(client, librarian_martigny.user) # STEP 0 :: Create the account with multiple order lines account_data = { - 'name': 'Account A', - 'allocated_amount': 1000, - 'budget': {'$ref': get_ref_for_pid('budg', budget_2020_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)} + "name": "Account A", + "allocated_amount": 1000, + "budget": {"$ref": get_ref_for_pid("budg", budget_2020_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, } - account_a = _make_resource(client, 'acac', account_data) - account_a_ref = {'$ref': get_ref_for_pid('acac', account_a.pid)} + account_a = _make_resource(client, "acac", account_data) + account_a_ref = {"$ref": get_ref_for_pid("acac", account_a.pid)} order_data = { - 'vendor': {'$ref': get_ref_for_pid('vndr', vendor_martigny.pid)}, - 'library': {'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - 'reference': 'ORDER#1', - 'type': 'monograph', + "vendor": {"$ref": get_ref_for_pid("vndr", vendor_martigny.pid)}, + "library": {"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + "reference": "ORDER#1", + "type": "monograph", } - order = _make_resource(client, 'acor', order_data) + order = _make_resource(client, "acor", order_data) order.reindex() line_data = { - 'acq_account': account_a_ref, - 'acq_order': {'$ref': get_ref_for_pid('acor', order.pid)}, - 'document': {'$ref': get_ref_for_pid('doc', document.pid)}, - 'quantity': 4, - 'amount': 25 + "acq_account": account_a_ref, + "acq_order": {"$ref": get_ref_for_pid("acor", order.pid)}, + "document": {"$ref": get_ref_for_pid("doc", document.pid)}, + "quantity": 4, + "amount": 25, } - order_line_1 = _make_resource(client, 'acol', line_data) + order_line_1 = _make_resource(client, "acol", line_data) # TEST ORDER SERIALIZER - list_url = url_for('invenio_records_rest.acor_list') + list_url = url_for("invenio_records_rest.acor_list") response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - record = data.get('hits', {}).get('hits', [])[0] - assert record.get('metadata', {})\ - .get('order_lines', [])[0] \ - .get('account', {})\ - .get('name') == account_a['name'] - assert record.get('metadata', {}) \ - .get('order_lines', [])[0] \ - .get('document', {}) \ - .get('pid') == document.pid + record = data.get("hits", {}).get("hits", [])[0] + assert ( + record.get("metadata", {}) + .get("order_lines", [])[0] + .get("account", {}) + .get("name") + == account_a["name"] + ) + assert ( + record.get("metadata", {}) + .get("order_lines", [])[0] + .get("document", {}) + .get("pid") + == document.pid + ) # RESET RESOURCES - _del_resource(client, 'acol', order_line_1.pid) - _del_resource(client, 'acor', order.pid) - _del_resource(client, 'acac', account_a.pid) + _del_resource(client, "acol", order_line_1.pid) + _del_resource(client, "acor", order.pid) + _del_resource(client, "acac", account_a.pid) diff --git a/tests/api/budgets/test_budgets_permissions.py b/tests/api/budgets/test_budgets_permissions.py index f6febff37b..450fa57a60 100644 --- a/tests/api/budgets/test_budgets_permissions.py +++ b/tests/api/budgets/test_budgets_permissions.py @@ -21,13 +21,11 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.acquisition.budgets.permissions import \ - BudgetPermissionPolicy +from rero_ils.modules.acquisition.budgets.permissions import BudgetPermissionPolicy def test_budget_permissions( - patron_martigny, librarian_martigny, - budget_2018_martigny, budget_2020_sion + patron_martigny, librarian_martigny, budget_2018_martigny, budget_2020_sion ): """Test budget permissions class.""" @@ -35,37 +33,53 @@ def test_budget_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(BudgetPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + BudgetPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron :: can't operate any operation about Budget login_user(patron_martigny.user) - check_permission(BudgetPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, budget_2018_martigny) + check_permission( + BudgetPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + budget_2018_martigny, + ) # Staff members :: can only search and read (only org record) login_user(librarian_martigny.user) - check_permission(BudgetPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, budget_2018_martigny) - check_permission(BudgetPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, budget_2020_sion) + check_permission( + BudgetPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + budget_2018_martigny, + ) + check_permission( + BudgetPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + budget_2020_sion, + ) diff --git a/tests/api/budgets/test_budgets_rest.py b/tests/api/budgets/test_budgets_rest.py index ac6c791b18..c54b43fe65 100644 --- a/tests/api/budgets/test_budgets_rest.py +++ b/tests/api/budgets/test_budgets_rest.py @@ -22,119 +22,107 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -def test_budgets_permissions(client, budget_2020_martigny, - json_header): +def test_budgets_permissions(client, budget_2020_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.budg_item', pid_value='budg1') + item_url = url_for("invenio_records_rest.budg_item", pid_value="budg1") res = client.get(item_url) assert res.status_code == 401 - res, _ = postdata( - client, - 'invenio_records_rest.budg_list', - {} - ) + res, _ = postdata(client, "invenio_records_rest.budg_list", {}) assert res.status_code == 401 res = client.put( - url_for('invenio_records_rest.budg_item', pid_value='budg1'), + url_for("invenio_records_rest.budg_item", pid_value="budg1"), data={}, - headers=json_header + headers=json_header, ) res = client.delete(item_url) assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_budgets_get(client, budget_2020_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.budg_item', pid_value='budg1') + item_url = url_for("invenio_records_rest.budg_item", pid_value="budg1") budget = budget_2020_martigny res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{budget.revision_id}"' + assert res.headers["ETag"] == f'"{budget.revision_id}"' data = get_json(res) - assert budget.dumps() == data['metadata'] + assert budget.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert budget.dumps() == data['metadata'] + assert budget.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.budg_list', pid='budg1') + list_url = url_for("invenio_records_rest.budg_list", pid="budg1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == budget.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == budget.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_budgets_post_put_delete(client, - budget_2018_martigny, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_budgets_post_put_delete(client, budget_2018_martigny, json_header): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.budg_item', pid_value='1') - list_url = url_for('invenio_records_rest.budg_list', q='pid:1') - - budget_2018_martigny['pid'] = '1' - res, data = postdata( - client, - 'invenio_records_rest.budg_list', - budget_2018_martigny - ) + item_url = url_for("invenio_records_rest.budg_item", pid_value="1") + list_url = url_for("invenio_records_rest.budg_list", q="pid:1") + + budget_2018_martigny["pid"] = "1" + res, data = postdata(client, "invenio_records_rest.budg_list", budget_2018_martigny) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == budget_2018_martigny + assert data["metadata"] == budget_2018_martigny res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert budget_2018_martigny == data['metadata'] + assert budget_2018_martigny == data["metadata"] # Update record/PUT data = budget_2018_martigny - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"librarie.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -144,155 +132,140 @@ def test_budgets_post_put_delete(client, assert res.status_code == 410 -def test_budgets_can_delete( - client, budget_2020_martigny, acq_account_fiction_martigny): +def test_budgets_can_delete(client, budget_2020_martigny, acq_account_fiction_martigny): """Test can delete an acq account.""" can, reasons = budget_2020_martigny.can_delete assert not can - assert reasons['links']['acq_accounts'] - assert reasons['others']['is_default'] + assert reasons["links"]["acq_accounts"] + assert reasons["others"]["is_default"] def test_filtered_budgets_get( - client, librarian_martigny, budget_2020_martigny, - librarian_sion, budget_2020_sion): + client, librarian_martigny, budget_2020_martigny, librarian_sion, budget_2020_sion +): """Test acq accounts filter by organisation.""" - list_url = url_for('invenio_records_rest.budg_list') + list_url = url_for("invenio_records_rest.budg_list") res = client.get(list_url) assert res.status_code == 401 # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.budg_list') + list_url = url_for("invenio_records_rest.budg_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.budg_list') + list_url = url_for("invenio_records_rest.budg_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 -def test_budget_secure_api(client, json_header, - budget_2020_martigny, - librarian_martigny, - librarian_sion): +def test_budget_secure_api( + client, json_header, budget_2020_martigny, librarian_martigny, librarian_sion +): """Test acq account secure api access.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.budg_item', - pid_value=budget_2020_martigny.pid) + record_url = url_for( + "invenio_records_rest.budg_item", pid_value=budget_2020_martigny.pid + ) res = client.get(record_url) assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.budg_item', - pid_value=budget_2020_martigny.pid) + record_url = url_for( + "invenio_records_rest.budg_item", pid_value=budget_2020_martigny.pid + ) res = client.get(record_url) assert res.status_code == 403 -def test_budget_secure_api_create(client, json_header, - budget_2020_martigny, - librarian_martigny, - librarian_sion, - budget_2019_martigny, - system_librarian_martigny): +def test_budget_secure_api_create( + client, + json_header, + budget_2020_martigny, + librarian_martigny, + librarian_sion, + budget_2019_martigny, + system_librarian_martigny, +): """Test acq account secure api create.""" # Martigny login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.budg_list' + post_entrypoint = "invenio_records_rest.budg_list" - del budget_2019_martigny['pid'] - res, _ = postdata( - client, - post_entrypoint, - budget_2019_martigny - ) + del budget_2019_martigny["pid"] + res, _ = postdata(client, post_entrypoint, budget_2019_martigny) assert res.status_code == 403 - del budget_2020_martigny['pid'] - res, _ = postdata( - client, - post_entrypoint, - budget_2020_martigny - ) + del budget_2020_martigny["pid"] + res, _ = postdata(client, post_entrypoint, budget_2020_martigny) assert res.status_code == 403 login_user_via_session(client, system_librarian_martigny.user) - res, _ = postdata( - client, - post_entrypoint, - budget_2020_martigny - ) + res, _ = postdata(client, post_entrypoint, budget_2020_martigny) assert res.status_code == 403 # Sion login_user_via_session(client, librarian_sion.user) - res, _ = postdata( - client, - post_entrypoint, - budget_2019_martigny - ) + res, _ = postdata(client, post_entrypoint, budget_2019_martigny) assert res.status_code == 403 -def test_budget_secure_api_update(client, - budget_2017_martigny, - librarian_martigny, - system_librarian_martigny, - system_librarian_sion, - librarian_sion, - json_header): +def test_budget_secure_api_update( + client, + budget_2017_martigny, + librarian_martigny, + system_librarian_martigny, + system_librarian_sion, + librarian_sion, + json_header, +): """Test acq account secure api update.""" # Martigny login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.budg_item', - pid_value=budget_2017_martigny.pid) + record_url = url_for( + "invenio_records_rest.budg_item", pid_value=budget_2017_martigny.pid + ) data = budget_2017_martigny - data['name'] = 'Test Name' - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 # Sion login_user_via_session(client, system_librarian_sion.user) - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 -def test_budget_secure_api_delete(client, - budget_2017_martigny, - librarian_martigny, - librarian_sion, - system_librarian_martigny, - json_header): +def test_budget_secure_api_delete( + client, + budget_2017_martigny, + librarian_martigny, + librarian_sion, + system_librarian_martigny, + json_header, +): """Test acq account secure api delete.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.budg_item', - pid_value=budget_2017_martigny.pid) + record_url = url_for( + "invenio_records_rest.budg_item", pid_value=budget_2017_martigny.pid + ) res = client.delete(record_url) assert res.status_code == 403 diff --git a/tests/api/circ_policies/test_circ_policies_permissions.py b/tests/api/circ_policies/test_circ_policies_permissions.py index f75800955e..9a5f26c12c 100644 --- a/tests/api/circ_policies/test_circ_policies_permissions.py +++ b/tests/api/circ_policies/test_circ_policies_permissions.py @@ -22,28 +22,31 @@ from invenio_accounts.testutils import login_user_via_session from utils import check_permission, get_json -from rero_ils.modules.circ_policies.permissions import \ - CirculationPolicyPermissionPolicy as CiPoPermissionPolicy +from rero_ils.modules.circ_policies.permissions import ( + CirculationPolicyPermissionPolicy as CiPoPermissionPolicy, +) -def test_circ_policies_permissions_api(client, librarian_martigny, - system_librarian_martigny, - circ_policy_short_martigny, - circ_policy_default_sion): +def test_circ_policies_permissions_api( + client, + librarian_martigny, + system_librarian_martigny, + circ_policy_short_martigny, + circ_policy_default_sion, +): """Test circulation policies permissions api.""" cipo_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='circ_policies' + "api_blueprint.permissions", route_name="circ_policies" ) cipo_martigny_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='circ_policies', - record_pid=circ_policy_short_martigny.pid + "api_blueprint.permissions", + route_name="circ_policies", + record_pid=circ_policy_short_martigny.pid, ) cipo_sion_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='circ_policies', - record_pid=circ_policy_default_sion.pid + "api_blueprint.permissions", + route_name="circ_policies", + record_pid=circ_policy_default_sion.pid, ) # Not logged @@ -59,13 +62,13 @@ def test_circ_policies_permissions_api(client, librarian_martigny, res = client.get(cipo_martigny_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read']: - assert data[action]['can'] - for action in ['create', 'update', 'delete']: - assert not data[action]['can'] + for action in ["list", "read"]: + assert data[action]["can"] + for action in ["create", "update", "delete"]: + assert not data[action]["can"] res = client.get(cipo_sion_permissions_url) data = get_json(res) - assert not data['read']['can'] + assert not data["read"]["can"] # Logged as system librarian # * sys_lib can do anything about patron type for its own organisation @@ -74,20 +77,22 @@ def test_circ_policies_permissions_api(client, librarian_martigny, res = client.get(cipo_martigny_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read', 'create', 'update', 'delete']: - assert data[action]['can'] + for action in ["list", "read", "create", "update", "delete"]: + assert data[action]["can"] res = client.get(cipo_sion_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['update', 'delete']: - assert not data[action]['can'] + for action in ["update", "delete"]: + assert not data[action]["can"] -def test_circ_policies_permissions(patron_martigny, - librarian_martigny, - system_librarian_martigny, - circ_policy_short_martigny, - circ_policy_default_sion): +def test_circ_policies_permissions( + patron_martigny, + librarian_martigny, + system_librarian_martigny, + circ_policy_short_martigny, + circ_policy_default_sion, +): """Test circulation policies permission class.""" # Anonymous user # An anonymous user can't operate any operation about circulation @@ -95,63 +100,57 @@ def test_circ_policies_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(CiPoPermissionPolicy, {'search': False}, None) - check_permission(CiPoPermissionPolicy, {'create': False}, {}) - check_permission(CiPoPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, circ_policy_short_martigny) + check_permission(CiPoPermissionPolicy, {"search": False}, None) + check_permission(CiPoPermissionPolicy, {"create": False}, {}) + check_permission( + CiPoPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + circ_policy_short_martigny, + ) # Patron # A simple patron can't operate any operation about circulation policies login_user(patron_martigny.user) - check_permission(CiPoPermissionPolicy, {'search': False}, None) - check_permission(CiPoPermissionPolicy, {'create': False}, {}) - check_permission(CiPoPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, circ_policy_short_martigny) + check_permission(CiPoPermissionPolicy, {"search": False}, None) + check_permission(CiPoPermissionPolicy, {"create": False}, {}) + check_permission( + CiPoPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + circ_policy_short_martigny, + ) # Librarian # - search : any circulation policies despite organisation owner # - read : only circulation policies for its own organisation # - create/update/delete: disallowed login_user(librarian_martigny.user) - check_permission(CiPoPermissionPolicy, {'search': True}, None) - check_permission(CiPoPermissionPolicy, {'create': False}, {}) - check_permission(CiPoPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, circ_policy_short_martigny) - check_permission(CiPoPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, circ_policy_default_sion) + check_permission(CiPoPermissionPolicy, {"search": True}, None) + check_permission(CiPoPermissionPolicy, {"create": False}, {}) + check_permission( + CiPoPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + circ_policy_short_martigny, + ) + check_permission( + CiPoPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + circ_policy_default_sion, + ) # SystemLibrarian # - search : any circulation policies despite organisation owner # - read/create/update/delete : only circulation policies for its own # organisation login_user(system_librarian_martigny.user) - check_permission(CiPoPermissionPolicy, {'search': True}, None) - check_permission(CiPoPermissionPolicy, {'create': True}, {}) - check_permission(CiPoPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, circ_policy_short_martigny) - check_permission(CiPoPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, circ_policy_default_sion) + check_permission(CiPoPermissionPolicy, {"search": True}, None) + check_permission(CiPoPermissionPolicy, {"create": True}, {}) + check_permission( + CiPoPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + circ_policy_short_martigny, + ) + check_permission( + CiPoPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + circ_policy_default_sion, + ) diff --git a/tests/api/circ_policies/test_circ_policies_rest.py b/tests/api/circ_policies/test_circ_policies_rest.py index 332e3abea7..119cad587d 100644 --- a/tests/api/circ_policies/test_circ_policies_rest.py +++ b/tests/api/circ_policies/test_circ_policies_rest.py @@ -22,149 +22,150 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_circ_policies_get(client, circ_policy_default_martigny): """Test policy retrieval.""" circ_policy = circ_policy_default_martigny - item_url = url_for('invenio_records_rest.cipo_item', pid_value='cipo1') + item_url = url_for("invenio_records_rest.cipo_item", pid_value="cipo1") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{circ_policy.revision_id}"' + assert res.headers["ETag"] == f'"{circ_policy.revision_id}"' data = get_json(res) - assert circ_policy.dumps() == data['metadata'] + assert circ_policy.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 json = get_json(res) assert data == json - assert circ_policy.dumps() == data['metadata'] + assert circ_policy.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.cipo_list', pid='cipo1') + list_url = url_for("invenio_records_rest.cipo_list", pid="cipo1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == circ_policy.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == circ_policy.replace_refs() def test_filtered_circ_policies_get( - client, librarian_martigny, circ_policy_default_martigny, - circ_policy_short_martigny, circ_policy_temp_martigny, - librarian_sion, circ_policy_default_sion): + client, + librarian_martigny, + circ_policy_default_martigny, + circ_policy_short_martigny, + circ_policy_temp_martigny, + librarian_sion, + circ_policy_default_sion, +): """Test circulation policies filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.cipo_list') + list_url = url_for("invenio_records_rest.cipo_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 3 + assert data["hits"]["total"]["value"] == 3 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.cipo_list') + list_url = url_for("invenio_records_rest.cipo_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_circ_policies_post_put_delete(client, org_martigny, - circ_policy_short_martigny_data, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_circ_policies_post_put_delete( + client, org_martigny, circ_policy_short_martigny_data, json_header +): """Test policy retrieval.""" # Create policy / POST - item_url = url_for('invenio_records_rest.cipo_item', pid_value='1') - list_url = url_for('invenio_records_rest.cipo_list', q='pid:1') - del circ_policy_short_martigny_data['pid'] + item_url = url_for("invenio_records_rest.cipo_item", pid_value="1") + list_url = url_for("invenio_records_rest.cipo_list", q="pid:1") + del circ_policy_short_martigny_data["pid"] res, data = postdata( - client, - 'invenio_records_rest.cipo_list', - circ_policy_short_martigny_data + client, "invenio_records_rest.cipo_list", circ_policy_short_martigny_data ) assert res.status_code == 201 # Check that the returned policy matches the given data - circ_policy_short_martigny_data['pid'] = '1' - assert data['metadata'] == circ_policy_short_martigny_data + circ_policy_short_martigny_data["pid"] = "1" + assert data["metadata"] == circ_policy_short_martigny_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert circ_policy_short_martigny_data == data['metadata'] + assert circ_policy_short_martigny_data == data["metadata"] # Update policy/PUT data = circ_policy_short_martigny_data - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"{librarie.revision_id}"' # Check that the returned policy matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete policy/DELETE res = client.delete(item_url) assert res.status_code == 204 -@mock.patch('rero_ils.modules.decorators.login_and_librarian', - mock.MagicMock()) +@mock.patch("rero_ils.modules.decorators.login_and_librarian", mock.MagicMock()) def test_circ_policies_name_validate(client): """Test policy validation.""" - url = url_for('circ_policies.name_validate', name='Default') + url = url_for("circ_policies.name_validate", name="Default") class current_librarian: class organisation: - pid = 'org1' + pid = "org1" + with mock.patch( - 'rero_ils.modules.circ_policies.views.current_librarian', - current_librarian + "rero_ils.modules.circ_policies.views.current_librarian", current_librarian ): res = client.get(url) assert res.status_code == 200 - assert get_json(res) == {'name': 'Default'} + assert get_json(res) == {"name": "Default"} class current_librarian: class organisation: - pid = 'does not exists' + pid = "does not exists" + with mock.patch( - 'rero_ils.modules.circ_policies.views.current_librarian', - current_librarian + "rero_ils.modules.circ_policies.views.current_librarian", current_librarian ): res = client.get(url) assert res.status_code == 200 - assert get_json(res) == {'name': None} + assert get_json(res) == {"name": None} diff --git a/tests/api/circulation/scenarios/test_scenario_a.py b/tests/api/circulation/scenarios/test_scenario_a.py index 99b8fdf0e6..2522731210 100644 --- a/tests/api/circulation/scenarios/test_scenario_a.py +++ b/tests/api/circulation/scenarios/test_scenario_a.py @@ -19,7 +19,7 @@ from invenio_accounts.testutils import login_user_via_session -from utils import flush_index, get_json, postdata +from utils import get_json, postdata from rero_ils.modules.items.models import ItemStatus from rero_ils.modules.loans.api import Loan @@ -27,9 +27,14 @@ def test_circ_scenario_a( - client, librarian_martigny, lib_martigny, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies): + client, + librarian_martigny, + lib_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, +): """Test the first circulation scenario.""" # https://github.com/rero/rero-ils/blob/dev/doc/circulation/scenarios.md # A request is made on on-shelf item, that has no requests, to be picked @@ -37,33 +42,29 @@ def test_circ_scenario_a( # owning library. and returned on-time at the owning library login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } # ADD_REQUEST_1.1 - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - request_loan_pid = get_json(res)['action_applied']['request']['pid'] + request_loan_pid = get_json(res)["action_applied"]["request"]["pid"] # VALIDATE_1.2 - circ_params['pid'] = request_loan_pid - res, data = postdata( - client, 'api_item.validate_request', dict(circ_params)) + circ_params["pid"] = request_loan_pid + res, data = postdata(client, "api_item.validate_request", dict(circ_params)) assert res.status_code == 200 - loan = Loan(get_json(res)['action_applied']['validate']) + loan = Loan(get_json(res)["action_applied"]["validate"]) assert loan.checkout_date is None # CHECKOUT_2.1 - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 200 - loan = Loan(get_json(res)['action_applied']['checkout']) - flush_index(OperationLogsSearch.Meta.index) + loan = Loan(get_json(res)["action_applied"]["checkout"]) + OperationLogsSearch.flush_and_refresh() assert loan.checkout_date # CHECKIN_3.1.1 - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 assert item_lib_martigny.status == ItemStatus.ON_SHELF diff --git a/tests/api/circulation/scenarios/test_scenario_b.py b/tests/api/circulation/scenarios/test_scenario_b.py index de18f66cb2..9c2d8a38df 100644 --- a/tests/api/circulation/scenarios/test_scenario_b.py +++ b/tests/api/circulation/scenarios/test_scenario_b.py @@ -25,9 +25,17 @@ def test_circ_scenario_b( - client, librarian_martigny, lib_martigny, lib_saxon, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies, loc_public_saxon, librarian_saxon): + client, + librarian_martigny, + lib_martigny, + lib_saxon, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, + loc_public_saxon, + librarian_saxon, +): """Test the second circulation scenario.""" # https://github.com/rero/rero-ils/blob/dev/doc/circulation/scenarios.md # A request is made on item of library A, on-shelf without previous @@ -38,53 +46,47 @@ def test_circ_scenario_b( login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - request_loan_pid = get_json(res)['action_applied']['request']['pid'] + request_loan_pid = get_json(res)["action_applied"]["request"]["pid"] - circ_params['pid'] = request_loan_pid - res, data = postdata( - client, 'api_item.validate_request', dict(circ_params)) + circ_params["pid"] = request_loan_pid + res, data = postdata(client, "api_item.validate_request", dict(circ_params)) assert res.status_code == 200 login_user_via_session(client, librarian_saxon.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 200 - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 assert item_lib_martigny.status == ItemStatus.ON_SHELF login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 assert item_lib_martigny.status == ItemStatus.ON_SHELF diff --git a/tests/api/circulation/scenarios/test_scenario_c.py b/tests/api/circulation/scenarios/test_scenario_c.py index 2e5fbec115..1cbbedc345 100644 --- a/tests/api/circulation/scenarios/test_scenario_c.py +++ b/tests/api/circulation/scenarios/test_scenario_c.py @@ -26,11 +26,21 @@ def test_circ_scenario_c( - client, librarian_martigny, lib_martigny, lib_saxon, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies, loc_public_saxon, librarian_saxon, - patron2_martigny, lib_fully, loc_public_fully, - librarian_fully): + client, + librarian_martigny, + lib_martigny, + lib_saxon, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, + loc_public_saxon, + librarian_saxon, + patron2_martigny, + lib_fully, + loc_public_fully, + librarian_fully, +): """Test the third circulation scenario.""" # https://github.com/rero/rero-ils/blob/dev/doc/circulation/scenarios.md # A request is made on item of library A, on-shelf without previous @@ -45,97 +55,87 @@ def test_circ_scenario_c( login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - request_loan_pid = get_json(res)['action_applied']['request']['pid'] + request_loan_pid = get_json(res)["action_applied"]["request"]["pid"] - circ_params['pid'] = request_loan_pid - res, data = postdata( - client, 'api_item.validate_request', dict(circ_params)) + circ_params["pid"] = request_loan_pid + res, data = postdata(client, "api_item.validate_request", dict(circ_params)) assert res.status_code == 200 login_user_via_session(client, librarian_saxon.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 200 circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid, - 'transaction_library_pid': lib_fully.pid, - 'transaction_user_pid': librarian_fully.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, + "transaction_library_pid": lib_fully.pid, + "transaction_user_pid": librarian_fully.pid, } - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - fully_loan_pid = get_json(res)['action_applied']['request']['pid'] + fully_loan_pid = get_json(res)["action_applied"]["request"]["pid"] circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 assert item_lib_martigny.status == ItemStatus.ON_SHELF login_user_via_session(client, librarian_fully.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid, - 'transaction_library_pid': lib_fully.pid, - 'transaction_user_pid': librarian_fully.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, + "transaction_library_pid": lib_fully.pid, + "transaction_user_pid": librarian_fully.pid, } - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 200 # Update loan end_date to allow direct renewal - loan = Loan.get_record_by_pid(data['action_applied']['checkout']['pid']) - loan['end_date'] = loan['start_date'] + loan = Loan.get_record_by_pid(data["action_applied"]["checkout"]["pid"]) + loan["end_date"] = loan["start_date"] loan.update(loan, dbcommit=True, reindex=True) - res, data = postdata( - client, 'api_item.extend_loan', dict(circ_params)) + res, data = postdata(client, "api_item.extend_loan", dict(circ_params)) assert res.status_code == 200 - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 assert item_lib_martigny.status == ItemStatus.ON_SHELF diff --git a/tests/api/circulation/scenarios/test_scenario_d.py b/tests/api/circulation/scenarios/test_scenario_d.py index ee01a9a35a..3e268b9976 100644 --- a/tests/api/circulation/scenarios/test_scenario_d.py +++ b/tests/api/circulation/scenarios/test_scenario_d.py @@ -23,11 +23,21 @@ def test_circ_scenario_d( - client, librarian_martigny, lib_martigny, lib_saxon, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies, loc_public_saxon, librarian_saxon, - patron2_martigny, lib_fully, loc_public_fully, - librarian_fully): + client, + librarian_martigny, + lib_martigny, + lib_saxon, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, + loc_public_saxon, + librarian_saxon, + patron2_martigny, + lib_fully, + loc_public_fully, + librarian_fully, +): """Test the fourth circulation scenario.""" # https://github.com/rero/rero-ils/blob/dev/doc/circulation/scenarios.md # An inexperienced librarian A (library A) makes a checkin on item A, @@ -52,122 +62,110 @@ def test_circ_scenario_d( # which is on shelf at library A and without requests (-> nothing happens). login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 400 # Item A is requested by patron A. - circ_params['patron_pid'] = patron_martigny.pid - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + circ_params["patron_pid"] = patron_martigny.pid + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - martigny_loan_pid = get_json(res)['action_applied']['request']['pid'] + martigny_loan_pid = get_json(res)["action_applied"]["request"]["pid"] # Another librarian B of library B tries # to check it out for patron B (-> denied). login_user_via_session(client, librarian_saxon.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 403 # The item is requested by # patron B with pickup library B. - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - saxon_loan_pid = get_json(res)['action_applied']['request']['pid'] + saxon_loan_pid = get_json(res)["action_applied"]["request"]["pid"] # Librarian B tries again to check it out # for patron B (-> denied), circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 403 # then for patron A (-> ok). circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 200 # Patron A tries to renew item A (-> denied). - res, data = postdata( - client, 'api_item.extend_loan', dict(circ_params)) + res, data = postdata(client, "api_item.extend_loan", dict(circ_params)) assert res.status_code == 400 # Patron A returns item A at library B. The item is at desk for patron B. - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 # Patron A requests it again, with pickup library A. login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.librarian_request', dict(circ_params)) + res, data = postdata(client, "api_item.librarian_request", dict(circ_params)) assert res.status_code == 200 - martigny_loan_pid = get_json(res)['action_applied']['request']['pid'] + martigny_loan_pid = get_json(res)["action_applied"]["request"]["pid"] # Unexpectedly, libarian A tries to check out item A for patron A # (-> denied). - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 403 # He then checks it out for patron B. circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.checkout', dict(circ_params)) + res, data = postdata(client, "api_item.checkout", dict(circ_params)) assert res.status_code == 200 # Patron B returns item A at library C. # It goes in transit to library A for patron A. login_user_via_session(client, librarian_fully.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron2_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid, - 'transaction_library_pid': lib_fully.pid, - 'transaction_user_pid': librarian_fully.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron2_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, + "transaction_library_pid": lib_fully.pid, + "transaction_user_pid": librarian_fully.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 # Before arriving to library A, it transits through library B. @@ -176,22 +174,20 @@ def test_circ_scenario_d( # cancels his request. Item A transits through library C. It is then # received at its owning library A. circ_params = { - 'pid': martigny_loan_pid, - 'transaction_library_pid': lib_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "pid": martigny_loan_pid, + "transaction_library_pid": lib_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } - res, data = postdata( - client, 'api_item.cancel_item_request', dict(circ_params)) + res, data = postdata(client, "api_item.cancel_item_request", dict(circ_params)) assert res.status_code == 200 login_user_via_session(client, librarian_martigny.user) circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.checkin', dict(circ_params)) + res, data = postdata(client, "api_item.checkin", dict(circ_params)) assert res.status_code == 200 diff --git a/tests/api/circulation/test_actions_views_add_request.py b/tests/api/circulation/test_actions_views_add_request.py index 4710ebfab1..8cc5e75f4c 100644 --- a/tests/api/circulation/test_actions_views_add_request.py +++ b/tests/api/circulation/test_actions_views_add_request.py @@ -23,20 +23,22 @@ def test_add_request_failed_actions( - client, librarian_martigny, lib_martigny, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies): + client, + librarian_martigny, + lib_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, +): """Test item failed actions.""" login_user_via_session(client, librarian_martigny.user) # test fails for a request with a missing parameter pickup_location_pid res, data = postdata( client, - 'api_item.librarian_request', - dict( - item_pid=item_lib_martigny.pid, - patron_pid=patron_martigny.pid - ) + "api_item.librarian_request", + dict(item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid), ) assert res.status_code == 400 @@ -44,56 +46,60 @@ def test_add_request_failed_actions( # when item record not found in database, api returns 404 res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( - patron_pid=patron_martigny.pid, - pickup_location_pid=loc_public_martigny.pid - ) + patron_pid=patron_martigny.pid, pickup_location_pid=loc_public_martigny.pid + ), ) assert res.status_code == 404 # test fails for a request with a missing parameter patron_pid res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( - item_pid=item_lib_martigny.pid, - pickup_location_pid=loc_public_martigny.pid - ) + item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid + ), ) assert res.status_code == 400 # test fails for a request with a missing parameter transaction_library_pid res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, - pickup_location_pid=loc_public_martigny.pid - ) + pickup_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 400 def test_add_request( - client, librarian_martigny, lib_martigny, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies, patron2_martigny): + client, + librarian_martigny, + lib_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, + patron2_martigny, +): """Test a successful frontend add request action.""" # test passes when all required parameters are given # test passes when the transaction libarary pid is given login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 @@ -101,13 +107,13 @@ def test_add_request( login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, patron_pid=patron2_martigny.pid, pickup_location_pid=loc_public_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 diff --git a/tests/api/circulation/test_actions_views_cancel_request.py b/tests/api/circulation/test_actions_views_cancel_request.py index d35bd61694..0cba363cc3 100644 --- a/tests/api/circulation/test_actions_views_cancel_request.py +++ b/tests/api/circulation/test_actions_views_cancel_request.py @@ -23,33 +23,28 @@ def test_cancel_an_item_request( - client, librarian_martigny, lib_martigny, - item_at_desk_martigny_patron_and_loan_at_desk, - item_on_shelf_martigny_patron_and_loan_pending, loc_public_martigny, - circulation_policies): + client, + librarian_martigny, + lib_martigny, + item_at_desk_martigny_patron_and_loan_at_desk, + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + circulation_policies, +): """Test the frontend cancel an item request action.""" # test passes when all required parameters are given login_user_via_session(client, librarian_martigny.user) item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending # test fails when there is a missing required parameter - res, data = postdata( - client, - 'api_item.cancel_item_request', - dict( - pid=loan.pid - ) - ) + res, data = postdata(client, "api_item.cancel_item_request", dict(pid=loan.pid)) assert res.status_code == 400 # test fails when there is a missing required parameter res, data = postdata( client, - 'api_item.cancel_item_request', - dict( - pid=loan.pid, - transaction_location_pid=loc_public_martigny.pid - ) + "api_item.cancel_item_request", + dict(pid=loan.pid, transaction_location_pid=loc_public_martigny.pid), ) assert res.status_code == 400 @@ -57,23 +52,23 @@ def test_cancel_an_item_request( # when item record not found in database, api returns 404 res, data = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 404 # test passes when the transaction location pid is given res, data = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( pid=loan.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 @@ -82,11 +77,11 @@ def test_cancel_an_item_request( item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk res, data = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( pid=loan.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 diff --git a/tests/api/circulation/test_actions_views_change_pickup.py b/tests/api/circulation/test_actions_views_change_pickup.py index baff46b199..06e0ee70fb 100644 --- a/tests/api/circulation/test_actions_views_change_pickup.py +++ b/tests/api/circulation/test_actions_views_change_pickup.py @@ -23,31 +23,30 @@ def test_change_pickup_location_request( - client, librarian_martigny, lib_martigny, - item_at_desk_martigny_patron_and_loan_at_desk, - item_on_shelf_martigny_patron_and_loan_pending, loc_public_martigny, - circulation_policies, loc_public_fully): + client, + librarian_martigny, + lib_martigny, + item_at_desk_martigny_patron_and_loan_at_desk, + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + circulation_policies, + loc_public_fully, +): """Test the frontend update pickup location calls.""" login_user_via_session(client, librarian_martigny.user) item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending # test fails when there is a missing required parameter res, data = postdata( - client, - 'api_item.update_loan_pickup_location', - dict( - pid=loan.pid - ) + client, "api_item.update_loan_pickup_location", dict(pid=loan.pid) ) assert res.status_code == 400 # test fails when there is a missing required parameter res, data = postdata( client, - 'api_item.update_loan_pickup_location', - dict( - pickup_location_pid=loc_public_martigny.pid - ) + "api_item.update_loan_pickup_location", + dict(pickup_location_pid=loc_public_martigny.pid), ) assert res.status_code == 400 @@ -55,65 +54,55 @@ def test_change_pickup_location_request( # CHANGE_PICKUP_LOCATION_1_2: update is allowed on PENDING loans. res, data = postdata( client, - 'api_item.update_loan_pickup_location', - dict( - pid=loan.pid, - pickup_location_pid=loc_public_fully.pid - ) + "api_item.update_loan_pickup_location", + dict(pid=loan.pid, pickup_location_pid=loc_public_fully.pid), ) assert res.status_code == 200 def test_change_pickup_location_request_for_other_loans( - client, librarian_martigny, lib_martigny, - item_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, circulation_policies, loc_public_fully, - item_on_loan_martigny_patron_and_loan_on_loan, - item_in_transit_martigny_patron_and_loan_for_pickup, - item_in_transit_martigny_patron_and_loan_to_house): + client, + librarian_martigny, + lib_martigny, + item_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + circulation_policies, + loc_public_fully, + item_on_loan_martigny_patron_and_loan_on_loan, + item_in_transit_martigny_patron_and_loan_for_pickup, + item_in_transit_martigny_patron_and_loan_to_house, +): """Test the frontend update pickup location calls of other loan states.""" login_user_via_session(client, librarian_martigny.user) # CHANGE_PICKUP_LOCATION_2_1: update denied on ITEM_ON_LOAN loans. item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk res, data = postdata( client, - 'api_item.update_loan_pickup_location', - dict( - pid=loan.pid, - pickup_location_pid=loc_public_fully.pid - ) + "api_item.update_loan_pickup_location", + dict(pid=loan.pid, pickup_location_pid=loc_public_fully.pid), ) assert res.status_code == 403 # CHANGE_PICKUP_LOCATION_3_1: update denied on ITEM_AT_DESK loans. item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan res, data = postdata( client, - 'api_item.update_loan_pickup_location', - dict( - pid=loan.pid, - pickup_location_pid=loc_public_fully.pid - ) + "api_item.update_loan_pickup_location", + dict(pid=loan.pid, pickup_location_pid=loc_public_fully.pid), ) assert res.status_code == 403 # CHANGE_PICKUP_LOCATION_4: update allowed on IN_TRANSIT_FOR_PICKUP loans. item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup res, data = postdata( client, - 'api_item.update_loan_pickup_location', - dict( - pid=loan.pid, - pickup_location_pid=loc_public_fully.pid - ) + "api_item.update_loan_pickup_location", + dict(pid=loan.pid, pickup_location_pid=loc_public_fully.pid), ) assert res.status_code == 200 # CHANGE_PICKUP_LOCATION_5: update denied on IN_TRANSIT_TO_HOUSE loans. item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house res, data = postdata( client, - 'api_item.update_loan_pickup_location', - dict( - pid=loan.pid, - pickup_location_pid=loc_public_fully.pid - ) + "api_item.update_loan_pickup_location", + dict(pid=loan.pid, pickup_location_pid=loc_public_fully.pid), ) assert res.status_code == 403 diff --git a/tests/api/circulation/test_actions_views_checkin.py b/tests/api/circulation/test_actions_views_checkin.py index 9f7ca3bbea..b9bbb7bedd 100644 --- a/tests/api/circulation/test_actions_views_checkin.py +++ b/tests/api/circulation/test_actions_views_checkin.py @@ -26,38 +26,32 @@ from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemStatus from rero_ils.modules.loans.utils import get_circ_policy, sum_for_fees -from rero_ils.modules.patron_transactions.utils import \ - get_last_transaction_by_loan_pid +from rero_ils.modules.patron_transactions.utils import get_last_transaction_by_loan_pid def test_checkin_an_item( - client, librarian_martigny, lib_martigny, - item_on_loan_martigny_patron_and_loan_on_loan, loc_public_martigny, - item2_on_loan_martigny_patron_and_loan_on_loan, - circulation_policies): + client, + librarian_martigny, + lib_martigny, + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + item2_on_loan_martigny_patron_and_loan_on_loan, + circulation_policies, +): """Test the frontend return a checked-out item action.""" # test passes when all required parameters are given login_user_via_session(client, librarian_martigny.user) item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan # test fails when there is a missing required parameter - res, data = postdata( - client, - 'api_item.checkin', - dict( - item_pid=item.pid - ) - ) + res, data = postdata(client, "api_item.checkin", dict(item_pid=item.pid)) assert res.status_code == 400 # test fails when there is a missing required parameter res, data = postdata( client, - 'api_item.checkin', - dict( - item_pid=item.pid, - transaction_location_pid=loc_public_martigny.pid - ) + "api_item.checkin", + dict(item_pid=item.pid, transaction_location_pid=loc_public_martigny.pid), ) assert res.status_code == 400 @@ -65,23 +59,23 @@ def test_checkin_an_item( # when item record not found in database, api returns 404 res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 404 # test passes when the transaction location pid is given res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item.pid) @@ -91,41 +85,51 @@ def test_checkin_an_item( item, patron, loan = item2_on_loan_martigny_patron_and_loan_on_loan res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item.pid) assert item.status == ItemStatus.ON_SHELF -def test_auto_checkin_else(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_lib_martigny, json_header, lib_martigny, - loc_public_saxon): +def test_auto_checkin_else( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + json_header, + lib_martigny, + loc_public_saxon, +): """Test item checkin no action.""" login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 400 - assert get_json(res)['status'] == \ - _('error: No circulation action performed: on shelf') + assert get_json(res)["status"] == _( + "error: No circulation action performed: on shelf" + ) def test_checkin_overdue_item( - client, librarian_martigny, loc_public_martigny, - item_on_loan_martigny_patron_and_loan_on_loan): + client, + librarian_martigny, + loc_public_martigny, + item_on_loan_martigny_patron_and_loan_on_loan, +): """Test a checkin for an overdue item with incremental fees.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan @@ -133,18 +137,18 @@ def test_checkin_overdue_item( # Update the circulation policy corresponding to the loan # Update the loan due date cipo = get_circ_policy(loan) - cipo['overdue_fees'] = { - 'intervals': [ - {'from': 1, 'to': 5, 'fee_amount': 0.50}, - {'from': 6, 'to': 10, 'fee_amount': 1}, - {'from': 11, 'fee_amount': 2}, + cipo["overdue_fees"] = { + "intervals": [ + {"from": 1, "to": 5, "fee_amount": 0.50}, + {"from": 6, "to": 10, "fee_amount": 1}, + {"from": 11, "fee_amount": 2}, ] } cipo.update(data=cipo, dbcommit=True, reindex=True) end = date.today() - timedelta(days=30) end = datetime(end.year, end.month, end.day, tzinfo=timezone.utc) end = end - timedelta(microseconds=1) - loan['end_date'] = end.isoformat() + loan["end_date"] = end.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) fees = loan.get_overdue_fees @@ -153,35 +157,32 @@ def test_checkin_overdue_item( assert total_fees > 0 # Check overdues preview API and check result - url = url_for('api_loan.preview_loan_overdue', loan_pid=loan.pid) + url = url_for("api_loan.preview_loan_overdue", loan_pid=loan.pid) login_user_via_session(client, librarian_martigny.user) res = client.get(url) data = get_json(res) assert res.status_code == 200 - assert len(data['steps']) > 0 - assert data['total'] > 0 + assert len(data["steps"]) > 0 + assert data["total"] > 0 - url = url_for( - 'api_patrons.patron_overdue_preview_api', - patron_pid=patron.pid - ) + url = url_for("api_patrons.patron_overdue_preview_api", patron_pid=patron.pid) res = client.get(url) data = get_json(res) assert res.status_code == 200 assert len(data) == 1 - assert data[0]['loan']['pid'] == loan.pid - assert len(data[0]['fees']['steps']) > 0 - assert data[0]['fees']['total'] > 0 + assert data[0]["loan"]["pid"] == loan.pid + assert len(data[0]["fees"]["steps"]) > 0 + assert data[0]["fees"]["total"] > 0 # Do the checkin on the item res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item.pid) @@ -192,8 +193,8 @@ def test_checkin_overdue_item( assert trans.total_amount == total_fees events = list(trans.events) assert len(events) == 1 - assert len(events[0].get('steps', [])) == len(fees) + assert len(events[0].get("steps", [])) == len(fees) # reset the cipo - del cipo['overdue_fees'] + del cipo["overdue_fees"] cipo.update(data=cipo, dbcommit=True, reindex=True) diff --git a/tests/api/circulation/test_actions_views_checkout.py b/tests/api/circulation/test_actions_views_checkout.py index cba183c57f..b153298753 100644 --- a/tests/api/circulation/test_actions_views_checkout.py +++ b/tests/api/circulation/test_actions_views_checkout.py @@ -26,13 +26,14 @@ def test_checkout_missing_parameters( - client, - librarian_martigny, - lib_martigny, - loc_public_martigny, - patron_martigny, - item_lib_martigny, - circulation_policies): + client, + librarian_martigny, + lib_martigny, + loc_public_martigny, + patron_martigny, + item_lib_martigny, + circulation_policies, +): """Test checkout with missing parameters. Are needed: @@ -46,44 +47,36 @@ def test_checkout_missing_parameters( assert item.status == ItemStatus.ON_SHELF # test fails when missing required parameter - res, _ = postdata( - client, - 'api_item.checkout', - dict( - item_pid=item.pid - ) - ) + res, _ = postdata(client, "api_item.checkout", dict(item_pid=item.pid)) assert res.status_code == 400 res, _ = postdata( client, - 'api_item.checkout', - dict( - item_pid=item.pid, - patron_pid=patron_martigny.pid - ) + "api_item.checkout", + dict(item_pid=item.pid, patron_pid=patron_martigny.pid), ) assert res.status_code == 400 res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item.pid, patron_pid=patron_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 400 def test_checkout( - client, - librarian_martigny, - lib_martigny, - loc_public_martigny, - patron_martigny, - item_lib_martigny, - circulation_policies, - item_on_shelf_martigny_patron_and_loan_pending): + client, + librarian_martigny, + lib_martigny, + loc_public_martigny, + patron_martigny, + item_lib_martigny, + circulation_policies, + item_on_shelf_martigny_patron_and_loan_pending, +): """Test a successful frontend checkout action.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -93,27 +86,19 @@ def test_checkout( item_pid=item.pid, patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ) # test is done WITHOUT loan PID - res, _ = postdata( - client, - 'api_item.checkout', - params - ) + res, _ = postdata(client, "api_item.checkout", params) assert res.status_code == 200 # test WITH loan PID & WITH SPECIFIED END-DATE item, patron_pid, loan = item_on_shelf_martigny_patron_and_loan_pending assert item.status == ItemStatus.ON_SHELF - params['item_pid'] = item.pid - params['pid'] = loan.pid - res, _ = postdata( - client, - 'api_item.checkout', - params - ) + params["item_pid"] = item.pid + params["pid"] = loan.pid + res, _ = postdata(client, "api_item.checkout", params) assert res.status_code == 200 # TEST CHECKOUT WITH SPECIFIED END-DATE @@ -124,12 +109,12 @@ def test_checkout( # business open day res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 @@ -143,15 +128,11 @@ def test_checkout( patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - end_date=next_saturday.isoformat() - ) - res, data = postdata( - client, - 'api_item.checkout', - params + end_date=next_saturday.isoformat(), ) + res, data = postdata(client, "api_item.checkout", params) assert res.status_code == 200 - transaction_end_date = data['action_applied']['checkout']['end_date'] + transaction_end_date = data["action_applied"]["checkout"]["end_date"] transaction_end_date = ciso8601.parse_datetime(transaction_end_date) next_open_date = lib_martigny.next_open(next_saturday) assert next_open_date.date() == transaction_end_date.date() diff --git a/tests/api/circulation/test_actions_views_extend_loan_request.py b/tests/api/circulation/test_actions_views_extend_loan_request.py index 958c2fc230..3f6b510588 100644 --- a/tests/api/circulation/test_actions_views_extend_loan_request.py +++ b/tests/api/circulation/test_actions_views_extend_loan_request.py @@ -25,12 +25,13 @@ def test_extend_loan_missing_parameters( - client, - librarian_martigny, - lib_martigny, - loc_public_martigny, - circulation_policies, - item_on_loan_martigny_patron_and_loan_on_loan): + client, + librarian_martigny, + lib_martigny, + loc_public_martigny, + circulation_policies, + item_on_loan_martigny_patron_and_loan_on_loan, +): """Test extend loan with missing parameters. Are needed: @@ -42,78 +43,67 @@ def test_extend_loan_missing_parameters( assert item.status == ItemStatus.ON_LOAN # test fails when missing required parameter - res, _ = postdata( - client, - 'api_item.extend_loan', - dict( - item_pid=item.pid - ) - ) + res, _ = postdata(client, "api_item.extend_loan", dict(item_pid=item.pid)) assert res.status_code == 400 res, _ = postdata( client, - 'api_item.extend_loan', - dict( - item_pid=item.pid, - transaction_location_pid=loc_public_martigny.pid - ) + "api_item.extend_loan", + dict(item_pid=item.pid, transaction_location_pid=loc_public_martigny.pid), ) assert res.status_code == 400 res, _ = postdata( client, - 'api_item.extend_loan', - dict( - item_pid=item.pid, - transaction_user_pid=librarian_martigny.pid - ) + "api_item.extend_loan", + dict(item_pid=item.pid, transaction_user_pid=librarian_martigny.pid), ) assert res.status_code == 400 def test_extend_loan( - client, - librarian_martigny, - lib_martigny, - loc_public_martigny, - circulation_policies, - item_on_loan_martigny_patron_and_loan_on_loan, - yesterday): + client, + librarian_martigny, + lib_martigny, + loc_public_martigny, + circulation_policies, + item_on_loan_martigny_patron_and_loan_on_loan, + yesterday, +): """Test frontend extend action.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan # Update loan `end_date` to play with "extend" function without problem - loan['end_date'] = loan['start_date'] + loan["end_date"] = loan["start_date"] loan.update(loan, dbcommit=True, reindex=True) assert item.status == ItemStatus.ON_LOAN # Test extend for a blocked patron - patron['patron']['blocked'] = True - patron['patron']['blocked_note'] = 'Dummy reason' + patron["patron"]["blocked"] = True + patron["patron"]["blocked_note"] = "Dummy reason" patron.update(patron, dbcommit=True, reindex=True) params = dict( item_pid=item.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ) login_user_via_session(client, librarian_martigny.user) - res, _ = postdata(client, 'api_item.extend_loan', params) + res, _ = postdata(client, "api_item.extend_loan", params) assert res.status_code == 403 # Test for an expired patron - del patron['patron']['blocked'] - del patron['patron']['blocked_note'] - original_exp_date = patron['patron']['expiration_date'] - patron['patron']['expiration_date'] = yesterday.strftime('%Y-%m-%d') + del patron["patron"]["blocked"] + del patron["patron"]["blocked_note"] + original_exp_date = patron["patron"]["expiration_date"] + patron["patron"]["expiration_date"] = yesterday.strftime("%Y-%m-%d") patron.update(patron, dbcommit=True, reindex=True) - res, _ = postdata(client, 'api_item.extend_loan', params) + res, _ = postdata(client, "api_item.extend_loan", params) assert res.status_code == 403 - patron['patron']['expiration_date'] = original_exp_date + patron["patron"]["expiration_date"] = original_exp_date patron.update(patron, dbcommit=True, reindex=True) # With only needed parameters - res, _ = postdata(client, 'api_item.extend_loan', params) + res, _ = postdata(client, "api_item.extend_loan", params) assert res.status_code == 200 diff --git a/tests/api/circulation/test_actions_views_validate_request.py b/tests/api/circulation/test_actions_views_validate_request.py index 67ef9f56e6..4c2f2884cb 100644 --- a/tests/api/circulation/test_actions_views_validate_request.py +++ b/tests/api/circulation/test_actions_views_validate_request.py @@ -23,10 +23,13 @@ def test_validate_item_request( - client, librarian_martigny, lib_martigny, + client, + librarian_martigny, + lib_martigny, item2_on_shelf_martigny_patron_and_loan_pending, - item_on_shelf_martigny_patron_and_loan_pending, loc_public_martigny, - circulation_policies + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + circulation_policies, ): """Test the frontend validate an item request action.""" login_user_via_session(client, librarian_martigny.user) @@ -36,42 +39,51 @@ def test_validate_item_request( # --> `pid` parameter is required into data. Representing the loan pid. # --> `transaction_location_pid` parameter is required into data # --> `transaction_user_pid` parameter is required into data - data = [{ - 'parameters': {'pid': loan.pid}, - 'return_code': 400 - }, { - 'parameters': { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid + data = [ + {"parameters": {"pid": loan.pid}, "return_code": 400}, + { + "parameters": { + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + }, + "return_code": 400, }, - 'return_code': 400 - }, { - 'parameters': { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + { + "parameters": { + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + }, + "return_code": 404, }, - 'return_code': 404 - }] + ] for hit in data: - params = hit['parameters'] - res, _ = postdata(client, 'api_item.validate_request', data=params) - assert res.status_code == hit['return_code'] + params = hit["parameters"] + res, _ = postdata(client, "api_item.validate_request", data=params) + assert res.status_code == hit["return_code"] # TEST SUCCESS # --> test passes when the transaction location pid is given # --> test passes when the transaction library pid is given - res, data = postdata(client, 'api_item.validate_request', data={ - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid - }) + res, data = postdata( + client, + "api_item.validate_request", + data={ + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + }, + ) assert res.status_code == 200 login_user_via_session(client, librarian_martigny.user) item, patron, loan = item2_on_shelf_martigny_patron_and_loan_pending - res, data = postdata(client, 'api_item.validate_request', data={ - 'pid': loan.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid - }) + res, data = postdata( + client, + "api_item.validate_request", + data={ + "pid": loan.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + }, + ) assert res.status_code == 200 diff --git a/tests/api/circulation/test_borrow_limits.py b/tests/api/circulation/test_borrow_limits.py index 891d147106..4f3c896124 100644 --- a/tests/api/circulation/test_borrow_limits.py +++ b/tests/api/circulation/test_borrow_limits.py @@ -22,29 +22,37 @@ from flask import url_for from freezegun import freeze_time from invenio_accounts.testutils import login_user_via_session -from invenio_circulation.search.api import LoansSearch -from utils import flush_index, get_json, postdata +from utils import get_json, postdata from rero_ils.modules.items.api import Item -from rero_ils.modules.loans.api import Loan, get_overdue_loans +from rero_ils.modules.loans.api import Loan, LoansSearch, get_overdue_loans from rero_ils.modules.loans.models import LoanAction from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.dispatcher import Dispatcher from rero_ils.modules.notifications.models import NotificationType from rero_ils.modules.notifications.utils import get_notification -from rero_ils.modules.patron_transaction_events.api import \ - PatronTransactionEvent +from rero_ils.modules.patron_transaction_events.api import PatronTransactionEvent from rero_ils.modules.patron_types.api import PatronType from rero_ils.modules.patrons.api import Patron from rero_ils.modules.utils import get_ref_for_pid def test_checkout_library_limit( - client, app, librarian_martigny, lib_martigny, - patron_type_children_martigny, item_lib_martigny, item2_lib_martigny, - item3_lib_martigny, item_lib_martigny_data, item2_lib_martigny_data, - item3_lib_martigny_data, loc_public_martigny, patron_martigny, - circ_policy_short_martigny): + client, + app, + librarian_martigny, + lib_martigny, + patron_type_children_martigny, + item_lib_martigny, + item2_lib_martigny, + item3_lib_martigny, + item_lib_martigny_data, + item2_lib_martigny_data, + item3_lib_martigny_data, + loc_public_martigny, + patron_martigny, + circ_policy_short_martigny, +): """Test checkout library limits.""" patron = patron_martigny @@ -53,8 +61,8 @@ def test_checkout_library_limit( item1 = item_lib_martigny item2 = item2_lib_martigny item3 = item3_lib_martigny - library_ref = get_ref_for_pid('lib', lib_martigny.pid) - location_ref = get_ref_for_pid('loc', loc_public_martigny.pid) + library_ref = get_ref_for_pid("lib", lib_martigny.pid) + location_ref = get_ref_for_pid("loc", loc_public_martigny.pid) login_user_via_session(client, librarian_martigny.user) @@ -62,137 +70,164 @@ def test_checkout_library_limit( # * Update the patron_type to set a checkout limits # * All items are linked to the same library/location patron_type = patron_type_children_martigny - patron_type['limits'] = { - 'checkout_limits': { - 'global_limit': 3, - 'library_limit': 2, - 'library_exceptions': [{ - 'library': {'$ref': library_ref}, - 'value': 1 - }] + patron_type["limits"] = { + "checkout_limits": { + "global_limit": 3, + "library_limit": 2, + "library_exceptions": [{"library": {"$ref": library_ref}, "value": 1}], } } patron_type.update(patron_type, dbcommit=True, reindex=True) patron_type = PatronType.get_record_by_pid(patron_type.pid) - item2_lib_martigny_data['location']['$ref'] = location_ref + item2_lib_martigny_data["location"]["$ref"] = location_ref item2.update(item2_lib_martigny_data, dbcommit=True, reindex=True) - item3_lib_martigny_data['location']['$ref'] = location_ref + item3_lib_martigny_data["location"]["$ref"] = location_ref item3.update(item3_lib_martigny_data, dbcommit=True, reindex=True) # First checkout - All should be fine. - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item1.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item1.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - loan1_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan1_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") # Second checkout # --> The library limit exception should be raised. - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item2.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item2.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 403 - assert 'Checkout denied' in data['message'] + assert "Checkout denied" in data["message"] # remove the library specific exception and try a new checkout # --> As the limit by library is now '2', the checkout will be done. # --> Try a third checkout : the default library_limit exception should # be raised - patron_type['limits'] = { - 'checkout_limits': { - 'global_limit': 3, - 'library_limit': 2, + patron_type["limits"] = { + "checkout_limits": { + "global_limit": 3, + "library_limit": 2, } } patron_type.update(patron_type, dbcommit=True, reindex=True) - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item2.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item2.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - loan2_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item3.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + loan2_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item3.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 403 - assert 'Checkout denied' in data['message'] + assert "Checkout denied" in data["message"] # remove the library default limit and update the global_limit to 2. # --> try the third checkout : the global_limit exception should now be # raised - patron_type['limits'] = { - 'checkout_limits': { - 'global_limit': 2 - } - } + patron_type["limits"] = {"checkout_limits": {"global_limit": 2}} patron_type.update(patron_type, dbcommit=True, reindex=True) - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item3.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item3.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 403 - assert 'Checkout denied' in data['message'] + assert "Checkout denied" in data["message"] # check the circulation information API - url = url_for( - 'api_patrons.patron_circulation_informations', - patron_pid=patron.pid - ) + url = url_for("api_patrons.patron_circulation_informations", patron_pid=patron.pid) res = client.get(url) assert res.status_code == 200 data = get_json(res) - assert 'error' == data['messages'][0]['type'] - assert 'Checkout denied' in data['messages'][0]['content'] + assert "error" == data["messages"][0]["type"] + assert "Checkout denied" in data["messages"][0]["content"] # try a checkout with 'override_blocking' parameter. # --> the restriction is no longer checked, the checkout will be success. - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item3.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - ), url_data={'override_blocking': 'true'}) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item3.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + url_data={"override_blocking": "true"}, + ) assert res.status_code == 200 - loan3_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan3_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") # reset fixtures # --> checkin three loaned item # --> reset patron_type to original value # --> reset items to original values - res, data = postdata(client, 'api_item.checkin', dict( - item_pid=item3.pid, - pid=loan3_pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) - res, data = postdata(client, 'api_item.checkin', dict( - item_pid=item2.pid, - pid=loan2_pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkin", + dict( + item_pid=item3.pid, + pid=loan3_pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) + res, data = postdata( + client, + "api_item.checkin", + dict( + item_pid=item2.pid, + pid=loan2_pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - res, data = postdata(client, 'api_item.checkin', dict( - item_pid=item1.pid, - pid=loan1_pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkin", + dict( + item_pid=item1.pid, + pid=loan1_pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - del patron_type['limits'] + del patron_type["limits"] patron_type.update(patron_type, dbcommit=True, reindex=True) item2.update(item2_original_data, dbcommit=True, reindex=True) item3.update(item3_original_data, dbcommit=True, reindex=True) @@ -200,36 +235,50 @@ def test_checkout_library_limit( @freeze_time("2021-06-15") def test_overdue_limit( - client, app, librarian_martigny, lib_martigny, item_lib_martigny, - item2_lib_martigny, patron_type_children_martigny, - item3_lib_martigny, item_lib_martigny_data, item2_lib_martigny_data, - item3_lib_martigny_data, loc_public_martigny, patron_martigny, - circ_policy_short_martigny, lib_saxon, loc_public_saxon): + client, + app, + librarian_martigny, + lib_martigny, + item_lib_martigny, + item2_lib_martigny, + patron_type_children_martigny, + item3_lib_martigny, + item_lib_martigny_data, + item2_lib_martigny_data, + item3_lib_martigny_data, + loc_public_martigny, + patron_martigny, + circ_policy_short_martigny, + lib_saxon, + loc_public_saxon, +): """Test overdue limit.""" item = item_lib_martigny item_pid = item.pid patron_pid = patron_martigny.pid - date_format = '%Y/%m/%dT%H:%M:%S.000Z' + date_format = "%Y/%m/%dT%H:%M:%S.000Z" today = datetime.utcnow() - eod = today.replace(hour=23, minute=59, second=0, microsecond=0, - tzinfo=lib_martigny.get_timezone()) + eod = today.replace( + hour=23, minute=59, second=0, microsecond=0, tzinfo=lib_martigny.get_timezone() + ) # STEP 0 :: Prepare data for test # * Update the patron_type to set a overdue_items_limit rule. # We define than only 1 overdue items are allowed. Trying a second # checkout is disallowed if patron has an overdue item patron_type = patron_type_children_martigny - patron_type \ - .setdefault('limits', {}) \ - .setdefault('overdue_items_limits', {}) \ - .setdefault('default_value', 1) + patron_type.setdefault("limits", {}).setdefault( + "overdue_items_limits", {} + ).setdefault("default_value", 1) patron_type.update(patron_type, dbcommit=True, reindex=True) patron_type = PatronType.get_record_by_pid(patron_type.pid) - assert patron_type\ - .get('limits', {})\ - .get('overdue_items_limits', {}) \ - .get('default_value') == 1 + assert ( + patron_type.get("limits", {}) + .get("overdue_items_limits", {}) + .get("default_value") + == 1 + ) # STEP 1 :: Create an checkout with a end_date at the current date # * Create a checkout and set end_date to a fixed_date equals to @@ -238,47 +287,46 @@ def test_overdue_limit( login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - end_date=eod.strftime(date_format) - ) + end_date=eod.strftime(date_format), + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not loan.is_loan_overdue() tmp_item_data = deepcopy(item_lib_martigny_data) - del tmp_item_data['pid'] - del tmp_item_data['barcode'] - tmp_item_data['library']['$ref'] = get_ref_for_pid('lib', lib_saxon.pid) - tmp_item_data['location']['$ref'] = \ - get_ref_for_pid('loc', loc_public_saxon.pid) + del tmp_item_data["pid"] + del tmp_item_data["barcode"] + tmp_item_data["library"]["$ref"] = get_ref_for_pid("lib", lib_saxon.pid) + tmp_item_data["location"]["$ref"] = get_ref_for_pid("loc", loc_public_saxon.pid) tmp_item = Item.create(tmp_item_data, dbcommit=True, reindex=True) res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=tmp_item.pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=tmp_item.pid, - pid=data.get('action_applied')[LoanAction.CHECKOUT].get('pid'), + pid=data.get("action_applied")[LoanAction.CHECKOUT].get("pid"), transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 @@ -286,144 +334,151 @@ def test_overdue_limit( # Now there is one loan in overdue, then the limit is reached and a new # checkout shouldn't be possible end_date = eod - timedelta(days=7) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) overdue_loans = list(get_overdue_loans(patron_pid=patron_pid)) assert loan.is_loan_overdue() assert loan.end_date == end_date.isoformat() - assert overdue_loans[0].get('pid') == loan_pid + assert overdue_loans[0].get("pid") == loan_pid assert not get_notification(loan, NotificationType.OVERDUE) - notification = loan.create_notification( - _type=NotificationType.OVERDUE).pop() - Dispatcher.dispatch_notifications([notification.get('pid')]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + notification = loan.create_notification(_type=NotificationType.OVERDUE).pop() + Dispatcher.dispatch_notifications([notification.get("pid")]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() assert get_notification(loan, NotificationType.OVERDUE) # Try a second checkout - limit should be reached res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 403 - assert 'Checkout denied' in data['message'] + assert "Checkout denied" in data["message"] # Try a request - limit should be reached res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 403 - assert 'maximal number of overdue items is reached' in data['message'] + assert "maximal number of overdue items is reached" in data["message"] # Try to extend - limit should be reached res, _ = postdata( client, - 'api_item.extend_loan', + "api_item.extend_loan", dict( item_pid=item_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 403 - assert 'maximal number of overdue items is reached' in data['message'] + assert "maximal number of overdue items is reached" in data["message"] # reset the patron_type with default value - del patron_type['limits'] + del patron_type["limits"] # [2] test fee amount limit # Update the patron_type to set a fee_amount_limit rule - patron_type \ - .setdefault('limits', {}) \ - .setdefault('fee_amount_limits', {}) \ - .setdefault('default_value', 0.5) + patron_type.setdefault("limits", {}).setdefault("fee_amount_limits", {}).setdefault( + "default_value", 0.5 + ) patron_type.update(patron_type, dbcommit=True, reindex=True) patron_type = PatronType.get_record_by_pid(patron_type.pid) - assert patron_type.get('limits', {}).get('fee_amount_limits', {}) \ - .get('default_value') == 0.5 + assert ( + patron_type.get("limits", {}).get("fee_amount_limits", {}).get("default_value") + == 0.5 + ) # [2.1] test fee amount limit when we try to checkout a second item res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 403 - assert 'maximal overdue fee amount is reached' in data['message'] + assert "maximal overdue fee amount is reached" in data["message"] # [2.2] test fee amount limit when we try to request another item res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 403 - assert 'maximal overdue fee amount is reached' in data['message'] + assert "maximal overdue fee amount is reached" in data["message"] # [2.3] test fee amount limit when we try to extend loan res, _ = postdata( client, - 'api_item.extend_loan', + "api_item.extend_loan", dict( item_pid=item_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 403 - assert 'maximal overdue fee amount is reached' in data['message'] + assert "maximal overdue fee amount is reached" in data["message"] # reset the patron_type with default value - del patron_type['limits'] + del patron_type["limits"] patron_type.update(patron_type, dbcommit=True, reindex=True) patron_type = PatronType.get_record_by_pid(patron_type.pid) - assert patron_type.get('limits') is None + assert patron_type.get("limits") is None # # checkin the item to put it back to it's original state res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 def test_unpaid_subscription( - client, app, librarian_martigny, lib_martigny, item_lib_martigny, - patron_type_children_martigny, patron_type_children_martigny_data, - loc_public_martigny, patron_martigny, circ_policy_short_martigny + client, + app, + librarian_martigny, + lib_martigny, + item_lib_martigny, + patron_type_children_martigny, + patron_type_children_martigny_data, + loc_public_martigny, + patron_martigny, + circ_policy_short_martigny, ): """Test unpaid subscription restriction limit.""" item = item_lib_martigny @@ -436,10 +491,8 @@ def test_unpaid_subscription( # * Update the patron_type to set a unpaid_subscription limit rule to # False => Even if patron has unpaid subscription, any circulation # operation should be possible. - patron_type \ - .setdefault('limits', {}) \ - .setdefault('unpaid_subscription', True) - patron_type.setdefault('subscription_amount', 10) + patron_type.setdefault("limits", {}).setdefault("unpaid_subscription", True) + patron_type.setdefault("subscription_amount", 10) patron_type = patron_type.update(patron_type, dbcommit=True, reindex=True) # STEP#1 :: Create an unpaid subscription for patron @@ -454,45 +507,56 @@ def test_unpaid_subscription( # STEP#2 :: Try a circulation operation # As patron doesn't yet paid the subscription and we set a limit at # STEP#0, any checkout operation should be denied. - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 403 - assert 'Checkout denied' in data['message'] and 'unpaid' in data['message'] + assert "Checkout denied" in data["message"] and "unpaid" in data["message"] # STEP#3 :: Pay the subscription and retry a circulation operation # As the subscription will be paid the checkout operation must be granted data = { - 'parent': subscription['patron_transaction'], - 'creation_date': datetime.now().isoformat(), - 'type': 'payment', - 'subtype': 'cash', - 'amount': patron_type['subscription_amount'], - 'operator': {'$ref': get_ref_for_pid(Patron, librarian_martigny.pid)} + "parent": subscription["patron_transaction"], + "creation_date": datetime.now().isoformat(), + "type": "payment", + "subtype": "cash", + "amount": patron_type["subscription_amount"], + "operator": {"$ref": get_ref_for_pid(Patron, librarian_martigny.pid)}, } PatronTransactionEvent.create(data, dbcommit=True, reindex=True) assert not patron.pending_subscriptions - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item.pid, - patron_pid=patron.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item.pid, + patron_pid=patron.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - loan_pid = data['action_applied']['checkout']['pid'] + loan_pid = data["action_applied"]["checkout"]["pid"] # RESET DATA # * check-in the item # * reset the patron_type - res, _ = postdata(client, 'api_item.checkin', dict( - item_pid=item.pid, - pid=loan_pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, _ = postdata( + client, + "api_item.checkin", + dict( + item_pid=item.pid, + pid=loan_pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res == 200 - patron_type.update(patron_type_children_martigny_data, - dbcommit=True, reindex=True) + patron_type.update(patron_type_children_martigny_data, dbcommit=True, reindex=True) diff --git a/tests/api/circulation/test_inhouse_cipo.py b/tests/api/circulation/test_inhouse_cipo.py index d4f54ed7b6..d44e836cc4 100644 --- a/tests/api/circulation/test_inhouse_cipo.py +++ b/tests/api/circulation/test_inhouse_cipo.py @@ -31,27 +31,28 @@ def test_less_than_one_day_checkout( - client, - circ_policy_less_than_one_day_martigny, - patron_martigny, - patron2_martigny, - item_lib_martigny, - loc_public_martigny, - librarian_martigny, - item_on_shelf_martigny_patron_and_loan_pending): + client, + circ_policy_less_than_one_day_martigny, + patron_martigny, + patron2_martigny, + item_lib_martigny, + loc_public_martigny, + librarian_martigny, + item_on_shelf_martigny_patron_and_loan_pending, +): """Test checkout on an ON_SHELF item with 'less than one day' cipo.""" # Create a new item in ON_SHELF (without Loan) data = deepcopy(item_lib_martigny) - data.pop('barcode') - data.setdefault('status', ItemStatus.ON_SHELF) - created_item = Item.create( - data=data, dbcommit=True, reindex=True, delete_pid=True) + data.pop("barcode") + data.setdefault("status", ItemStatus.ON_SHELF) + created_item = Item.create(data=data, dbcommit=True, reindex=True, delete_pid=True) # Check item is ON_SHELF and NO PENDING loan exist! assert created_item.number_of_requests() == 0 assert created_item.status == ItemStatus.ON_SHELF assert not created_item.is_requested_by_patron( - patron2_martigny.get('patron', {}).get('barcode')[0]) + patron2_martigny.get("patron", {}).get("barcode")[0] + ) # Ensure than the transaction date used will be an open_day. owner_lib = Library.get_record_by_pid(created_item.library_pid) @@ -65,25 +66,25 @@ def test_less_than_one_day_checkout( login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=created_item.pid, patron_pid=patron2_martigny.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - pickup_location_pid=loc_public_martigny.pid - ) + pickup_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data['action_applied'] - onloan_item = Item.get_record_by_pid(data['metadata']['pid']) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + actions = data["action_applied"] + onloan_item = Item.get_record_by_pid(data["metadata"]["pid"]) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert onloan_item.number_of_requests() == 0 assert onloan_item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN - loan_end_date = ciso8601.parse_datetime(loan.get('end_date')) - loan_end_date_formatted = loan_end_date.strftime('%Y-%m-%d') - transaction_date_formatted = transaction_date.strftime('%Y-%m-%d') + loan_end_date = ciso8601.parse_datetime(loan.get("end_date")) + loan_end_date_formatted = loan_end_date.strftime("%Y-%m-%d") + transaction_date_formatted = transaction_date.strftime("%Y-%m-%d") assert loan_end_date_formatted == transaction_date_formatted diff --git a/tests/api/circulation/test_library_calendar_changes.py b/tests/api/circulation/test_library_calendar_changes.py index ea156713e2..85ba08dfd6 100644 --- a/tests/api/circulation/test_library_calendar_changes.py +++ b/tests/api/circulation/test_library_calendar_changes.py @@ -27,13 +27,19 @@ def test_library_calendar_changes( - client, librarian_martigny, lib_martigny, lib_martigny_data, - patron_martigny, loc_public_martigny, item_lib_martigny, - circulation_policies + client, + librarian_martigny, + lib_martigny, + lib_martigny_data, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, ): """Test changes on library calendar and loan implication.""" + def get_cache(library_pid): - cache_content = current_cache.get('library-calendar-changes') or {} + cache_content = current_cache.get("library-calendar-changes") or {} return cache_content.get(library_pid, {}) login_user_via_session(client, librarian_martigny.user) @@ -41,21 +47,21 @@ def get_cache(library_pid): # INITIALIZATION :: Create a loan circ_params = { - 'item_pid': item_lib_martigny.pid, - 'patron_pid': patron_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': library.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "patron_pid": patron_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": library.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata(client, 'api_item.checkout', circ_params) + res, data = postdata(client, "api_item.checkout", circ_params) assert res.status_code == 200 - loan_pid = res.json['action_applied']['checkout']['pid'] + loan_pid = res.json["action_applied"]["checkout"]["pid"] loan = Loan.get_record_by_pid(loan_pid) # TEST#1 :: Changes on an untracked field. # Related loan shouldn't be updated initial_enddate = loan.end_date - library['code'] = 'new code' + library["code"] = "new code" library = library.update(library, dbcommit=True, reindex=False) loan = Loan.get_record_by_pid(loan_pid) assert loan.end_date == initial_enddate @@ -66,12 +72,10 @@ def get_cache(library_pid): # * A `calendar_changes_update_loans` task should be run ; wait to the # end of the task to check if the "end date" of the loan has been # updated. - library['opening_hours'][0] = {'day': 'sunday', 'is_open': False} - library['exception_dates'].append({ - 'is_open': False, - 'title': 'exception date', - 'start_date': loan.end_date[:10] - }) + library["opening_hours"][0] = {"day": "sunday", "is_open": False} + library["exception_dates"].append( + {"is_open": False, "title": "exception date", "start_date": loan.end_date[:10]} + ) library = library.update(library, dbcommit=True, reindex=False) time.sleep(5) # TODO :: find a better way to detect task is finished. @@ -80,10 +84,10 @@ def get_cache(library_pid): # RESET FIXTURES circ_params = { - 'item_pid': item_lib_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item_lib_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, _ = postdata(client, 'api_item.checkin', circ_params) + res, _ = postdata(client, "api_item.checkin", circ_params) assert res.status_code == 200 library.update(lib_martigny_data, dbcommit=True, reindex=True) diff --git a/tests/api/circulation/test_library_with_no_circulation.py b/tests/api/circulation/test_library_with_no_circulation.py index 5147b65606..57fd570ae2 100644 --- a/tests/api/circulation/test_library_with_no_circulation.py +++ b/tests/api/circulation/test_library_with_no_circulation.py @@ -29,27 +29,30 @@ def test_checking_out_external_items_at_non_circ_library( - client, librarian_martigny, lib_martigny, lib_martigny_bourg, - patron_martigny, loc_public_martigny, loc_public_martigny_bourg, - item_lib_martigny_bourg, circulation_policies, item_lib_martigny, - librarian_martigny_bourg): + client, + librarian_martigny, + lib_martigny, + lib_martigny_bourg, + patron_martigny, + loc_public_martigny, + loc_public_martigny_bourg, + item_lib_martigny_bourg, + circulation_policies, + item_lib_martigny, + librarian_martigny_bourg, +): """Test checkout of external items at non-circ library.""" login_user_via_session(client, librarian_martigny_bourg.user) # A non-circulation library (has no pickup configured) and library hours is # well configured opening_hours = [ - { - "day": "monday", - "is_open": True, - "times": [ - { - "start_time": "07:00", - "end_time": "19:00" - } - ] - } + { + "day": "monday", + "is_open": True, + "times": [{"start_time": "07:00", "end_time": "19:00"}], + } ] - lib_martigny_bourg['opening_hours'] = opening_hours + lib_martigny_bourg["opening_hours"] = opening_hours lib_martigny_bourg.update(lib_martigny_bourg, dbcommit=True, reindex=True) # a librarian from the non-circulating library can checkout items from # another library into his library @@ -59,22 +62,18 @@ def test_checking_out_external_items_at_non_circ_library( transaction_user_pid=librarian_martigny_bourg.pid, transaction_library_pid=lib_martigny_bourg.pid, ) - res, data = postdata( - client, - 'api_item.checkout', - params - ) + res, data = postdata(client, "api_item.checkout", params) assert res.status_code == 200 # the checkin is possible at the non-circulating library and the item goes # directly to in-transit res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, transaction_library_pid=lib_martigny_bourg.pid, transaction_user_pid=librarian_martigny_bourg.pid, - ) + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item_lib_martigny.pid) @@ -82,28 +81,30 @@ def test_checking_out_external_items_at_non_circ_library( def test_requesting_item_from_non_circulating_library( - client, librarian_martigny, lib_martigny, lib_martigny_bourg, - loc_restricted_martigny_bourg, - patron_martigny, loc_public_martigny, loc_public_martigny_bourg, - item_lib_martigny_bourg, circulation_policies, - librarian_martigny_bourg): + client, + librarian_martigny, + lib_martigny, + lib_martigny_bourg, + loc_restricted_martigny_bourg, + patron_martigny, + loc_public_martigny, + loc_public_martigny_bourg, + item_lib_martigny_bourg, + circulation_policies, + librarian_martigny_bourg, +): """Test requests on items of a non-circulating library.""" login_user_via_session(client, librarian_martigny_bourg.user) # Test a checkout of an item at a library with open-hours and no pickup # locations defined is possible. opening_hours = [ - { - "day": "monday", - "is_open": True, - "times": [ - { - "start_time": "07:00", - "end_time": "19:00" - } - ] - } + { + "day": "monday", + "is_open": True, + "times": [{"start_time": "07:00", "end_time": "19:00"}], + } ] - lib_martigny_bourg['opening_hours'] = opening_hours + lib_martigny_bourg["opening_hours"] = opening_hours lib_martigny_bourg.update(lib_martigny_bourg, dbcommit=True, reindex=True) params = dict( item_pid=item_lib_martigny_bourg.pid, @@ -111,29 +112,24 @@ def test_requesting_item_from_non_circulating_library( transaction_user_pid=librarian_martigny_bourg.pid, transaction_library_pid=lib_martigny_bourg.pid, ) - res, data = postdata( - client, - 'api_item.checkout', - params - ) + res, data = postdata(client, "api_item.checkout", params) assert res.status_code == 200 - loan_pid = data.get('action_applied').get('checkout').get('pid') + loan_pid = data.get("action_applied").get("checkout").get("pid") loan = Loan.get_record_by_pid(loan_pid) transaction_loc = Location.get_record_by_pid(loan.transaction_location_pid) assert transaction_loc.library_pid == lib_martigny_bourg.pid - pickup_lib_pid = Location.get_record_by_pid( - loan.pickup_location_pid).library_pid + pickup_lib_pid = Location.get_record_by_pid(loan.pickup_location_pid).library_pid assert pickup_lib_pid == lib_martigny_bourg.pid - assert loan.get('state') == LoanState.ITEM_ON_LOAN + assert loan.get("state") == LoanState.ITEM_ON_LOAN res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny_bourg.pid, transaction_library_pid=lib_martigny_bourg.pid, transaction_user_pid=librarian_martigny_bourg.pid, - ) + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item_lib_martigny_bourg.pid) @@ -141,102 +137,91 @@ def test_requesting_item_from_non_circulating_library( # TEST: a librarian from an external library can request and item from a # non-circulating library to be picked-up at his own library. - lib_martigny_bourg.pop('opening_hours', None) + lib_martigny_bourg.pop("opening_hours", None) lib_martigny_bourg.update(lib_martigny_bourg, dbcommit=True, reindex=True) login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny_bourg.pid, patron_pid=patron_martigny.pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny_bourg.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - loan = Loan(data.get('metadata').get('pending_loans')[0]) - transaction_loc = Location.get_record_by_pid( - loan.transaction_location_pid) + loan = Loan(data.get("metadata").get("pending_loans")[0]) + transaction_loc = Location.get_record_by_pid(loan.transaction_location_pid) assert transaction_loc.library_pid == lib_martigny_bourg.pid - pickup_lib_pid = Location.get_record_by_pid( - loan.pickup_location_pid).library_pid + pickup_lib_pid = Location.get_record_by_pid(loan.pickup_location_pid).library_pid assert pickup_lib_pid == lib_martigny.pid # non-circulating library send items to requesting library login_user_via_session(client, librarian_martigny_bourg.user) res, data = postdata( client, - 'api_item.validate_request', + "api_item.validate_request", dict( - pid=loan.get('pid'), + pid=loan.get("pid"), transaction_library_pid=lib_martigny_bourg.pid, - transaction_user_pid=librarian_martigny_bourg.pid - ) + transaction_user_pid=librarian_martigny_bourg.pid, + ), ) assert res.status_code == 200 - loan = Loan(data.get('metadata').get('pending_loans')[0]) - transaction_loc = Location.get_record_by_pid( - loan.transaction_location_pid) + loan = Loan(data.get("metadata").get("pending_loans")[0]) + transaction_loc = Location.get_record_by_pid(loan.transaction_location_pid) assert transaction_loc.library_pid == lib_martigny_bourg.pid - pickup_lib_pid = Location.get_record_by_pid( - loan.pickup_location_pid).library_pid + pickup_lib_pid = Location.get_record_by_pid(loan.pickup_location_pid).library_pid assert pickup_lib_pid == lib_martigny.pid - assert loan.get('state') == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan.get("state") == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP # requesting library receives an item from non-circulating library. login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.receive', + "api_item.receive", dict( - pid=loan.get('pid'), + pid=loan.get("pid"), transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - loan = Loan(data.get('metadata').get('pending_loans')[0]) + loan = Loan(data.get("metadata").get("pending_loans")[0]) - transaction_loc = Location.get_record_by_pid( - loan.transaction_location_pid) + transaction_loc = Location.get_record_by_pid(loan.transaction_location_pid) assert transaction_loc.library_pid == lib_martigny.pid - pickup_lib_pid = \ - Location.get_record_by_pid(loan.pickup_location_pid).library_pid + pickup_lib_pid = Location.get_record_by_pid(loan.pickup_location_pid).library_pid assert pickup_lib_pid == lib_martigny.pid - assert loan.get('state') == LoanState.ITEM_AT_DESK + assert loan.get("state") == LoanState.ITEM_AT_DESK # checkout item to requested patron login_user_via_session(client, librarian_martigny.user) - date_format = '%Y/%m/%dT%H:%M:%S.000Z' + date_format = "%Y/%m/%dT%H:%M:%S.000Z" today = datetime.utcnow() - eod = today.replace(hour=23, minute=59, second=0, microsecond=0, - tzinfo=lib_martigny.get_timezone()) + eod = today.replace( + hour=23, minute=59, second=0, microsecond=0, tzinfo=lib_martigny.get_timezone() + ) params = dict( item_pid=item_lib_martigny_bourg.pid, patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, transaction_library_pid=lib_martigny.pid, - end_date=eod.strftime(date_format) - ) - res, data = postdata( - client, - 'api_item.checkout', - params + end_date=eod.strftime(date_format), ) + res, data = postdata(client, "api_item.checkout", params) assert res.status_code == 200 - loan_pid = data.get('action_applied').get('checkout').get('pid') + loan_pid = data.get("action_applied").get("checkout").get("pid") loan = Loan.get_record_by_pid(loan_pid) - transaction_loc = Location.get_record_by_pid( - loan.transaction_location_pid) + transaction_loc = Location.get_record_by_pid(loan.transaction_location_pid) assert transaction_loc.library_pid == lib_martigny.pid - pickup_lib_pid = Location.get_record_by_pid( - loan.pickup_location_pid).library_pid + pickup_lib_pid = Location.get_record_by_pid(loan.pickup_location_pid).library_pid assert pickup_lib_pid == lib_martigny.pid - assert loan.get('state') == LoanState.ITEM_ON_LOAN + assert loan.get("state") == LoanState.ITEM_ON_LOAN diff --git a/tests/api/circulation/test_locations_restrictions.py b/tests/api/circulation/test_locations_restrictions.py index efe9a526a6..d9c3911321 100644 --- a/tests/api/circulation/test_locations_restrictions.py +++ b/tests/api/circulation/test_locations_restrictions.py @@ -28,60 +28,56 @@ from rero_ils.modules.utils import get_ref_for_pid -def test_item_pickup_location( - client, librarian_martigny, item2_lib_martigny): +def test_item_pickup_location(client, librarian_martigny, item2_lib_martigny): """Test get item pickup locations.""" login_user_via_session(client, librarian_martigny.user) # test with dummy data will return 404 - res = client.get( - url_for( - 'api_item.get_pickup_locations', - item_pid='dummy_pid' - ) - ) + res = client.get(url_for("api_item.get_pickup_locations", item_pid="dummy_pid")) assert res.status_code == 404 # test with an existing item res = client.get( - url_for( - 'api_item.get_pickup_locations', - item_pid=item2_lib_martigny.pid - ) + url_for("api_item.get_pickup_locations", item_pid=item2_lib_martigny.pid) ) assert res.status_code == 200 data = get_json(res) - assert 'locations' in data + assert "locations" in data def test_location_disallow_request( - item_lib_martigny, item_lib_martigny_data, loc_public_martigny, - loc_public_martigny_data, loc_public_saxon, lib_martigny, patron_martigny, - circulation_policies + item_lib_martigny, + item_lib_martigny_data, + loc_public_martigny, + loc_public_martigny_data, + loc_public_saxon, + lib_martigny, + patron_martigny, + circulation_policies, ): """Test a request when location disallow request.""" item = item_lib_martigny location = loc_public_martigny # update location to disallow request - location['allow_request'] = False + location["allow_request"] = False location.update(location, dbcommit=True, reindex=True) # Create "virtual" Loan (not registered) - loan = Loan({ - 'item_pid': item_pid_to_object(item_lib_martigny.pid), - 'library_pid': lib_martigny.pid, - 'patron_pid': patron_martigny.pid - }) + loan = Loan( + { + "item_pid": item_pid_to_object(item_lib_martigny.pid), + "library_pid": lib_martigny.pid, + "patron_pid": patron_martigny.pid, + } + ) assert not can_be_requested(loan) # update item to set a temporary location allowing request # owning location disallow request, but temporary location allow request # then the request could be accepted (for locations checks) - item['temporary_location'] = { - '$ref': get_ref_for_pid('loc', loc_public_saxon.pid) - } + item["temporary_location"] = {"$ref": get_ref_for_pid("loc", loc_public_saxon.pid)} item.update(item, dbcommit=True, reindex=True) - assert loc_public_saxon['allow_request'] + assert loc_public_saxon["allow_request"] assert can_be_requested(loan) # reset fixtures @@ -89,25 +85,20 @@ def test_location_disallow_request( location.update(loc_public_martigny_data, dbcommit=True, reindex=True) -def test_holding_pickup_location( - client, patron_martigny, holding_lib_martigny): +def test_holding_pickup_location(client, patron_martigny, holding_lib_martigny): """Test get holding pickup locations for patron.""" login_user_via_session(client, patron_martigny.user) # test with dummy data will return 404 res = client.get( - url_for( - 'api_holding.get_pickup_locations', - holding_pid='dummy_pid' - ) + url_for("api_holding.get_pickup_locations", holding_pid="dummy_pid") ) assert res.status_code == 404 # test with an existing holding res = client.get( url_for( - 'api_holding.get_pickup_locations', - holding_pid=holding_lib_martigny.pid + "api_holding.get_pickup_locations", holding_pid=holding_lib_martigny.pid ) ) assert res.status_code == 200 data = get_json(res) - assert 'locations' in data + assert "locations" in data diff --git a/tests/api/circulation/test_temporary_item_type.py b/tests/api/circulation/test_temporary_item_type.py index 399eedf219..278ab9d6fe 100644 --- a/tests/api/circulation/test_temporary_item_type.py +++ b/tests/api/circulation/test_temporary_item_type.py @@ -29,16 +29,17 @@ def test_checkout_temporary_item_type( - client, - document, - librarian_martigny, - lib_martigny, - loc_public_martigny, - patron_martigny, - item_lib_martigny, - item_type_on_site_martigny, - circ_policy_short_martigny, - circ_policy_default_martigny): + client, + document, + librarian_martigny, + lib_martigny, + loc_public_martigny, + patron_martigny, + item_lib_martigny, + item_type_on_site_martigny, + circ_policy_short_martigny, + circ_policy_default_martigny, +): """Test checkout or item with temporary item_types""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -49,40 +50,40 @@ def test_checkout_temporary_item_type( lib_martigny.organisation_pid, lib_martigny.pid, patron_martigny.patron_type_pid, - item.item_type_circulation_category_pid + item.item_type_circulation_category_pid, ) assert cipo_used == circ_policy_short_martigny # add a temporary_item_type on item # due to this change, the cipo used during circulation operation should # be different from the first cipo found. - item['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid) + item["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_on_site_martigny.pid) } item = item.update(data=item, dbcommit=True, reindex=True) # check temporary_circulation_category is indexed in document doc_list = url_for( - 'invenio_records_rest.doc_list', - q=f'holdings.circulation_category.pid\ - :{item_type_on_site_martigny.pid}' - ) + "invenio_records_rest.doc_list", + q=f"holdings.circulation_category.pid\ + :{item_type_on_site_martigny.pid}", + ) res = client.get(doc_list) data = get_json(res) - assert len(data['hits']['hits']) == 1 + assert len(data["hits"]["hits"]) == 1 cipo_tmp_used = CircPolicy.provide_circ_policy( lib_martigny.organisation_pid, lib_martigny.pid, patron_martigny.patron_type_pid, - item.item_type_circulation_category_pid + item.item_type_circulation_category_pid, ) assert cipo_tmp_used == circ_policy_default_martigny - delta = timedelta(cipo_tmp_used.get('checkout_duration')) + delta = timedelta(cipo_tmp_used.get("checkout_duration")) expected_date = datetime.now() + delta expected_dates = [expected_date, lib_martigny.next_open(expected_date)] - expected_dates = [d.strftime('%Y-%m-%d') for d in expected_dates] + expected_dates = [d.strftime("%Y-%m-%d") for d in expected_dates] # try a checkout and check the transaction end_date is related to the cipo # corresponding to the temporary item_type @@ -90,15 +91,15 @@ def test_checkout_temporary_item_type( item_pid=item.pid, patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ) - res, data = postdata(client, 'api_item.checkout', params) + res, data = postdata(client, "api_item.checkout", params) assert res.status_code == 200 - transaction_end_date = data['action_applied']['checkout']['end_date'] + transaction_end_date = data["action_applied"]["checkout"]["end_date"] transaction_end_date = ciso8601.parse_datetime(transaction_end_date).date() - transaction_end_date = transaction_end_date.strftime('%Y-%m-%d') + transaction_end_date = transaction_end_date.strftime("%Y-%m-%d") assert transaction_end_date in expected_dates # reset the item to original value - del item['temporary_item_type'] + del item["temporary_item_type"] item.update(data=item, dbcommit=True, reindex=True) diff --git a/tests/api/collections/test_collections_api.py b/tests/api/collections/test_collections_api.py index 06c25a23c3..92f20cdb3a 100644 --- a/tests/api/collections/test_collections_api.py +++ b/tests/api/collections/test_collections_api.py @@ -18,9 +18,6 @@ """Test item collections.""" -def test_get_items( - item_lib_martigny, item2_lib_martigny, - coll_martigny_1): +def test_get_items(item_lib_martigny, item2_lib_martigny, coll_martigny_1): """Test get items for a collection""" - assert coll_martigny_1.get_items() == \ - [item_lib_martigny, item2_lib_martigny] + assert coll_martigny_1.get_items() == [item_lib_martigny, item2_lib_martigny] diff --git a/tests/api/collections/test_collections_permissions.py b/tests/api/collections/test_collections_permissions.py index f58dbe3d1d..4e77ba3133 100644 --- a/tests/api/collections/test_collections_permissions.py +++ b/tests/api/collections/test_collections_permissions.py @@ -21,16 +21,22 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.collections.permissions import CollectionPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_collections_permissions( - patron_martigny, librarian_martigny, system_librarian_martigny, - coll_martigny_1, coll_sion_1, coll_saxon_1, lib_martigny, org_martigny + patron_martigny, + librarian_martigny, + system_librarian_martigny, + coll_martigny_1, + coll_sion_1, + coll_saxon_1, + lib_martigny, + org_martigny, ): """Test collection permissions class.""" @@ -40,113 +46,133 @@ def test_collections_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, coll_martigny_1) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + coll_martigny_1, + ) login_user(patron_martigny.user) - check_permission(CollectionPermissionPolicy, {'create': False}, {}) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, coll_martigny_1) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, coll_sion_1) + check_permission(CollectionPermissionPolicy, {"create": False}, {}) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + coll_martigny_1, + ) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + coll_sion_1, + ) # Librarian with specific role # - search/read: any items # - create/update/delete: allowed for items of its own library login_user(librarian_martigny.user) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, coll_martigny_1) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, coll_saxon_1) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, coll_sion_1) + check_permission( + CollectionPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + coll_martigny_1, + ) + check_permission( + CollectionPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + coll_saxon_1, + ) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + coll_sion_1, + ) # Librarian without specific role # - search/read: any items # - create/update/delete: disallowed for any items except for # "pro_circulation_manager" as create/update are allowed. - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_circulation_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_circulation_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': False - }, coll_martigny_1) + check_permission( + CollectionPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": False}, + coll_martigny_1, + ) - librarian_martigny['roles'] = ['pro_user_manager'] + librarian_martigny["roles"] = ["pro_user_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, coll_martigny_1) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + coll_martigny_1, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() # System librarian (aka. full-permissions) # - create/update/delete: allow for serial holding if its own org login_user(system_librarian_martigny.user) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, coll_saxon_1) - check_permission(CollectionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, coll_sion_1) + check_permission( + CollectionPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + coll_saxon_1, + ) + check_permission( + CollectionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + coll_sion_1, + ) diff --git a/tests/api/collections/test_collections_rest.py b/tests/api/collections/test_collections_rest.py index 8a73010b1b..c295f4c32e 100644 --- a/tests/api/collections/test_collections_rest.py +++ b/tests/api/collections/test_collections_rest.py @@ -19,61 +19,65 @@ import mock from flask import url_for -from utils import VerifyRecordPermissionPatch, flush_index, get_json +from utils import VerifyRecordPermissionPatch, get_json from rero_ils.modules.collections.api import CollectionsSearch -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_collections_facets(client, rero_json_header, coll_martigny_1): """Test record retrieval.""" - list_url = url_for('invenio_records_rest.coll_list') + list_url = url_for("invenio_records_rest.coll_list") res = client.get(list_url, headers=rero_json_header) data = get_json(res) - aggs = data['aggregations'] + aggs = data["aggregations"] # check all facets are present - for facet in ['type', 'library', 'subject', 'teacher']: + for facet in ["type", "library", "subject", "teacher"]: assert aggs[facet] # FILTERS # type - list_url = url_for('invenio_records_rest.coll_list', type='course') + list_url = url_for("invenio_records_rest.coll_list", type="course") res = client.get(list_url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # library - list_url = url_for('invenio_records_rest.coll_list', library='lib1') + list_url = url_for("invenio_records_rest.coll_list", library="lib1") res = client.get(list_url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # subject - list_url = url_for('invenio_records_rest.coll_list', subject='geographic') + list_url = url_for("invenio_records_rest.coll_list", subject="geographic") res = client.get(list_url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # teacher - list_url = url_for( - 'invenio_records_rest.coll_list', teacher='Pr. Smith, John' - ) + list_url = url_for("invenio_records_rest.coll_list", teacher="Pr. Smith, John") res = client.get(list_url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 -def test_collection_enrich_data(client, document, item_type_standard_martigny, - item_lib_martigny, item2_lib_martigny, - loc_public_martigny, coll_martigny_1, - json_header): +def test_collection_enrich_data( + client, + document, + item_type_standard_martigny, + item_lib_martigny, + item2_lib_martigny, + loc_public_martigny, + coll_martigny_1, + json_header, +): """Test record retrieval.""" coll_martigny_1.reindex() - flush_index(CollectionsSearch.Meta.index) - query = CollectionsSearch()\ - .filter('term', pid=coll_martigny_1.pid)\ - .source().scan() + CollectionsSearch.flush_and_refresh() + query = CollectionsSearch().filter("term", pid=coll_martigny_1.pid).source().scan() coll_martigny_1_es_data = next(query) - assert coll_martigny_1_es_data.organisation.pid == 'org1' + assert coll_martigny_1_es_data.organisation.pid == "org1" diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 6ec9c78c7b..f9bb84406f 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -25,19 +25,18 @@ import pytest from flask_security.utils import hash_password from invenio_accounts.models import User -from utils import flush_index from rero_ils.modules.documents.api import Document, DocumentsSearch from rero_ils.modules.documents.models import DocumentFictionType from rero_ils.modules.items.api import Item, ItemsSearch -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def user_without_profile(db, default_user_password): """Create a simple invenio user with a profile.""" with db.session.begin_nested(): user = User( - email='user_without_profile@test.com', + email="user_without_profile@test.com", password=hash_password(default_user_password), user_profile=None, active=True, @@ -47,12 +46,12 @@ def user_without_profile(db, default_user_password): return user -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def user_with_profile(db, default_user_password): """Create a simple invenio user with a profile.""" with db.session.begin_nested(): user = User( - email='user_with_profile@test.com', + email="user_with_profile@test.com", password=hash_password(default_user_password), user_profile={}, active=True, @@ -60,18 +59,18 @@ def user_with_profile(db, default_user_password): db.session.add(user) profile = dict( birth_date=datetime(1990, 1, 1), - first_name='User', - last_name='With Profile', - city='Nowhere' + first_name="User", + last_name="With Profile", + city="Nowhere", ) - profile.username = 'user_with_profile' + profile.username = "user_with_profile" user.user_profile = profile db.session.merge(user) db.session.commit() return user -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def create_app(): """Create test app.""" from invenio_app.factory import create_api @@ -79,134 +78,89 @@ def create_app(): return create_api -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def doc_title_travailleurs(app): """Document with title with travailleur.""" data = { - '$schema': 'https://bib.rero.ch/schemas/documents/' - 'document-v0.0.1.json', - 'pid': 'doc_title_test1', - 'type': [{ - "main_type": "docmaintype_book", - "subtype": "docsubtype_other_book" - }], - 'fiction_statement': DocumentFictionType.Unspecified.value, - 'language': [{'type': 'bf:Language', 'value': 'fre'}], - 'title': [{ - 'type': 'bf:Title', - 'mainTitle': [{ - 'value': 'Les travailleurs assidus sont de retours' - }], - 'subtitle': [ - {'value': 'les jeunes arrivent bientôt ? Quelle histoire!'}] - }], - "provisionActivity": [ - { - "type": "bf:Publication", - "startDate": 1818 - } + "$schema": "https://bib.rero.ch/schemas/documents/" "document-v0.0.1.json", + "pid": "doc_title_test1", + "type": [{"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"}], + "fiction_statement": DocumentFictionType.Unspecified.value, + "language": [{"type": "bf:Language", "value": "fre"}], + "title": [ + { + "type": "bf:Title", + "mainTitle": [{"value": "Les travailleurs assidus sont de retours"}], + "subtitle": [ + {"value": "les jeunes arrivent bientôt ? Quelle histoire!"} + ], + } ], - 'issuance': { - 'main_type': 'rdami:1001', - 'subtype': 'materialUnit' - }, - 'adminMetadata': { - 'encodingLevel': 'Minimal level' - }, - "seriesStatement": [{ - "seriesTitle": [ - { - "value": "Boy & Girl" - } - ] - }], + "provisionActivity": [{"type": "bf:Publication", "startDate": 1818}], + "issuance": {"main_type": "rdami:1001", "subtype": "materialUnit"}, + "adminMetadata": {"encodingLevel": "Minimal level"}, + "seriesStatement": [{"seriesTitle": [{"value": "Boy & Girl"}]}], } - doc = Document.create( - data=data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + doc = Document.create(data=data, delete_pid=False, dbcommit=True, reindex=True) + DocumentsSearch.flush_and_refresh() return doc -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def doc_title_travailleuses(app): """Document with title with travailleuses.""" data = { - '$schema': 'https://bib.rero.ch/schemas/documents/' - 'document-v0.0.1.json', - 'pid': 'doc_title_test2', - 'type': [{ - "main_type": "docmaintype_book", - "subtype": "docsubtype_other_book" - }], - 'fiction_statement': DocumentFictionType.Unspecified.value, - 'language': [{'type': 'bf:Language', 'value': 'fre'}], - 'title': [{ - 'type': 'bf:Title', - 'mainTitle': [{ - 'value': "Les travailleuses partent à l'école 100" - }], - 'subtitle': [{'value': "lorsqu'un est bœuf ex aequo"}] - - }], - 'contribution': [ + "$schema": "https://bib.rero.ch/schemas/documents/" "document-v0.0.1.json", + "pid": "doc_title_test2", + "type": [{"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"}], + "fiction_statement": DocumentFictionType.Unspecified.value, + "language": [{"type": "bf:Language", "value": "fre"}], + "title": [ + { + "type": "bf:Title", + "mainTitle": [{"value": "Les travailleuses partent à l'école 100"}], + "subtitle": [{"value": "lorsqu'un est bœuf ex aequo"}], + } + ], + "contribution": [ { - 'entity': { - 'authorized_access_point': 'Müller, John', - 'type': 'bf:Person' + "entity": { + "authorized_access_point": "Müller, John", + "type": "bf:Person", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'authorized_access_point': 'Corminbœuf, Gruß', - 'type': 'bf:Person' + "entity": { + "authorized_access_point": "Corminbœuf, Gruß", + "type": "bf:Person", }, - 'role': ['aut'] - } - ], - "provisionActivity": [ - { - "type": "bf:Publication", - "startDate": 1818 - } + "role": ["aut"], + }, ], - 'issuance': { - 'main_type': 'rdami:1001', - 'subtype': 'materialUnit' - }, - 'adminMetadata': { - 'encodingLevel': 'Minimal level' - } + "provisionActivity": [{"type": "bf:Publication", "startDate": 1818}], + "issuance": {"main_type": "rdami:1001", "subtype": "materialUnit"}, + "adminMetadata": {"encodingLevel": "Minimal level"}, } - doc = Document.create( - data=data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + doc = Document.create(data=data, delete_pid=False, dbcommit=True, reindex=True) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="function") def item_lib_martigny_masked( - app, - document, - item_lib_martigny_data, - loc_public_martigny, - item_type_standard_martigny): + app, + document, + item_lib_martigny_data, + loc_public_martigny, + item_type_standard_martigny, +): """Create item of martigny library.""" data = deepcopy(item_lib_martigny_data) - data['barcode'] = 'masked' - data['pid'] = f'maked-{data["pid"]}' - data['_masked'] = True - item = Item.create( - data=data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data["barcode"] = "masked" + data["pid"] = f'maked-{data["pid"]}' + data["_masked"] = True + item = Item.create(data=data, delete_pid=False, dbcommit=True, reindex=True) + ItemsSearch.flush_and_refresh() yield item item.delete(True, True, True) diff --git a/tests/api/documents/test_documents_dumpers.py b/tests/api/documents/test_documents_dumpers.py index 7a3d6b6a39..f6535d9c55 100644 --- a/tests/api/documents/test_documents_dumpers.py +++ b/tests/api/documents/test_documents_dumpers.py @@ -22,34 +22,36 @@ from rero_ils.modules.acquisition.dumpers import document_acquisition_dumper from rero_ils.modules.commons.exceptions import RecordNotFound from rero_ils.modules.documents.api import Document -from rero_ils.modules.documents.dumpers import document_replace_refs_dumper, \ - document_title_dumper +from rero_ils.modules.documents.dumpers import ( + document_replace_refs_dumper, + document_title_dumper, +) from rero_ils.modules.entities.models import EntityType def test_document_dumpers(document, document_data): """Test document dumpers.""" dump_data = document.dumps(dumper=document_title_dumper) - assert dump_data['pid'] - assert dump_data['title_text'] + assert dump_data["pid"] + assert dump_data["title_text"] dump_data = document.dumps(dumper=document_acquisition_dumper) - assert dump_data['pid'] - assert dump_data['title_text'] - assert dump_data['identifiers'] + assert dump_data["pid"] + assert dump_data["title_text"] + assert dump_data["identifiers"] entity_data = { - 'entity': { - '$ref': 'https://mef.rero.ch/api/agents/idref/dummy_idref', - 'pid': 'dummy_pid', - 'type': EntityType.PERSON + "entity": { + "$ref": "https://mef.rero.ch/api/agents/idref/dummy_idref", + "pid": "dummy_pid", + "type": EntityType.PERSON, } } - document['contribution'] = [entity_data] + document["contribution"] = [entity_data] with pytest.raises(RecordNotFound): document.dumps(dumper=document_replace_refs_dumper) - document['contribution'] = document_data['contribution'] - document['subjects'] = [entity_data] + document["contribution"] = document_data["contribution"] + document["subjects"] = [entity_data] with pytest.raises(RecordNotFound): document.dumps(dumper=document_replace_refs_dumper) diff --git a/tests/api/documents/test_documents_files_rest.py b/tests/api/documents/test_documents_files_rest.py index 6516b897c2..400c811d2b 100644 --- a/tests/api/documents/test_documents_files_rest.py +++ b/tests/api/documents/test_documents_files_rest.py @@ -41,7 +41,7 @@ def test_document_files( res = client.get(list_url) hits = get_json(res)["hits"] aggregations = get_json(res)["aggregations"]["organisation"] - assert aggregations['buckets'][0]['doc_count'] == 1 + assert aggregations["buckets"][0]["doc_count"] == 1 assert hits["total"]["value"] == 1 # check for collections @@ -55,9 +55,7 @@ def test_document_files( # check for collections list_url = url_for( - "invenio_records_rest.doc_list", - q=f"_exists_:files", - view=org_martigny.pid + "invenio_records_rest.doc_list", q=f"_exists_:files", view=org_martigny.pid ) res = client.get(list_url) hits = get_json(res)["hits"] @@ -73,28 +71,32 @@ def test_document_files( json={ "metadata": { "collections": ["new col"], - "library": {'$ref': get_ref_for_pid('lib', 'lib1')}, - "document": {'$ref': get_ref_for_pid('doc', 'doc1')}, + "library": {"$ref": get_ref_for_pid("lib", "lib1")}, + "document": {"$ref": get_ref_for_pid("doc", "doc1")}, } }, ) assert res.status_code == 200 res = client.get(f"/records/{file_data['rec_id']}", headers=json_header) assert res.status_code == 200 - assert res.json['metadata'] == { - 'collections': ['new col'], - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc1'}, - 'library': {'$ref': 'https://bib.rero.ch/api/libraries/lib1'} + assert res.json["metadata"] == { + "collections": ["new col"], + "document": {"$ref": "https://bib.rero.ch/api/documents/doc1"}, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, } res = client.get(f"/records?q={file_data['rec_id']}", headers=json_header) assert res.status_code == 200 - metadata = res.json['hits']['hits'][0]['metadata'] + metadata = res.json["hits"]["hits"][0]["metadata"] assert set(metadata.keys()) == { - 'collections', 'document', 'file_size', 'library', 'n_files' + "collections", + "document", + "file_size", + "library", + "n_files", } - assert metadata['library'] == {'pid': 'lib1', 'type': 'lib'} - assert metadata['document'] == {'pid': 'doc1', 'type': 'doc'} + assert metadata["library"] == {"pid": "lib1", "type": "lib"} + assert metadata["document"] == {"pid": "doc1", "type": "doc"} # check for modifications in document res = client.get(list_url) diff --git a/tests/api/documents/test_documents_permissions.py b/tests/api/documents/test_documents_permissions.py index 8b8c4c089f..10c380ab8b 100644 --- a/tests/api/documents/test_documents_permissions.py +++ b/tests/api/documents/test_documents_permissions.py @@ -20,16 +20,14 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.documents.permissions import DocumentPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) -def test_documents_permissions( - patron_martigny, librarian_martigny, document -): +@mock.patch.object(Patron, "_extensions", []) +def test_documents_permissions(patron_martigny, librarian_martigny, document): """Test documents permissions class.""" # Anonymous user & Patron user # - search/read any document are allowed. @@ -37,70 +35,88 @@ def test_documents_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(DocumentPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(DocumentPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, document) + check_permission( + DocumentPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + DocumentPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + document, + ) login_user(patron_martigny.user) - check_permission(DocumentPermissionPolicy, {'create': False}, {}) - check_permission(DocumentPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, document) + check_permission(DocumentPermissionPolicy, {"create": False}, {}) + check_permission( + DocumentPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + document, + ) # Librarian with specific role # - search/read: any document # - create/update/delete: allowed for any document login_user(librarian_martigny.user) - check_permission(DocumentPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, document) + check_permission( + DocumentPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + document, + ) # Librarian without specific role # - search/read: any document # - create/update/delete: disallowed for any document !! - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_circulation_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_circulation_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(DocumentPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, document) + check_permission( + DocumentPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + document, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() # Test if the document cannot be edited (harvested documents, ...) - with mock.patch('rero_ils.modules.documents.api.Document.can_edit', False): - check_permission(DocumentPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, document) + with mock.patch("rero_ils.modules.documents.api.Document.can_edit", False): + check_permission( + DocumentPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + document, + ) diff --git a/tests/api/documents/test_documents_rest.py b/tests/api/documents/test_documents_rest.py index b2e1dce193..8d8567282b 100644 --- a/tests/api/documents/test_documents_rest.py +++ b/tests/api/documents/test_documents_rest.py @@ -23,14 +23,22 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, clean_text, flush_index, \ - get_json, mock_response, postdata, to_relative_url +from utils import ( + VerifyRecordPermissionPatch, + clean_text, + get_json, + mock_response, + postdata, + to_relative_url, +) from rero_ils.modules.commons.identifiers import IdentifierType from rero_ils.modules.documents.api import DocumentsSearch from rero_ils.modules.documents.utils import get_remote_cover -from rero_ils.modules.documents.views import can_request, \ - record_library_pickup_locations +from rero_ils.modules.documents.views import ( + can_request, + record_library_pickup_locations, +) from rero_ils.modules.operation_logs.api import OperationLogsSearch from rero_ils.modules.utils import get_ref_for_pid @@ -89,8 +97,7 @@ def clean_es_metadata(metadata): document_data.pop("identifiedBy", None) assert document_data == clean_es_metadata(data["metadata"]) - list_url = url_for( - "invenio_records_rest.doc_list", q=f"pid:{document.pid}") + list_url = url_for("invenio_records_rest.doc_list", q=f"pid:{document.pid}") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) @@ -161,8 +168,7 @@ def datetime_milliseconds(date): assert res.status_code == 201 # check item creation and indexation - doc_list = url_for( - "invenio_records_rest.doc_list", view="global", pid="doc1") + doc_list = url_for("invenio_records_rest.doc_list", view="global", pid="doc1") res = client.get(doc_list, headers=rero_json_header) data = get_json(res) assert len(data["hits"]["hits"]) == 1 @@ -270,7 +276,7 @@ def test_documents_facets( "intendedAudience", "year", "status", - "acquisition" + "acquisition", ] assert all(key in data["aggregations"] for key in facet_keys) @@ -320,9 +326,7 @@ def test_documents_facets( }, 0, ), - ({ - "view": "global", - "author": "Nebehay, Christian Michael", "lang": "thl"}, 1), + ({"view": "global", "author": "Nebehay, Christian Michael", "lang": "thl"}, 1), ({"view": "global", "online": "true"}, 1), ] for params, value in checks: @@ -383,8 +387,7 @@ def test_documents_post_put_delete( list_url = url_for("invenio_records_rest.doc_list", q="pid:4") document_chinese_data["pid"] = "4" - res, data = postdata( - client, "invenio_records_rest.doc_list", document_chinese_data) + res, data = postdata(client, "invenio_records_rest.doc_list", document_chinese_data) assert res.status_code == 201 @@ -422,15 +425,11 @@ def test_documents_post_put_delete( { "partNumber": [ {"value": "Part Number (Latin)"}, - { - "value": "Part Number (Chinese)", - "language": "chi-hani"}, + {"value": "Part Number (Chinese)", "language": "chi-hani"}, ], "partName": [ {"value": "Part Name (Latin)"}, - { - "language": "chi-hani", - "value": "Part Name (Chinese)"}, + {"language": "chi-hani", "value": "Part Name (Chinese)"}, ], } ], @@ -439,15 +438,11 @@ def test_documents_post_put_delete( { "mainTitle": [ {"value": "International law (Latin)"}, - { - "value": "International law (Chinese)", - "language": "chi-hani"}, + {"value": "International law (Chinese)", "language": "chi-hani"}, ], "subtitle": [ {"value": "Parallel Subtitle (Latin)"}, - { - "value": "Parallel Subtitle (Chinese)", - "language": "chi-hani"}, + {"value": "Parallel Subtitle (Chinese)", "language": "chi-hani"}, ], "part": [ { @@ -472,15 +467,11 @@ def test_documents_post_put_delete( { "mainTitle": [ {"value": "Parallel Title 2 (Latin)"}, - { - "value": "Parallel Title 2 (Chinese)", - "language": "chi-hani"}, + {"value": "Parallel Title 2 (Chinese)", "language": "chi-hani"}, ], "subtitle": [ {"value": "Parallel Subtitle 2 (Latin)"}, - { - "value": "Parallel Subtitle 2 (Chinese)", - "language": "chi-hani"}, + {"value": "Parallel Subtitle 2 (Chinese)", "language": "chi-hani"}, ], "type": "bf:ParallelTitle", }, @@ -526,8 +517,7 @@ def test_documents_get_resolve_rero_json( rero_json_header, ): """Test record get with resolve and mimetype rero+json.""" - api_url = url_for( - "invenio_records_rest.doc_item", pid_value="doc2", resolve="1") + api_url = url_for("invenio_records_rest.doc_item", pid_value="doc2", resolve="1") res = client.get(api_url, headers=rero_json_header) assert res.status_code == 200 metadata = get_json(res).get("metadata", {}) @@ -588,8 +578,7 @@ def test_documents_resolve( entity_person_response_data, ): """Test document detailed view with items filter.""" - res = client.get( - url_for("invenio_records_rest.doc_item", pid_value="doc2")) + res = client.get(url_for("invenio_records_rest.doc_item", pid_value="doc2")) assert res.json["metadata"]["contribution"] == [ { "entity": { @@ -762,7 +751,7 @@ def failure(response_data): {"type": IdentifierType.EAN, "value": "invalid_ean_identifier"} ) document.update(document, dbcommit=True, reindex=True) - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() params = {"identifiers": ["(bf:Ean)invalid_ean_identifier"]} url = url_for("invenio_records_rest.doc_list", **params) @@ -792,7 +781,7 @@ def test_document_current_library_on_request_parameter( doc_url = url_for("invenio_records_rest.doc_item", pid_value=document.pid) res = client.put(doc_url, data=json.dumps(document), headers=json_header) assert res.status_code == 200 - flush_index(OperationLogsSearch.Meta.index) + OperationLogsSearch.flush_and_refresh() oplg = next( OperationLogsSearch() .filter("term", record__type="doc") @@ -813,7 +802,7 @@ def test_document_current_library_on_request_parameter( ) res = client.put(doc_url, data=json.dumps(document), headers=json_header) assert res.status_code == 200 - flush_index(OperationLogsSearch.Meta.index) + OperationLogsSearch.flush_and_refresh() oplg = next( OperationLogsSearch() .filter("term", record__type="doc") @@ -887,21 +876,16 @@ def check_field_data(key, field_data, data): ] assert data_keys == list(field_data.keys()) + check_field_data("canton", field_data, {"label": "canton_ag", "value": "ag"}) + check_field_data("country", field_data, {"label": "country_aa", "value": "aa"}) check_field_data( - "canton", field_data, {"label": "canton_ag", "value": "ag"}) - check_field_data( - "country", field_data, {"label": "country_aa", "value": "aa"}) - check_field_data( - "rdaCarrierType", field_data, - {"label": "rdact:1002", "value": "rdact:1002"} + "rdaCarrierType", field_data, {"label": "rdact:1002", "value": "rdact:1002"} ) check_field_data( - "rdaContentType", field_data, - {"label": "rdaco:1002", "value": "rdaco:1002"} + "rdaContentType", field_data, {"label": "rdaco:1002", "value": "rdaco:1002"} ) check_field_data( - "rdaMediaType", field_data, - {"label": "rdamt:1001", "value": "rdamt:1001"} + "rdaMediaType", field_data, {"label": "rdamt:1001", "value": "rdamt:1001"} ) @@ -945,8 +929,7 @@ def test_document_fulltext(client, document_with_files, document_with_issn): assert data["pid"] == document_with_files.pid list_url = url_for( - "invenio_records_rest.doc_list", - q=f'"Document ({document_with_files.pid})"' + "invenio_records_rest.doc_list", q=f'"Document ({document_with_files.pid})"' ) res = client.get(list_url) hits = get_json(res)["hits"] diff --git a/tests/api/documents/test_export_serializers.py b/tests/api/documents/test_export_serializers.py index 22aee9b9a1..bfbd771be4 100644 --- a/tests/api/documents/test_export_serializers.py +++ b/tests/api/documents/test_export_serializers.py @@ -22,43 +22,61 @@ from utils import VerifyRecordPermissionPatch, get_json -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_json_export_serializers(client, export_json_header, document, - export_document): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_json_export_serializers(client, export_json_header, document, export_document): """Test JSON export serializers for documents.""" - item_url = url_for('invenio_records_rest.doc_item', - pid_value=export_document.pid) + item_url = url_for("invenio_records_rest.doc_item", pid_value=export_document.pid) response = client.get(item_url, headers=export_json_header) assert response.status_code == 200 # Get the first result. data = get_json(response) # Check if all desired keys not in data - for key in ['created', 'updated', 'id', 'links', 'metadata']: + for key in ["created", "updated", "id", "links", "metadata"]: assert key not in data - list_url = url_for( - 'invenio_records_rest.doc_list', q=f'pid:{export_document.pid}' - ) + list_url = url_for("invenio_records_rest.doc_list", q=f"pid:{export_document.pid}") response = client.get(list_url, headers=export_json_header) assert response.status_code == 200 data = get_json(response) - for key in ['created', 'updated', 'id', 'links', 'metadata']: + for key in ["created", "updated", "id", "links", "metadata"]: assert key not in data -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_ris_serializer(client, ris_header, document, export_document): """Test RIS formatter""" ris_tag = [ - 'TY -', 'ID -', 'TI -', 'T2 -', 'AU -', 'A2 -', 'DA -', - 'SP -', 'EP -', 'CY -', 'LA -', 'PB -', 'SN -', 'UR -', - 'KW -', 'ET -', 'DO -', 'VL -', 'IS -', 'PP -', 'Y1 -', - 'PY -', 'ER -' + "TY -", + "ID -", + "TI -", + "T2 -", + "AU -", + "A2 -", + "DA -", + "SP -", + "EP -", + "CY -", + "LA -", + "PB -", + "SN -", + "UR -", + "KW -", + "ET -", + "DO -", + "VL -", + "IS -", + "PP -", + "Y1 -", + "PY -", + "ER -", ] - list_url = url_for('invenio_records_rest.doc_list', - q=f'pid:{export_document.pid}') + list_url = url_for("invenio_records_rest.doc_list", q=f"pid:{export_document.pid}") response = client.get(list_url, headers=ris_header) assert response.status_code == 200 ris_data = response.get_data(as_text=True) diff --git a/tests/api/documents/test_marcxml_rest_api.py b/tests/api/documents/test_marcxml_rest_api.py index 120ca37925..7c947f6d85 100644 --- a/tests/api/documents/test_marcxml_rest_api.py +++ b/tests/api/documents/test_marcxml_rest_api.py @@ -25,59 +25,57 @@ def test_marcxml_documents_create( - client, document_marcxml, documents_marcxml, rero_marcxml_header, - librarian_martigny): + client, document_marcxml, documents_marcxml, rero_marcxml_header, librarian_martigny +): """Test post of marcxml document for logged users.""" res, data = postdata( client, - 'invenio_records_rest.doc_list', + "invenio_records_rest.doc_list", document_marcxml, headers=rero_marcxml_header, - force_data_as_json=False + force_data_as_json=False, ) assert res.status_code == 401 login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'invenio_records_rest.doc_list', + "invenio_records_rest.doc_list", document_marcxml, headers=rero_marcxml_header, - force_data_as_json=False + force_data_as_json=False, ) assert res.status_code == 201 - assert data['metadata']['_draft'] + assert data["metadata"]["_draft"] # test fails when multiple xml records are sent. res, data = postdata( client, - 'invenio_records_rest.doc_list', + "invenio_records_rest.doc_list", documents_marcxml, headers=rero_marcxml_header, - force_data_as_json=False + force_data_as_json=False, ) assert res.status_code == 400 def test_marcxml_documents_create_with_a_token( - app, client, document_marcxml, rero_marcxml_header, - librarian_martigny, script_info): + app, client, document_marcxml, rero_marcxml_header, librarian_martigny +): """Test post of marcxml document with an access token.""" runner = CliRunner() res = runner.invoke( token_create, - ['-n', 'test', '-u', librarian_martigny.dumps().get('email'), - '-t', 'my_token'], - obj=script_info + ["-n", "test", "-u", librarian_martigny.dumps().get("email"), "-t", "my_token"], ) - access_token = res.output.strip().split('\n')[0] + access_token = res.output.strip().split("\n")[0] res, data = postdata( client, - 'invenio_records_rest.doc_list', + "invenio_records_rest.doc_list", document_marcxml, - url_data={'access_token': access_token}, + url_data={"access_token": access_token}, headers=rero_marcxml_header, - force_data_as_json=False + force_data_as_json=False, ) assert res.status_code == 201 - assert data['metadata']['_draft'] + assert data["metadata"]["_draft"] diff --git a/tests/api/entities/local_entities/test_local_entities_extensions.py b/tests/api/entities/local_entities/test_local_entities_extensions.py index f5c1427ae1..c3eb6169dc 100644 --- a/tests/api/entities/local_entities/test_local_entities_extensions.py +++ b/tests/api/entities/local_entities/test_local_entities_extensions.py @@ -19,22 +19,27 @@ """Tests `LocalEntity` authorized access point.""" -def test_local_entities_authorized_access_point(local_entity_person, - local_entity_person2, - local_entity_org, - local_entity_org2): +def test_local_entities_authorized_access_point( + local_entity_person, local_entity_person2, local_entity_org, local_entity_org2 +): """Test authorized access point calculation.""" dumped_record = local_entity_person.dumps() - assert dumped_record['authorized_access_point'] == 'Loy, Georg (1881-1968)' + assert dumped_record["authorized_access_point"] == "Loy, Georg (1881-1968)" dumped_record = local_entity_person2.dumps() - assert dumped_record['authorized_access_point'] == \ - 'William III, King of England (1650-1702)' + assert ( + dumped_record["authorized_access_point"] + == "William III, King of England (1650-1702)" + ) dumped_record = local_entity_org.dumps() - assert dumped_record['authorized_access_point'] == \ - 'Convegno internazionale di Italianistica' + assert ( + dumped_record["authorized_access_point"] + == "Convegno internazionale di Italianistica" + ) # dumped_record = local_entity_org2.dumps() - assert dumped_record['authorized_access_point'] == \ - 'Catholic Church. Concilium Plenarium Americae ' \ - 'Latinae (5th ; 1899 ; Rome, Italy)' + assert ( + dumped_record["authorized_access_point"] + == "Catholic Church. Concilium Plenarium Americae " + "Latinae (5th ; 1899 ; Rome, Italy)" + ) diff --git a/tests/api/entities/local_entities/test_local_entities_permissions.py b/tests/api/entities/local_entities/test_local_entities_permissions.py index 344e20b1f6..7e2e369863 100644 --- a/tests/api/entities/local_entities/test_local_entities_permissions.py +++ b/tests/api/entities/local_entities/test_local_entities_permissions.py @@ -21,15 +21,18 @@ from flask_security.utils import login_user from utils import check_permission -from rero_ils.modules.entities.local_entities.permissions import \ - LocalEntityPermissionPolicy +from rero_ils.modules.entities.local_entities.permissions import ( + LocalEntityPermissionPolicy, +) -def test_local_entity_permissions(patron_martigny, - librarian_martigny, - librarian2_martigny, - system_librarian_martigny, - local_entity_person): +def test_local_entity_permissions( + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + local_entity_person, +): """Test entity permissions class.""" permission_policy = LocalEntityPermissionPolicy @@ -39,54 +42,62 @@ def test_local_entity_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + permission_policy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron user # - Allow search/read actions on any local entity # - Deny create/update/delete actions on any local entity login_user(patron_martigny.user) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, local_entity_person) + check_permission( + permission_policy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + local_entity_person, + ) # As staff member without `pro_entity_manager` role : # - Allow search/read actions on any local entity # - Deny create/update/delete actions on any local entity login_user(librarian2_martigny.user) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, local_entity_person) + check_permission( + permission_policy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + local_entity_person, + ) # As staff member with `pro_entity_manager` role : # - Allow search/read actions on any local entity # - Allow create/update/delete actions on any local entity login_user(librarian_martigny.user) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, local_entity_person) + check_permission( + permission_policy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + local_entity_person, + ) # Full permission user # - Allow search/read actions on any local entity # - Allow create/update/delete actions on any local entity login_user(system_librarian_martigny.user) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, local_entity_person) + check_permission( + permission_policy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + local_entity_person, + ) diff --git a/tests/api/entities/local_entities/test_local_entities_rest.py b/tests/api/entities/local_entities/test_local_entities_rest.py index 23ae3c21ab..17a11959ff 100644 --- a/tests/api/entities/local_entities/test_local_entities_rest.py +++ b/tests/api/entities/local_entities/test_local_entities_rest.py @@ -22,8 +22,7 @@ import mock from flask import url_for -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.documents.dumpers import document_replace_refs_dumper from rero_ils.modules.entities.dumpers import indexer_dumper @@ -32,21 +31,19 @@ from rero_ils.modules.utils import get_ref_for_pid -def test_local_entities_permissions(client, roles, local_entity_person, - json_header): +def test_local_entities_permissions(client, roles, local_entity_person, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.locent_item', - pid_value='locent_pers') + item_url = url_for("invenio_records_rest.locent_item", pid_value="locent_pers") res = client.get(item_url) assert res.status_code == 200 - res, _ = postdata(client, 'invenio_records_rest.locent_list', {}) + res, _ = postdata(client, "invenio_records_rest.locent_list", {}) assert res.status_code == 401 client.put( - url_for('invenio_records_rest.locent_item', pid_value='locent_pers'), + url_for("invenio_records_rest.locent_item", pid_value="locent_pers"), data={}, - headers=json_header + headers=json_header, ) assert res.status_code == 401 @@ -54,92 +51,87 @@ def test_local_entities_permissions(client, roles, local_entity_person, assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_local_entities_get(client, local_entity_person): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.locent_item', - pid_value='locent_pers') + item_url = url_for("invenio_records_rest.locent_item", pid_value="locent_pers") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{local_entity_person.revision_id}"' + assert res.headers["ETag"] == f'"{local_entity_person.revision_id}"' data = get_json(res) - assert local_entity_person.dumps() == data['metadata'] + assert local_entity_person.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert local_entity_person.dumps() == data['metadata'] + assert local_entity_person.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.locent_list', pid='locent_pers') + list_url = url_for("invenio_records_rest.locent_list", pid="locent_pers") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) entity_person = local_entity_person.replace_refs() - entity_person['type'] = EntityType.PERSON - assert data['hits']['hits'][0]['metadata'] == \ - entity_person.dumps(dumper=indexer_dumper) + entity_person["type"] = EntityType.PERSON + assert data["hits"]["hits"][0]["metadata"] == entity_person.dumps( + dumper=indexer_dumper + ) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_local_entities_post_put_delete(client, local_entity_person_data, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_local_entities_post_put_delete(client, local_entity_person_data, json_header): """Test record api post, put and delete.""" - item_url = url_for('invenio_records_rest.locent_item', pid_value='1') - list_url = url_for('invenio_records_rest.locent_list', q='pid:1') + item_url = url_for("invenio_records_rest.locent_item", pid_value="1") + list_url = url_for("invenio_records_rest.locent_list", q="pid:1") local_entity_data = local_entity_person_data # Create record / POST - local_entity_data['pid'] = '1' - res, data = postdata( - client, - 'invenio_records_rest.locent_list', - local_entity_data - ) + local_entity_data["pid"] = "1" + res, data = postdata(client, "invenio_records_rest.locent_list", local_entity_data) assert res.status_code == 201 - local_entity = LocalEntity.get_record_by_pid(data['metadata']['pid']) + local_entity = LocalEntity.get_record_by_pid(data["metadata"]["pid"]) # Check that the returned record matches the given data - assert local_entity.dumps() == data['metadata'] + assert local_entity.dumps() == data["metadata"] res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert local_entity.dumps() == data['metadata'] + assert local_entity.dumps() == data["metadata"] # Update record/PUT data = local_entity_data - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" # Check value from record API res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" # Check value from Elasticsearch res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -149,73 +141,79 @@ def test_local_entities_post_put_delete(client, local_entity_person_data, assert res.status_code == 410 -@mock.patch('rero_ils.modules.decorators.login_and_librarian', - mock.MagicMock()) -def test_local_search_by_proxy( - client, local_entity_genre_form, local_entity_org -): +@mock.patch("rero_ils.modules.decorators.login_and_librarian", mock.MagicMock()) +def test_local_search_by_proxy(client, local_entity_genre_form, local_entity_org): """Test local entity search proxy.""" - response = client.get(url_for( - 'api_local_entities.local_search_proxy', - entity_type='concepts-genreForm', - term='personal', - size='dummy_qs_arg' - )) + response = client.get( + url_for( + "api_local_entities.local_search_proxy", + entity_type="concepts-genreForm", + term="personal", + size="dummy_qs_arg", + ) + ) assert response.status_code == 200 assert len(response.json) == 1 - assert response.json[0]['pid'] == local_entity_genre_form.pid - - response = client.get(url_for( - 'api_local_entities.local_search_proxy', - entity_type='concepts-genreForm', - term='personal', - size='0' - )) + assert response.json[0]["pid"] == local_entity_genre_form.pid + + response = client.get( + url_for( + "api_local_entities.local_search_proxy", + entity_type="concepts-genreForm", + term="personal", + size="0", + ) + ) assert response.status_code == 200 assert len(response.json) == 0 - response = client.get(url_for( - 'api_local_entities.local_search_proxy', - entity_type='concepts-genreForm', - term='dummy_key' - )) + response = client.get( + url_for( + "api_local_entities.local_search_proxy", + entity_type="concepts-genreForm", + term="dummy_key", + ) + ) assert response.status_code == 200 assert len(response.json) == 0 - response = client.get(url_for( - 'api_local_entities.local_search_proxy', - entity_type='bf:Organisation', - term='Convegno' - )) + response = client.get( + url_for( + "api_local_entities.local_search_proxy", + entity_type="bf:Organisation", + term="Convegno", + ) + ) assert response.status_code == 200 assert len(response.json) == 1 - assert response.json[0]['pid'] == local_entity_org.pid + assert response.json[0]["pid"] == local_entity_org.pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_local_entities_resolve( - client, mef_agents_url, local_entity_person, document -): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_local_entities_resolve(client, mef_agents_url, local_entity_person, document): """Test local entity resolver""" # LOCAL ENTITY RESOLVER =================================================== - res = client.get(url_for( - 'invenio_records_rest.locent_item', - pid_value=local_entity_person.pid, - resolve='1' - )) + res = client.get( + url_for( + "invenio_records_rest.locent_item", + pid_value=local_entity_person.pid, + resolve="1", + ) + ) assert res.status_code == 200 # LOCAL ENTITY INTO A DOCUMENT RESOLVER =================================== - ent_ref = get_ref_for_pid('locent', local_entity_person.pid) - document.setdefault('contribution', []).append({ - 'entity': {'$ref': ent_ref}, - 'role': ['aut'] - }) + ent_ref = get_ref_for_pid("locent", local_entity_person.pid) + document.setdefault("contribution", []).append( + {"entity": {"$ref": ent_ref}, "role": ["aut"]} + ) document = document.update(document, dbcommit=True, reindex=True) data = document.dumps(dumper=document_replace_refs_dumper) assert any( - contribution['entity'].get('pid') == local_entity_person.pid - for contribution in data['contribution'] + contribution["entity"].get("pid") == local_entity_person.pid + for contribution in data["contribution"] ) diff --git a/tests/api/entities/remote_entities/test_remote_entities_permissions.py b/tests/api/entities/remote_entities/test_remote_entities_permissions.py index c71d50e4b8..168bef6305 100644 --- a/tests/api/entities/remote_entities/test_remote_entities_permissions.py +++ b/tests/api/entities/remote_entities/test_remote_entities_permissions.py @@ -22,22 +22,22 @@ from invenio_accounts.testutils import login_user_via_session from utils import check_permission, get_json -from rero_ils.modules.entities.remote_entities.permissions import \ - RemoteEntityPermissionPolicy +from rero_ils.modules.entities.remote_entities.permissions import ( + RemoteEntityPermissionPolicy, +) -def test_remote_entity_permissions_api(client, patron_martigny, - entity_person, - librarian_martigny): +def test_remote_entity_permissions_api( + client, patron_martigny, entity_person, librarian_martigny +): """Test entities permissions api.""" prs_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='remote_entities' + "api_blueprint.permissions", route_name="remote_entities" ) prs_real_permission_url = url_for( - 'api_blueprint.permissions', - route_name='remote_entities', - record_pid=entity_person.pid + "api_blueprint.permissions", + route_name="remote_entities", + record_pid=entity_person.pid, ) # Not logged @@ -54,16 +54,16 @@ def test_remote_entity_permissions_api(client, patron_martigny, res = client.get(prs_real_permission_url) assert res.status_code == 200 data = get_json(res) - assert data['read']['can'] - assert data['list']['can'] - assert not data['create']['can'] - assert not data['update']['can'] - assert not data['delete']['can'] + assert data["read"]["can"] + assert data["list"]["can"] + assert not data["create"]["can"] + assert not data["update"]["can"] + assert not data["delete"]["can"] -def test_remote_entity_permissions(patron_martigny, - librarian_martigny, - system_librarian_martigny): +def test_remote_entity_permissions( + patron_martigny, librarian_martigny, system_librarian_martigny +): """Test entity permissions class.""" permission_policy = RemoteEntityPermissionPolicy @@ -73,32 +73,44 @@ def test_remote_entity_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + permission_policy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron user # - Allow search/read actions on any entity # - Deny create/update/delete actions on any entity login_user(patron_martigny.user) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + permission_policy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Full permission user # - Allow search/read actions on any entity # - Deny create/update/delete actions on any entity login_user(system_librarian_martigny.user) - check_permission(permission_policy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + permission_policy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) diff --git a/tests/api/entities/remote_entities/test_remote_entities_rest.py b/tests/api/entities/remote_entities/test_remote_entities_rest.py index 066d72ed64..d4c6e378e6 100644 --- a/tests/api/entities/remote_entities/test_remote_entities_rest.py +++ b/tests/api/entities/remote_entities/test_remote_entities_rest.py @@ -28,18 +28,17 @@ def test_remote_entities_permissions(client, entity_person, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.rement_item', - pid_value='ent_pers') + item_url = url_for("invenio_records_rest.rement_item", pid_value="ent_pers") res = client.get(item_url) assert res.status_code == 200 - res, _ = postdata(client, 'invenio_records_rest.rement_list', {}) + res, _ = postdata(client, "invenio_records_rest.rement_list", {}) assert res.status_code == 401 client.put( - url_for('invenio_records_rest.rement_item', pid_value='ent_pers'), + url_for("invenio_records_rest.rement_item", pid_value="ent_pers"), data={}, - headers=json_header + headers=json_header, ) res = client.delete(item_url) @@ -48,111 +47,128 @@ def test_remote_entities_permissions(client, entity_person, json_header): def test_remote_entities_get(client, entity_person): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.rement_item', - pid_value='ent_pers') + item_url = url_for("invenio_records_rest.rement_item", pid_value="ent_pers") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{entity_person.revision_id}"' + assert res.headers["ETag"] == f'"{entity_person.revision_id}"' data = get_json(res) - assert entity_person.dumps() == data['metadata'] + assert entity_person.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert entity_person.dumps() == data['metadata'] + assert entity_person.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.rement_list', pid='ent_pers') + list_url = url_for("invenio_records_rest.rement_list", pid="ent_pers") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) entity_person = entity_person.replace_refs() - entity_person['organisations'] = entity_person.organisation_pids - entity_person['type'] = EntityType.PERSON - assert data['hits']['hits'][0]['metadata'] == \ - entity_person.dumps(indexer_dumper) + entity_person["organisations"] = entity_person.organisation_pids + entity_person["type"] = EntityType.PERSON + assert data["hits"]["hits"][0]["metadata"] == entity_person.dumps(indexer_dumper) -@mock.patch('rero_ils.modules.decorators.login_and_librarian', - mock.MagicMock()) -@mock.patch('requests.request') +@mock.patch("rero_ils.modules.decorators.login_and_librarian", mock.MagicMock()) +@mock.patch("requests.request") def test_remote_search_proxy( - mock_es_concept_get, app, client, - mef_concept2_es_response, mef_agents1_es_response, mef_places1_es_response + mock_es_concept_get, + app, + client, + mef_concept2_es_response, + mef_agents1_es_response, + mef_places1_es_response, ): """Test entities search on remote servers.""" # TEST#1 :: Concepts # All results must include a `type` key if a root `metadata` field # exists. - mock_es_concept_get.return_value = mock_response( - json_data=mef_concept2_es_response) - - response = client.get(url_for( - 'api_remote_entities.remote_search_proxy', - entity_type='concepts-genreForm', - term='side-car' - )) + mock_es_concept_get.return_value = mock_response(json_data=mef_concept2_es_response) + + response = client.get( + url_for( + "api_remote_entities.remote_search_proxy", + entity_type="concepts-genreForm", + term="side-car", + ) + ) assert response.status_code == 200 assert all( - hit.get('metadata', {}).get('type') == EntityType.TOPIC - for hit in response.json['hits']['hits'] - if 'metadata' in hit + hit.get("metadata", {}).get("type") == EntityType.TOPIC + for hit in response.json["hits"]["hits"] + if "metadata" in hit ) # TEST#2 :: Agents # All result must include a `identifiedBy` object if a root - mock_es_concept_get.return_value = mock_response( - json_data=mef_agents1_es_response) - response = client.get(url_for( - 'api_remote_entities.remote_search_proxy', - entity_type='agents', - term='UCLouvain' - )) - identifier = mef_agents1_es_response['hits']['hits'][0][ - 'metadata']['idref']['identifier'] - assert identifier == response.json['hits']['hits'][0][ - 'metadata']['idref']['identifiedBy'][0]['value'] + mock_es_concept_get.return_value = mock_response(json_data=mef_agents1_es_response) + response = client.get( + url_for( + "api_remote_entities.remote_search_proxy", + entity_type="agents", + term="UCLouvain", + ) + ) + identifier = mef_agents1_es_response["hits"]["hits"][0]["metadata"]["idref"][ + "identifier" + ] + assert ( + identifier + == response.json["hits"]["hits"][0]["metadata"]["idref"]["identifiedBy"][0][ + "value" + ] + ) # TEST#3 :: Places # All result must include a `identifiedBy` object if a root - mock_es_concept_get.return_value = mock_response( - json_data=mef_places1_es_response) - response = client.get(url_for( - 'api_remote_entities.remote_search_proxy', - entity_type='places', - term='Rouen' - )) - authorized_access_point = mef_places1_es_response['hits']['hits'][0][ - 'metadata']['idref']['authorized_access_point'] - assert authorized_access_point == response.json['hits']['hits'][0][ - 'metadata']['idref']['authorized_access_point'] + mock_es_concept_get.return_value = mock_response(json_data=mef_places1_es_response) + response = client.get( + url_for( + "api_remote_entities.remote_search_proxy", + entity_type="places", + term="Rouen", + ) + ) + authorized_access_point = mef_places1_es_response["hits"]["hits"][0]["metadata"][ + "idref" + ]["authorized_access_point"] + assert ( + authorized_access_point + == response.json["hits"]["hits"][0]["metadata"]["idref"][ + "authorized_access_point" + ] + ) # TEST#4 :: Unknown MEF search type # Try to execute a search on a not-configured MEF category. It should be # raised a `ValueError` caught by flask to return an HTTP 400 response - category = 'unknown_category' - response = client.get(url_for( - 'api_remote_entities.remote_search_proxy', - entity_type=category, - term='search_term' - )) + category = "unknown_category" + response = client.get( + url_for( + "api_remote_entities.remote_search_proxy", + entity_type=category, + term="search_term", + ) + ) assert response.status_code == 400 - assert response.json['message'] == \ - f'Unable to find a MEF factory for {category}' + assert response.json["message"] == f"Unable to find a MEF factory for {category}" # TEST#4 :: Simulate MEF errors # Simulate than MEF call return an HTTP error and check the response. mock_es_concept_get.return_value = mock_response(status=404) - response = client.get(url_for( - 'api_remote_entities.remote_search_proxy', - entity_type='agents', - term='UCLouvain' - )) + response = client.get( + url_for( + "api_remote_entities.remote_search_proxy", + entity_type="agents", + term="UCLouvain", + ) + ) assert response.status_code == 404 diff --git a/tests/api/entities/test_entities_rest.py b/tests/api/entities/test_entities_rest.py index 4954fe49d2..84b8983daf 100644 --- a/tests/api/entities/test_entities_rest.py +++ b/tests/api/entities/test_entities_rest.py @@ -22,29 +22,26 @@ from utils import get_json, postdata -def test_entities_permissions(client, entity_person, - local_entity_person, json_header): +def test_entities_permissions(client, entity_person, local_entity_person, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.ent_item', - pid_value='locent_pers') + item_url = url_for("invenio_records_rest.ent_item", pid_value="locent_pers") res = client.get(item_url) assert res.status_code == 401 - item_url = url_for('invenio_records_rest.ent_item', - pid_value='ent_pers') + item_url = url_for("invenio_records_rest.ent_item", pid_value="ent_pers") res = client.get(item_url) assert res.status_code == 401 - res = client.get(url_for('invenio_records_rest.ent_list')) + res = client.get(url_for("invenio_records_rest.ent_list")) assert res.status_code == 200 - res, _ = postdata(client, 'invenio_records_rest.ent_list', {}) + res, _ = postdata(client, "invenio_records_rest.ent_list", {}) assert res.status_code == 401 client.put( - url_for('invenio_records_rest.ent_item', pid_value='ent_pers'), + url_for("invenio_records_rest.ent_item", pid_value="ent_pers"), data={}, - headers=json_header + headers=json_header, ) res = client.delete(item_url) @@ -53,34 +50,32 @@ def test_entities_permissions(client, entity_person, def test_entities_get(client, entity_person, local_entity_person): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.ent_item', - pid_value='locent_pers') + item_url = url_for("invenio_records_rest.ent_item", pid_value="locent_pers") res = client.get(item_url) assert res.status_code == 401 - item_url = url_for('invenio_records_rest.ent_item', - pid_value='ent_pers') + item_url = url_for("invenio_records_rest.ent_item", pid_value="ent_pers") res = client.get(item_url) assert res.status_code == 401 - res = client.get(url_for('invenio_records_rest.ent_list')) + res = client.get(url_for("invenio_records_rest.ent_list")) assert res.status_code == 200 # Check remote/local entities self links data = get_json(res) pid_link_map = { - 'ent_pers': 'http://localhost/remote_entities/ent_pers', - 'locent_pers': 'http://localhost/local_entities/locent_pers' + "ent_pers": "http://localhost/remote_entities/ent_pers", + "locent_pers": "http://localhost/local_entities/locent_pers", } - for hit in data['hits']['hits']: - assert hit['links']['self'] == pid_link_map.get(hit['id']) + for hit in data["hits"]["hits"]: + assert hit["links"]["self"] == pid_link_map.get(hit["id"]) # search entity record - list_url = url_for('invenio_records_rest.ent_list', pid='ent_pers') + list_url = url_for("invenio_records_rest.ent_list", pid="ent_pers") res = client.get(list_url) assert res.status_code == 200 # search local entity record - list_url = url_for('invenio_records_rest.ent_list', pid='locent_pers') + list_url = url_for("invenio_records_rest.ent_list", pid="locent_pers") res = client.get(list_url) assert res.status_code == 200 diff --git a/tests/api/entities/test_entities_search.py b/tests/api/entities/test_entities_search.py index 3694940420..85d428e735 100644 --- a/tests/api/entities/test_entities_search.py +++ b/tests/api/entities/test_entities_search.py @@ -21,38 +21,35 @@ from utils import VerifyRecordPermissionPatch, get_json -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_unified_entity_search(client, entity_person, local_entity_person, - entity_organisation): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_unified_entity_search( + client, entity_person, local_entity_person, entity_organisation +): """Test unified entity search queries.""" # unified entity search - list_url = url_for( - 'invenio_records_rest.ent_list', - q='"Loy, Georg"', - simple='1' - ) + list_url = url_for("invenio_records_rest.ent_list", q='"Loy, Georg"', simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 2 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 2 # unified entity search organisation list_url = url_for( - 'invenio_records_rest.ent_list', + "invenio_records_rest.ent_list", q='"Convegno internazionale di italianistica Craiova"', - simple='1' + simple="1", ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # empty search list_url = url_for( - 'invenio_records_rest.ent_list', - q='"Nebehay, Christian Michael"', - simple='1' + "invenio_records_rest.ent_list", q='"Nebehay, Christian Michael"', simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 0 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 0 diff --git a/tests/api/files/test_files_permissions.py b/tests/api/files/test_files_permissions.py index 9a42c88597..1fea117c8f 100644 --- a/tests/api/files/test_files_permissions.py +++ b/tests/api/files/test_files_permissions.py @@ -20,26 +20,27 @@ from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user from invenio_accounts.testutils import login_user_via_session -from utils import check_permission, flush_index, get_json +from utils import check_permission, get_json from rero_ils.modules.files.permissions import FilePermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -def test_files_permissions_api(client, librarian_martigny, - librarian_sion, patron_martigny, - system_librarian_martigny, - document_with_files): +def test_files_permissions_api( + client, + librarian_martigny, + librarian_sion, + patron_martigny, + system_librarian_martigny, + document_with_files, +): """Test files permissions api.""" record_file = next(document_with_files.get_records_files()) - permissions_list_url = url_for( - 'api_blueprint.permissions', - route_name='records' - ) + permissions_list_url = url_for("api_blueprint.permissions", route_name="records") permissions_item_url = url_for( - 'api_blueprint.permissions', - route_name='records', - record_pid=record_file.pid.pid_value + "api_blueprint.permissions", + route_name="records", + record_pid=record_file.pid.pid_value, ) # Not logged @@ -62,50 +63,53 @@ def test_files_permissions_api(client, librarian_martigny, res = client.get(permissions_item_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read', 'create', 'update', 'delete']: - assert data[action]['can'] + for action in ["list", "read", "create", "update", "delete"]: + assert data[action]["can"] res = client.get(permissions_list_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'create']: - assert data[action]['can'] + for action in ["list", "create"]: + assert data[action]["can"] # logged as librarian login_user_via_session(client, system_librarian_martigny.user) res = client.get(permissions_item_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read', 'create', 'update', 'delete']: - assert data[action]['can'] + for action in ["list", "read", "create", "update", "delete"]: + assert data[action]["can"] res = client.get(permissions_list_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'create']: - assert data[action]['can'] + for action in ["list", "create"]: + assert data[action]["can"] # logged as librarian login_user_via_session(client, librarian_sion.user) res = client.get(permissions_item_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'create', 'read']: - assert data[action]['can'] - for action in ['update', 'delete']: - assert not data[action]['can'] + for action in ["list", "create", "read"]: + assert data[action]["can"] + for action in ["update", "delete"]: + assert not data[action]["can"] res = client.get(permissions_list_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'create']: - assert data[action]['can'] + for action in ["list", "create"]: + assert data[action]["can"] -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_files_permissions( - patron_martigny, librarian_martigny, librarian_sion, - system_librarian_martigny, document_with_files + patron_martigny, + librarian_martigny, + librarian_sion, + system_librarian_martigny, + document_with_files, ): """Test files permissions.""" @@ -115,97 +119,111 @@ def test_files_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, None) + check_permission( + FilePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + None, + ) record_file = next(document_with_files.get_records_files()) - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, record_file) + check_permission( + FilePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + record_file, + ) login_user(patron_martigny.user) - check_permission(FilePermissionPolicy, {'create': False}, {}) - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, record_file) + check_permission(FilePermissionPolicy, {"create": False}, {}) + check_permission( + FilePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + record_file, + ) # Librarian with specific role # - search/read: any files # - create/update/delete: allowed for files of its own library login_user(librarian_martigny.user) - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, record_file) + check_permission( + FilePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + record_file, + ) # Librarian with specific role # - search/read: any files # - update/delete: disallowed for files of other libraries login_user(librarian_sion.user) - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': False, - 'delete': False - }, record_file) + check_permission( + FilePermissionPolicy, + { + "search": True, + "read": True, + "create": True, + "update": False, + "delete": False, + }, + record_file, + ) # Librarian without specific role # - search/read: any files # - create/update/delete: allowed for any files of its own library - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_catalog_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_catalog_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, record_file) - - librarian_martigny['roles'] = ['pro_user_manager'] + check_permission( + FilePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + record_file, + ) + + librarian_martigny["roles"] = ["pro_user_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, record_file) + check_permission( + FilePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + record_file, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() # System librarian (aka. full-permissions) # - create/update/delete: allow for files if its own org login_user(system_librarian_martigny.user) - check_permission(FilePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, record_file) + check_permission( + FilePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + record_file, + ) diff --git a/tests/api/holdings/test_holdings_permissions.py b/tests/api/holdings/test_holdings_permissions.py index f64be0d3fc..921525b9ba 100644 --- a/tests/api/holdings/test_holdings_permissions.py +++ b/tests/api/holdings/test_holdings_permissions.py @@ -20,18 +20,24 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.holdings.permissions import HoldingsPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_holdings_permissions( - patron_martigny, org_martigny, librarian_martigny, - system_librarian_martigny, holding_lib_sion, holding_lib_saxon, - holding_lib_martigny, holding_lib_martigny_w_patterns, - holding_lib_saxon_w_patterns, holding_lib_sion_w_patterns + patron_martigny, + org_martigny, + librarian_martigny, + system_librarian_martigny, + holding_lib_sion, + holding_lib_saxon, + holding_lib_martigny, + holding_lib_martigny_w_patterns, + holding_lib_saxon_w_patterns, + holding_lib_sion_w_patterns, ): """Test holdings permissions class.""" @@ -41,29 +47,41 @@ def test_holdings_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_martigny) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_martigny, + ) login_user(patron_martigny.user) - check_permission(HoldingsPermissionPolicy, {'create': False}, {}) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_sion) + check_permission(HoldingsPermissionPolicy, {"create": False}, {}) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_sion, + ) # Librarian with specific role # - search/read: any document @@ -71,78 +89,98 @@ def test_holdings_permissions( # -- allowed for serial holdings of its own library # -- disallowed for standard holdings despite its own library login_user(librarian_martigny.user) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_martigny) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_saxon) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_sion) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, holding_lib_martigny_w_patterns) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_saxon_w_patterns) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_sion_w_patterns) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_martigny, + ) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_saxon, + ) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_sion, + ) + check_permission( + HoldingsPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + holding_lib_martigny_w_patterns, + ) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_saxon_w_patterns, + ) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_sion_w_patterns, + ) # Librarian without specific role # - search/read: any document # - create/update/delete: disallowed for any holdings !! - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_circulation_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_circulation_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, holding_lib_martigny_w_patterns) + check_permission( + HoldingsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + holding_lib_martigny_w_patterns, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() # System librarian (aka. full-permissions) # - create/update/delete: allow for serial holding if its own org login_user(system_librarian_martigny.user) - check_permission(HoldingsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, holding_lib_saxon_w_patterns) + check_permission( + HoldingsPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + holding_lib_saxon_w_patterns, + ) diff --git a/tests/api/holdings/test_holdings_rest.py b/tests/api/holdings/test_holdings_rest.py index 0010ef5ce9..f3a012522a 100644 --- a/tests/api/holdings/test_holdings_rest.py +++ b/tests/api/holdings/test_holdings_rest.py @@ -23,161 +23,171 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.holdings.api import Holding -def test_holding_can_delete_and_utils(client, holding_lib_martigny, document, - item_type_standard_martigny): +def test_holding_can_delete_and_utils( + client, holding_lib_martigny, document, item_type_standard_martigny +): """Test can delete a holding.""" can, reasons = holding_lib_martigny.can_delete assert can assert reasons == {} assert holding_lib_martigny.document_pid == document.pid - assert holding_lib_martigny.circulation_category_pid == \ - item_type_standard_martigny.pid - assert Holding.get_document_pid_by_holding_pid( - holding_lib_martigny.pid) == document.pid - assert list(Holding.get_holdings_pid_by_document_pid(document.pid))[0] == \ - holding_lib_martigny.pid + assert ( + holding_lib_martigny.circulation_category_pid == item_type_standard_martigny.pid + ) + assert ( + Holding.get_document_pid_by_holding_pid(holding_lib_martigny.pid) + == document.pid + ) + assert ( + list(Holding.get_holdings_pid_by_document_pid(document.pid))[0] + == holding_lib_martigny.pid + ) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_holdings_get( client, item_lib_martigny, item_lib_martigny_masked, rero_json_header ): """Test record retrieval.""" holding = Holding.get_record_by_pid(item_lib_martigny.holding_pid) - url = url_for('invenio_records_rest.hold_item', pid_value=holding.pid) + url = url_for("invenio_records_rest.hold_item", pid_value=holding.pid) # Check REST API for a single holdings res = client.get(url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{holding.revision_id}"' + assert res.headers["ETag"] == f'"{holding.revision_id}"' data = get_json(res) - assert all(k in data for k in ['created', 'updated', 'metadata', 'links']) - assert holding.dumps() == data['metadata'] + assert all(k in data for k in ["created", "updated", "metadata", "links"]) + assert holding.dumps() == data["metadata"] # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert holding.dumps() == data['metadata'] + assert holding.dumps() == data["metadata"] # Check REST API for a single holdings with reference resolver - url = url_for('invenio_records_rest.hold_item', pid_value=holding.pid, - resolve=1, sources=1) + url = url_for( + "invenio_records_rest.hold_item", pid_value=holding.pid, resolve=1, sources=1 + ) res = client.get(url) assert res.status_code == 200 data = get_json(res) - assert holding.replace_refs().dumps() == data['metadata'] + assert holding.replace_refs().dumps() == data["metadata"] # Check REST API for holdings query - url = url_for('invenio_records_rest.hold_list', q=f'pid:{holding.pid}') + url = url_for("invenio_records_rest.hold_list", q=f"pid:{holding.pid}") res = client.get(url) assert res.status_code == 200 data = get_json(res) - hit = data['hits']['hits'][0]['metadata'] - assert hit.pop('public_items_count') == 1 - assert hit.pop('items_count') == 2 + hit = data["hits"]["hits"][0]["metadata"] + assert hit.pop("public_items_count") == 1 + assert hit.pop("items_count") == 2 assert hit == holding.replace_refs() # Check REST API for holdings query for `rero+json` header res = client.get(url, headers=rero_json_header) assert res.status_code == 200 data = get_json(res) - hit = data['hits']['hits'][0]['metadata'] + hit = data["hits"]["hits"][0]["metadata"] def test_filtered_holdings_get( - client, librarian_martigny, holding_lib_martigny, - holding_lib_fully, holding_lib_saxon, holding_lib_sion, - patron_sion): + client, + librarian_martigny, + holding_lib_martigny, + holding_lib_fully, + holding_lib_saxon, + holding_lib_sion, + patron_sion, +): """Test holding filter by organisation.""" # Librarian Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.hold_list') + list_url = url_for("invenio_records_rest.hold_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 # Patron Martigny login_user_via_session(client, patron_sion.user) - list_url = url_for('invenio_records_rest.hold_list', view='org2') + list_url = url_for("invenio_records_rest.hold_list", view="org2") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 -def test_holdings_items_filter(client, holding_lib_martigny, holding_lib_sion, - item_lib_martigny): +def test_holdings_items_filter( + client, holding_lib_martigny, holding_lib_sion, item_lib_martigny +): """Test filter for holdings items.""" - assert len( - holding_lib_martigny.get_items_filter_by_viewcode('global')) == 1 + assert len(holding_lib_martigny.get_items_filter_by_viewcode("global")) == 1 - assert len( - holding_lib_martigny.get_items_filter_by_viewcode('org2')) == 0 + assert len(holding_lib_martigny.get_items_filter_by_viewcode("org2")) == 0 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_holdings_post_put_delete(client, holding_lib_martigny_data_tmp, - json_header, holding_lib_martigny, - loc_public_martigny): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_holdings_post_put_delete( + client, + holding_lib_martigny_data_tmp, + json_header, + holding_lib_martigny, + loc_public_martigny, +): """Test record create and delete.""" - item_url = url_for('invenio_records_rest.hold_item', pid_value='2') - list_url = url_for('invenio_records_rest.hold_list', q='pid:2') + item_url = url_for("invenio_records_rest.hold_item", pid_value="2") + list_url = url_for("invenio_records_rest.hold_list", q="pid:2") holding_data = holding_lib_martigny_data_tmp # Create record / POST # We can not use pid=1 here. It is already used! - holding_data['pid'] = '2' - res, data = postdata( - client, - 'invenio_records_rest.hold_list', - holding_data - ) + holding_data["pid"] = "2" + res, data = postdata(client, "invenio_records_rest.hold_list", holding_data) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == holding_data + assert data["metadata"] == holding_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert holding_data == data['metadata'] + assert holding_data == data["metadata"] # Update record/PUT data = holding_data - data['call_number'] = 'call number' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["call_number"] = "call number" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['call_number'] == 'call number' + assert data["metadata"]["call_number"] == "call number" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['call_number'] == 'call number' + assert data["metadata"]["call_number"] == "call number" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['call_number'] == 'call number' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["call_number"] == "call number" # Delete record/DELETE res = client.delete(item_url) @@ -187,9 +197,15 @@ def test_holdings_post_put_delete(client, holding_lib_martigny_data_tmp, assert res.status_code == 410 -def test_holding_request(client, librarian_martigny, patron_martigny, - holding_lib_martigny, holding_lib_martigny_w_patterns, - lib_martigny, circ_policy_short_martigny): +def test_holding_request( + client, + librarian_martigny, + patron_martigny, + holding_lib_martigny, + holding_lib_martigny_w_patterns, + lib_martigny, + circ_policy_short_martigny, +): """Test holding can be requested""" # test patron can request holding login_user_via_session(client, patron_martigny.user) @@ -197,14 +213,14 @@ def test_holding_request(client, librarian_martigny, patron_martigny, res = client.get( url_for( - 'api_holding.can_request', + "api_holding.can_request", holding_pid=holding_lib_martigny_w_patterns.pid, library_pid=lib_martigny.pid, - patron_barcode=patron.patron.get('barcode') + patron_barcode=patron.patron.get("barcode"), ) ) response = json.loads(res.data) - assert response['can'] + assert response["can"] # test librarian can request holding login_user_via_session(client, librarian_martigny.user) @@ -212,14 +228,14 @@ def test_holding_request(client, librarian_martigny, patron_martigny, res = client.get( url_for( - 'api_holding.can_request', + "api_holding.can_request", holding_pid=holding_lib_martigny_w_patterns.pid, library_pid=lib_martigny.pid, - patron_barcode=patron.patron.get('barcode') + patron_barcode=patron.patron.get("barcode"), ) ) response = json.loads(res.data) - assert response['can'] + assert response["can"] # test patron cannot request holding login_user_via_session(client, patron_martigny.user) @@ -227,14 +243,14 @@ def test_holding_request(client, librarian_martigny, patron_martigny, res = client.get( url_for( - 'api_holding.can_request', + "api_holding.can_request", holding_pid=holding_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron.patron.get('barcode') + patron_barcode=patron.patron.get("barcode"), ) ) response = json.loads(res.data) - assert not response['can'] + assert not response["can"] # test librarian cannot request holding login_user_via_session(client, librarian_martigny.user) @@ -242,11 +258,11 @@ def test_holding_request(client, librarian_martigny, patron_martigny, res = client.get( url_for( - 'api_holding.can_request', + "api_holding.can_request", holding_pid=holding_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron.patron.get('barcode') + patron_barcode=patron.patron.get("barcode"), ) ) response = json.loads(res.data) - assert not response['can'] + assert not response["can"] diff --git a/tests/api/holdings/test_patterns.py b/tests/api/holdings/test_patterns.py index 1ac1d1025e..1f76c74a0e 100644 --- a/tests/api/holdings/test_patterns.py +++ b/tests/api/holdings/test_patterns.py @@ -37,105 +37,85 @@ def test_pattern_preview_api( - client, holding_lib_martigny_w_patterns, librarian_martigny): + client, holding_lib_martigny_w_patterns, librarian_martigny +): """Test holdings patterns preview api.""" login_user_via_session(client, librarian_martigny.user) holding = holding_lib_martigny_w_patterns # holding = Holding.get_record_by_pid(holding.pid) # test preview by default 10 issues returned - res = client.get( - url_for( - 'api_holding.patterns_preview', - holding_pid=holding.pid - ) - ) + res = client.get(url_for("api_holding.patterns_preview", holding_pid=holding.pid)) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[0]['issue'] == 'no 61 mars 2020' + issues = get_json(res).get("issues") + assert issues[0]["issue"] == "no 61 mars 2020" assert len(issues) == 10 # test invalid size res = client.get( - url_for( - 'api_holding.patterns_preview', - holding_pid=holding.pid, - size='no size' - ) + url_for("api_holding.patterns_preview", holding_pid=holding.pid, size="no size") ) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[0]['issue'] == 'no 61 mars 2020' + issues = get_json(res).get("issues") + assert issues[0]["issue"] == "no 61 mars 2020" assert len(issues) == 10 # test preview for a given size res = client.get( - url_for( - 'api_holding.patterns_preview', - holding_pid=holding.pid, - size=13 - ) + url_for("api_holding.patterns_preview", holding_pid=holding.pid, size=13) ) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[12]['issue'] == 'no 73 mars 2023' + issues = get_json(res).get("issues") + assert issues[12]["issue"] == "no 73 mars 2023" assert len(issues) == 13 def test_pattern_preview_api( - client, holding_lib_martigny_w_patterns, librarian_martigny): + client, holding_lib_martigny_w_patterns, librarian_martigny +): """Test holdings patterns preview api.""" login_user_via_session(client, librarian_martigny.user) holding = holding_lib_martigny_w_patterns # holding = Holding.get_record_by_pid(holding.pid) # test preview by default 10 issues returned - res = client.get( - url_for( - 'api_holding.patterns_preview', - holding_pid=holding.pid - ) - ) + res = client.get(url_for("api_holding.patterns_preview", holding_pid=holding.pid)) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[0]['issue'] == 'no 61 mars 2020' + issues = get_json(res).get("issues") + assert issues[0]["issue"] == "no 61 mars 2020" assert len(issues) == 10 # test invalid size res = client.get( - url_for( - 'api_holding.patterns_preview', - holding_pid=holding.pid, - size='no size' - ) + url_for("api_holding.patterns_preview", holding_pid=holding.pid, size="no size") ) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[0]['issue'] == 'no 61 mars 2020' + issues = get_json(res).get("issues") + assert issues[0]["issue"] == "no 61 mars 2020" assert len(issues) == 10 # test preview for a given size res = client.get( - url_for( - 'api_holding.patterns_preview', - holding_pid=holding.pid, - size=13 - ) + url_for("api_holding.patterns_preview", holding_pid=holding.pid, size=13) ) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[12]['issue'] == 'no 73 mars 2023' + issues = get_json(res).get("issues") + assert issues[12]["issue"] == "no 73 mars 2023" assert len(issues) == 13 def test_receive_regular_issue_api( - client, holding_lib_martigny_w_patterns, - librarian_fully, librarian_martigny, - system_librarian_sion): + client, + holding_lib_martigny_w_patterns, + librarian_fully, + librarian_martigny, + system_librarian_sion, +): """Test holdings receive regular issues API.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) issue_display, expected_date = holding._get_next_issue_display_text( - holding.get('patterns')) + holding.get("patterns") + ) # not logged users are not authorized res, data = postdata( client, - 'api_holding.receive_regular_issue', - url_data=dict(holding_pid=holding.pid) + "api_holding.receive_regular_issue", + url_data=dict(holding_pid=holding.pid), ) assert res.status_code == 401 @@ -144,89 +124,95 @@ def test_receive_regular_issue_api( login_user_via_session(client, librarian_fully.user) res, data = postdata( client, - 'api_holding.receive_regular_issue', - url_data=dict(holding_pid=holding.pid) + "api_holding.receive_regular_issue", + url_data=dict(holding_pid=holding.pid), ) assert res.status_code == 401 # only users of same organisation may receive issues. login_user_via_session(client, system_librarian_sion.user) res, data = postdata( client, - 'api_holding.receive_regular_issue', - url_data=dict(holding_pid=holding.pid) + "api_holding.receive_regular_issue", + url_data=dict(holding_pid=holding.pid), ) assert res.status_code == 401 login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_holding.receive_regular_issue', - url_data=dict(holding_pid=holding.pid) + "api_holding.receive_regular_issue", + url_data=dict(holding_pid=holding.pid), ) assert res.status_code == 200 - issue = get_json(res).get('issue') - assert issue.get('enumerationAndChronology') == issue_display - assert issue.get('issue').get('expected_date') == expected_date + issue = get_json(res).get("issue") + assert issue.get("enumerationAndChronology") == issue_display + assert issue.get("issue").get("expected_date") == expected_date item = { - 'issue': { - 'regular': True, - 'status': ItemIssueStatus.RECEIVED, - 'expected_date': datetime.now().strftime('%Y-%m-%d'), - 'received_date': datetime.now().strftime('%Y-%m-%d') + "issue": { + "regular": True, + "status": ItemIssueStatus.RECEIVED, + "expected_date": datetime.now().strftime("%Y-%m-%d"), + "received_date": datetime.now().strftime("%Y-%m-%d"), }, - 'enumerationAndChronology': 'free_text' + "enumerationAndChronology": "free_text", } res, data = postdata( client, - 'api_holding.receive_regular_issue', + "api_holding.receive_regular_issue", data=dict(item=item), - url_data=dict(holding_pid=holding.pid) + url_data=dict(holding_pid=holding.pid), ) assert res.status_code == 200 - issue = get_json(res).get('issue') - assert issue.get('enumerationAndChronology') == 'free_text' - assert issue.get('issue').get('expected_date') == \ - datetime.now().strftime('%Y-%m-%d') + issue = get_json(res).get("issue") + assert issue.get("enumerationAndChronology") == "free_text" + assert issue.get("issue").get("expected_date") == datetime.now().strftime( + "%Y-%m-%d" + ) def test_create_holdings_with_pattern( - client, librarian_martigny, loc_public_martigny, - journal, item_type_standard_martigny, document, - json_header, holding_lib_martigny_data, pattern_yearly_one_level_data, - holding_lib_martigny_w_patterns_data): + client, + librarian_martigny, + loc_public_martigny, + journal, + item_type_standard_martigny, + document, + json_header, + holding_lib_martigny_data, + pattern_yearly_one_level_data, + holding_lib_martigny_w_patterns_data, +): """Test create holding type serial with patterns.""" login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.hold_list' + post_entrypoint = "invenio_records_rest.hold_list" - del holding_lib_martigny_data['pid'] - holding_lib_martigny_data['holdings_type'] = 'serial' - res, _ = postdata( - client, - post_entrypoint, - holding_lib_martigny_data - ) + del holding_lib_martigny_data["pid"] + holding_lib_martigny_data["holdings_type"] = "serial" + res, _ = postdata(client, post_entrypoint, holding_lib_martigny_data) assert res.status_code == 201 - holding_lib_martigny_data['patterns'] = \ - pattern_yearly_one_level_data['patterns'] + holding_lib_martigny_data["patterns"] = pattern_yearly_one_level_data["patterns"] # test will fail when creating a serial holding for a standard document. - res, _ = postdata( - client, - post_entrypoint, - holding_lib_martigny_data - ) + res, _ = postdata(client, post_entrypoint, holding_lib_martigny_data) assert res.status_code == 201 # test will not fail when creating a standard holding for a journal doc. - holding_lib_martigny_w_patterns_data['holdings_type'] = 'standard' + holding_lib_martigny_w_patterns_data["holdings_type"] = "standard" # delete serials fields fields = [ - 'enumerationAndChronology', 'notes', 'index', 'missing_issues', - 'supplementaryContent', 'acquisition_status', - 'acquisition_method', 'acquisition_expected_end_date', - 'general_retention_policy', 'completeness', - 'composite_copy_report', 'issue_binding' + "enumerationAndChronology", + "notes", + "index", + "missing_issues", + "supplementaryContent", + "acquisition_status", + "acquisition_method", + "acquisition_expected_end_date", + "general_retention_policy", + "completeness", + "composite_copy_report", + "issue_binding", ] for field in fields: del holding_lib_martigny_w_patterns_data[field] @@ -234,158 +220,148 @@ def test_create_holdings_with_pattern( data=holding_lib_martigny_w_patterns_data, delete_pid=True, dbcommit=True, - reindex=True) + reindex=True, + ) journal_pids = list(Document.get_all_serial_pids()) assert journal_pids == [journal.pid] def test_holding_pattern_preview_api( - client, pattern_yearly_one_level_data, - librarian_martigny): + client, pattern_yearly_one_level_data, librarian_martigny +): """Test holdings patterns preview api.""" login_user_via_session(client, librarian_martigny.user) - patterns = pattern_yearly_one_level_data.get('patterns') + patterns = pattern_yearly_one_level_data.get("patterns") # test preview by default 10 issues returned res, data = postdata( - client, - 'api_holding.pattern_preview', - dict(data=patterns, size=15) + client, "api_holding.pattern_preview", dict(data=patterns, size=15) ) assert res.status_code == 200 - issues = get_json(res).get('issues') - assert issues[0]['issue'] == '82 2020' + issues = get_json(res).get("issues") + assert issues[0]["issue"] == "82 2020" assert len(issues) == 15 # test invalid patterns - del patterns['values'] - res, data = postdata( - client, - 'api_holding.pattern_preview', - dict(data=patterns) - ) + del patterns["values"] + res, data = postdata(client, "api_holding.pattern_preview", dict(data=patterns)) assert res.status_code == 200 - issues = get_json(res).get('issues') + issues = get_json(res).get("issues") assert issues == [] assert len(issues) == 0 def test_automatic_item_creation_no_serials( - client, json_header, holding_lib_martigny_w_patterns, - item_lib_martigny_data, librarian_martigny): + client, + json_header, + holding_lib_martigny_w_patterns, + item_lib_martigny_data, + librarian_martigny, +): """Test automatically created items are not attached to serials.""" login_user_via_session(client, librarian_martigny.user) - post_url = 'invenio_records_rest.item_list' - res, _ = postdata( - client, - post_url, - item_lib_martigny_data - ) + post_url = "invenio_records_rest.item_list" + res, _ = postdata(client, post_url, item_lib_martigny_data) assert res.status_code == 201 - item = Item.get_record_by_pid(item_lib_martigny_data.get('pid')) + item = Item.get_record_by_pid(item_lib_martigny_data.get("pid")) holding = Holding.get_record_by_pid(item.holding_pid) assert holding.pid != holding_lib_martigny_w_patterns.pid assert holding.location_pid == holding_lib_martigny_w_patterns.location_pid - assert holding.get('circulation_category') == \ - holding_lib_martigny_w_patterns.get('circulation_category') + assert holding.get("circulation_category") == holding_lib_martigny_w_patterns.get( + "circulation_category" + ) def test_pattern_validate_next_expected_date( - client, librarian_martigny, - journal, loc_public_sion, item_type_regular_sion, document, - pattern_yearly_two_times_data, json_header, - holding_lib_sion_w_patterns_data): + client, + librarian_martigny, + journal, + loc_public_sion, + item_type_regular_sion, + document, + pattern_yearly_two_times_data, + json_header, + holding_lib_sion_w_patterns_data, +): """Test create holding with regular frequency and missing the next_expected_date. """ login_user_via_session(client, librarian_martigny.user) holding = holding_lib_sion_w_patterns_data - holding['holdings_type'] = 'serial' - holding['patterns'] = \ - pattern_yearly_two_times_data['patterns'] - del holding['pid'] - del holding['patterns']['next_expected_date'] + holding["holdings_type"] = "serial" + holding["patterns"] = pattern_yearly_two_times_data["patterns"] + del holding["pid"] + del holding["patterns"]["next_expected_date"] # test will fail when the serial holding has no field # next_expected_date for the regular frequency with pytest.raises(ValidationError): - Holding.create( - data=holding, - delete_pid=False, - dbcommit=True, - reindex=True) + Holding.create(data=holding, delete_pid=False, dbcommit=True, reindex=True) def test_irregular_issue_creation_update_delete_api( - client, holding_lib_martigny_w_patterns, - librarian_martigny): + client, holding_lib_martigny_w_patterns, librarian_martigny +): """Test create, update and delete of an irregular issue API.""" holding = holding_lib_martigny_w_patterns issue_display, expected_date = holding._get_next_issue_display_text( - holding.get('patterns')) + holding.get("patterns") + ) login_user_via_session(client, librarian_martigny.user) item = { - 'issue': { - 'status': ItemIssueStatus.RECEIVED, - 'received_date': datetime.now().strftime('%Y-%m-%d'), - 'expected_date': datetime.now().strftime('%Y-%m-%d'), - 'regular': False + "issue": { + "status": ItemIssueStatus.RECEIVED, + "received_date": datetime.now().strftime("%Y-%m-%d"), + "expected_date": datetime.now().strftime("%Y-%m-%d"), + "regular": False, }, - 'enumerationAndChronology': 'irregular_issue', - 'status': ItemStatus.ON_SHELF, - 'holding': {'$ref': get_ref_for_pid('hold', holding.pid)}, - '$schema': get_schema_for_resource(Item), - 'location': holding.get('location'), - 'document': holding.get('document'), - 'item_type': holding.get('circulation_category'), - 'type': 'issue', - 'organisation': - {'$ref': get_ref_for_pid('org', holding.organisation_pid)} + "enumerationAndChronology": "irregular_issue", + "status": ItemStatus.ON_SHELF, + "holding": {"$ref": get_ref_for_pid("hold", holding.pid)}, + "$schema": get_schema_for_resource(Item), + "location": holding.get("location"), + "document": holding.get("document"), + "item_type": holding.get("circulation_category"), + "type": "issue", + "organisation": {"$ref": get_ref_for_pid("org", holding.organisation_pid)}, } - res, data = postdata( - client, - 'invenio_records_rest.item_list', - item - ) + res, data = postdata(client, "invenio_records_rest.item_list", item) assert res.status_code == 201 - created_item = Item.get_record_by_pid(data['metadata'].get('pid')) - assert created_item.get('barcode').startswith('f-') - assert created_item.get('type') == 'issue' - assert not created_item.get('issue').get('regular') - assert created_item.get('enumerationAndChronology') == 'irregular_issue' - new_issue_display, new_expected_date = \ - holding._get_next_issue_display_text(holding.get('patterns')) + created_item = Item.get_record_by_pid(data["metadata"].get("pid")) + assert created_item.get("barcode").startswith("f-") + assert created_item.get("type") == "issue" + assert not created_item.get("issue").get("regular") + assert created_item.get("enumerationAndChronology") == "irregular_issue" + new_issue_display, new_expected_date = holding._get_next_issue_display_text( + holding.get("patterns") + ) assert new_issue_display == issue_display assert new_expected_date == expected_date # No Validation error if you try to create an issue with no holdings links item = { - 'issue': { - 'status': ItemIssueStatus.RECEIVED, - 'received_date': datetime.now().strftime('%Y-%m-%d'), - 'expected_date': datetime.now().strftime('%Y-%m-%d'), - 'regular': False + "issue": { + "status": ItemIssueStatus.RECEIVED, + "received_date": datetime.now().strftime("%Y-%m-%d"), + "expected_date": datetime.now().strftime("%Y-%m-%d"), + "regular": False, }, - 'enumerationAndChronology': 'irregular_issue', - 'status': ItemStatus.ON_SHELF, - 'location': holding.get('location'), - 'document': holding.get('document'), - 'item_type': holding.get('circulation_category'), - 'type': 'issue' + "enumerationAndChronology": "irregular_issue", + "status": ItemStatus.ON_SHELF, + "location": holding.get("location"), + "document": holding.get("document"), + "item_type": holding.get("circulation_category"), + "type": "issue", } - res, data = postdata( - client, - 'invenio_records_rest.item_list', - item - ) + res, data = postdata(client, "invenio_records_rest.item_list", item) # NO validation error if you try to update an issue with a holdings link item = deepcopy(created_item) created_item.update(data=item, dbcommit=True, reindex=True) # Validation error if you try to update an issue with no holdings links - item.pop('holding') + item.pop("holding") # with pytest.raises(ValidationError): created_item.update(data=item, dbcommit=True, reindex=True) # no errors when deleting an irregular issue diff --git a/tests/api/holdings/test_provisional_items.py b/tests/api/holdings/test_provisional_items.py index bddc2764b6..9547a50046 100644 --- a/tests/api/holdings/test_provisional_items.py +++ b/tests/api/holdings/test_provisional_items.py @@ -29,32 +29,36 @@ from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemStatus, TypeOfItem from rero_ils.modules.items.tasks import delete_provisional_items -from rero_ils.modules.items.utils import \ - get_provisional_items_candidate_to_delete +from rero_ils.modules.items.utils import get_provisional_items_candidate_to_delete from rero_ils.modules.loans.api import Loan from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.patron_transactions.api import PatronTransaction from rero_ils.modules.utils import get_ref_for_pid -def test_provisional_items_creation(client, document, org_martigny, - holding_lib_martigny_w_patterns, - provisional_item_lib_martigny_data, - json_header, item_lib_martigny, - patron_martigny, - system_librarian_martigny): +def test_provisional_items_creation( + client, + document, + org_martigny, + holding_lib_martigny_w_patterns, + provisional_item_lib_martigny_data, + json_header, + item_lib_martigny, + patron_martigny, + system_librarian_martigny, +): """Test creation of provisional items.""" holding = holding_lib_martigny_w_patterns - provisional_item_lib_martigny_data['holding'] = \ - {'$ref': get_ref_for_pid('hold', holding.pid)} - item = Item.create( - provisional_item_lib_martigny_data, dbcommit=True, reindex=True) + provisional_item_lib_martigny_data["holding"] = { + "$ref": get_ref_for_pid("hold", holding.pid) + } + item = Item.create(provisional_item_lib_martigny_data, dbcommit=True, reindex=True) - item_url = url_for('invenio_records_rest.item_item', pid_value=item.pid) + item_url = url_for("invenio_records_rest.item_item", pid_value=item.pid) res = client.get(item_url) assert res.status_code == 200 - item_es = Item(get_json(res).get('metadata')) - assert item_es.get('type') == TypeOfItem.PROVISIONAL + item_es = Item(get_json(res).get("metadata")) + assert item_es.get("type") == TypeOfItem.PROVISIONAL assert item_es.status == ItemStatus.ON_SHELF assert item_es.holding_pid == holding.pid @@ -62,118 +66,124 @@ def test_provisional_items_creation(client, document, org_martigny, # for both global and insitutional view. login_user_via_session(client, patron_martigny.user) - list_url = url_for('invenio_records_rest.item_list', view='org1') + list_url = url_for("invenio_records_rest.item_list", view="org1") response = client.get(list_url, headers=json_header) assert response.status_code == 200 data = get_json(response) - items = data['hits']['hits'] + items = data["hits"]["hits"] assert len(items) == 1 - assert items[0]['metadata']['pid'] == item_lib_martigny.pid + assert items[0]["metadata"]["pid"] == item_lib_martigny.pid - list_url = url_for('invenio_records_rest.item_list', view='global') + list_url = url_for("invenio_records_rest.item_list", view="global") response = client.get(list_url, headers=json_header) assert response.status_code == 200 data = get_json(response) - items = data['hits']['hits'] + items = data["hits"]["hits"] assert len(items) == 1 - assert items[0]['metadata']['pid'] == item_lib_martigny.pid + assert items[0]["metadata"]["pid"] == item_lib_martigny.pid # TEST: logged librarians can have the provisional items in the results. # provisional items are still not available for the global and other views. login_user_via_session(client, system_librarian_martigny.user) - list_url = url_for('invenio_records_rest.item_list') + list_url = url_for("invenio_records_rest.item_list") response = client.get(list_url, headers=json_header) assert response.status_code == 200 data = get_json(response) - items = data['hits']['hits'] + items = data["hits"]["hits"] assert len(items) == 2 - list_url = url_for('invenio_records_rest.item_list', view='global') + list_url = url_for("invenio_records_rest.item_list", view="global") response = client.get(list_url, headers=json_header) assert response.status_code == 200 data = get_json(response) - items = data['hits']['hits'] + items = data["hits"]["hits"] assert len(items) == 1 - assert items[0]['metadata']['pid'] == item_lib_martigny.pid + assert items[0]["metadata"]["pid"] == item_lib_martigny.pid - list_url = url_for('invenio_records_rest.item_list', view='org1') + list_url = url_for("invenio_records_rest.item_list", view="org1") response = client.get(list_url, headers=json_header) assert response.status_code == 200 data = get_json(response) - items = data['hits']['hits'] + items = data["hits"]["hits"] assert len(items) == 1 - assert items[0]['metadata']['pid'] == item_lib_martigny.pid + assert items[0]["metadata"]["pid"] == item_lib_martigny.pid -def test_holding_requests(client, patron_martigny, loc_public_martigny, - circulation_policies, librarian_martigny, - holding_lib_martigny_w_patterns, lib_martigny, - item_lib_martigny, org_martigny): +def test_holding_requests( + client, + patron_martigny, + loc_public_martigny, + circulation_policies, + librarian_martigny, + holding_lib_martigny_w_patterns, + lib_martigny, + item_lib_martigny, + org_martigny, +): """Test holding patron request.""" login_user_via_session(client, patron_martigny.user) holding = holding_lib_martigny_w_patterns - description = 'Year: 2000 / volume: 15 / number: 22 / pages: 11-12' + description = "Year: 2000 / volume: 15 / number: 22 / pages: 11-12" # test fails when there is a missing description or holding_pid res, data = postdata( client, - 'api_holding.patron_request', - dict( - holding_pid=holding.pid, - pickup_location_pid=loc_public_martigny.pid - ) + "api_holding.patron_request", + dict(holding_pid=holding.pid, pickup_location_pid=loc_public_martigny.pid), ) assert res.status_code == 400 res, data = postdata( client, - 'api_holding.patron_request', - dict( - description=description, - pickup_location_pid=loc_public_martigny.pid - ) + "api_holding.patron_request", + dict(description=description, pickup_location_pid=loc_public_martigny.pid), ) assert res.status_code == 404 # test passes when all required parameters are given res, data = postdata( client, - 'api_holding.patron_request', + "api_holding.patron_request", dict( holding_pid=holding.pid, pickup_location_pid=loc_public_martigny.pid, - description=description - ) + description=description, + ), ) assert res.status_code == 200 loan = Loan.get_record_by_pid( - data.get('action_applied')[LoanAction.REQUEST].get('pid')) + data.get("action_applied")[LoanAction.REQUEST].get("pid") + ) assert loan.state == LoanState.PENDING item = Item.get_record_by_pid(loan.item_pid) - assert item.get('type') == TypeOfItem.PROVISIONAL + assert item.get("type") == TypeOfItem.PROVISIONAL assert item.status == ItemStatus.ON_SHELF assert item.holding_pid == holding.pid - assert item.get('enumerationAndChronology') == description + assert item.get("enumerationAndChronology") == description # checkout the item to the requested patron login_user_via_session(client, librarian_martigny.user) - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item.pid, - patron_pid=patron_martigny.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item.pid, + patron_pid=patron_martigny.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") assert loan_pid == loan.pid item = Item.get_record_by_pid(item.pid) assert item.status == ItemStatus.ON_LOAN # return the item at the owning library res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item.pid) @@ -183,62 +193,63 @@ def test_holding_requests(client, patron_martigny, loc_public_martigny, # test fails when there are missing parameters res, data = postdata( client, - 'api_holding.librarian_request', + "api_holding.librarian_request", dict( holding_pid=holding.pid, pickup_location_pid=loc_public_martigny.pid, description=description, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 400 res, data = postdata( client, - 'api_holding.librarian_request', + "api_holding.librarian_request", dict( holding_pid=holding.pid, pickup_location_pid=loc_public_martigny.pid, description=description, patron_pid=patron_martigny.pid, - transaction_library_pid=lib_martigny.pid - ) + transaction_library_pid=lib_martigny.pid, + ), ) assert res.status_code == 400 res, data = postdata( client, - 'api_holding.librarian_request', + "api_holding.librarian_request", dict( holding_pid=holding.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 400 # test passes when all required parameters are given res, data = postdata( client, - 'api_holding.librarian_request', + "api_holding.librarian_request", dict( holding_pid=holding.pid, pickup_location_pid=loc_public_martigny.pid, description=description, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 loan_2 = Loan.get_record_by_pid( - data.get('action_applied')[LoanAction.REQUEST].get('pid')) + data.get("action_applied")[LoanAction.REQUEST].get("pid") + ) assert loan_2.state == LoanState.PENDING item_2 = Item.get_record_by_pid(loan_2.item_pid) - assert item_2.get('type') == TypeOfItem.PROVISIONAL + assert item_2.get("type") == TypeOfItem.PROVISIONAL assert item_2.status == ItemStatus.ON_SHELF assert item_2.holding_pid == holding.pid - assert item_2.get('enumerationAndChronology') == description + assert item_2.get("enumerationAndChronology") == description assert item_2.pid != item.pid all_item_pids = list(Item.get_all_pids()) @@ -246,8 +257,8 @@ def test_holding_requests(client, patron_martigny, loc_public_martigny, # test delete provisional items with no active fees/loans report = delete_provisional_items() - assert report.get('number_of_deleted_items') - assert report.get('number_of_candidate_items_to_delete') + assert report.get("number_of_deleted_items") + assert report.get("number_of_candidate_items_to_delete") # assert that not deleted items are either having loans/fees or not # provisional items left_item_pids = list(Item.get_all_pids()) @@ -255,19 +266,19 @@ def test_holding_requests(client, patron_martigny, loc_public_martigny, for pid in left_item_pids: record = Item.get_record_by_pid(pid) can, _ = record.can_delete - assert not can or record.get('type') != TypeOfItem.PROVISIONAL + assert not can or record.get("type") != TypeOfItem.PROVISIONAL # item_2 has pending loans then it should not be removed assert item_2.pid in left_item_pids assert item_2 in get_provisional_items_candidate_to_delete() # add fee to item_2 and make sure it will not be candidate at the deletion. data = { - 'loan': {'$ref': get_ref_for_pid('loanid', loan_2.pid)}, - 'patron': {'$ref': get_ref_for_pid('patrons', patron_martigny.pid)}, - 'organisation': {'$ref': get_ref_for_pid('org', org_martigny.pid)}, - 'status': 'open', - 'total_amount': 0.6, - 'type': 'overdue', - 'creation_date': datetime.now(timezone.utc).isoformat() + "loan": {"$ref": get_ref_for_pid("loanid", loan_2.pid)}, + "patron": {"$ref": get_ref_for_pid("patrons", patron_martigny.pid)}, + "organisation": {"$ref": get_ref_for_pid("org", org_martigny.pid)}, + "status": "open", + "total_amount": 0.6, + "type": "overdue", + "creation_date": datetime.now(timezone.utc).isoformat(), } PatronTransaction.create(data, dbcommit=True, reindex=True) assert item_2 not in get_provisional_items_candidate_to_delete() diff --git a/tests/api/ill_requests/test_ill_requests_permissions.py b/tests/api/ill_requests/test_ill_requests_permissions.py index 97fe02111c..baca24275d 100644 --- a/tests/api/ill_requests/test_ill_requests_permissions.py +++ b/tests/api/ill_requests/test_ill_requests_permissions.py @@ -21,62 +21,79 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.ill_requests.permissions import \ - ILLRequestPermissionPolicy +from rero_ils.modules.ill_requests.permissions import ILLRequestPermissionPolicy def test_ill_requests_permissions( - patron_martigny, librarian_martigny, system_librarian_martigny, - ill_request_martigny, ill_request_sion, org_martigny + patron_martigny, + librarian_martigny, + system_librarian_martigny, + ill_request_martigny, + ill_request_sion, + org_martigny, ): """Test ill requests permissions class.""" # Anonymous user identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(ILLRequestPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + ILLRequestPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron # * can : search, read (own record), create # * can't : update, delete login_user(patron_martigny.user) - check_permission(ILLRequestPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': False, - 'delete': False - }, ill_request_martigny) - check_permission(ILLRequestPermissionPolicy, { - 'search': True, - 'read': False, - 'create': True, - 'update': False, - 'delete': False - }, ill_request_sion) + check_permission( + ILLRequestPermissionPolicy, + { + "search": True, + "read": True, + "create": True, + "update": False, + "delete": False, + }, + ill_request_martigny, + ) + check_permission( + ILLRequestPermissionPolicy, + { + "search": True, + "read": False, + "create": True, + "update": False, + "delete": False, + }, + ill_request_sion, + ) # Librarian without correct role # - can : search, read (own organisation), create # - update : only request for its own organisation # - delete : disallowed (missing ActionNeed) login_user(librarian_martigny.user) - check_permission(ILLRequestPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': False - }, ill_request_martigny) - check_permission(ILLRequestPermissionPolicy, { - 'search': True, - 'read': False, - 'create': True, - 'update': False, - 'delete': False - }, ill_request_sion) + check_permission( + ILLRequestPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": False}, + ill_request_martigny, + ) + check_permission( + ILLRequestPermissionPolicy, + { + "search": True, + "read": False, + "create": True, + "update": False, + "delete": False, + }, + ill_request_sion, + ) diff --git a/tests/api/ill_requests/test_ill_requests_rest.py b/tests/api/ill_requests/test_ill_requests_rest.py index f3fb4aa880..b189410349 100644 --- a/tests/api/ill_requests/test_ill_requests_rest.py +++ b/tests/api/ill_requests/test_ill_requests_rest.py @@ -25,8 +25,7 @@ from dateutil.relativedelta import * from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.ill_requests.api import ILLRequest from rero_ils.modules.ill_requests.models import ILLRequestStatus @@ -34,12 +33,12 @@ def test_ill_requests_permissions(client, ill_request_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.illr_item', pid_value='illr1') + item_url = url_for("invenio_records_rest.illr_item", pid_value="illr1") # Anonymous user res = client.get(item_url) assert res.status_code == 401 - res, _ = postdata(client, 'invenio_records_rest.illr_list', {}) + res, _ = postdata(client, "invenio_records_rest.illr_list", {}) assert res.status_code == 401 res = client.put(item_url, data={}, headers=json_header) assert res.status_code == 401 @@ -47,102 +46,104 @@ def test_ill_requests_permissions(client, ill_request_martigny, json_header): assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_ill_requests_facets(client, ill_request_martigny, rero_json_header): """Test record retrieval.""" - url = url_for('invenio_records_rest.illr_list', view='org1') + url = url_for("invenio_records_rest.illr_list", view="org1") res = client.get(url, headers=rero_json_header) data = get_json(res) - facets = ['library'] - assert all(facet_name in data['aggregations'] for facet_name in facets) - aggr_library = data['aggregations']['library'] - assert all('name' in term for term in aggr_library['buckets']) + facets = ["library"] + assert all(facet_name in data["aggregations"] for facet_name in facets) + aggr_library = data["aggregations"]["library"] + assert all("name" in term for term in aggr_library["buckets"]) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_ill_requests_get(client, ill_request_martigny): """Test record retrieval.""" ill_request = ill_request_martigny - item_url = url_for('invenio_records_rest.illr_item', pid_value='illr1') + item_url = url_for("invenio_records_rest.illr_item", pid_value="illr1") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{ill_request.revision_id}"' + assert res.headers["ETag"] == f'"{ill_request.revision_id}"' data = get_json(res) - assert ill_request.dumps() == data['metadata'] + assert ill_request.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert ill_request.dumps() == data['metadata'] + assert ill_request.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.illr_list', pid='illr1') + list_url = url_for("invenio_records_rest.illr_list", pid="illr1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - metadata = data['hits']['hits'][0]['metadata'] - del metadata['organisation'] # organisation is added only for indexation - del metadata['library'] # library is added only for indexation - del metadata['patron']['name'] # patron name is added only for indexation + metadata = data["hits"]["hits"][0]["metadata"] + del metadata["organisation"] # organisation is added only for indexation + del metadata["library"] # library is added only for indexation + del metadata["patron"]["name"] # patron name is added only for indexation assert metadata == ill_request.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_ill_requests_post_put_delete(client, org_martigny, json_header, - patron_martigny, - loc_public_martigny, - ill_request_martigny_data): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_ill_requests_post_put_delete( + client, + org_martigny, + json_header, + patron_martigny, + loc_public_martigny, + ill_request_martigny_data, +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.illr_item', pid_value='1') - list_url = url_for('invenio_records_rest.illr_list', q='pid:1') + item_url = url_for("invenio_records_rest.illr_item", pid_value="1") + list_url = url_for("invenio_records_rest.illr_list", q="pid:1") ill_request_data = deepcopy(ill_request_martigny_data) - ill_request_data['pid'] = '1' - res, data = postdata( - client, - 'invenio_records_rest.illr_list', - ill_request_data - ) + ill_request_data["pid"] = "1" + res, data = postdata(client, "invenio_records_rest.illr_list", ill_request_data) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == ill_request_data + assert data["metadata"] == ill_request_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert ill_request_data == data['metadata'] + assert ill_request_data == data["metadata"] # Update record/PUT data = ill_request_data - data['document']['title'] = 'Title test' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["document"]["title"] = "Title test" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['document']['title'] == 'Title test' + assert data["metadata"]["document"]["title"] == "Title test" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['document']['title'] == 'Title test' + assert data["metadata"]["document"]["title"] == "Title test" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['document']['title'] == 'Title test' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["document"]["title"] == "Title test" def test_ill_requests_can_delete(client, ill_request_martigny): @@ -153,16 +154,16 @@ def test_ill_requests_can_delete(client, ill_request_martigny): def test_filtered_ill_requests_get( - client, librarian_martigny, ill_request_martigny, - librarian_sion, ill_request_sion): + client, librarian_martigny, ill_request_martigny, librarian_sion, ill_request_sion +): """Test ill_requests filter by organisation.""" - list_url = url_for('invenio_records_rest.illr_list') + list_url = url_for("invenio_records_rest.illr_list") # Martigny login_user_via_session(client, librarian_martigny.user) res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - pids = [hit['metadata']['pid'] for hit in data['hits']['hits']] + pids = [hit["metadata"]["pid"] for hit in data["hits"]["hits"]] assert ill_request_martigny.pid in pids # Sion @@ -170,19 +171,24 @@ def test_filtered_ill_requests_get( res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - pids = [hit['metadata']['pid'] for hit in data['hits']['hits']] + pids = [hit["metadata"]["pid"] for hit in data["hits"]["hits"]] assert ill_request_sion.pid in pids -def test_ill_request_secure_api(client, json_header, ill_request_martigny, - ill_request_martigny_data, ill_request_sion, - librarian_martigny, - system_librarian_sion): +def test_ill_request_secure_api( + client, + json_header, + ill_request_martigny, + ill_request_martigny_data, + ill_request_sion, + librarian_martigny, + system_librarian_sion, +): """Test ill request secure api access.""" - martigny_url = url_for('invenio_records_rest.illr_item', - pid_value=ill_request_martigny.pid) - sion_url = url_for('invenio_records_rest.illr_item', - pid_value=ill_request_sion.pid) + martigny_url = url_for( + "invenio_records_rest.illr_item", pid_value=ill_request_martigny.pid + ) + sion_url = url_for("invenio_records_rest.illr_item", pid_value=ill_request_sion.pid) # Logged as Martigny librarian # * can read martigny request # * can't read sion request @@ -193,58 +199,53 @@ def test_ill_request_secure_api(client, json_header, ill_request_martigny, assert res.status_code == 403 -def test_ill_request_secure_api_update(client, json_header, - ill_request_martigny_data, - ill_request_martigny, ill_request_sion, - system_librarian_martigny, - system_librarian_sion): +def test_ill_request_secure_api_update( + client, + json_header, + ill_request_martigny_data, + ill_request_martigny, + ill_request_sion, + system_librarian_martigny, + system_librarian_sion, +): """Test ill request secure api update.""" - martigny_url = url_for('invenio_records_rest.illr_item', - pid_value=ill_request_martigny.pid) - sion_url = url_for('invenio_records_rest.illr_item', - pid_value=ill_request_sion.pid) + martigny_url = url_for( + "invenio_records_rest.illr_item", pid_value=ill_request_martigny.pid + ) + sion_url = url_for("invenio_records_rest.illr_item", pid_value=ill_request_sion.pid) # Martigny login_user_via_session(client, system_librarian_martigny.user) data = ill_request_martigny_data - data['document']['title'] = 'Test title' - res = client.put( - martigny_url, - data=json.dumps(data), - headers=json_header - ) + data["document"]["title"] = "Test title" + res = client.put(martigny_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Sion login_user_via_session(client, system_librarian_sion.user) - res = client.put( - martigny_url, - data=json.dumps(data), - headers=json_header - ) + res = client.put(martigny_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 -def test_ill_request_secure_api_delete(client, ill_request_martigny, - ill_request_sion, - system_librarian_martigny): +def test_ill_request_secure_api_delete( + client, ill_request_martigny, ill_request_sion, system_librarian_martigny +): """Test ill requests secure api delete.""" login_user_via_session(client, system_librarian_martigny.user) record_url = url_for( - 'invenio_records_rest.illr_item', - pid_value=ill_request_sion.pid + "invenio_records_rest.illr_item", pid_value=ill_request_sion.pid ) res = client.delete(record_url) assert res.status_code == 403 record_url = url_for( - 'invenio_records_rest.illr_item', - pid_value=ill_request_martigny.pid + "invenio_records_rest.illr_item", pid_value=ill_request_martigny.pid ) res = client.delete(record_url) assert res.status_code == 403 def test_filtered_ill_requests_get_pending_months_filters( - client, app, db, librarian_martigny, ill_request_martigny): + client, app, db, librarian_martigny, ill_request_martigny +): """Test ill_requests filter by pending and months.""" def date_delta(months): @@ -261,38 +262,34 @@ def db_commit_reindex(record): # Initial status is pending list_url = url_for( - 'invenio_records_rest.illr_list', - q=f'pid:{ill_request_martigny["pid"]}' + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}' ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 1 + assert result["hits"]["total"]["value"] == 1 # Closed record - ill_request_martigny = ILLRequest\ - .get_record_by_pid(ill_request_martigny.pid) - ill_request_martigny['status'] = ILLRequestStatus.CLOSED - ill_request_martigny.update( - ill_request_martigny, dbcommit=True, reindex=True) + ill_request_martigny = ILLRequest.get_record_by_pid(ill_request_martigny.pid) + ill_request_martigny["status"] = ILLRequestStatus.CLOSED + ill_request_martigny.update(ill_request_martigny, dbcommit=True, reindex=True) # Without filter (show record) list_url = url_for( - 'invenio_records_rest.illr_list', - q=f'pid:{ill_request_martigny["pid"]}' + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}' ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 1 + assert result["hits"]["total"]["value"] == 1 # With filter (hide record) list_url = url_for( - 'invenio_records_rest.illr_list', + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}', - remove_archived='1' + remove_archived="1", ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 1 + assert result["hits"]["total"]["value"] == 1 # Change created date initial_create = ill_request_martigny.model.created @@ -301,54 +298,48 @@ def db_commit_reindex(record): # Without filter (show record) list_url = url_for( - 'invenio_records_rest.illr_list', - q=f'pid:{ill_request_martigny["pid"]}' + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}' ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 1 + assert result["hits"]["total"]["value"] == 1 # With filter (show record) list_url = url_for( - 'invenio_records_rest.illr_list', + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}', - remove_archived='1' + remove_archived="1", ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 0 + assert result["hits"]["total"]["value"] == 0 # Make record to pending status - ill_request_martigny = ILLRequest\ - .get_record_by_pid(ill_request_martigny.pid) - ill_request_martigny['status'] = ILLRequestStatus.PENDING - ill_request_martigny.update( - ill_request_martigny, dbcommit=True, reindex=True) + ill_request_martigny = ILLRequest.get_record_by_pid(ill_request_martigny.pid) + ill_request_martigny["status"] = ILLRequestStatus.PENDING + ill_request_martigny.update(ill_request_martigny, dbcommit=True, reindex=True) # Without filter (show record) list_url = url_for( - 'invenio_records_rest.illr_list', - q=f'pid:{ill_request_martigny["pid"]}' + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}' ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 1 + assert result["hits"]["total"]["value"] == 1 # With filter (show record) list_url = url_for( - 'invenio_records_rest.illr_list', + "invenio_records_rest.illr_list", q=f'pid:{ill_request_martigny["pid"]}', - remove_archived='1' + remove_archived="1", ) res = client.get(list_url) result = res.json - assert result['hits']['total']['value'] == 1 + assert result["hits"]["total"]["value"] == 1 # Initial state ill_request_martigny.model.created = initial_create db_commit_reindex(ill_request_martigny) - ill_request_martigny = ILLRequest\ - .get_record_by_pid(ill_request_martigny.pid) - ill_request_martigny['status'] = ILLRequestStatus.PENDING - ill_request_martigny.update( - ill_request_martigny, dbcommit=True, reindex=True) + ill_request_martigny = ILLRequest.get_record_by_pid(ill_request_martigny.pid) + ill_request_martigny["status"] = ILLRequestStatus.PENDING + ill_request_martigny.update(ill_request_martigny, dbcommit=True, reindex=True) diff --git a/tests/api/item_types/test_item_types_permissions.py b/tests/api/item_types/test_item_types_permissions.py index 055cc22326..94db72a3a2 100644 --- a/tests/api/item_types/test_item_types_permissions.py +++ b/tests/api/item_types/test_item_types_permissions.py @@ -22,28 +22,27 @@ from invenio_accounts.testutils import login_user_via_session from utils import check_permission, get_json -from rero_ils.modules.patron_types.permissions import \ - PatronTypePermissionPolicy +from rero_ils.modules.patron_types.permissions import PatronTypePermissionPolicy -def test_item_types_permissions_api(client, librarian_martigny, - system_librarian_martigny, - item_type_standard_martigny, - item_type_regular_sion): +def test_item_types_permissions_api( + client, + librarian_martigny, + system_librarian_martigny, + item_type_standard_martigny, + item_type_regular_sion, +): """Test patron types permissions api.""" - itty_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='item_types' - ) + itty_permissions_url = url_for("api_blueprint.permissions", route_name="item_types") itty_martigny_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='item_types', - record_pid=item_type_standard_martigny.pid + "api_blueprint.permissions", + route_name="item_types", + record_pid=item_type_standard_martigny.pid, ) itty_sion_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='item_types', - record_pid=item_type_regular_sion.pid + "api_blueprint.permissions", + route_name="item_types", + record_pid=item_type_regular_sion.pid, ) # Not logged @@ -58,13 +57,13 @@ def test_item_types_permissions_api(client, librarian_martigny, res = client.get(itty_martigny_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read']: - assert data[action]['can'] - for action in ['create', 'update', 'delete']: - assert not data[action]['can'] + for action in ["list", "read"]: + assert data[action]["can"] + for action in ["create", "update", "delete"]: + assert not data[action]["can"] res = client.get(itty_sion_permissions_url) data = get_json(res) - assert not data['read']['can'] + assert not data["read"]["can"] # Logged as system librarian # * sys_lib can do anything about item_type for its own organisation @@ -73,20 +72,22 @@ def test_item_types_permissions_api(client, librarian_martigny, res = client.get(itty_martigny_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read', 'create', 'update', 'delete']: - assert data[action]['can'] + for action in ["list", "read", "create", "update", "delete"]: + assert data[action]["can"] res = client.get(itty_sion_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['update', 'delete']: - assert not data[action]['can'] + for action in ["update", "delete"]: + assert not data[action]["can"] -def test_item_types_permissions(patron_martigny, - librarian_martigny, - system_librarian_martigny, - item_type_standard_martigny, - item_type_regular_sion): +def test_item_types_permissions( + patron_martigny, + librarian_martigny, + system_librarian_martigny, + item_type_standard_martigny, + item_type_regular_sion, +): """Test patron types permissions class.""" permission_policy = PatronTypePermissionPolicy @@ -95,63 +96,57 @@ def test_item_types_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(permission_policy, {'search': False}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, item_type_standard_martigny) + check_permission(permission_policy, {"search": False}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + item_type_standard_martigny, + ) # Patron # A simple patron can't operate any operation about item type login_user(patron_martigny.user) - check_permission(permission_policy, {'search': False}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, item_type_standard_martigny) + check_permission(permission_policy, {"search": False}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + item_type_standard_martigny, + ) # Librarian # - search : any item type despite organisation owner # - read : only item type for its own organisation # - create/update/delete: disallowed login_user(librarian_martigny.user) - check_permission(permission_policy, {'search': True}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, item_type_standard_martigny) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, item_type_regular_sion) + check_permission(permission_policy, {"search": True}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": True, "create": False, "update": False, "delete": False}, + item_type_standard_martigny, + ) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + item_type_regular_sion, + ) # SystemLibrarian # - search : any item type despite organisation owner # - read/create/update/delete : only item type for its own # organisation login_user(system_librarian_martigny.user) - check_permission(permission_policy, {'search': True}, None) - check_permission(permission_policy, {'create': True}, {}) - check_permission(permission_policy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, item_type_standard_martigny) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, item_type_regular_sion) + check_permission(permission_policy, {"search": True}, None) + check_permission(permission_policy, {"create": True}, {}) + check_permission( + permission_policy, + {"read": True, "create": True, "update": True, "delete": True}, + item_type_standard_martigny, + ) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + item_type_regular_sion, + ) diff --git a/tests/api/item_types/test_item_types_rest.py b/tests/api/item_types/test_item_types_rest.py index 3438267a50..5342d8a030 100644 --- a/tests/api/item_types/test_item_types_rest.py +++ b/tests/api/item_types/test_item_types_rest.py @@ -23,125 +23,117 @@ import pytest from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.api import IlsRecordError -def test_item_types_permissions(client, item_type_standard_martigny, - json_header): +def test_item_types_permissions(client, item_type_standard_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.itty_item', pid_value='itty1') + item_url = url_for("invenio_records_rest.itty_item", pid_value="itty1") res = client.get(item_url) assert res.status_code == 401 - res, _ = postdata( - client, - 'invenio_records_rest.itty_list', - {} - ) + res, _ = postdata(client, "invenio_records_rest.itty_list", {}) assert res.status_code == 401 res = client.put( - url_for('invenio_records_rest.itty_item', pid_value='itty1'), + url_for("invenio_records_rest.itty_item", pid_value="itty1"), data={}, - headers=json_header + headers=json_header, ) assert res.status_code == 401 res = client.delete(item_url) assert res.status_code == 401 - res = client.get(url_for('item_types.name_validate', name='standard')) + res = client.get(url_for("item_types.name_validate", name="standard")) assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_item_types_get(client, item_type_standard_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.itty_item', pid_value='itty1') + item_url = url_for("invenio_records_rest.itty_item", pid_value="itty1") item_type = item_type_standard_martigny res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{item_type.revision_id}"' + assert res.headers["ETag"] == f'"{item_type.revision_id}"' data = get_json(res) - assert item_type.dumps() == data['metadata'] + assert item_type.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert item_type.dumps() == data['metadata'] + assert item_type.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.itty_list', pid='itty1') + list_url = url_for("invenio_records_rest.itty_list", pid="itty1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == item_type.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == item_type.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_item_types_post_put_delete(client, org_martigny, - item_type_standard_martigny_data, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_item_types_post_put_delete( + client, org_martigny, item_type_standard_martigny_data, json_header +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.itty_item', pid_value='1') - list_url = url_for('invenio_records_rest.itty_list', q='pid:1') + item_url = url_for("invenio_records_rest.itty_item", pid_value="1") + list_url = url_for("invenio_records_rest.itty_list", q="pid:1") - item_type_standard_martigny_data['pid'] = '1' + item_type_standard_martigny_data["pid"] = "1" res, data = postdata( - client, - 'invenio_records_rest.itty_list', - item_type_standard_martigny_data + client, "invenio_records_rest.itty_list", item_type_standard_martigny_data ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == item_type_standard_martigny_data + assert data["metadata"] == item_type_standard_martigny_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert item_type_standard_martigny_data == data['metadata'] + assert item_type_standard_martigny_data == data["metadata"] # Update record/PUT data = item_type_standard_martigny_data - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"{librarie.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -151,164 +143,163 @@ def test_item_types_post_put_delete(client, org_martigny, assert res.status_code == 410 -@mock.patch('rero_ils.modules.decorators.login_and_librarian', - mock.MagicMock()) +@mock.patch("rero_ils.modules.decorators.login_and_librarian", mock.MagicMock()) def test_item_types_name_validate(client): """Test record name validation.""" - url = url_for('item_types.name_validate', name='standard') + url = url_for("item_types.name_validate", name="standard") class current_librarian: class organisation: - pid = 'org1' + pid = "org1" + with mock.patch( - 'rero_ils.modules.item_types.views.current_librarian', - current_librarian + "rero_ils.modules.item_types.views.current_librarian", current_librarian ): res = client.get(url) assert res.status_code == 200 - assert get_json(res) == {'name': 'standard'} + assert get_json(res) == {"name": "standard"} class current_librarian: class organisation: - pid = 'does not exists' + pid = "does not exists" + with mock.patch( - 'rero_ils.modules.item_types.views.current_librarian', - current_librarian + "rero_ils.modules.item_types.views.current_librarian", current_librarian ): res = client.get(url) assert res.status_code == 200 - assert get_json(res) == {'name': None} + assert get_json(res) == {"name": None} -def test_item_types_can_delete(client, item_type_standard_martigny, - item_lib_martigny, - circulation_policies): +def test_item_types_can_delete( + client, item_type_standard_martigny, item_lib_martigny, circulation_policies +): """Test can delete an item type.""" can, reasons = item_type_standard_martigny.can_delete assert not can - assert reasons['links']['circ_policies'] - assert reasons['links']['items'] + assert reasons["links"]["circ_policies"] + assert reasons["links"]["items"] def test_filtered_item_types_get( - client, librarian_martigny, item_type_standard_martigny, - item_type_on_site_martigny, item_type_specific_martigny, - librarian_sion, item_type_regular_sion, - item_type_internal_sion, item_type_particular_sion): + client, + librarian_martigny, + item_type_standard_martigny, + item_type_on_site_martigny, + item_type_specific_martigny, + librarian_sion, + item_type_regular_sion, + item_type_internal_sion, + item_type_particular_sion, +): """Test item types filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.itty_list') + list_url = url_for("invenio_records_rest.itty_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.itty_list') + list_url = url_for("invenio_records_rest.itty_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 -def test_item_type_secure_api(client, json_header, - item_type_standard_martigny, - librarian_martigny, - librarian_sion): +def test_item_type_secure_api( + client, json_header, item_type_standard_martigny, librarian_martigny, librarian_sion +): """Test item type secure api access.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.itty_item', - pid_value=item_type_standard_martigny.pid) + record_url = url_for( + "invenio_records_rest.itty_item", pid_value=item_type_standard_martigny.pid + ) res = client.get(record_url) assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.itty_item', - pid_value=item_type_standard_martigny.pid) + record_url = url_for( + "invenio_records_rest.itty_item", pid_value=item_type_standard_martigny.pid + ) res = client.get(record_url) assert res.status_code == 403 -def test_item_type_secure_api_create(client, json_header, - item_type_standard_martigny, - system_librarian_martigny, - system_librarian_sion, - item_type_standard_martigny_data): +def test_item_type_secure_api_create( + client, + json_header, + item_type_standard_martigny, + system_librarian_martigny, + system_librarian_sion, + item_type_standard_martigny_data, +): """Test item type secure api create.""" # Martigny login_user_via_session(client, system_librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.itty_list' + post_entrypoint = "invenio_records_rest.itty_list" - del item_type_standard_martigny_data['pid'] - res, _ = postdata( - client, - post_entrypoint, - item_type_standard_martigny_data - ) + del item_type_standard_martigny_data["pid"] + res, _ = postdata(client, post_entrypoint, item_type_standard_martigny_data) assert res.status_code == 201 # Sion login_user_via_session(client, system_librarian_sion.user) - res, _ = postdata( - client, - post_entrypoint, - item_type_standard_martigny_data - ) + res, _ = postdata(client, post_entrypoint, item_type_standard_martigny_data) assert res.status_code == 403 -def test_item_type_secure_api_update(client, - item_type_on_site_martigny, - system_librarian_martigny, - system_librarian_sion, - item_type_on_site_martigny_data, - json_header): +def test_item_type_secure_api_update( + client, + item_type_on_site_martigny, + system_librarian_martigny, + system_librarian_sion, + item_type_on_site_martigny_data, + json_header, +): """Test item type secure api update.""" # Martigny login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.itty_item', - pid_value=item_type_on_site_martigny.pid) + record_url = url_for( + "invenio_records_rest.itty_item", pid_value=item_type_on_site_martigny.pid + ) data = item_type_on_site_martigny - data['name'] = 'Test Name' - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Sion login_user_via_session(client, system_librarian_sion.user) - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 -def test_item_type_secure_api_delete(client, - item_type_on_site_martigny, - system_librarian_martigny, - system_librarian_sion, - item_type_on_site_martigny_data, - json_header): +def test_item_type_secure_api_delete( + client, + item_type_on_site_martigny, + system_librarian_martigny, + system_librarian_sion, + item_type_on_site_martigny_data, + json_header, +): """Test item type secure api delete.""" # Martigny login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.itty_item', - pid_value=item_type_on_site_martigny.pid) + record_url = url_for( + "invenio_records_rest.itty_item", pid_value=item_type_on_site_martigny.pid + ) with pytest.raises(IlsRecordError.NotDeleted): res = client.delete(record_url) diff --git a/tests/api/items/test_items_in_transit.py b/tests/api/items/test_items_in_transit.py index 5d1031dd32..b48d4ad9b1 100644 --- a/tests/api/items/test_items_in_transit.py +++ b/tests/api/items/test_items_in_transit.py @@ -27,43 +27,52 @@ def test_items_in_transit_between_libraries( - client, librarian_martigny, librarian_saxon, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, loc_public_saxon, item_lib_martigny, - json_header, circulation_policies): + client, + librarian_martigny, + librarian_saxon, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + loc_public_saxon, + item_lib_martigny, + json_header, + circulation_policies, +): """Test item in-transit scenarios.""" login_user_via_session(client, librarian_martigny.user) # checkout the item at location A res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_saxon.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - assert Item.get_record_by_pid(item_lib_martigny.pid).get('status') \ + assert ( + Item.get_record_by_pid(item_lib_martigny.pid).get("status") == ItemStatus.ON_LOAN - item_data = data.get('metadata') - actions = data.get('action_applied') - assert item_data.get('status') == ItemStatus.ON_LOAN - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + ) + item_data = data.get("metadata") + actions = data.get("action_applied") + assert item_data.get("status") == ItemStatus.ON_LOAN + loan_pid = actions[LoanAction.CHECKOUT].get("pid") # checkin the item at location B res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - item_data = data.get('metadata') - item = Item.get_record_by_pid(item_data.get('pid')) - assert item.get('status') == ItemStatus.ON_SHELF + item_data = data.get("metadata") + item = Item.get_record_by_pid(item_data.get("pid")) + assert item.get("status") == ItemStatus.ON_SHELF diff --git a/tests/api/items/test_items_issue.py b/tests/api/items/test_items_issue.py index bc59a59053..55b02df5cd 100644 --- a/tests/api/items/test_items_issue.py +++ b/tests/api/items/test_items_issue.py @@ -20,15 +20,13 @@ from flask import url_for from invenio_accounts.testutils import login_user_via_session from jinja2 import UndefinedError -from utils import VerifyRecordPermissionPatch, flush_index, get_csv, \ - get_json, parse_csv, postdata +from utils import VerifyRecordPermissionPatch, get_csv, get_json, parse_csv, postdata from rero_ils.modules.commons.exceptions import MissingDataException from rero_ils.modules.holdings.api import Holding from rero_ils.modules.items.api import Item from rero_ils.modules.items.dumpers import ClaimIssueNotificationDumper -from rero_ils.modules.notifications.api import Notification, \ - NotificationsSearch +from rero_ils.modules.notifications.api import Notification, NotificationsSearch from rero_ils.modules.notifications.models import RecipientType from rero_ils.modules.vendors.dumpers import VendorClaimIssueNotificationDumper @@ -36,19 +34,21 @@ def _receive_regular_issue(client, holding): res, data = postdata( client, - 'api_holding.receive_regular_issue', - url_data=dict(holding_pid=holding.pid) + "api_holding.receive_regular_issue", + url_data=dict(holding_pid=holding.pid), ) assert res.status_code == 200 data = get_json(res) - issue_item = Item.get_record_by_pid(data.get('issue', {}).get('pid')) + issue_item = Item.get_record_by_pid(data.get("issue", {}).get("pid")) assert issue_item is not None assert issue_item.issue_is_regular return issue_item -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_issues_permissions( client, holding_lib_martigny_w_patterns, librarian_martigny ): @@ -62,19 +62,22 @@ def test_issues_permissions( res = client.get( url_for( - 'api_blueprint.permissions', - route_name='items', - record_pid=issue_item.pid + "api_blueprint.permissions", route_name="items", record_pid=issue_item.pid ) ) assert res.status_code == 200 data = get_json(res) - assert data['delete']['can'] + assert data["delete"]["can"] def test_issues_claim_notifications( - client, holding_lib_martigny_w_patterns, librarian_martigny, mailbox, - csv_header, rero_json_header, item_lib_sion + client, + holding_lib_martigny_w_patterns, + librarian_martigny, + mailbox, + csv_header, + rero_json_header, + item_lib_sion, ): """Test claim notification creation.""" item = item_lib_sion @@ -94,46 +97,53 @@ def test_issues_claim_notifications( # 3) simulate a template rendering error --> return 500 # 4) missing data --> return 500 # 4) call with an issue item --> return 200 - for pid, ret_code in [('dummy_pid', 404), (item.pid, 400)]: - url = url_for('api_item.claim_notification_preview', item_pid=pid) + for pid, ret_code in [("dummy_pid", 404), (item.pid, 400)]: + url = url_for("api_item.claim_notification_preview", item_pid=pid) response = client.get(url) assert response.status_code == ret_code issue_pid = issue_item.pid - url = url_for('api_item.claim_notification_preview', item_pid=issue_pid) - with mock.patch('rero_ils.modules.items.views.api_views.render_template', - mock.MagicMock(side_effect=UndefinedError('my_error'))): + url = url_for("api_item.claim_notification_preview", item_pid=issue_pid) + with mock.patch( + "rero_ils.modules.items.views.api_views.render_template", + mock.MagicMock(side_effect=UndefinedError("my_error")), + ): response = client.get(url) assert response.status_code == 500 - assert 'my_error' in response.json['message'] + assert "my_error" in response.json["message"] with mock.patch.object( - ClaimIssueNotificationDumper, 'dump', - mock.MagicMock(side_effect=MissingDataException('Test!')) + ClaimIssueNotificationDumper, + "dump", + mock.MagicMock(side_effect=MissingDataException("Test!")), ): response = client.get(url) assert response.status_code == 500 - assert 'Test!' in response.json['message'] + assert "Test!" in response.json["message"] response = client.get(url) assert response.status_code == 200 - assert all(field in response.json - for field in ['recipient_suggestions', 'preview']) - assert 'message' not in response.json + assert all(field in response.json for field in ["recipient_suggestions", "preview"]) + assert "message" not in response.json # update the vendor communication_language to force it to an unknown # related template and retry. - with mock.patch.object(VendorClaimIssueNotificationDumper, 'dump', - mock.MagicMock(return_value={ - 'name': 'test vendor name', - 'email': 'test@vendor.com', - 'language': 'dummy' - })): + with mock.patch.object( + VendorClaimIssueNotificationDumper, + "dump", + mock.MagicMock( + return_value={ + "name": "test vendor name", + "email": "test@vendor.com", + "language": "dummy", + } + ), + ): response = client.get(url) assert response.status_code == 200 assert all( field in response.json - for field in ['recipient_suggestions', 'preview', 'message'] + for field in ["recipient_suggestions", "preview", "message"] ) # Now really claim the issue @@ -141,79 +151,81 @@ def test_issues_claim_notifications( # 2) not sending recipients data --> return 400 HTTP code # 3) sending all correct data : the notification is created, dispatched # and returned - for pid, ret_code in [('dummy_pid', 404), (item.pid, 400)]: - url = url_for('api_item.claim_issue', item_pid=pid) + for pid, ret_code in [("dummy_pid", 404), (item.pid, 400)]: + url = url_for("api_item.claim_issue", item_pid=pid) response = client.post(url) assert response.status_code == ret_code response, data = postdata( client, - 'api_item.claim_issue', - url_data={'item_pid': issue_pid}, - data={'recipients': []} + "api_item.claim_issue", + url_data={"item_pid": issue_pid}, + data={"recipients": []}, ) assert response.status_code == 400 - assert data['message'] == 'Missing recipients emails.' + assert data["message"] == "Missing recipients emails." response, data = postdata( client, - 'api_item.claim_issue', - url_data={'item_pid': issue_pid}, - data={'recipients': [ - {'type': RecipientType.TO, 'address': 'to@domain.com'}, - {'type': RecipientType.REPLY_TO, 'address': 'noreply@domain.com'}, - {'type': RecipientType.CC, 'address': 'cc1@domain.com'}, - {'type': RecipientType.CC, 'address': 'cc2@domain.com'}, - {'type': RecipientType.BCC, 'address': 'bcc@domain.com'}, - ]} + "api_item.claim_issue", + url_data={"item_pid": issue_pid}, + data={ + "recipients": [ + {"type": RecipientType.TO, "address": "to@domain.com"}, + {"type": RecipientType.REPLY_TO, "address": "noreply@domain.com"}, + {"type": RecipientType.CC, "address": "cc1@domain.com"}, + {"type": RecipientType.CC, "address": "cc2@domain.com"}, + {"type": RecipientType.BCC, "address": "bcc@domain.com"}, + ] + }, ) assert response.status_code == 200 - notification = Notification.get_record_by_pid(data['data']['pid']) + notification = Notification.get_record_by_pid(data["data"]["pid"]) assert notification assert len(mailbox) == 1 - assert notification['context']['number'] == 1 + assert notification["context"]["number"] == 1 # Send a second claims... just for fun (and also testing increment number) mailbox.clear() response, data = postdata( client, - 'api_item.claim_issue', - url_data={'item_pid': issue_pid}, - data={'recipients': [ - {'type': RecipientType.TO, 'address': 'to2@domain.com'}, - {'type': RecipientType.REPLY_TO, 'address': 'noreply2@domain.com'} - ]} + "api_item.claim_issue", + url_data={"item_pid": issue_pid}, + data={ + "recipients": [ + {"type": RecipientType.TO, "address": "to2@domain.com"}, + {"type": RecipientType.REPLY_TO, "address": "noreply2@domain.com"}, + ] + }, ) assert response.status_code == 200 - notification = Notification.get_record_by_pid(data['data']['pid']) + notification = Notification.get_record_by_pid(data["data"]["pid"]) assert notification assert len(mailbox) == 1 - assert notification['context']['number'] == 2 + assert notification["context"]["number"] == 2 # As a claim notification has been created, the number of claim for this # issue should be incremented - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() assert issue_item.claims_count == 2 # Check that all is correctly indexed into ES url = url_for( - 'invenio_records_rest.item_list', - q=f'pid:{issue_pid}', - facets='claims_date' + "invenio_records_rest.item_list", q=f"pid:{issue_pid}", facets="claims_date" ) response = client.get(url) - data = response.json['hits']['hits'][0]['metadata'] - assert data['issue']['claims']['counter'] == 2 - assert len(data['issue']['claims']['dates']) == 2 + data = response.json["hits"]["hits"][0]["metadata"] + assert data["issue"]["claims"]["counter"] == 2 + assert len(data["issue"]["claims"]["dates"]) == 2 # Ensure than item serialization includes claim keys - url = url_for('invenio_records_rest.item_item', pid_value=issue_pid) + url = url_for("invenio_records_rest.item_item", pid_value=issue_pid) response = client.get(url, headers=rero_json_header) - assert response.json['metadata']['issue']['claims']['counter'] == 2 - assert len(response.json['metadata']['issue']['claims']['dates']) == 2 + assert response.json["metadata"]["issue"]["claims"]["counter"] == 2 + assert len(response.json["metadata"]["issue"]["claims"]["dates"]) == 2 # Export this issue as CSV and check issue claims_count column - list_url = url_for('api_item.inventory_search', q=f'pid:{issue_pid}') + list_url = url_for("api_item.inventory_search", q=f"pid:{issue_pid}") response = client.get(list_url, headers=csv_header) assert response.status_code == 200 data = list(parse_csv(get_csv(response))) diff --git a/tests/api/items/test_items_permissions.py b/tests/api/items/test_items_permissions.py index 69a0f8a954..95a95724cb 100644 --- a/tests/api/items/test_items_permissions.py +++ b/tests/api/items/test_items_permissions.py @@ -20,17 +20,21 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.items.permissions import ItemPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_items_permissions( - patron_martigny, org_martigny, librarian_martigny, - system_librarian_martigny, item_lib_sion, item_lib_saxon, - item_lib_martigny + patron_martigny, + org_martigny, + librarian_martigny, + system_librarian_martigny, + item_lib_sion, + item_lib_saxon, + item_lib_martigny, ): """Test item permissions class.""" @@ -40,99 +44,117 @@ def test_items_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, item_lib_martigny) + check_permission( + ItemPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + ItemPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + item_lib_martigny, + ) login_user(patron_martigny.user) - check_permission(ItemPermissionPolicy, {'create': False}, {}) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, item_lib_sion) + check_permission(ItemPermissionPolicy, {"create": False}, {}) + check_permission( + ItemPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + item_lib_sion, + ) # Librarian with specific role # - search/read: any items # - create/update/delete: allowed for items of its own library login_user(librarian_martigny.user) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, item_lib_martigny) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, item_lib_saxon) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, item_lib_sion) + check_permission( + ItemPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + item_lib_martigny, + ) + check_permission( + ItemPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + item_lib_saxon, + ) + check_permission( + ItemPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + item_lib_sion, + ) # Librarian without specific role # - search/read: any items # - create/update/delete: disallowed for any items except for # "pro_circulation_manager" as create/update are allowed. - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_circulation_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_circulation_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': False - }, item_lib_martigny) + check_permission( + ItemPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": False}, + item_lib_martigny, + ) - librarian_martigny['roles'] = ['pro_user_manager'] + librarian_martigny["roles"] = ["pro_user_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, item_lib_martigny) + check_permission( + ItemPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + item_lib_martigny, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() # System librarian (aka. full-permissions) # - create/update/delete: allow for serial holding if its own org login_user(system_librarian_martigny.user) - check_permission(ItemPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, item_lib_saxon) + check_permission( + ItemPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + item_lib_saxon, + ) diff --git a/tests/api/items/test_items_rest.py b/tests/api/items/test_items_rest.py index 7f6a6b1303..6ad5056d13 100644 --- a/tests/api/items/test_items_rest.py +++ b/tests/api/items/test_items_rest.py @@ -27,7 +27,7 @@ from invenio_accounts.testutils import login_user_via_session from invenio_db import db from invenio_pidstore.models import PersistentIdentifier -from utils import VerifyRecordPermissionPatch, flush_index, get_json, postdata +from utils import VerifyRecordPermissionPatch, get_json, postdata from rero_ils.modules.circ_policies.api import CircPoliciesSearch from rero_ils.modules.holdings.api import Holding @@ -39,176 +39,164 @@ from rero_ils.modules.utils import get_ref_for_pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_orphean_pids( - client, document, loc_public_martigny, item_type_standard_martigny, - item_lib_martigny_data_tmp, json_header + client, + document, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny_data_tmp, + json_header, ): """Test record retrieval.""" item_data = item_lib_martigny_data_tmp - item_data.pop('pid', None) - item_data['foo'] = 'foo' - n_item_pids = PersistentIdentifier.query.filter_by(pid_type='item').count() + item_data.pop("pid", None) + item_data["foo"] = "foo" + n_item_pids = PersistentIdentifier.query.filter_by(pid_type="item").count() n_holdings = Holding.count() - res, _ = postdata( - client, - 'invenio_records_rest.item_list', - item_data - ) + res, _ = postdata(client, "invenio_records_rest.item_list", item_data) # close the session as it is shared with the client db.session.close() assert res.status_code == 400 # no holding has been created assert Holding.count() == n_holdings # no orphean pids - assert PersistentIdentifier.query.filter_by(pid_type='item').count() \ - == n_item_pids + assert PersistentIdentifier.query.filter_by(pid_type="item").count() == n_item_pids -def test_items_permissions(client, item_lib_martigny, - patron_martigny, - json_header): +def test_items_permissions(client, item_lib_martigny, patron_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.item_item', pid_value='item1') + item_url = url_for("invenio_records_rest.item_item", pid_value="item1") res = client.get(item_url) assert res.status_code == 200 - res, _ = postdata( - client, - 'invenio_records_rest.item_list', - {} - ) + res, _ = postdata(client, "invenio_records_rest.item_list", {}) assert res.status_code == 401 res = client.put( - url_for('invenio_records_rest.item_item', pid_value='item1'), + url_for("invenio_records_rest.item_item", pid_value="item1"), data={}, - headers=json_header + headers=json_header, ) res = client.delete(item_url) assert res.status_code == 401 views = { - 'api_item.checkout': 403, - 'api_item.checkin': 403, - 'api_item.cancel_item_request': 404, # auth. OK but send bad data - 'api_item.validate_request': 403, - 'api_item.receive': 403, - 'api_item.return_missing': 403, - 'api_item.extend_loan': 404, # auth. OK but send bad data - 'api_item.librarian_request': 403, - 'api_item.patron_request': 404 # auth. OK but send bad data + "api_item.checkout": 403, + "api_item.checkin": 403, + "api_item.cancel_item_request": 404, # auth. OK but send bad data + "api_item.validate_request": 403, + "api_item.receive": 403, + "api_item.return_missing": 403, + "api_item.extend_loan": 404, # auth. OK but send bad data + "api_item.librarian_request": 403, + "api_item.patron_request": 404, # auth. OK but send bad data } for view in views: res, _ = postdata(client, view, {}) assert res.status_code == 401 - res = client.get( - url_for('api_item.requested_loans', library_pid='test'), - data={} - ) + res = client.get(url_for("api_item.requested_loans", library_pid="test"), data={}) assert res.status_code == 401 login_user_via_session(client, patron_martigny.user) for view, status in views.items(): res, _ = postdata(client, view, {}) assert res.status_code == status - res = client.get( - url_for('api_item.requested_loans', library_pid='test'), - data={} - ) + res = client.get(url_for("api_item.requested_loans", library_pid="test"), data={}) assert res.status_code == 403 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_items_post_put_delete(client, document, loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny_data, json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_items_post_put_delete( + client, + document, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny_data, + json_header, +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.item_item', pid_value='1') - list_url = url_for('invenio_records_rest.item_list', q='pid:1') + item_url = url_for("invenio_records_rest.item_item", pid_value="1") + list_url = url_for("invenio_records_rest.item_list", q="pid:1") # test when item has no barcode item_record_with_no_barcode = deepcopy(item_lib_martigny_data) - item_record_with_no_barcode['pid'] = 'pid' - del item_record_with_no_barcode['barcode'] + item_record_with_no_barcode["pid"] = "pid" + del item_record_with_no_barcode["barcode"] res, data = postdata( - client, - 'invenio_records_rest.item_list', - item_record_with_no_barcode + client, "invenio_records_rest.item_list", item_record_with_no_barcode ) assert res.status_code == 201 - item_barcode = data['metadata']['barcode'] - assert item_barcode.startswith('f-') + item_barcode = data["metadata"]["barcode"] + assert item_barcode.startswith("f-") # test updating an item with no barcode, keeps the old barcode - created_item = Item.get_record_by_pid('pid') - assert created_item.pid == 'pid' + created_item = Item.get_record_by_pid("pid") + assert created_item.pid == "pid" item_to_update = deepcopy(created_item) - del item_to_update['barcode'] - updated_item = created_item.update( - data=item_to_update, dbcommit=True, reindex=True) - assert updated_item['barcode'].startswith('f-') + del item_to_update["barcode"] + updated_item = created_item.update(data=item_to_update, dbcommit=True, reindex=True) + assert updated_item["barcode"].startswith("f-") # test replacing an item with no barcode, regenerates a new barcode item_to_replace = deepcopy(updated_item) - del item_to_replace['barcode'] + del item_to_replace["barcode"] replaced_item = created_item.replace( - data=item_to_replace, dbcommit=True, reindex=True) - assert replaced_item['barcode'].startswith('f-') + data=item_to_replace, dbcommit=True, reindex=True + ) + assert replaced_item["barcode"].startswith("f-") # test when item has a dirty barcode - item_lib_martigny_data['pid'] = '1' - item_lib_martigny_data['barcode'] = '123456' + item_lib_martigny_data["pid"] = "1" + item_lib_martigny_data["barcode"] = "123456" item_record_with_dirty_barcode = deepcopy(item_lib_martigny_data) - barcode = item_record_with_dirty_barcode.get('barcode') - item_record_with_dirty_barcode['barcode'] = f' {barcode} ' + barcode = item_record_with_dirty_barcode.get("barcode") + item_record_with_dirty_barcode["barcode"] = f" {barcode} " res, data = postdata( - client, - 'invenio_records_rest.item_list', - item_record_with_dirty_barcode + client, "invenio_records_rest.item_list", item_record_with_dirty_barcode ) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == item_lib_martigny_data + assert data["metadata"] == item_lib_martigny_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert item_lib_martigny_data == data['metadata'] + assert item_lib_martigny_data == data["metadata"] # Update record/PUT data = item_lib_martigny_data - data['call_number'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["call_number"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"{librarie.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['call_number'] == 'Test Name' + assert data["metadata"]["call_number"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['call_number'] == 'Test Name' + assert data["metadata"]["call_number"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['call_number'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["call_number"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -217,18 +205,22 @@ def test_items_post_put_delete(client, document, loc_public_martigny, assert res.status_code == 410 # Reset fixtures - item_url = url_for('invenio_records_rest.item_item', pid_value='pid') + item_url = url_for("invenio_records_rest.item_item", pid_value="pid") res = client.delete(item_url) assert res.status_code == 204 -def test_checkout_default_policy(client, lib_martigny, - librarian_martigny, - patron_martigny, - loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circulation_policies): +def test_checkout_default_policy( + client, + lib_martigny, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circulation_policies, +): """Test circ policy parameters""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -236,56 +228,59 @@ def test_checkout_default_policy(client, lib_martigny, patron_pid = patron_martigny.pid from rero_ils.modules.circ_policies.api import CircPolicy + circ_policy = CircPolicy.provide_circ_policy( - item.organisation_pid, - item.library_pid, - 'ptty1', - 'itty1' + item.organisation_pid, item.library_pid, "ptty1", "itty1" ) # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') + actions = data.get("action_applied") loan = actions[LoanAction.CHECKOUT] - end_date = loan.get('end_date') - start_date = loan.get('start_date') - checkout_duration = (ciso8601.parse_datetime( - end_date) - ciso8601.parse_datetime(start_date)).days + end_date = loan.get("end_date") + start_date = loan.get("start_date") + checkout_duration = ( + ciso8601.parse_datetime(end_date) - ciso8601.parse_datetime(start_date) + ).days - assert checkout_duration >= circ_policy.get('checkout_duration') + assert checkout_duration >= circ_policy.get("checkout_duration") # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, - pid=loan.get('pid'), + pid=loan.get("pid"), transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 -def test_checkout_library_level_policy(client, lib_martigny, - librarian_martigny, - patron_martigny, - loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circ_policy_short_martigny): +def test_checkout_library_level_policy( + client, + lib_martigny, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circ_policy_short_martigny, +): """Test circ policy parameters""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -295,46 +290,50 @@ def test_checkout_library_level_policy(client, lib_martigny, # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') + actions = data.get("action_applied") loan = actions[LoanAction.CHECKOUT] - end_date = loan.get('end_date') - start_date = loan.get('start_date') - checkout_duration = (ciso8601.parse_datetime( - end_date) - ciso8601.parse_datetime(start_date)).days - assert checkout_duration >= circ_policy_short_martigny.get( - 'checkout_duration') + end_date = loan.get("end_date") + start_date = loan.get("start_date") + checkout_duration = ( + ciso8601.parse_datetime(end_date) - ciso8601.parse_datetime(start_date) + ).days + assert checkout_duration >= circ_policy_short_martigny.get("checkout_duration") # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, - pid=loan.get('pid'), + pid=loan.get("pid"), transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 -def test_checkout_organisation_policy(client, lib_martigny, - librarian_martigny, - patron_martigny, - loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circ_policy_short_martigny): +def test_checkout_organisation_policy( + client, + lib_martigny, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circ_policy_short_martigny, +): """Test circ policy parameters""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -344,44 +343,50 @@ def test_checkout_organisation_policy(client, lib_martigny, # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') + actions = data.get("action_applied") loan = actions[LoanAction.CHECKOUT] - end_date = loan.get('end_date') - start_date = loan.get('start_date') - checkout_duration = (ciso8601.parse_datetime( - end_date) - ciso8601.parse_datetime(start_date)).days - assert checkout_duration >= circ_policy_short_martigny.get( - 'checkout_duration') + end_date = loan.get("end_date") + start_date = loan.get("start_date") + checkout_duration = ( + ciso8601.parse_datetime(end_date) - ciso8601.parse_datetime(start_date) + ).days + assert checkout_duration >= circ_policy_short_martigny.get("checkout_duration") # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, - pid=loan.get('pid'), + pid=loan.get("pid"), transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 -def test_items_receive(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, loc_restricted_martigny, - item_lib_martigny, json_header, - circulation_policies): +def test_items_receive( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + loc_restricted_martigny, + item_lib_martigny, + json_header, + circulation_policies, +): """Test item receive.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -392,45 +397,50 @@ def test_items_receive(client, librarian_martigny, # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - item_data = data.get('metadata') - actions = data.get('action_applied') - assert item_data.get('status') == ItemStatus.ON_LOAN + item_data = data.get("metadata") + actions = data.get("action_applied") + assert item_data.get("status") == ItemStatus.ON_LOAN assert actions.get(LoanAction.CHECKOUT) assert item.patron_has_an_active_loan_on_item(patron_martigny) - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + loan_pid = actions[LoanAction.CHECKOUT].get("pid") # checkin res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_location_pid=loc_restricted_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - item_data = data.get('metadata') - actions = data.get('action_applied') - assert item_data.get('status') == ItemStatus.ON_SHELF + item_data = data.get("metadata") + actions = data.get("action_applied") + assert item_data.get("status") == ItemStatus.ON_SHELF assert actions.get(LoanAction.CHECKIN) -def test_items_no_extend(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circ_policy_short_martigny): +def test_items_no_extend( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circ_policy_short_martigny, +): """Test items when no renewals is possible.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -441,76 +451,79 @@ def test_items_no_extend(client, librarian_martigny, # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.CHECKOUT].get("pid") assert not item.get_extension_count() - circ_policy_short_martigny['number_renewals'] = 0 + circ_policy_short_martigny["number_renewals"] = 0 circ_policy_short_martigny.update( - data=circ_policy_short_martigny, - dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + data=circ_policy_short_martigny, dbcommit=True, reindex=True + ) + CircPoliciesSearch.flush_and_refresh() # extend loan res, _ = postdata( client, - 'api_item.extend_loan', + "api_item.extend_loan", dict( item_pid=item_pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 403 - circ_policy_short_martigny['number_renewals'] = 1 + circ_policy_short_martigny["number_renewals"] = 1 circ_policy_short_martigny.update( - data=circ_policy_short_martigny, - dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + data=circ_policy_short_martigny, dbcommit=True, reindex=True + ) + CircPoliciesSearch.flush_and_refresh() # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 -def test_items_deny_requests(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, lib_martigny, - item_lib_martigny, json_header, - circ_policy_short_martigny): +def test_items_deny_requests( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + lib_martigny, + item_lib_martigny, + json_header, + circ_policy_short_martigny, +): """Test items when requests are denied.""" location = loc_public_martigny - circ_policy_short_martigny['allow_requests'] = False + circ_policy_short_martigny["allow_requests"] = False circ_policy_short_martigny.update( - data=circ_policy_short_martigny, - dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + data=circ_policy_short_martigny, dbcommit=True, reindex=True + ) + CircPoliciesSearch.flush_and_refresh() login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny item_pid = item.pid @@ -520,124 +533,118 @@ def test_items_deny_requests(client, librarian_martigny, # request res, _ = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_pid, pickup_location_pid=location.pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 403 # test can request because of a circulation policy does not allow request res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_pid, library_pid=lib_martigny.pid, - patron_barcode=patron.patron.get('barcode') + patron_barcode=patron.patron.get("barcode"), ) ) assert res.status_code == 200 data = get_json(res) - assert not data.get('can_request') + assert not data.get("can_request") - circ_policy_short_martigny['allow_requests'] = True + circ_policy_short_martigny["allow_requests"] = True circ_policy_short_martigny.update( - data=circ_policy_short_martigny, - dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) - assert circ_policy_short_martigny.get('allow_requests') - - -def test_extend_possible_actions(client, item_lib_martigny, - loc_public_martigny, - librarian_martigny, - patron_martigny, - circ_policy_short_martigny): + data=circ_policy_short_martigny, dbcommit=True, reindex=True + ) + CircPoliciesSearch.flush_and_refresh() + assert circ_policy_short_martigny.get("allow_requests") + + +def test_extend_possible_actions( + client, + item_lib_martigny, + loc_public_martigny, + librarian_martigny, + patron_martigny, + circ_policy_short_martigny, +): """Extend action changes according to params of cipo.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny patron_pid = patron_martigny.pid res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item.pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) # check the item is now in patron loaned item - res = client.get( - url_for('api_item.loans', patron_pid=patron_pid) - ) + res = client.get(url_for("api_item.loans", patron_pid=patron_pid)) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 - hit = data.get('hits').get('hits')[0].get('item') - assert hit.get('barcode') == item.get('barcode') + assert data["hits"]["total"]["value"] == 1 + hit = data.get("hits").get("hits")[0].get("item") + assert hit.get("barcode") == item.get("barcode") # check the item can be checked-in - res = client.get( - url_for('api_item.item', item_barcode=item.get('barcode')) - ) + res = client.get(url_for("api_item.item", item_barcode=item.get("barcode"))) assert res.status_code == 200 data = get_json(res) - actions = data.get('metadata').get('item').get('actions', []) - assert 'checkin' in actions + actions = data.get("metadata").get("item").get("actions", []) + assert "checkin" in actions from rero_ils.modules.circ_policies.api import CircPolicy + circ_policy = CircPolicy.provide_circ_policy( - item.organisation_pid, - item.library_pid, - 'ptty1', - 'itty1' + item.organisation_pid, item.library_pid, "ptty1", "itty1" ) - circ_policy['number_renewals'] = 0 + circ_policy["number_renewals"] = 0 circ_policy.update(circ_policy, dbcommit=True, reindex=True) - res = client.get( - url_for('api_item.item', item_barcode=item.get('barcode')) - ) + res = client.get(url_for("api_item.item", item_barcode=item.get("barcode"))) assert res.status_code == 200 data = get_json(res) - actions = data.get('metadata').get('item').get('actions', []) - assert 'extend_loan' not in actions - assert 'checkin' in actions + actions = data.get("metadata").get("item").get("actions", []) + assert "extend_loan" not in actions + assert "checkin" in actions # reset used objects - loan_pid = data.get('metadata').get('loan').get('pid') + loan_pid = data.get("metadata").get("loan").get("pid") res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - circ_policy['number_renewals'] = 1 - circ_policy.update( - circ_policy, - dbcommit=True, - reindex=True - ) - assert circ_policy['number_renewals'] == 1 - - -def test_items_extend_end_date(client, librarian_martigny, - patron_martigny, - loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circ_policy_short_martigny): + circ_policy["number_renewals"] = 1 + circ_policy.update(circ_policy, dbcommit=True, reindex=True) + assert circ_policy["number_renewals"] == 1 + + +def test_items_extend_end_date( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circ_policy_short_martigny, +): """Test correct renewal due date for items.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -647,81 +654,84 @@ def test_items_extend_end_date(client, librarian_martigny, # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not item.get_extension_count() - renewal_duration_policy = circ_policy_short_martigny['renewal_duration'] + renewal_duration_policy = circ_policy_short_martigny["renewal_duration"] renewal_duration = get_extension_params( - loan=loan, parameter_name='duration_default') + loan=loan, parameter_name="duration_default" + ) assert renewal_duration_policy <= renewal_duration.days # Update loan end_date to allow direct renewal - loan['end_date'] = loan['start_date'] + loan["end_date"] = loan["start_date"] loan.update(loan, dbcommit=True, reindex=True) # extend loan res, data = postdata( client, - 'api_item.extend_loan', + "api_item.extend_loan", dict( item_pid=item_pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 # Compare expected loan date with processed one # first get loan UTC date - actions = data.get('action_applied') - loan_pid = actions[LoanAction.EXTEND].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.EXTEND].get("pid") loan = Loan.get_record_by_pid(loan_pid) - loan_date = loan.get('end_date') + loan_date = loan.get("end_date") # then process a date with current UTC date + renewal current_date = datetime.now(timezone.utc) calc_date = current_date + renewal_duration # finally the comparison should give the same date (in UTC)! - assert ( - calc_date.strftime('%Y-%m-%d') == ciso8601.parse_datetime( - loan_date).astimezone(timezone.utc).strftime('%Y-%m-%d') - ) + assert calc_date.strftime("%Y-%m-%d") == ciso8601.parse_datetime( + loan_date + ).astimezone(timezone.utc).strftime("%Y-%m-%d") # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 -def test_multiple_loans_on_item_error(client, - patron_martigny, - patron2_martigny, - loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circulation_policies, - loc_public_fully, - librarian_martigny): +def test_multiple_loans_on_item_error( + client, + patron_martigny, + patron2_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circulation_policies, + loc_public_fully, + librarian_martigny, +): """Test MultipleLoansOnItemError.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -731,131 +741,140 @@ def test_multiple_loans_on_item_error(client, # checkout to checked_patron res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item.pid, patron_pid=checked_patron, transaction_location_pid=location.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - assert Item.get_record_by_pid(item.pid).get('status') == ItemStatus.ON_LOAN - item_data = data.get('metadata') - actions = data.get('action_applied') - assert item_data.get('status') == ItemStatus.ON_LOAN + assert Item.get_record_by_pid(item.pid).get("status") == ItemStatus.ON_LOAN + item_data = data.get("metadata") + actions = data.get("action_applied") + assert item_data.get("status") == ItemStatus.ON_LOAN assert actions.get(LoanAction.CHECKOUT) - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + loan_pid = actions[LoanAction.CHECKOUT].get("pid") item = Item.get_record_by_pid(item.pid) # request by requested patron to pick at another location res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item.pid, pickup_location_pid=loc_public_fully.pid, patron_pid=requested_patron, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - item_data = data.get('metadata') - actions = data.get('action_applied') - assert item_data.get('status') == ItemStatus.ON_LOAN + item_data = data.get("metadata") + actions = data.get("action_applied") + assert item_data.get("status") == ItemStatus.ON_LOAN assert actions.get(LoanAction.REQUEST) - req_loan_pid = actions[LoanAction.REQUEST].get('pid') + req_loan_pid = actions[LoanAction.REQUEST].get("pid") item = Item.get_record_by_pid(item.pid) # checkin at the request location res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item.pid, pid=loan_pid, transaction_location_pid=loc_public_fully.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 # test the returned three actions - loans = data.get('action_applied') + loans = data.get("action_applied") checked_in_loan = loans.get(LoanAction.CHECKIN) cancelled_loan = loans.get(LoanAction.CANCEL) validated_loan = loans.get(LoanAction.VALIDATE) - assert checked_in_loan.get('pid') == cancelled_loan.get('pid') - assert validated_loan.get('pid') == req_loan_pid + assert checked_in_loan.get("pid") == cancelled_loan.get("pid") + assert validated_loan.get("pid") == req_loan_pid - assert Loan.get_record_by_pid(loan_pid).get('state') == 'CANCELLED' + assert Loan.get_record_by_pid(loan_pid).get("state") == "CANCELLED" new_loan = Loan.get_record_by_pid(req_loan_pid) - assert new_loan.get('state') == 'ITEM_AT_DESK' - assert Item.get_record_by_pid(item.pid).get('status') == \ - ItemStatus.AT_DESK + assert new_loan.get("state") == "ITEM_AT_DESK" + assert Item.get_record_by_pid(item.pid).get("status") == ItemStatus.AT_DESK # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item.pid, pid=req_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 def test_filtered_items_get( - client, librarian_martigny, item_lib_martigny, - item_lib_saxon, item_lib_fully, - item_lib_sion, patron_sion): + client, + librarian_martigny, + item_lib_martigny, + item_lib_saxon, + item_lib_fully, + item_lib_sion, + patron_sion, +): """Test items filter by organisation.""" # Librarian Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.item_list') + list_url = url_for("invenio_records_rest.item_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 # Patron Sion login_user_via_session(client, patron_sion.user) - list_url = url_for('invenio_records_rest.item_list', view='org2') + list_url = url_for("invenio_records_rest.item_list", view="org2") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 def test_local_fields_items_get( - client, librarian_martigny, item_lib_martigny, - item_lib_fully, local_field_3_martigny): + client, + librarian_martigny, + item_lib_martigny, + item_lib_fully, + local_field_3_martigny, +): """Test items filter by local_fields.""" # Librarian Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.item_list', - q='local_fields.fields.field_1:testfield1') + list_url = url_for( + "invenio_records_rest.item_list", q="local_fields.fields.field_1:testfield1" + ) res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 - list_url = url_for('invenio_records_rest.item_list', - q='local_fields.fields.field_1:testfield2') + list_url = url_for( + "invenio_records_rest.item_list", q="local_fields.fields.field_1:testfield2" + ) res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 -def test_items_notes(client, librarian_martigny, item_lib_martigny, - json_header): +def test_items_notes(client, librarian_martigny, item_lib_martigny, json_header): """Test items notes.""" item = item_lib_martigny @@ -865,33 +884,32 @@ def test_items_notes(client, librarian_martigny, item_lib_martigny, assert len(item.notes) == 1 # set one public & one staff note - item['notes'] = [ - {'type': ItemNoteTypes.GENERAL, 'content': 'Public note'}, - {'type': ItemNoteTypes.STAFF, 'content': 'Staff note'} + item["notes"] = [ + {"type": ItemNoteTypes.GENERAL, "content": "Public note"}, + {"type": ItemNoteTypes.STAFF, "content": "Staff note"}, ] res = client.put( - url_for('invenio_records_rest.item_item', pid_value=item.pid), + url_for("invenio_records_rest.item_item", pid_value=item.pid), data=json.dumps(item), - headers=json_header + headers=json_header, ) assert res.status_code == 200 # add a second public note -- This should fail because we can only have one # note of each type for an item - item['notes'].append( - {'type': ItemNoteTypes.GENERAL, 'content': 'Second public note'} + item["notes"].append( + {"type": ItemNoteTypes.GENERAL, "content": "Second public note"} ) res = client.put( - url_for('invenio_records_rest.item_item', pid_value=item.pid), + url_for("invenio_records_rest.item_item", pid_value=item.pid), data=json.dumps(item), - headers=json_header + headers=json_header, ) assert get_json(res) == { - 'status': 400, - 'message': - 'Validation error: Can not have multiple notes of the same type..' + "status": 400, + "message": "Validation error: Can not have multiple notes of the same type..", } - item['notes'] = item.notes[:-1] + item["notes"] = item.notes[:-1] # get a specific type of notes # --> public : should return a note @@ -899,14 +917,21 @@ def test_items_notes(client, librarian_martigny, item_lib_martigny, # --> dummy : should never return something ! assert item.get_note(ItemNoteTypes.GENERAL) assert item.get_note(ItemNoteTypes.CHECKIN) is None - assert item.get_note('dummy') is None + assert item.get_note("dummy") is None def test_requested_loans_to_validate( - client, librarian_martigny, loc_public_martigny, - loc_restricted_martigny, item_type_standard_martigny, - item2_lib_martigny, json_header, item_type_missing_martigny, - patron_sion, circulation_policies): + client, + librarian_martigny, + loc_public_martigny, + loc_restricted_martigny, + item_type_standard_martigny, + item2_lib_martigny, + json_header, + item_type_missing_martigny, + patron_sion, + circulation_policies, +): """Test requested loans to validate.""" holding_pid = item2_lib_martigny.holding_pid @@ -919,210 +944,207 @@ def test_requested_loans_to_validate( # forget to reset data before leaving method. holding_pid = item2_lib_martigny.holding_pid holding = Holding.get_record_by_pid(holding_pid) - holding['call_number'] = item2_lib_martigny.pop('call_number', None) - item2_lib_martigny['item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_missing_martigny.pid) + holding["call_number"] = item2_lib_martigny.pop("call_number", None) + item2_lib_martigny["item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_missing_martigny.pid) } - item2_lib_martigny['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_standard_martigny.pid) + item2_lib_martigny["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_standard_martigny.pid) } - item2_lib_martigny['temporary_location'] = { - '$ref': get_ref_for_pid('loc', loc_restricted_martigny.pid) + item2_lib_martigny["temporary_location"] = { + "$ref": get_ref_for_pid("loc", loc_restricted_martigny.pid) } holding.update(holding, dbcommit=True, reindex=True) item2_lib_martigny.update(item2_lib_martigny, dbcommit=True, reindex=True) - library_pid = librarian_martigny.replace_refs()['libraries'][0]['pid'] + library_pid = librarian_martigny.replace_refs()["libraries"][0]["pid"] login_user_via_session(client, librarian_martigny.user) res, _ = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_sion.pid, pickup_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) - res = client.get( - url_for('api_item.requested_loans', library_pid=library_pid)) + res = client.get(url_for("api_item.requested_loans", library_pid=library_pid)) assert res.status_code == 200 data = get_json(res) - assert 1 == data['hits']['total']['value'] - requested_loan = data['hits']['hits'][0] - assert item2_lib_martigny.pid == requested_loan['item']['pid'] - assert item2_lib_martigny.pid == \ - requested_loan['loan']['item_pid']['value'] - assert LoanState.PENDING == requested_loan['loan']['state'] - assert patron_sion.pid == requested_loan['loan']['patron_pid'] + assert 1 == data["hits"]["total"]["value"] + requested_loan = data["hits"]["hits"][0] + assert item2_lib_martigny.pid == requested_loan["item"]["pid"] + assert item2_lib_martigny.pid == requested_loan["loan"]["item_pid"]["value"] + assert LoanState.PENDING == requested_loan["loan"]["state"] + assert patron_sion.pid == requested_loan["loan"]["patron_pid"] - assert requested_loan['item']['temporary_location']['name'] + assert requested_loan["item"]["temporary_location"]["name"] # RESET - the item - del item2_lib_martigny['temporary_item_type'] - del item2_lib_martigny['temporary_location'] + del item2_lib_martigny["temporary_item_type"] + del item2_lib_martigny["temporary_location"] holding.update(original_holding, dbcommit=True, reindex=True) item2_lib_martigny.update(original_item, dbcommit=True, reindex=True) -def test_patron_request(client, patron_martigny, loc_public_martigny, - item_lib_martigny, circulation_policies): +def test_patron_request( + client, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, +): """Test patron request.""" login_user_via_session(client, patron_martigny.user) res, data = postdata( client, - 'api_item.patron_request', + "api_item.patron_request", dict( - item_pid=item_lib_martigny.pid, - pickup_location_pid=loc_public_martigny.pid - ) + item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.REQUEST].get('pid') + loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") params = { - 'pid': loan_pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': patron_martigny.pid + "pid": loan_pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": patron_martigny.pid, } item_lib_martigny.cancel_item_request(**params) def test_requests_with_different_locations( - client, patron_martigny, librarian_saxon, loc_public_saxon, - loc_public_martigny, item_lib_martigny, circulation_policies, lib_saxon + client, + patron_martigny, + librarian_saxon, + loc_public_saxon, + loc_public_martigny, + item_lib_martigny, + circulation_policies, + lib_saxon, ): """Test patron and librarian request with different locations.""" login_user_via_session(client, patron_martigny.user) - loc_public_saxon['allow_request'] = False + loc_public_saxon["allow_request"] = False loc_public_saxon.update(loc_public_saxon, True, True) res, data = postdata( client, - 'api_item.patron_request', - dict( - item_pid=item_lib_martigny.pid, - pickup_location_pid=loc_public_saxon.pid - ) + "api_item.patron_request", + dict(item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_saxon.pid), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.REQUEST].get('pid') + loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") params = { - 'pid': loan_pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': patron_martigny.pid + "pid": loan_pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": patron_martigny.pid, } item_lib_martigny.cancel_item_request(**params) login_user_via_session(client, librarian_saxon.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_saxon.pid, - transaction_user_pid=librarian_saxon.pid - ) + transaction_user_pid=librarian_saxon.pid, + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.REQUEST].get('pid') + loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") params = { - 'pid': loan_pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "pid": loan_pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } item_lib_martigny.cancel_item_request(**params) - loc_public_saxon['allow_request'] = True + loc_public_saxon["allow_request"] = True loc_public_saxon.update(loc_public_saxon, True, True) -def test_item_possible_actions(client, item_lib_martigny, - librarian_martigny, - patron_martigny, - circulation_policies): +def test_item_possible_actions( + client, item_lib_martigny, librarian_martigny, patron_martigny, circulation_policies +): """Possible action changes according to params of cipo.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny patron_pid = patron_martigny.pid res = client.get( url_for( - 'api_item.item', - item_barcode=item.get('barcode'), - patron_pid=patron_pid + "api_item.item", item_barcode=item.get("barcode"), patron_pid=patron_pid ) ) data = get_json(res) assert res.status_code == 200 - actions = data.get('metadata').get('item').get('actions') - assert 'checkout' in actions + actions = data.get("metadata").get("item").get("actions") + assert "checkout" in actions from rero_ils.modules.circ_policies.api import CircPolicy + circ_policy = CircPolicy.provide_circ_policy( - item.organisation_pid, - item.library_pid, - 'ptty1', - 'itty1' + item.organisation_pid, item.library_pid, "ptty1", "itty1" ) - original_checkout_duration = circ_policy.get('checkout_duration') + original_checkout_duration = circ_policy.get("checkout_duration") if original_checkout_duration is not None: - del circ_policy['checkout_duration'] - circ_policy.update( - circ_policy, - dbcommit=True, - reindex=True - ) + del circ_policy["checkout_duration"] + circ_policy.update(circ_policy, dbcommit=True, reindex=True) res = client.get( url_for( - 'api_item.item', - item_barcode=item.get('barcode'), - patron_pid=patron_pid + "api_item.item", item_barcode=item.get("barcode"), patron_pid=patron_pid ) ) assert res.status_code == 200 data = get_json(res) - actions = data.get('metadata').get('item').get('actions') - assert 'checkout' not in actions + actions = data.get("metadata").get("item").get("actions") + assert "checkout" not in actions if original_checkout_duration is not None: - circ_policy['checkout_duration'] = original_checkout_duration - circ_policy.update( - circ_policy, - dbcommit=True, - reindex=True - ) + circ_policy["checkout_duration"] = original_checkout_duration + circ_policy.update(circ_policy, dbcommit=True, reindex=True) assert circ_policy.can_checkout def test_items_facets( - client, librarian_martigny, rero_json_header, + client, + librarian_martigny, + rero_json_header, item_lib_martigny, # on shelf item_lib_fully, # on loan ): """Test record retrieval.""" login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.item_list') + list_url = url_for("invenio_records_rest.item_list") response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 facet_names = [ - 'document_type', 'item_type', 'library', 'location', - 'status', 'temporary_item_type', 'temporary_location', 'vendor', - 'claims_count', 'claims_date', 'current_requests' + "document_type", + "item_type", + "library", + "location", + "status", + "temporary_item_type", + "temporary_location", + "vendor", + "claims_count", + "claims_date", + "current_requests", ] - assert all( - name in response.json['aggregations'] - for name in facet_names - ) + assert all(name in response.json["aggregations"] for name in facet_names) def test_items_rest_api_sort( @@ -1130,54 +1152,53 @@ def test_items_rest_api_sort( ): """Test sorting option on `Item` REST API endpoints.""" - item_lib_fully['second_call_number'] = 'second_call_number' + item_lib_fully["second_call_number"] = "second_call_number" item_lib_fully.update(item_lib_fully, dbcommit=True, reindex=True) - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() # STEP 1 :: Sort on 'call_number' # * Ensure sort on `call_number` is possible # * Ensure `call_number.raw` ES field contains correct value - url = url_for('invenio_records_rest.item_list', sort='call_number') + url = url_for("invenio_records_rest.item_list", sort="call_number") response = client.get(url, headers=rero_json_header) assert response.status_code == 200 data = response.json - first_hit = data['hits']['hits'][0]['metadata'] - assert first_hit['call_number'] == item_lib_martigny['call_number'] + first_hit = data["hits"]["hits"][0]["metadata"] + assert first_hit["call_number"] == item_lib_martigny["call_number"] url = url_for( - 'invenio_records_rest.item_list', - q=f'call_number.raw:{item_lib_martigny["call_number"]}' + "invenio_records_rest.item_list", + q=f'call_number.raw:{item_lib_martigny["call_number"]}', ) response = client.get(url, headers=rero_json_header) assert response.status_code == 200 - assert response.json['hits']['total']['value'] == 1 + assert response.json["hits"]["total"]["value"] == 1 # STEP 2 :: Sort on 'second_call_number' # * Ensure sort `second_call_number` is possible # * Ensure `second_call_number.raw` ES field contains correct value - url = url_for('invenio_records_rest.item_list', sort='second_call_number') + url = url_for("invenio_records_rest.item_list", sort="second_call_number") response = client.get(url, headers=rero_json_header) assert response.status_code == 200 data = response.json - first_hit = data['hits']['hits'][0]['metadata'] - assert first_hit['second_call_number'] == \ - item_lib_fully['second_call_number'] + first_hit = data["hits"]["hits"][0]["metadata"] + assert first_hit["second_call_number"] == item_lib_fully["second_call_number"] url = url_for( - 'invenio_records_rest.item_list', - q=f'second_call_number.raw:"{item_lib_fully["second_call_number"]}"' + "invenio_records_rest.item_list", + q=f'second_call_number.raw:"{item_lib_fully["second_call_number"]}"', ) response = client.get(url, headers=rero_json_header) assert response.status_code == 200 - assert response.json['hits']['total']['value'] == 1 + assert response.json["hits"]["total"]["value"] == 1 url = url_for( - 'invenio_records_rest.item_list', - q=f'second_call_number.raw:"{item_lib_fully["second_call_number"]} "' + "invenio_records_rest.item_list", + q=f'second_call_number.raw:"{item_lib_fully["second_call_number"]} "', ) response = client.get(url, headers=rero_json_header) assert response.status_code == 200 - assert response.json['hits']['total']['value'] == 0 + assert response.json["hits"]["total"]["value"] == 0 # Reset fixtures - del item_lib_fully['second_call_number'] + del item_lib_fully["second_call_number"] item_lib_fully.update(item_lib_fully, dbcommit=True, reindex=True) diff --git a/tests/api/items/test_items_rest_views.py b/tests/api/items/test_items_rest_views.py index 03d0997a79..e24ca073b5 100644 --- a/tests/api/items/test_items_rest_views.py +++ b/tests/api/items/test_items_rest_views.py @@ -17,6 +17,8 @@ # along with this program. If not, see . """Tests REST API items.""" +import time + from elasticsearch_dsl.search import Response from flask import url_for from invenio_accounts.testutils import login_user_via_session @@ -25,186 +27,189 @@ from rero_ils.modules.items.api import Item from rero_ils.modules.operation_logs.api import OperationLogsSearch +from rero_ils.modules.stats.api.api import StatsSearch def test_item_stats_endpoint( - item_at_desk_martigny_patron_and_loan_at_desk, - client, librarian_martigny + item_at_desk_martigny_patron_and_loan_at_desk, client, librarian_martigny ): """Test loan filter on stats endpoint with real data.""" + item, _, _ = item_at_desk_martigny_patron_and_loan_at_desk + StatsSearch.flush_and_refresh() + time.sleep(1) # TODO :: find a better way to wait for stats. login_user_via_session(client, librarian_martigny.user) - res = client.get(url_for( - 'api_item.stats', - item_pid=item_at_desk_martigny_patron_and_loan_at_desk[0].pid - )) - assert res.json['total']['request'] == 1 + res = client.get( + url_for( + "api_item.stats", + item_pid=item.pid, + ) + ) + assert res.json["total"]["request"] == 1 -def test_item_dumps(client, item_lib_martigny, org_martigny, - librarian_martigny): +def test_item_dumps(client, item_lib_martigny, org_martigny, librarian_martigny): """Test item dumps and Elasticsearch version.""" item_dumps = Item(item_lib_martigny.dumps()).replace_refs() - assert item_dumps.get('organisation').get('pid') == org_martigny.pid + assert item_dumps.get("organisation").get("pid") == org_martigny.pid login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.item_item', - pid_value=item_lib_martigny.pid) + record_url = url_for( + "invenio_records_rest.item_item", pid_value=item_lib_martigny.pid + ) res = client.get(record_url) assert res.status_code == 200 - item_es = Item(get_json(res).get('metadata')) + item_es = Item(get_json(res).get("metadata")) assert item_es.is_available() assert item_es.organisation_pid == org_martigny.pid -def test_patron_checkouts_order(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, - item3_lib_martigny, json_header, - item4_lib_martigny, - circulation_policies): +def test_patron_checkouts_order( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item3_lib_martigny, + json_header, + item4_lib_martigny, + circulation_policies, +): """Test sort of checkout loans.""" login_user_via_session(client, librarian_martigny.user) res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item3_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ), ) assert res.status_code == 200 res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item4_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ), ) assert res.status_code == 200 # sort by transaction_date asc res = client.get( - url_for( - 'api_item.loans', patron_pid=patron_martigny.pid, - sort='_created')) + url_for("api_item.loans", patron_pid=patron_martigny.pid, sort="_created") + ) assert res.status_code == 200 data = get_json(res) - items = data['hits']['hits'] + items = data["hits"]["hits"] - assert items[0]['item']['pid'] == item3_lib_martigny.pid - assert items[1]['item']['pid'] == item4_lib_martigny.pid + assert items[0]["item"]["pid"] == item3_lib_martigny.pid + assert items[1]["item"]["pid"] == item4_lib_martigny.pid # sort by transaction_date desc res = client.get( url_for( - 'api_item.loans', patron_pid=patron_martigny.pid, - sort='-transaction_date')) + "api_item.loans", patron_pid=patron_martigny.pid, sort="-transaction_date" + ) + ) assert res.status_code == 200 data = get_json(res) - items = data['hits']['hits'] + items = data["hits"]["hits"] - assert items[0]['item']['pid'] == item4_lib_martigny.pid - assert items[1]['item']['pid'] == item3_lib_martigny.pid + assert items[0]["item"]["pid"] == item4_lib_martigny.pid + assert items[1]["item"]["pid"] == item3_lib_martigny.pid # sort by invalid field res = client.get( - url_for( - 'api_item.loans', patron_pid=patron_martigny.pid, - sort='does not exist')) + url_for("api_item.loans", patron_pid=patron_martigny.pid, sort="does not exist") + ) assert res.status_code == 500 data = get_json(res) - assert 'RequestError(400' in data['status'] + assert "RequestError(400" in data["status"] -def test_item_stats( - app, client, librarian_martigny, item_lib_martigny -): +def test_item_stats(app, client, librarian_martigny, item_lib_martigny): """Test item stats.""" # A mock on the answer has been created, because it is not possible # to freeze on the date, because the string "now-1y" passed to # the configuration of the "year" facet is calculated # on the Elasticsearch instance. - es_response = Response(OperationLogsSearch(), { - 'aggregations': { - 'trigger': { - 'buckets': [{ - 'doc_count': 1, - 'key': 'checkout', - 'year': { - 'doc_count': 1, - 'meta': {} - } - }, { - 'doc_count': 2, - 'key': 'extend', - 'year': { - 'doc_count': 1, - 'meta': {} - } - }, { - 'doc_count': 2, - 'key': 'checkin', - 'year': { - 'doc_count': 1, - 'meta': {} - } - }] + es_response = Response( + OperationLogsSearch(), + { + "aggregations": { + "trigger": { + "buckets": [ + { + "doc_count": 1, + "key": "checkout", + "year": {"doc_count": 1, "meta": {}}, + }, + { + "doc_count": 2, + "key": "extend", + "year": {"doc_count": 1, "meta": {}}, + }, + { + "doc_count": 2, + "key": "checkin", + "year": {"doc_count": 1, "meta": {}}, + }, + ] + } } - } - }) - - es_response_checkin = Response(OperationLogsSearch(), { - 'aggregations': { - 'trigger': { - 'buckets': [{ - 'doc_count': 2, - 'key': 'checkin', - 'year': { - 'doc_count': 1, - 'meta': {} - } - }] + }, + ) + + es_response_checkin = Response( + OperationLogsSearch(), + { + "aggregations": { + "trigger": { + "buckets": [ + { + "doc_count": 2, + "key": "checkin", + "year": {"doc_count": 1, "meta": {}}, + } + ] + } } - } - }) + }, + ) login_user_via_session(client, librarian_martigny.user) with mock.patch.object( - OperationLogsSearch, - 'execute', - mock.MagicMock(return_value=es_response) + OperationLogsSearch, "execute", mock.MagicMock(return_value=es_response) ): # We sum the Legacy_count field in the checkout field - res = client.get(url_for('api_item.stats', item_pid='item1')) - assert res.json == \ - { - 'total': {'checkout': 5, 'extend': 2, 'checkin': 2}, - 'total_year': {'checkout': 1, 'extend': 1, 'checkin': 1}} + StatsSearch.flush_and_refresh() + res = client.get(url_for("api_item.stats", item_pid="item1")) + assert res.json == { + "total": {"checkout": 5, "extend": 2, "checkin": 2}, + "total_year": {"checkout": 1, "extend": 1, "checkin": 1}, + } with mock.patch.object( - OperationLogsSearch, - 'execute', - mock.MagicMock(return_value=es_response_checkin) + OperationLogsSearch, "execute", mock.MagicMock(return_value=es_response_checkin) ): # item found # We add the legacy_checkout_count field to the checkout field - res = client.get(url_for('api_item.stats', item_pid='item1')) - assert res.json == \ - { - 'total': {'checkout': 4, 'checkin': 2}, - 'total_year': {'checkin': 1}} + StatsSearch.flush_and_refresh() + res = client.get(url_for("api_item.stats", item_pid="item1")) + assert res.json == { + "total": {"checkout": 4, "checkin": 2}, + "total_year": {"checkin": 1}, + } # No item found - res = client.get(url_for('api_item.stats', item_pid='foot')) - assert res.json == \ - { - 'total': {'checkin': 2}, - 'total_year': {'checkin': 1}} + StatsSearch.flush_and_refresh() + res = client.get(url_for("api_item.stats", item_pid="foot")) + assert res.json == {"total": {"checkin": 2}, "total_year": {"checkin": 1}} diff --git a/tests/api/items/test_items_serializer.py b/tests/api/items/test_items_serializer.py index 6f471364ac..bd6e60f3ee 100644 --- a/tests/api/items/test_items_serializer.py +++ b/tests/api/items/test_items_serializer.py @@ -35,87 +35,130 @@ def test_serializers( loan_due_soon_martigny, loc_public_martigny, item_type_standard_martigny, - item_type_on_site_martigny + item_type_on_site_martigny, ): """Test Serializers.""" login_user(client, librarian_martigny) item = item_lib_martigny - loc_ref = get_ref_for_pid('locations', loc_public_martigny.pid) - item.setdefault('temporary_location', {})['$ref'] = loc_ref + loc_ref = get_ref_for_pid("locations", loc_public_martigny.pid) + item.setdefault("temporary_location", {})["$ref"] = loc_ref item.commit() item.reindex() - item_url = url_for( - 'invenio_records_rest.item_item', pid_value=item_lib_fully.pid) + item_url = url_for("invenio_records_rest.item_item", pid_value=item_lib_fully.pid) response = client.get(item_url, headers=json_header) assert response.status_code == 200 - assert response.json['metadata'].get('item_type', {}).get('$ref') + assert response.json["metadata"].get("item_type", {}).get("$ref") item_url = url_for( - 'invenio_records_rest.item_item', pid_value=item_lib_martigny.pid) + "invenio_records_rest.item_item", pid_value=item_lib_martigny.pid + ) response = client.get(item_url, headers=json_header) assert response.status_code == 200 - assert response.json['metadata'].get('item_type', {}).get('$ref') + assert response.json["metadata"].get("item_type", {}).get("$ref") item_url = url_for( - 'invenio_records_rest.item_item', - pid_value=item_lib_fully.pid, resolve=1) + "invenio_records_rest.item_item", pid_value=item_lib_fully.pid, resolve=1 + ) response = client.get(item_url, headers=json_header) data = response.json - assert data['metadata'].get('item_type', {}).get('pid') + assert data["metadata"].get("item_type", {}).get("pid") # test if all key exist into response with a value - for key in ['created', 'updated', 'id', 'links', 'metadata']: + for key in ["created", "updated", "id", "links", "metadata"]: assert key in data assert data[key] - list_url = url_for('invenio_records_rest.item_list') + list_url = url_for("invenio_records_rest.item_list") response = client.get(list_url, headers=rero_json_header) - data = response.json['hits']['hits'] + data = response.json["hits"]["hits"] assert response.status_code == 200 - list_url = url_for('api_item.inventory_search') + list_url = url_for("api_item.inventory_search") response = client.get(list_url, headers=csv_header) assert response.status_code == 200 data = get_csv(response) assert data fields = [ - 'item_pid', 'item_create_date', 'document_pid', 'document_title', - 'document_creator', 'document_main_type', 'document_sub_type', - 'document_masked', 'document_isbn', 'document_issn', - 'document_series_statement', 'document_edition_statement', - 'document_publication_year', 'document_publisher', - 'document_local_field_1', 'document_local_field_2', - 'document_local_field_3', 'document_local_field_4', - 'document_local_field_5', 'document_local_field_6', - 'document_local_field_7', 'document_local_field_8', - 'document_local_field_9', 'document_local_field_10', - 'item_acquisition_date', 'item_barcode', 'item_call_number', - 'item_second_call_number', 'item_legacy_checkout_count', - 'item_type', 'item_library_name', 'item_location_name', - 'item_pac_code', 'item_holding_pid', 'item_price', 'item_status', - 'item_item_type', 'item_general_note', 'item_staff_note', - 'item_checkin_note', 'item_checkout_note', 'item_acquisition_note', - 'item_binding_note', 'item_condition_note', 'item_patrimonial_note', - 'item_provenance_note', 'temporary_item_type', - 'temporary_item_type_expiry_date', 'item_masked', - 'item_enumerationAndChronology', 'item_local_field_1', - 'item_local_field_2', 'item_local_field_3', 'item_local_field_4', - 'item_local_field_5', 'item_local_field_6', 'item_local_field_7', - 'item_local_field_8', 'item_local_field_9', 'item_local_field_10', - 'issue_status', 'issue_status_date', 'issue_claims_count', - 'issue_expected_date', 'issue_regular', 'item_checkouts_count', - 'item_renewals_count', 'last_transaction_date', 'last_checkout_date' + "item_pid", + "item_create_date", + "document_pid", + "document_title", + "document_creator", + "document_main_type", + "document_sub_type", + "document_masked", + "document_isbn", + "document_issn", + "document_series_statement", + "document_edition_statement", + "document_publication_year", + "document_publisher", + "document_local_field_1", + "document_local_field_2", + "document_local_field_3", + "document_local_field_4", + "document_local_field_5", + "document_local_field_6", + "document_local_field_7", + "document_local_field_8", + "document_local_field_9", + "document_local_field_10", + "item_acquisition_date", + "item_barcode", + "item_call_number", + "item_second_call_number", + "item_legacy_checkout_count", + "item_type", + "item_library_name", + "item_location_name", + "item_pac_code", + "item_holding_pid", + "item_price", + "item_status", + "item_item_type", + "item_general_note", + "item_staff_note", + "item_checkin_note", + "item_checkout_note", + "item_acquisition_note", + "item_binding_note", + "item_condition_note", + "item_patrimonial_note", + "item_provenance_note", + "temporary_item_type", + "temporary_item_type_expiry_date", + "item_masked", + "item_enumerationAndChronology", + "item_local_field_1", + "item_local_field_2", + "item_local_field_3", + "item_local_field_4", + "item_local_field_5", + "item_local_field_6", + "item_local_field_7", + "item_local_field_8", + "item_local_field_9", + "item_local_field_10", + "issue_status", + "issue_status_date", + "issue_claims_count", + "issue_expected_date", + "issue_regular", + "item_checkouts_count", + "item_renewals_count", + "last_transaction_date", + "last_checkout_date", ] for field in fields: assert field in data # test provisionActivity without type bf:Publication - document['provisionActivity'][0]['type'] = 'bf:Manufacture' + document["provisionActivity"][0]["type"] = "bf:Manufacture" document.commit() document.reindex() - list_url = url_for('api_item.inventory_search') + list_url = url_for("api_item.inventory_search") response = client.get(list_url, headers=csv_header) assert response.status_code == 200 data = get_csv(response) @@ -124,21 +167,18 @@ def test_serializers( # with temporary_item_type item_type = item_type_on_site_martigny circulation = [ - {'label': 'On site DE', 'language': 'de'}, - {'label': 'On site EN', 'language': 'en'}, - {'label': 'On site FR', 'language': 'fr'}, - {'label': 'On site IT', 'language': 'it'} + {"label": "On site DE", "language": "de"}, + {"label": "On site EN", "language": "en"}, + {"label": "On site FR", "language": "fr"}, + {"label": "On site IT", "language": "it"}, ] - item_type['circulation_information'] = circulation + item_type["circulation_information"] = circulation item_type.commit() item_type.reindex() - item['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type.pid) - } + item["temporary_item_type"] = {"$ref": get_ref_for_pid("itty", item_type.pid)} item.commit() item.reindex() - list_url = url_for('invenio_records_rest.item_list', q=f'pid:{item.pid}') + list_url = url_for("invenio_records_rest.item_list", q=f"pid:{item.pid}") response = client.get(list_url, headers=rero_json_header) - data = response.json['hits']['hits'] - assert circulation == \ - data[0]['metadata']['item_type']['circulation_information'] + data = response.json["hits"]["hits"] + assert circulation == data[0]["metadata"]["item_type"]["circulation_information"] diff --git a/tests/api/libraries/test_libraries_permissions.py b/tests/api/libraries/test_libraries_permissions.py index 19e1e60854..d750802f7f 100644 --- a/tests/api/libraries/test_libraries_permissions.py +++ b/tests/api/libraries/test_libraries_permissions.py @@ -20,115 +20,120 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.libraries.permissions import LibraryPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) -def test_library_permissions(patron_martigny, - librarian_martigny, - system_librarian_martigny, - org_martigny, lib_martigny, lib_saxon, lib_sion): +@mock.patch.object(Patron, "_extensions", []) +def test_library_permissions( + patron_martigny, + librarian_martigny, + system_librarian_martigny, + org_martigny, + lib_martigny, + lib_saxon, + lib_sion, +): """Test library permissions class.""" # Anonymous user identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(LibraryPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + LibraryPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron # A simple patron can't operate any operation about Library login_user(patron_martigny.user) - check_permission(LibraryPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, lib_martigny) + check_permission( + LibraryPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + lib_martigny, + ) # Librarian without 'pro_library_administrator' role # - search : any Library despite organisation owner # - read : only Library for its own organisation # - create/update/delete : disallowed - librarian_martigny['roles'].remove('pro_library_administrator') + librarian_martigny["roles"].remove("pro_library_administrator") librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) - check_permission(LibraryPermissionPolicy, {'search': True}, None) - check_permission(LibraryPermissionPolicy, {'create': False}, {}) - check_permission(LibraryPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, lib_martigny) - check_permission(LibraryPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, lib_sion) + check_permission(LibraryPermissionPolicy, {"search": True}, None) + check_permission(LibraryPermissionPolicy, {"create": False}, {}) + check_permission( + LibraryPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + lib_martigny, + ) + check_permission( + LibraryPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + lib_sion, + ) # reset the librarian. - librarian_martigny['roles'].append('pro_library_administrator') + librarian_martigny["roles"].append("pro_library_administrator") librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() # Librarian with 'pro_library_administrator' role # - search/read : same as common librarian # - create/update/delete : if patron is manager for this library login_user(librarian_martigny.user) - check_permission(LibraryPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, lib_martigny) - check_permission(LibraryPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, lib_saxon) - check_permission(LibraryPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, lib_sion) + check_permission( + LibraryPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + lib_martigny, + ) + check_permission( + LibraryPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + lib_saxon, + ) + check_permission( + LibraryPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + lib_sion, + ) # SystemLibrarian # - search : any Library despite organisation owner # - read : only Library for its own organisation # - create/update/delete : only Library for its own organisation login_user(system_librarian_martigny.user) - check_permission(LibraryPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, lib_martigny) - check_permission(LibraryPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, lib_saxon) - check_permission(LibraryPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, lib_sion) + check_permission( + LibraryPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + lib_martigny, + ) + check_permission( + LibraryPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + lib_saxon, + ) + check_permission( + LibraryPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + lib_sion, + ) diff --git a/tests/api/libraries/test_libraries_rest.py b/tests/api/libraries/test_libraries_rest.py index 8092e88322..88674bba03 100644 --- a/tests/api/libraries/test_libraries_rest.py +++ b/tests/api/libraries/test_libraries_rest.py @@ -24,20 +24,19 @@ import pytest from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.libraries.api import LibraryNeverOpen def test_libraries_permissions(client, lib_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.lib_item', pid_value='lib1') + item_url = url_for("invenio_records_rest.lib_item", pid_value="lib1") res = client.get(item_url) assert res.status_code == 401 - res, _ = postdata(client, 'invenio_records_rest.lib_list', {}) + res, _ = postdata(client, "invenio_records_rest.lib_list", {}) assert res.status_code == 401 client.put(item_url, data={}, headers=json_header) @@ -45,88 +44,84 @@ def test_libraries_permissions(client, lib_martigny, json_header): assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_libraries_get(client, lib_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.lib_item', pid_value='lib1') - list_url = url_for('invenio_records_rest.lib_list', q='pid:lib1') + item_url = url_for("invenio_records_rest.lib_item", pid_value="lib1") + list_url = url_for("invenio_records_rest.lib_list", q="pid:lib1") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{lib_martigny.revision_id}"' + assert res.headers["ETag"] == f'"{lib_martigny.revision_id}"' data = get_json(res) - assert lib_martigny.dumps() == data['metadata'] + assert lib_martigny.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert lib_martigny.dumps() == data['metadata'] + assert lib_martigny.dumps() == data["metadata"] res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == lib_martigny.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == lib_martigny.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_libraries_post_put_delete(client, lib_martigny_data, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.lib_item', pid_value='1') - list_url = url_for('invenio_records_rest.lib_list', q='pid:1') + item_url = url_for("invenio_records_rest.lib_item", pid_value="1") + list_url = url_for("invenio_records_rest.lib_list", q="pid:1") # Create record / POST - lib_martigny_data['pid'] = '1' - res, data = postdata( - client, - 'invenio_records_rest.lib_list', - lib_martigny_data - ) + lib_martigny_data["pid"] = "1" + res, data = postdata(client, "invenio_records_rest.lib_list", lib_martigny_data) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == lib_martigny_data + assert data["metadata"] == lib_martigny_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert lib_martigny_data == data['metadata'] + assert lib_martigny_data == data["metadata"] # Update record/PUT data = lib_martigny_data - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"{librarie.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -137,8 +132,12 @@ def test_libraries_post_put_delete(client, lib_martigny_data, json_header): def test_non_circulating_libraries( - lib_sion, lib_martigny, lib_martigny_bourg, loc_public_martigny, - loc_public_martigny_bourg): + lib_sion, + lib_martigny, + lib_martigny_bourg, + loc_public_martigny, + loc_public_martigny_bourg, +): """Test pickup vs transaction locations.""" assert not lib_sion.get_pickup_location_pid() assert not lib_martigny_bourg.get_pickup_location_pid() @@ -151,21 +150,16 @@ def test_library_never_open(lib_sion): assert lib_sion._has_is_open() assert lib_sion.next_open() - del lib_sion['opening_hours'] + del lib_sion["opening_hours"] # add an exception date in the past open_exception = { - 'is_open': True, - 'start_date': '2012-01-09', - 'title': 'Ouverture exceptionnelle', - 'times': [ - { - 'end_time': '16:00', - 'start_time': '12:00' - } - ] + "is_open": True, + "start_date": "2012-01-09", + "title": "Ouverture exceptionnelle", + "times": [{"end_time": "16:00", "start_time": "12:00"}], } - lib_sion['exception_dates'].append(open_exception) + lib_sion["exception_dates"].append(open_exception) lib_sion.update(lib_sion, dbcommit=True, reindex=True) # check that the exception in the past is not considered for next open date @@ -174,90 +168,96 @@ def test_library_never_open(lib_sion): # compute a date in the future and add it as exception date today = datetime.today() - future_date = (today + timedelta(days=14)).strftime('%Y-%m-%d') - open_exception['start_date'] = future_date + future_date = (today + timedelta(days=14)).strftime("%Y-%m-%d") + open_exception["start_date"] = future_date lib_sion.update(lib_sion, dbcommit=True, reindex=True) # check that the exception in the future is considered as open date assert lib_sion._has_is_open() - del lib_sion['exception_dates'] + del lib_sion["exception_dates"] lib_sion.update(lib_sion, dbcommit=True, reindex=True) with pytest.raises(LibraryNeverOpen): assert lib_sion.next_open() -def test_library_can_delete(lib_martigny, librarian_martigny, - loc_public_martigny, acq_receipt_fiction_martigny): +def test_library_can_delete( + lib_martigny, librarian_martigny, loc_public_martigny, acq_receipt_fiction_martigny +): """Test can delete a library.""" can, reasons = lib_martigny.can_delete assert not can - assert reasons['links']['locations'] - assert reasons['links']['patrons'] - assert reasons['links']['acq_receipts'] + assert reasons["links"]["locations"] + assert reasons["links"]["patrons"] + assert reasons["links"]["acq_receipts"] def test_filtered_libraries_get( - client, librarian_martigny, lib_martigny, lib_saxon, - lib_fully, librarian_sion, lib_sion): - """Test libraries filter by organisation.""" # Martigny + client, + librarian_martigny, + lib_martigny, + lib_saxon, + lib_fully, + librarian_sion, + lib_sion, +): + """Test libraries filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.lib_list') + list_url = url_for("invenio_records_rest.lib_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.lib_list') + list_url = url_for("invenio_records_rest.lib_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 - - -def test_library_secure_api(client, lib_martigny, lib_fully, - librarian_martigny, - librarian_sion, - system_librarian_martigny, - system_librarian_sion): + assert data["hits"]["total"]["value"] == 1 + + +def test_library_secure_api( + client, + lib_martigny, + lib_fully, + librarian_martigny, + librarian_sion, + system_librarian_martigny, + system_librarian_sion, +): """Test library secure api access.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) res = client.get(record_url) # a librarian is authorized to access its library record of its org assert res.status_code == 200 - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_fully.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_fully.pid) res = client.get(record_url) # a librarian is authorized to access other library records of its org assert res.status_code == 200 login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) res = client.get(record_url) # a sys_lib is authorized to access its library record of its org assert res.status_code == 200 - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_fully.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_fully.pid) res = client.get(record_url) # a sys_lib is authorized to access libraries of its org assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) res = client.get(record_url) # a librarian is not authorized to access library record of another org @@ -269,23 +269,27 @@ def test_library_secure_api(client, lib_martigny, lib_fully, assert res.status_code == 403 -def test_library_secure_api_create(client, lib_martigny, - lib_fully_data, librarian2_martigny, - librarian_sion, - lib_martigny_data, - system_librarian_martigny, - system_librarian_sion): +def test_library_secure_api_create( + client, + lib_martigny, + lib_fully_data, + librarian2_martigny, + librarian_sion, + lib_martigny_data, + system_librarian_martigny, + system_librarian_sion, +): """Test library secure api create.""" # Martigny login_user_via_session(client, librarian2_martigny.user) - post_entrypoint = 'invenio_records_rest.lib_list' + post_entrypoint = "invenio_records_rest.lib_list" - del lib_martigny_data['pid'] + del lib_martigny_data["pid"] res, _ = postdata(client, post_entrypoint, lib_martigny_data) # a not library manager is not authorized to create its library record assert res.status_code == 403 - del lib_fully_data['pid'] + del lib_fully_data["pid"] res, _ = postdata(client, post_entrypoint, lib_fully_data) # a not library manager is not authorized to create library record assert res.status_code == 403 @@ -299,101 +303,79 @@ def test_library_secure_api_create(client, lib_martigny, assert res.status_code == 201 -def test_library_secure_api_update(client, lib_fully, lib_martigny, - librarian_martigny, - librarian_sion, - json_header, - system_librarian_martigny, - system_librarian_sion): +def test_library_secure_api_update( + client, + lib_fully, + lib_martigny, + librarian_martigny, + librarian_sion, + json_header, + system_librarian_martigny, + system_librarian_sion, +): """Test library secure api update.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) - - lib_martigny['name'] = 'New Name' - res = client.put( - record_url, - data=json.dumps(lib_martigny), - headers=json_header - ) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) + + lib_martigny["name"] = "New Name" + res = client.put(record_url, data=json.dumps(lib_martigny), headers=json_header) # a librarian is authorized to update its library in its org assert res.status_code == 200 - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_fully.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_fully.pid) - lib_fully['name'] = 'New Name' - res = client.put( - record_url, - data=json.dumps(lib_fully), - headers=json_header - ) + lib_fully["name"] = "New Name" + res = client.put(record_url, data=json.dumps(lib_fully), headers=json_header) # a librarian is not authorized to update an external library of its org assert res.status_code == 403 login_user_via_session(client, system_librarian_martigny.user) - res = client.put( - record_url, - data=json.dumps(lib_fully), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(lib_fully), headers=json_header) # a sys_librarian is authorized to update any library of its org assert res.status_code == 200 - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) - lib_martigny['name'] = 'New Name 2' - res = client.put( - record_url, - data=json.dumps(lib_martigny), - headers=json_header - ) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) + lib_martigny["name"] = "New Name 2" + res = client.put(record_url, data=json.dumps(lib_martigny), headers=json_header) # a sys_librarian is authorized to update any library of its org assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_fully.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_fully.pid) - lib_fully['name'] = 'New Name 2' - res = client.put( - record_url, - data=json.dumps(lib_fully), - headers=json_header - ) + lib_fully["name"] = "New Name 2" + res = client.put(record_url, data=json.dumps(lib_fully), headers=json_header) # librarian is not authorized to update an external library of another org assert res.status_code == 403 login_user_via_session(client, system_librarian_sion.user) - res = client.put( - record_url, - data=json.dumps(lib_fully), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(lib_fully), headers=json_header) # sys_lib is not authorized to update an external library of another org assert res.status_code == 403 -def test_library_secure_api_delete(client, lib_fully, lib_martigny, - librarian2_martigny, - librarian_sion, - system_librarian_martigny, - system_librarian_sion): +def test_library_secure_api_delete( + client, + lib_fully, + lib_martigny, + librarian2_martigny, + librarian_sion, + system_librarian_martigny, + system_librarian_sion, +): """Test library secure api delete.""" # Martigny login_user_via_session(client, librarian2_martigny.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) res = client.delete(record_url) # librarian is not authorized to delete its library of its org assert res.status_code == 403 - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_fully.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_fully.pid) res = client.delete(record_url) # librarian is not authorized to delete an external library of its org @@ -413,8 +395,7 @@ def test_library_secure_api_delete(client, lib_fully, lib_martigny, assert res.status_code == 204 login_user_via_session(client, system_librarian_sion.user) - record_url = url_for('invenio_records_rest.lib_item', - pid_value=lib_martigny.pid) + record_url = url_for("invenio_records_rest.lib_item", pid_value=lib_martigny.pid) res = client.delete(record_url) # sys_librarian is not authorized to delete any library of other org diff --git a/tests/api/libraries/test_libraries_rest_views.py b/tests/api/libraries/test_libraries_rest_views.py index d007b96727..312e867718 100644 --- a/tests/api/libraries/test_libraries_rest_views.py +++ b/tests/api/libraries/test_libraries_rest_views.py @@ -27,53 +27,37 @@ def test_library_closed_date_api(client, lib_martigny, librarian_martigny): """Test closed date api.""" login_user_via_session(client, librarian_martigny.user) # CHECK#0 :: unknown library - url = url_for( - 'api_library.list_closed_dates', - library_pid='dummy_pid' - ) + url = url_for("api_library.list_closed_dates", library_pid="dummy_pid") res = client.get(url) assert res.status_code == 404 # CHECK#1 :: no specified dates - url = url_for( - 'api_library.list_closed_dates', - library_pid=lib_martigny.pid - ) + url = url_for("api_library.list_closed_dates", library_pid=lib_martigny.pid) res = client.get(url) assert res.status_code == 200 data = get_json(res) - assert 'closed_dates' in data - assert isinstance(data['closed_dates'], list) + assert "closed_dates" in data + assert isinstance(data["closed_dates"], list) # CHECK#2 :: with specified dates - params = { - 'from': '2020-01-01', - 'until': '2020-02-01' - } + params = {"from": "2020-01-01", "until": "2020-02-01"} url = url_for( - 'api_library.list_closed_dates', - library_pid=lib_martigny.pid, - **params + "api_library.list_closed_dates", library_pid=lib_martigny.pid, **params ) res = client.get(url) assert res.status_code == 200 data = get_json(res) - assert data['params']['from'] == params['from'] - assert data['params']['until'] == params['until'] + assert data["params"]["from"] == params["from"] + assert data["params"]["until"] == params["until"] # CHECK#3 :: with bad specified dates - params = { - 'until': '2020-01-01', - 'from': '2020-02-01' - } + params = {"until": "2020-01-01", "from": "2020-02-01"} url = url_for( - 'api_library.list_closed_dates', - library_pid=lib_martigny.pid, - **params + "api_library.list_closed_dates", library_pid=lib_martigny.pid, **params ) res = client.get(url) assert res.status_code == 200 data = get_json(res) - assert data['params']['from'] == params['from'] - assert data['params']['until'] == params['until'] - assert data['closed_dates'] == [] + assert data["params"]["from"] == params["from"] + assert data["params"]["until"] == params["until"] + assert data["closed_dates"] == [] diff --git a/tests/api/loans/test_loans_delete_item_rest.py b/tests/api/loans/test_loans_delete_item_rest.py index 180bad8229..d0cd387202 100644 --- a/tests/api/loans/test_loans_delete_item_rest.py +++ b/tests/api/loans/test_loans_delete_item_rest.py @@ -23,27 +23,40 @@ def test_loans_serializer_with_deleted_item( - client, item_lib_martigny, patron2_martigny, librarian_martigny, - lib_martigny, rero_json_header, circulation_policies + client, + item_lib_martigny, + patron2_martigny, + librarian_martigny, + lib_martigny, + rero_json_header, + circulation_policies, ): """Test loan serializer with a deleted item.""" login_user_via_session(client, librarian_martigny.user) - res, _ = postdata(client, 'api_item.checkout', dict( - item_pid=item_lib_martigny.pid, - patron_pid=patron2_martigny.pid, - transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - )) + res, _ = postdata( + client, + "api_item.checkout", + dict( + item_pid=item_lib_martigny.pid, + patron_pid=patron2_martigny.pid, + transaction_library_pid=lib_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - res, data = postdata(client, 'api_item.checkin', dict( - item_pid=item_lib_martigny.pid, - transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - )) + res, data = postdata( + client, + "api_item.checkin", + dict( + item_pid=item_lib_martigny.pid, + transaction_library_pid=lib_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 item_lib_martigny.delete(False, True, True) - loan_list_url = url_for('invenio_records_rest.loanid_list') + loan_list_url = url_for("invenio_records_rest.loanid_list") res = client.get(loan_list_url, headers=rero_json_header) assert res.status_code == 200 diff --git a/tests/api/loans/test_loans_permissions.py b/tests/api/loans/test_loans_permissions.py index 9a600e2bc0..3fd730a5b1 100644 --- a/tests/api/loans/test_loans_permissions.py +++ b/tests/api/loans/test_loans_permissions.py @@ -25,67 +25,90 @@ def test_loan_permissions( - patron_martigny, librarian_martigny, - loan_overdue_martigny, loan_overdue_sion + patron_martigny, librarian_martigny, loan_overdue_martigny, loan_overdue_sion ): """Test loans permissions api.""" # Anonymous user identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(LoanPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + LoanPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron # * can : search, read (own record), create # * can't : update, delete login_user(patron_martigny.user) - check_permission(LoanPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, loan_overdue_martigny) - check_permission(LoanPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loan_overdue_sion) + check_permission( + LoanPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + loan_overdue_martigny, + ) + check_permission( + LoanPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + loan_overdue_sion, + ) # Librarian without correct role # - can : search, read (own organisation), create # - update, delete : disallowed (missing ActionNeed) login_user(librarian_martigny.user) - check_permission(LoanPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, loan_overdue_martigny) - check_permission(LoanPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loan_overdue_sion) + check_permission( + LoanPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + loan_overdue_martigny, + ) + check_permission( + LoanPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + loan_overdue_sion, + ) # Loan anonymized - loan_overdue_martigny['to_anonymize'] = True + loan_overdue_martigny["to_anonymize"] = True login_user(librarian_martigny.user) - check_permission(LoanPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loan_overdue_martigny) + check_permission( + LoanPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + loan_overdue_martigny, + ) diff --git a/tests/api/loans/test_loans_rest.py b/tests/api/loans/test_loans_rest.py index d1a649f9c8..3d9ea0903f 100644 --- a/tests/api/loans/test_loans_rest.py +++ b/tests/api/loans/test_loans_rest.py @@ -24,14 +24,18 @@ from flask import url_for from invenio_accounts.testutils import login_user_via_session from invenio_circulation.api import get_loan_for_item -from invenio_circulation.search.api import LoansSearch -from utils import check_timezone_date, flush_index, get_json, postdata +from utils import check_timezone_date, get_json, postdata from rero_ils.modules.items.api import Item from rero_ils.modules.items.utils import item_pid_to_object from rero_ils.modules.libraries.api import LibrariesSearch, Library -from rero_ils.modules.loans.api import Loan, get_due_soon_loans, \ - get_last_transaction_loc_for_item, get_overdue_loans +from rero_ils.modules.loans.api import ( + Loan, + LoansSearch, + get_due_soon_loans, + get_last_transaction_loc_for_item, + get_overdue_loans, +) from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.dispatcher import Dispatcher @@ -41,8 +45,7 @@ def test_loans_search( - client, loan_pending_martigny, rero_json_header, librarian_martigny, - yesterday + client, loan_pending_martigny, rero_json_header, librarian_martigny, yesterday ): """Test record retrieval.""" login_user_via_session(client, librarian_martigny.user) @@ -50,82 +53,91 @@ def test_loans_search( original_loan = deepcopy(loan) # STEP#1 :: CHECK FACETS ARE PRESENT INTO SEARCH RESULT - url = url_for('invenio_records_rest.loanid_list', - exclude_status=LoanState.ITEM_RETURNED) + url = url_for( + "invenio_records_rest.loanid_list", exclude_status=LoanState.ITEM_RETURNED + ) res = client.get(url, headers=rero_json_header) data = get_json(res) - facet_keys = ['end_date', 'misc_status', 'owner_library', 'patron_type', - 'pickup_library', 'request_expire_date', 'status', - 'transaction_library'] - assert all(key in data['aggregations'] for key in facet_keys) - assert data['hits']['total']['value'] == 1 + facet_keys = [ + "end_date", + "misc_status", + "owner_library", + "patron_type", + "pickup_library", + "request_expire_date", + "status", + "transaction_library", + ] + assert all(key in data["aggregations"] for key in facet_keys) + assert data["hits"]["total"]["value"] == 1 # STEP#2 :: REQUEST EXPIRED # Update the loan to simulate that this request is now expired. - params = {'misc_status': 'expired_request'} - url = url_for('invenio_records_rest.loanid_list', **params) + params = {"misc_status": "expired_request"} + url = url_for("invenio_records_rest.loanid_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 - loan['request_expire_date'] = yesterday.isoformat() + loan["request_expire_date"] = yesterday.isoformat() loan.update(loan, dbcommit=True, reindex=True) res = client.get(url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # STEP#3 :: LOAN IS OVERDUE # Update the loan to be overdue and test the API search. - params = {'misc_status': 'overdue'} - url = url_for('invenio_records_rest.loanid_list', **params) + params = {"misc_status": "overdue"} + url = url_for("invenio_records_rest.loanid_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 - loan['end_date'] = yesterday.isoformat() + loan["end_date"] = yesterday.isoformat() loan.update(loan, dbcommit=True, reindex=True) res = client.get(url, headers=rero_json_header) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # RESET THE LOAN (for next tests) loan.update(original_loan, dbcommit=True, reindex=True) -def test_loan_access_permissions(client, librarian_martigny, - loc_public_saxon, - patron_martigny, - item_lib_sion, - item2_lib_sion, - patron_sion_multiple, - librarian_sion, - patron_sion, - patron2_martigny, - circulation_policies, - loan_pending_martigny, - item_lib_martigny, - loc_public_sion - ): +def test_loan_access_permissions( + client, + librarian_martigny, + loc_public_saxon, + patron_martigny, + item_lib_sion, + item2_lib_sion, + patron_sion_multiple, + librarian_sion, + patron_sion, + patron2_martigny, + circulation_policies, + loan_pending_martigny, + item_lib_martigny, + loc_public_sion, +): """Test loans read permissions.""" # ensure we have loans from the two configured organisation. login_user_via_session(client, librarian_sion.user) res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_sion.pid, patron_pid=patron_sion.pid, transaction_location_pid=loc_public_saxon.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 loan_pids = Loan.get_all_pids() loans = [Loan.get_record_by_pid(pid) for pid in loan_pids] - loans_martigny = [ - loan for loan in loans if loan.organisation_pid == 'org1'] - loans_sion = [loan for loan in loans if loan.organisation_pid == 'org2'] + loans_martigny = [loan for loan in loans if loan.organisation_pid == "org1"] + loans_sion = [loan for loan in loans if loan.organisation_pid == "org2"] assert loans assert loan_pids assert loans_martigny @@ -137,13 +149,13 @@ def test_loan_access_permissions(client, librarian_martigny, # create a loan for itself res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item2_lib_sion.pid, patron_pid=patron_sion_multiple.pid, transaction_location_pid=loc_public_sion.pid, transaction_user_pid=librarian_sion.pid, - ) + ), ) assert res.status_code == 200 @@ -152,55 +164,59 @@ def test_loan_access_permissions(client, librarian_martigny, # without query filter I should have 3 loans one of mine and two # in my employed organisation, the other patron loan of my patron org # should be filtered - loan_list = url_for( - 'invenio_records_rest.loanid_list', - q=f'') + loan_list = url_for("invenio_records_rest.loanid_list", q=f"") res = client.get(loan_list) assert res.status_code == 200 data = get_json(res) - assert len(data['hits']['hits']) == 3 + assert len(data["hits"]["hits"]) == 3 # see only my loan loan_list = url_for( - 'invenio_records_rest.loanid_list', - q=f'patron_pid:{patron_sion_multiple.pid}') + "invenio_records_rest.loanid_list", q=f"patron_pid:{patron_sion_multiple.pid}" + ) res = client.get(loan_list) assert res.status_code == 200 data = get_json(res) - assert len(data['hits']['hits']) == 1 + assert len(data["hits"]["hits"]) == 1 # checkin the item to put it back to it's original state login_user_via_session(client, librarian_sion.user) res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item2_lib_sion.pid, transaction_location_pid=loc_public_sion.pid, transaction_user_pid=librarian_sion.pid, - ) + ), ) assert res.status_code == 200 res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_sion.pid, transaction_location_pid=loc_public_saxon.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 -def test_due_soon_loans(client, librarian_martigny, - lib_martigny_data, lib_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, - circ_policy_short_martigny, yesterday): +def test_due_soon_loans( + client, + librarian_martigny, + lib_martigny_data, + lib_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + circ_policy_short_martigny, + yesterday, +): """Test overdue loans.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -214,67 +230,68 @@ def test_due_soon_loans(client, librarian_martigny, assert not item.patron_has_an_active_loan_on_item(patron_martigny) from rero_ils.modules.circ_policies.api import CircPolicy + circ_policy = CircPolicy.provide_circ_policy( item.organisation_pid, item.library_pid, patron_martigny.patron_type_pid, - item.item_type_pid + item.item_type_pid, ) - circ_policy['reminders'][0]['days_delay'] = 7 - circ_policy['checkout_duration'] = 3 + circ_policy["reminders"][0]["days_delay"] = 7 + circ_policy["checkout_duration"] = 3 circ_policy.update(circ_policy, dbcommit=True, reindex=True) # Remove library exception date to ensure to not been annoyed by # closed dates. custom_lib_data = deepcopy(lib_martigny_data) - custom_lib_data['exception_dates'] = [] + custom_lib_data["exception_dates"] = [] lib_martigny.update(custom_lib_data, dbcommit=True, reindex=True) - flush_index(LibrariesSearch.Meta.index) + LibrariesSearch.flush_and_refresh() # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") # To be considered as 'due_soon', we need to update the loan start date # to figure than start_date occurs before due_date. loan = Loan.get_record_by_pid(loan_pid) - start_date = ciso8601.parse_datetime(loan.get('start_date')) - loan['start_date'] = (start_date - timedelta(days=30)).isoformat() + start_date = ciso8601.parse_datetime(loan.get("start_date")) + loan["start_date"] = (start_date - timedelta(days=30)).isoformat() loan.update(loan, dbcommit=True, reindex=True) due_soon_loans = list(get_due_soon_loans()) - assert due_soon_loans[0].get('pid') == loan_pid + assert due_soon_loans[0].get("pid") == loan_pid # test due date regarding multiple timezones checkout_loan = Loan.get_record_by_pid(loan_pid) - loan_date = ciso8601.parse_datetime(checkout_loan.get('end_date')) + loan_date = ciso8601.parse_datetime(checkout_loan.get("end_date")) # as instance timezone is Europe/Zurich, it should be either 21 or 22 check_timezone_date(pytz.utc, loan_date, [21, 22]) # should be 14:59/15:59 in US/Pacific (because of daylight saving time) - check_timezone_date(pytz.timezone('US/Pacific'), loan_date, [14, 15]) - check_timezone_date(pytz.timezone('Europe/Amsterdam'), loan_date) + check_timezone_date(pytz.timezone("US/Pacific"), loan_date, [14, 15]) + check_timezone_date(pytz.timezone("Europe/Amsterdam"), loan_date) # checkin the item to put it back to it's original state res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 @@ -282,13 +299,18 @@ def test_due_soon_loans(client, librarian_martigny, lib_martigny.update(lib_martigny_data, dbcommit=True, reindex=True) -def test_overdue_loans(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, item2_lib_martigny, - patron_type_children_martigny, - circ_policy_short_martigny, - patron3_martigny_blocked): +def test_overdue_loans( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + item2_lib_martigny, + patron_type_children_martigny, + circ_policy_short_martigny, + patron3_martigny_blocked, +): """Test overdue loans.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -298,75 +320,83 @@ def test_overdue_loans(client, librarian_martigny, # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) - assert res.status_code == 200, "It probably failed while \ + assert ( + res.status_code == 200 + ), "It probably failed while \ test_due_soon_loans fail" - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) end_date = datetime.now(timezone.utc) - timedelta(days=7) - loan['end_date'] = end_date.isoformat() - loan.update( - loan, - dbcommit=True, - reindex=True - ) + loan["end_date"] = end_date.isoformat() + loan.update(loan, dbcommit=True, reindex=True) overdue_loans = list(get_overdue_loans(patron_pid=patron_pid)) - assert overdue_loans[0].get('pid') == loan_pid + assert overdue_loans[0].get("pid") == loan_pid assert number_of_notifications_sent(loan) == 0 - notification = loan.create_notification( - _type=NotificationType.OVERDUE).pop() - Dispatcher.dispatch_notifications([notification.get('pid')]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(OperationLogsSearch.Meta.index) + notification = loan.create_notification(_type=NotificationType.OVERDUE).pop() + Dispatcher.dispatch_notifications([notification.get("pid")]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + OperationLogsSearch.flush_and_refresh() assert number_of_notifications_sent(loan) == 1 # Check notification is created on operation logs - assert len(list( - OperationLogsSearch() - .get_logs_by_notification_pid(notification.get('pid')))) == 1 + assert ( + len( + list( + OperationLogsSearch().get_logs_by_notification_pid( + notification.get("pid") + ) + ) + ) + == 1 + ) # Try a checkout for a blocked user :: It should be blocked res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron3_martigny_blocked.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 403 - assert 'This patron is currently blocked' in data['message'] + assert "This patron is currently blocked" in data["message"] # checkin the item to put it back to it's original state res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 -def test_last_end_date_loans(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_lib_martigny, - circ_policy_short_martigny): +def test_last_end_date_loans( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + circ_policy_short_martigny, +): """Test last_end_date of loan.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -376,105 +406,101 @@ def test_last_end_date_loans(client, librarian_martigny, # checkout the item res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) - assert loan['end_date'] == loan['last_end_date'] + assert loan["end_date"] == loan["last_end_date"] - end_date = loan['end_date'] + end_date = loan["end_date"] # checkin the item res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 loan = Loan.get_record_by_pid(loan_pid) # check last_end_date is the last end_date - assert loan['last_end_date'] == end_date + assert loan["last_end_date"] == end_date # check end_date is equal to transaction_date - assert loan['end_date'] == loan['transaction_date'] - - -def test_checkout_item_transit(client, mailbox, item2_lib_martigny, - librarian_martigny, - librarian_saxon, - patron_martigny, - loc_public_saxon, lib_martigny, - loc_public_martigny, - circulation_policies): + assert loan["end_date"] == loan["transaction_date"] + + +def test_checkout_item_transit( + client, + mailbox, + item2_lib_martigny, + librarian_martigny, + librarian_saxon, + patron_martigny, + loc_public_saxon, + lib_martigny, + loc_public_martigny, + circulation_policies, +): """Test checkout of an item in transit.""" assert item2_lib_martigny.is_available() mailbox.clear() # request login_user_via_session(client, librarian_martigny.user) - loc_public_martigny['notification_email'] = 'dummy_email@fake.domain' - loc_public_martigny['send_notification'] = True - loc_public_martigny.update( - loc_public_martigny.dumps(), - dbcommit=True, - reindex=True - ) + loc_public_martigny["notification_email"] = "dummy_email@fake.domain" + loc_public_martigny["send_notification"] = True + loc_public_martigny.update(loc_public_martigny.dumps(), dbcommit=True, reindex=True) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, pickup_location_pid=loc_public_saxon.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.REQUEST].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.REQUEST].get("pid") assert not item2_lib_martigny.is_available() assert len(mailbox) == 1 - assert mailbox[-1].recipients == [ - loc_public_martigny['notification_email']] + assert mailbox[-1].recipients == [loc_public_martigny["notification_email"]] loan = Loan.get_record_by_pid(loan_pid) - assert loan['state'] == LoanState.PENDING + assert loan["state"] == LoanState.PENDING # reset the location - del loc_public_martigny['notification_email'] - del loc_public_martigny['send_notification'] - loc_public_martigny.update( - loc_public_martigny.dumps(), - dbcommit=True, - reindex=True - ) + del loc_public_martigny["notification_email"] + del loc_public_martigny["send_notification"] + loc_public_martigny.update(loc_public_martigny.dumps(), dbcommit=True, reindex=True) # validate request res, _ = postdata( client, - 'api_item.validate_request', + "api_item.validate_request", dict( item_pid=item2_lib_martigny.pid, pid=loan_pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert not item2_lib_martigny.is_available() @@ -482,19 +508,19 @@ def test_checkout_item_transit(client, mailbox, item2_lib_martigny, assert not item.is_available() loan = Loan.get_record_by_pid(loan_pid) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP login_user_via_session(client, librarian_saxon.user) # receive res, _ = postdata( client, - 'api_item.receive', + "api_item.receive", dict( item_pid=item2_lib_martigny.pid, pid=loan_pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert not item2_lib_martigny.is_available() @@ -502,76 +528,51 @@ def test_checkout_item_transit(client, mailbox, item2_lib_martigny, assert not item.is_available() loan_before_checkout = get_loan_for_item(item_pid_to_object(item.pid)) - assert loan_before_checkout.get('state') == LoanState.ITEM_AT_DESK + assert loan_before_checkout.get("state") == LoanState.ITEM_AT_DESK # checkout res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item2_lib_martigny.pid) loan_after_checkout = get_loan_for_item(item_pid_to_object(item.pid)) - assert loan_after_checkout.get('state') == LoanState.ITEM_ON_LOAN - assert loan_before_checkout.get('pid') == loan_after_checkout.get('pid') - - -def test_timezone_due_date(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_type_standard_martigny, - item3_lib_martigny, - circ_policy_short_martigny, - lib_martigny): + assert loan_after_checkout.get("state") == LoanState.ITEM_ON_LOAN + assert loan_before_checkout.get("pid") == loan_after_checkout.get("pid") + + +def test_timezone_due_date( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_type_standard_martigny, + item3_lib_martigny, + circ_policy_short_martigny, + lib_martigny, +): """Test that timezone affects due date regarding library location.""" # Close the library all days. Except Monday. - del lib_martigny['exception_dates'] - lib_martigny['opening_hours'] = [ + del lib_martigny["exception_dates"] + lib_martigny["opening_hours"] = [ { "day": "monday", "is_open": True, - "times": [ - { - "start_time": "07:00", - "end_time": "19:00" - } - ] - }, - { - "day": "tuesday", - "is_open": False, - "times": [] - }, - { - "day": "wednesday", - "is_open": False, - "times": [] + "times": [{"start_time": "07:00", "end_time": "19:00"}], }, - { - "day": "thursday", - "is_open": False, - "times": [] - }, - { - "day": "friday", - "is_open": False, - "times": [] - }, - { - "day": "saturday", - "is_open": False, - "times": [] - }, - { - "day": "sunday", - "is_open": False, - "times": [] - } + {"day": "tuesday", "is_open": False, "times": []}, + {"day": "wednesday", "is_open": False, "times": []}, + {"day": "thursday", "is_open": False, "times": []}, + {"day": "friday", "is_open": False, "times": []}, + {"day": "saturday", "is_open": False, "times": []}, + {"day": "sunday", "is_open": False, "times": []}, ] lib_martigny.update(lib_martigny, dbcommit=True, reindex=True) @@ -581,43 +582,40 @@ def test_timezone_due_date(client, librarian_martigny, item_pid = item.pid patron_pid = patron_martigny.pid from rero_ils.modules.circ_policies.api import CircPolicy + circ_policy = CircPolicy.provide_circ_policy( item.organisation_pid, item.library_pid, patron_martigny.patron_type_pid, - item.item_type_pid - ) - circ_policy['reminders'][0]['days_delay'] = 7 - circ_policy['checkout_duration'] = checkout_duration - circ_policy.update( - circ_policy, - dbcommit=True, - reindex=True + item.item_type_pid, ) + circ_policy["reminders"][0]["days_delay"] = 7 + circ_policy["checkout_duration"] = checkout_duration + circ_policy.update(circ_policy, dbcommit=True, reindex=True) # Login to perform action login_user_via_session(client, librarian_martigny.user) # Checkout the item res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 # Get Loan date (should be in UTC) - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) - loan_end_date = loan.get('end_date') + loan_end_date = loan.get("end_date") # Get next library open date (should be next monday after X-1 days) where # X is checkout_duration - soon = datetime.now(pytz.utc) + timedelta(days=(checkout_duration-1)) + soon = datetime.now(pytz.utc) + timedelta(days=(checkout_duration - 1)) lib = Library.get_record_by_pid(item.library_pid) lib_datetime = lib.next_open(soon) @@ -635,10 +633,14 @@ def test_timezone_due_date(client, librarian_martigny, def test_librarian_request_on_blocked_user( - client, item_lib_martigny, lib_martigny, - librarian_martigny, loc_public_martigny, - patron3_martigny_blocked, - circulation_policies): + client, + item_lib_martigny, + lib_martigny, + librarian_martigny, + loc_public_martigny, + patron3_martigny_blocked, + circulation_policies, +): """Librarian request on blocked user returns a specific 403 message.""" assert item_lib_martigny.is_available() @@ -646,15 +648,15 @@ def test_librarian_request_on_blocked_user( login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, patron_pid=patron3_martigny_blocked.pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 403 data = get_json(res) - assert 'blocked' in data.get('message') + assert "blocked" in data.get("message") diff --git a/tests/api/loans/test_loans_rest_views.py b/tests/api/loans/test_loans_rest_views.py index 3f18e54ad1..4ee572dd6d 100644 --- a/tests/api/loans/test_loans_rest_views.py +++ b/tests/api/loans/test_loans_rest_views.py @@ -26,40 +26,50 @@ from rero_ils.modules.utils import get_schema_for_resource -def test_loan_can_extend(client, patron_martigny, item_lib_martigny, - loc_public_martigny, librarian_martigny, - circulation_policies, json_header): +def test_loan_can_extend( + client, + patron_martigny, + item_lib_martigny, + loc_public_martigny, + librarian_martigny, + circulation_policies, + json_header, +): """Test is loan can extend.""" params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.ITEM_ON_LOAN, + params=params, + copy_item=True, + ) - list_url = url_for( - 'api_loan.can_extend', loan_pid=loan.pid) + list_url = url_for("api_loan.can_extend", loan_pid=loan.pid) login_user(client, patron_martigny) response = client.get(list_url, headers=json_header) assert response.status_code == 200 assert get_json(response) == { - 'can': False, - 'reasons': ['Circulation policies disallows the operation.'] + "can": False, + "reasons": ["Circulation policies disallows the operation."], } def test_loan_circulation_policy( - client, patron_martigny, librarian_martigny, - item_on_loan_martigny_patron_and_loan_on_loan + client, + patron_martigny, + librarian_martigny, + item_on_loan_martigny_patron_and_loan_on_loan, ): """Test dumping of circulation policy related to a loan.""" _, _, loan = item_on_loan_martigny_patron_and_loan_on_loan - base_url_for = 'api_loan.dump_loan_current_circulation_policy' + base_url_for = "api_loan.dump_loan_current_circulation_policy" api_url = url_for(base_url_for, loan_pid=loan.pid) - dummy_url = url_for(base_url_for, loan_pid='dummy_pid') + dummy_url = url_for(base_url_for, loan_pid="dummy_pid") # Patron user cannot access to this API login_user_via_session(client, patron_martigny.user) @@ -71,8 +81,8 @@ def test_loan_circulation_policy( response = client.get(api_url) assert response.status_code == 200 data = get_json(response) - cipo_schema = get_schema_for_resource('cipo') - data['$schema'] = current_jsonschemas.path_to_url(cipo_schema) + cipo_schema = get_schema_for_resource("cipo") + data["$schema"] = current_jsonschemas.path_to_url(cipo_schema) response = client.get(dummy_url) assert response.status_code == 404 diff --git a/tests/api/loans/test_loans_utils.py b/tests/api/loans/test_loans_utils.py index 61cececb16..31583c1ea9 100644 --- a/tests/api/loans/test_loans_utils.py +++ b/tests/api/loans/test_loans_utils.py @@ -18,25 +18,30 @@ """Tests loans utils.""" from rero_ils.modules.items.utils import item_pid_to_object from rero_ils.modules.loans.api import Loan -from rero_ils.modules.loans.utils import loan_build_document_ref, \ - loan_build_item_ref, loan_build_patron_ref +from rero_ils.modules.loans.utils import ( + loan_build_document_ref, + loan_build_item_ref, + loan_build_patron_ref, +) from rero_ils.modules.utils import get_ref_for_pid -def test_loans_build_refs(item_lib_martigny, patron_martigny, - document): +def test_loans_build_refs(item_lib_martigny, patron_martigny, document): """Test functions buildings refs.""" # Create "virtual" Loan (not registered) - loan = Loan({ - 'item_pid': item_pid_to_object(item_lib_martigny.pid), - 'document_pid': document.pid, - 'patron_pid': patron_martigny.pid - }) + loan = Loan( + { + "item_pid": item_pid_to_object(item_lib_martigny.pid), + "document_pid": document.pid, + "patron_pid": patron_martigny.pid, + } + ) - assert loan_build_item_ref(None, loan) == \ - get_ref_for_pid('items', item_lib_martigny.pid) - assert loan_build_document_ref(None, loan) == \ - get_ref_for_pid('doc', document.pid) - assert loan_build_patron_ref(None, loan) == \ - get_ref_for_pid('patrons', patron_martigny.pid) + assert loan_build_item_ref(None, loan) == get_ref_for_pid( + "items", item_lib_martigny.pid + ) + assert loan_build_document_ref(None, loan) == get_ref_for_pid("doc", document.pid) + assert loan_build_patron_ref(None, loan) == get_ref_for_pid( + "patrons", patron_martigny.pid + ) diff --git a/tests/api/local_fields/test_local_fields_permissions.py b/tests/api/local_fields/test_local_fields_permissions.py index f57347598e..56f373c671 100644 --- a/tests/api/local_fields/test_local_fields_permissions.py +++ b/tests/api/local_fields/test_local_fields_permissions.py @@ -19,14 +19,13 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission -from rero_ils.modules.local_fields.permissions import \ - LocalFieldPermissionPolicy +from rero_ils.modules.local_fields.permissions import LocalFieldPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_local_fields_permissions( local_field_martigny, librarian_martigny, local_field_sion ): @@ -38,59 +37,73 @@ def test_local_fields_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(LocalFieldPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(LocalFieldPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, local_field_martigny) + check_permission( + LocalFieldPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + LocalFieldPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + local_field_martigny, + ) # Librarian with specific role # - search/read: any items # - create/update/delete: allowed for items of its own library login_user(librarian_martigny.user) - check_permission(LocalFieldPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, local_field_martigny) - check_permission(LocalFieldPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, local_field_sion) + check_permission( + LocalFieldPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + local_field_martigny, + ) + check_permission( + LocalFieldPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + local_field_sion, + ) # Librarian without specific role # - search/read: any items # - create/update/delete: disallowed for any items except for # "pro_circulation_manager" as create/update are allowed. - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_user_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_user_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(LocalFieldPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, local_field_martigny) + check_permission( + LocalFieldPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + local_field_martigny, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() diff --git a/tests/api/local_fields/test_local_fields_rest.py b/tests/api/local_fields/test_local_fields_rest.py index 6c7465e173..7baa422715 100644 --- a/tests/api/local_fields/test_local_fields_rest.py +++ b/tests/api/local_fields/test_local_fields_rest.py @@ -22,82 +22,81 @@ import mock from flask import url_for -from utils import VerifyRecordPermissionPatch, flush_index, get_json, postdata +from utils import VerifyRecordPermissionPatch, get_json, postdata from rero_ils.modules.local_fields.api import LocalFieldsSearch -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_local_fields_get(client, local_field_martigny): """Test GET on local fields.""" local_fields_url = url_for( - 'invenio_records_rest.lofi_item', - pid_value=local_field_martigny.pid + "invenio_records_rest.lofi_item", pid_value=local_field_martigny.pid ) res = client.get(local_fields_url) assert res.status_code == 200 data = get_json(res) - assert local_field_martigny == data['metadata'] + assert local_field_martigny == data["metadata"] - list_url = url_for( - 'invenio_records_rest.lofi_list', - pid=local_field_martigny.pid - ) + list_url = url_for("invenio_records_rest.lofi_list", pid=local_field_martigny.pid) res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == \ - local_field_martigny.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == local_field_martigny.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_local_fields_post_put_delete(client, org_sion, document, - local_field_sion_data, json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_local_fields_post_put_delete( + client, org_sion, document, local_field_sion_data, json_header +): """Test POST and PUT on local fields.""" - lf_pid = local_field_sion_data['pid'] - item_url = url_for('invenio_records_rest.lofi_item', pid_value=lf_pid) - list_url = url_for('invenio_records_rest.lofi_list', q=f'pid:{lf_pid}') + lf_pid = local_field_sion_data["pid"] + item_url = url_for("invenio_records_rest.lofi_item", pid_value=lf_pid) + list_url = url_for("invenio_records_rest.lofi_list", q=f"pid:{lf_pid}") - res, data = postdata(client, 'invenio_records_rest.lofi_list', - local_field_sion_data) + res, data = postdata( + client, "invenio_records_rest.lofi_list", local_field_sion_data + ) assert res.status_code == 201 - assert data['metadata'] == local_field_sion_data + assert data["metadata"] == local_field_sion_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert local_field_sion_data == data['metadata'] + assert local_field_sion_data == data["metadata"] new_lofi = deepcopy(local_field_sion_data) - new_lofi['fields']['field_2'] = ['field 2'] - res = client.put( - item_url, - data=json.dumps(new_lofi), - headers=json_header - ) + new_lofi["fields"]["field_2"] = ["field 2"] + res = client.put(item_url, data=json.dumps(new_lofi), headers=json_header) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['fields']['field_2'][0] == 'field 2' + assert data["metadata"]["fields"]["field_2"][0] == "field 2" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['fields']['field_2'][0] == 'field 2' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["fields"]["field_2"][0] == "field 2" # Check duplicate record - flush_index(LocalFieldsSearch.Meta.index) - del new_lofi['pid'] - res, _ = postdata(client, 'invenio_records_rest.lofi_list', new_lofi) + LocalFieldsSearch.flush_and_refresh() + del new_lofi["pid"] + res, _ = postdata(client, "invenio_records_rest.lofi_list", new_lofi) assert res.status_code == 400 - res = client.get(url_for( - 'invenio_records_rest.lofi_list', - q=f'organisation.pid:{data["metadata"]["organisation"]["pid"]}' - )) + res = client.get( + url_for( + "invenio_records_rest.lofi_list", + q=f'organisation.pid:{data["metadata"]["organisation"]["pid"]}', + ) + ) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # Delete record res = client.delete(item_url) diff --git a/tests/api/locations/test_locations_permissions.py b/tests/api/locations/test_locations_permissions.py index 79ced18e89..b4ff7e9107 100644 --- a/tests/api/locations/test_locations_permissions.py +++ b/tests/api/locations/test_locations_permissions.py @@ -24,101 +24,105 @@ from rero_ils.modules.locations.permissions import LocationPermissionPolicy -def test_location_permissions(patron_martigny, - librarian_martigny, - librarian2_martigny, - system_librarian_martigny, - org_martigny, loc_public_martigny, - loc_public_saxon, loc_public_sion): +def test_location_permissions( + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + org_martigny, + loc_public_martigny, + loc_public_saxon, + loc_public_sion, +): """Test location permissions class.""" # Anonymous user identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(LocationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + LocationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron # A simple patron can't operate any operation about Location login_user(patron_martigny.user) - check_permission(LocationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loc_public_martigny) + check_permission( + LocationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + loc_public_martigny, + ) # Librarian without 'pro_library_administrator' role # - search : any Library despite organisation owner # - read : only Library for its own organisation # - create/update/delete : disallowed login_user(librarian2_martigny.user) - check_permission(LocationPermissionPolicy, {'search': True}, None) - check_permission(LocationPermissionPolicy, {'create': False}, {}) - check_permission(LocationPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, loc_public_martigny) - check_permission(LocationPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loc_public_sion) + check_permission(LocationPermissionPolicy, {"search": True}, None) + check_permission(LocationPermissionPolicy, {"create": False}, {}) + check_permission( + LocationPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + loc_public_martigny, + ) + check_permission( + LocationPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + loc_public_sion, + ) # Librarian with 'pro_library_administrator' role # - search/read : same as common librarian # - create/update/delete : if patron is manager for this library login_user(librarian_martigny.user) - check_permission(LocationPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, loc_public_martigny) - check_permission(LocationPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, loc_public_saxon) - check_permission(LocationPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loc_public_sion) + check_permission( + LocationPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + loc_public_martigny, + ) + check_permission( + LocationPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + loc_public_saxon, + ) + check_permission( + LocationPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + loc_public_sion, + ) # SystemLibrarian # - search : any Library despite organisation owner # - read : only Library for its own organisation # - create/update/delete : only Library for its own organisation login_user(system_librarian_martigny.user) - check_permission(LocationPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, loc_public_martigny) - check_permission(LocationPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, loc_public_saxon) - check_permission(LocationPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, loc_public_sion) + check_permission( + LocationPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + loc_public_martigny, + ) + check_permission( + LocationPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + loc_public_saxon, + ) + check_permission( + LocationPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + loc_public_sion, + ) diff --git a/tests/api/locations/test_locations_rest.py b/tests/api/locations/test_locations_rest.py index ebbc7751f4..59df81afbd 100644 --- a/tests/api/locations/test_locations_rest.py +++ b/tests/api/locations/test_locations_rest.py @@ -23,55 +23,49 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, flush_index, get_json, \ - postdata, to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.documents.views import record_library_pickup_locations from rero_ils.modules.locations.api import Location, LocationsSearch -def test_location_pickup_locations(locations, patron_martigny, - patron_sion, loc_public_martigny, - item2_lib_martigny): +def test_location_pickup_locations( + locations, patron_martigny, patron_sion, loc_public_martigny, item2_lib_martigny +): """Test for pickup locations.""" # At the beginning, if we load all locations from fixtures, there are 4 # pickup locations (loc1, loc3, loc5, loc7) pickup_locations = Location.get_pickup_location_pids() - assert set(pickup_locations) == {'loc1', 'loc3', 'loc5', 'loc7'} + assert set(pickup_locations) == {"loc1", "loc3", "loc5", "loc7"} # check pickup restrictions by patron_pid - pickup_locations = Location.get_pickup_location_pids( - patron_pid=patron_martigny.pid) - assert set(pickup_locations) == {'loc1', 'loc3', 'loc5'} - pickup_locations = Location.get_pickup_location_pids( - patron_pid=patron_sion.pid) - assert set(pickup_locations) == {'loc7'} + pickup_locations = Location.get_pickup_location_pids(patron_pid=patron_martigny.pid) + assert set(pickup_locations) == {"loc1", "loc3", "loc5"} + pickup_locations = Location.get_pickup_location_pids(patron_pid=patron_sion.pid) + assert set(pickup_locations) == {"loc7"} # check ill pickup pickup_locations = Location.get_pickup_location_pids(is_ill_pickup=True) - assert set(pickup_locations) == {'loc1', 'loc3', 'loc5'} + assert set(pickup_locations) == {"loc1", "loc3", "loc5"} # check pickup restrictions by item_barcode # * update `loc1` to restrict_pickup_to 'loc3' and 'loc6' # --> 'loc6' isn't a pickup location... it's just for test - loc_public_martigny['restrict_pickup_to'] = [ - {'$ref': 'https://bib.rero.ch/api/locations/loc3'}, - {'$ref': 'https://bib.rero.ch/api/locations/loc6'}, + loc_public_martigny["restrict_pickup_to"] = [ + {"$ref": "https://bib.rero.ch/api/locations/loc3"}, + {"$ref": "https://bib.rero.ch/api/locations/loc6"}, ] - loc_public_martigny.update( - loc_public_martigny, - dbcommit=True, - reindex=True - ) - flush_index(LocationsSearch.Meta.index) + loc_public_martigny.update(loc_public_martigny, dbcommit=True, reindex=True) + LocationsSearch.flush_and_refresh() pickup_locations = Location.get_pickup_location_pids( - item_pid=item2_lib_martigny.pid) - assert set(pickup_locations) == {'loc3'} + item_pid=item2_lib_martigny.pid + ) + assert set(pickup_locations) == {"loc3"} pickup_locations = Location.get_pickup_location_pids( - patron_pid=patron_sion.pid, - item_pid=item2_lib_martigny.pid) + patron_pid=patron_sion.pid, item_pid=item2_lib_martigny.pid + ) assert set(pickup_locations) == set([]) # check document.views::record_library_pickup_locations @@ -82,115 +76,104 @@ def test_location_pickup_locations(locations, patron_martigny, assert len(picks) == 2 # reset the location to default value before leaving - del loc_public_martigny['restrict_pickup_to'] - loc_public_martigny.update( - loc_public_martigny, - dbcommit=True, - reindex=True - ) - flush_index(LocationsSearch.Meta.index) + del loc_public_martigny["restrict_pickup_to"] + loc_public_martigny.update(loc_public_martigny, dbcommit=True, reindex=True) + LocationsSearch.flush_and_refresh() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_locations_get( - client, loc_public_martigny, lib_martigny, org_martigny -): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_locations_get(client, loc_public_martigny, lib_martigny, org_martigny): """Test record retrieval.""" location = loc_public_martigny - item_url = url_for('invenio_records_rest.loc_item', pid_value=location.pid) - list_url = url_for( - 'invenio_records_rest.loc_list', q=f'pid:{location.pid}') + item_url = url_for("invenio_records_rest.loc_item", pid_value=location.pid) + list_url = url_for("invenio_records_rest.loc_list", q=f"pid:{location.pid}") item_url_with_resolve = url_for( - 'invenio_records_rest.loc_item', - pid_value=location.pid, - resolve=1 + "invenio_records_rest.loc_item", pid_value=location.pid, resolve=1 ) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{location.revision_id}"' + assert res.headers["ETag"] == f'"{location.revision_id}"' data = get_json(res) - assert location.dumps() == data['metadata'] + assert location.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert location.dumps() == data['metadata'] + assert location.dumps() == data["metadata"] # check resolve res = client.get(item_url_with_resolve) assert res.status_code == 200 - resolved_data = res.json['metadata'] - assert '$ref' not in resolved_data['library'] and \ - lib_martigny.pid == resolved_data['library']['pid'] and \ - 'lib' in resolved_data['library']['type'] + resolved_data = res.json["metadata"] + assert ( + "$ref" not in resolved_data["library"] + and lib_martigny.pid == resolved_data["library"]["pid"] + and "lib" in resolved_data["library"]["type"] + ) res = client.get(list_url) assert res.status_code == 200 - hit = res.json['hits']['hits'][0]['metadata'] + hit = res.json["hits"]["hits"][0]["metadata"] # organisation has been added during the indexing - assert {'pid': org_martigny.pid, 'type': 'org'} == hit.pop('organisation') + assert {"pid": org_martigny.pid, "type": "org"} == hit.pop("organisation") assert hit == resolved_data -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_locations_post_put_delete(client, lib_martigny, - loc_public_martigny_data, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_locations_post_put_delete( + client, lib_martigny, loc_public_martigny_data, json_header +): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.loc_item', pid_value='1') - list_url = url_for('invenio_records_rest.loc_list', q='pid:1') + item_url = url_for("invenio_records_rest.loc_item", pid_value="1") + list_url = url_for("invenio_records_rest.loc_list", q="pid:1") location_data = loc_public_martigny_data # Create record / POST - location_data['pid'] = '1' - res, data = postdata( - client, - 'invenio_records_rest.loc_list', - location_data - ) + location_data["pid"] = "1" + res, data = postdata(client, "invenio_records_rest.loc_list", location_data) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == location_data + assert data["metadata"] == location_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert location_data == data['metadata'] + assert location_data == data["metadata"] # Update record/PUT data = location_data - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -204,69 +187,66 @@ def test_location_can_delete(client, item_lib_martigny, loc_public_martigny): """Test can delete a location.""" can, reasons = loc_public_martigny.can_delete assert not can - assert reasons['links']['items'] + assert reasons["links"]["items"] -def test_filtered_locations_get(client, librarian_martigny, - librarian_sion, locations): +def test_filtered_locations_get(client, librarian_martigny, librarian_sion, locations): """Test location filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.loc_list') + list_url = url_for("invenio_records_rest.loc_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 9 + assert data["hits"]["total"]["value"] == 9 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.loc_list') + list_url = url_for("invenio_records_rest.loc_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 - - -def test_location_secure_api_create(client, lib_fully, lib_martigny, - librarian_martigny, - librarian_sion, - loc_public_martigny_data, - loc_public_fully_data, - system_librarian_martigny, - system_librarian_sion): + assert data["hits"]["total"]["value"] == 4 + + +def test_location_secure_api_create( + client, + lib_fully, + lib_martigny, + librarian_martigny, + librarian_sion, + loc_public_martigny_data, + loc_public_fully_data, + system_librarian_martigny, + system_librarian_sion, +): """Test location secure api create.""" # try to create a pickup location without pickup location name. This should # be failed due to `extended_validation` rules login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.loc_list' + post_entrypoint = "invenio_records_rest.loc_list" fake_location_data = deepcopy(loc_public_martigny_data) - del fake_location_data['pid'] - if 'pickup_name' in fake_location_data: - del fake_location_data['pickup_name'] - fake_location_data['is_pickup'] = True - res, _ = postdata( - client, - post_entrypoint, - fake_location_data - ) + del fake_location_data["pid"] + if "pickup_name" in fake_location_data: + del fake_location_data["pickup_name"] + fake_location_data["is_pickup"] = True + res, _ = postdata(client, post_entrypoint, fake_location_data) assert get_json(res) == { - 'status': 400, - 'message': 'Validation error: Pickup location name field is required..' + "status": 400, + "message": "Validation error: Pickup location name field is required..", } -def test_location_serializers( - client, locations, librarian_martigny, rero_json_header -): +def test_location_serializers(client, locations, librarian_martigny, rero_json_header): """Test location serializers.""" login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.loc_list') + list_url = url_for("invenio_records_rest.loc_list") response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 assert all( - hit['metadata']['library'].get('code') - and hit['metadata']['library'].get('name') - for hit in response.json['hits']['hits'] + hit["metadata"]["library"].get("code") + and hit["metadata"]["library"].get("name") + for hit in response.json["hits"]["hits"] ) diff --git a/tests/api/notifications/test_notifications_permissions.py b/tests/api/notifications/test_notifications_permissions.py index 1526a552d5..535ea90e43 100644 --- a/tests/api/notifications/test_notifications_permissions.py +++ b/tests/api/notifications/test_notifications_permissions.py @@ -20,18 +20,21 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission -from rero_ils.modules.notifications.permissions import \ - NotificationPermissionPolicy +from rero_ils.modules.notifications.permissions import NotificationPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_notifcations_permissions( - patron_martigny, librarian2_martigny, system_librarian_martigny, - org_martigny, notification_late_sion, notification_late_martigny, - notification_late_saxon + patron_martigny, + librarian2_martigny, + system_librarian_martigny, + org_martigny, + notification_late_sion, + notification_late_martigny, + notification_late_saxon, ): """Test notifications permissions class.""" # Anonymous user & Patron user @@ -40,110 +43,123 @@ def test_notifcations_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(NotificationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(NotificationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_martigny) + check_permission( + NotificationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + NotificationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + notification_late_martigny, + ) login_user(patron_martigny.user) - check_permission(NotificationPermissionPolicy, {'create': False}, {}) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_martigny) + check_permission(NotificationPermissionPolicy, {"create": False}, {}) + check_permission( + NotificationPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + notification_late_martigny, + ) # Librarian without specific role # - search/read: any notifications # - create/update/delete: disallowed for any notifications login_user(librarian2_martigny.user) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_martigny) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_saxon) - check_permission(NotificationPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_sion) + check_permission( + NotificationPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + notification_late_martigny, + ) + check_permission( + NotificationPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + notification_late_saxon, + ) + check_permission( + NotificationPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + notification_late_sion, + ) # Librarian administrator - original_roles = librarian2_martigny.get('roles', []) - librarian2_martigny['roles'] = ['pro_library_administrator'] - librarian2_martigny.update(librarian2_martigny, - dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + original_roles = librarian2_martigny.get("roles", []) + librarian2_martigny["roles"] = ["pro_library_administrator"] + librarian2_martigny.update(librarian2_martigny, dbcommit=True, reindex=True) + PatronsSearch.flush_and_refresh() login_user(librarian2_martigny.user) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, notification_late_martigny) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_saxon) - check_permission(NotificationPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_sion) + check_permission( + NotificationPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + notification_late_martigny, + ) + check_permission( + NotificationPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + notification_late_saxon, + ) + check_permission( + NotificationPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + notification_late_sion, + ) # reset the librarian - librarian2_martigny['roles'] = original_roles - librarian2_martigny.update(librarian2_martigny, - dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + librarian2_martigny["roles"] = original_roles + librarian2_martigny.update(librarian2_martigny, dbcommit=True, reindex=True) + PatronsSearch.flush_and_refresh() # System librarian (aka. full-permissions) # - create/update/delete: allow for notification if its own org login_user(system_librarian_martigny.user) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, notification_late_martigny) - check_permission(NotificationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, notification_late_saxon) - check_permission(NotificationPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, notification_late_sion) + check_permission( + NotificationPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + notification_late_martigny, + ) + check_permission( + NotificationPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + notification_late_saxon, + ) + check_permission( + NotificationPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + notification_late_sion, + ) diff --git a/tests/api/notifications/test_notifications_rest.py b/tests/api/notifications/test_notifications_rest.py index 2ba7eea324..7b076c0b3c 100644 --- a/tests/api/notifications/test_notifications_rest.py +++ b/tests/api/notifications/test_notifications_rest.py @@ -27,44 +27,49 @@ import pytz from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, flush_index, get_json, \ - item_record_to_a_specific_loan_state, postdata, to_relative_url +from utils import ( + VerifyRecordPermissionPatch, + get_json, + item_record_to_a_specific_loan_state, + postdata, + to_relative_url, +) from rero_ils.modules.api import IlsRecordError from rero_ils.modules.circ_policies.api import DUE_SOON_REMINDER_TYPE from rero_ils.modules.loans.api import Loan from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.loans.utils import get_circ_policy -from rero_ils.modules.notifications.api import Notification, \ - NotificationsSearch +from rero_ils.modules.notifications.api import Notification, NotificationsSearch from rero_ils.modules.notifications.dispatcher import Dispatcher -from rero_ils.modules.notifications.models import NotificationStatus, \ - NotificationType -from rero_ils.modules.notifications.tasks import create_notifications, \ - process_notifications +from rero_ils.modules.notifications.models import NotificationStatus, NotificationType +from rero_ils.modules.notifications.tasks import ( + create_notifications, + process_notifications, +) from rero_ils.modules.notifications.utils import get_notification from rero_ils.modules.utils import get_ref_for_pid def test_delayed_notifications( - loan_validated_martigny, item2_lib_martigny, - mailbox, patron_martigny, lib_martigny): + loan_validated_martigny, item2_lib_martigny, mailbox, patron_martigny, lib_martigny +): """Test availability notification created from a loan.""" mailbox.clear() loan = loan_validated_martigny # ensure an availability notification exists (possibly not yet sent) notification = get_notification(loan, NotificationType.AVAILABILITY) assert notification - assert notification.loan_pid == loan_validated_martigny.get('pid') + assert notification.loan_pid == loan_validated_martigny.get("pid") assert notification.item_pid == item2_lib_martigny.pid assert notification.patron_pid == patron_martigny.pid # ensure an at_desk notification exists (possibly not yet sent) notification = get_notification(loan, NotificationType.AT_DESK) assert notification - assert notification.loan_pid == loan_validated_martigny.get('pid') + assert notification.loan_pid == loan_validated_martigny.get("pid") assert notification.item_pid == item2_lib_martigny.pid - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() assert not get_notification(loan, NotificationType.RECALL) for notification_type in NotificationType.ALL_NOTIFICATIONS: @@ -76,80 +81,72 @@ def test_delayed_notifications( # The notification should be sent to library AT_DESK email setting. notification = Notification.get_record(notification.id) effective_recipients = [ - recipient['address'] - for recipient in notification.get('effective_recipients') + recipient["address"] for recipient in notification.get("effective_recipients") ] - assert effective_recipients == \ - [lib_martigny.get_email(NotificationType.AT_DESK)] + assert effective_recipients == [lib_martigny.get_email(NotificationType.AT_DESK)] # One notification will be sent : AVAILABILITY (sent to patron). # Get the last message from mailbox and check it. availability_msg = mailbox[-1] - assert availability_msg.reply_to == lib_martigny.get('email') + assert availability_msg.reply_to == lib_martigny.get("email") mailbox.clear() def test_filtered_notifications_get( - client, notification_availability_martigny, - librarian_martigny, - librarian_sion): + client, notification_availability_martigny, librarian_martigny, librarian_sion +): """Test notification filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.notif_list') + list_url = url_for("invenio_records_rest.notif_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] > 0 + assert data["hits"]["total"]["value"] > 0 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.notif_list') + list_url = url_for("invenio_records_rest.notif_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 -def test_notification_secure_api(client, json_header, - librarian_martigny, - librarian_sion, - dummy_notification, - loan_validated_martigny): +def test_notification_secure_api( + client, + json_header, + librarian_martigny, + librarian_sion, + dummy_notification, + loan_validated_martigny, +): """Test notification secure api create.""" # Martigny login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.notif_list' - item_url = url_for('invenio_records_rest.notif_item', pid_value='notif1') + post_entrypoint = "invenio_records_rest.notif_list" + item_url = url_for("invenio_records_rest.notif_item", pid_value="notif1") # test notification creation notif = deepcopy(dummy_notification) - loan_pid = loan_validated_martigny.get('pid') - loan_ref = f'https://bib.rero.ch/api/loans/{loan_pid}' - notif['context']['loan'] = {"$ref": loan_ref} - res, _ = postdata( - client, - post_entrypoint, - notif - ) + loan_pid = loan_validated_martigny.get("pid") + loan_ref = f"https://bib.rero.ch/api/loans/{loan_pid}" + notif["context"]["loan"] = {"$ref": loan_ref} + res, _ = postdata(client, post_entrypoint, notif) assert res.status_code == 201 # test get notification res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert notif == data['metadata'] + assert notif == data["metadata"] # test notification update new_creation_date = datetime.now(timezone.utc).isoformat() - notif['creation_date'] = new_creation_date - res = client.put( - item_url, - data=json.dumps(notif), - headers=json_header - ) + notif["creation_date"] = new_creation_date + res = client.put(item_url, data=json.dumps(notif), headers=json_header) assert res.status_code == 200 # Sion @@ -159,21 +156,13 @@ def test_notification_secure_api(client, json_header, res = client.get(item_url) assert res.status_code == 403 - res, _ = postdata( - client, - post_entrypoint, - notif - ) + res, _ = postdata(client, post_entrypoint, notif) assert res.status_code == 403 # test notification update new_creation_date = datetime.now(timezone.utc).isoformat() - notif['creation_date'] = new_creation_date - res = client.put( - item_url, - data=json.dumps(notif), - headers=json_header - ) + notif["creation_date"] = new_creation_date + res = client.put(item_url, data=json.dumps(notif), headers=json_header) assert res.status_code == 403 # test notification delete @@ -186,125 +175,111 @@ def test_notification_secure_api(client, json_header, assert res.status_code == 204 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_notifications_get( - client, notification_availability_martigny): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_notifications_get(client, notification_availability_martigny): """Test record retrieval.""" record = notification_availability_martigny - pid = record.get('pid') + pid = record.get("pid") - item_url = url_for( - 'invenio_records_rest.notif_item', pid_value=pid) - list_url = url_for( - 'invenio_records_rest.notif_list', q='pid:' + pid) + item_url = url_for("invenio_records_rest.notif_item", pid_value=pid) + list_url = url_for("invenio_records_rest.notif_list", q="pid:" + pid) item_url_with_resolve = url_for( - 'invenio_records_rest.notif_item', - pid_value=record.pid, - resolve=1, - sources=1 + "invenio_records_rest.notif_item", pid_value=record.pid, resolve=1, sources=1 ) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{record.revision_id}"' + assert res.headers["ETag"] == f'"{record.revision_id}"' data = get_json(res) - assert record.dumps() == data['metadata'] + assert record.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert record.dumps() == data['metadata'] + assert record.dumps() == data["metadata"] # check resolve res = client.get(item_url_with_resolve) assert res.status_code == 200 data = get_json(res) - assert record.replace_refs().dumps() == data['metadata'] + assert record.replace_refs().dumps() == data["metadata"] res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - result = data['hits']['hits'][0]['metadata'] + result = data["hits"]["hits"][0]["metadata"] # organisation has been added during the indexing - del result['organisation'] + del result["organisation"] assert result == record.replace_refs() record.delete(dbcommit=True, delindex=True) - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_notifications_post_put_delete( - client, dummy_notification, loan_validated_martigny, json_header): + client, dummy_notification, loan_validated_martigny, json_header +): """Test record delete and update.""" record = deepcopy(dummy_notification) - del record['pid'] - loan_ref = get_ref_for_pid('loans', loan_validated_martigny.get('pid')) - record['context']['loan'] = {'$ref': loan_ref} - notif = Notification.create( - record, - dbcommit=True, - reindex=True, - delete_pid=True - ) + del record["pid"] + loan_ref = get_ref_for_pid("loans", loan_validated_martigny.get("pid")) + record["context"]["loan"] = {"$ref": loan_ref} + notif = Notification.create(record, dbcommit=True, reindex=True, delete_pid=True) assert notif == record - flush_index(NotificationsSearch.Meta.index) - pid = notif.get('pid') + NotificationsSearch.flush_and_refresh() + pid = notif.get("pid") - item_url = url_for('invenio_records_rest.notif_item', pid_value=pid) - list_url = url_for('invenio_records_rest.notif_list', q='pid:pid') + item_url = url_for("invenio_records_rest.notif_item", pid_value=pid) + list_url = url_for("invenio_records_rest.notif_list", q="pid:pid") new_record = deepcopy(record) # Create record / POST - new_record['pid'] = 'x' - res, data = postdata( - client, - 'invenio_records_rest.notif_list', - new_record - ) + new_record["pid"] = "x" + res, data = postdata(client, "invenio_records_rest.notif_list", new_record) assert res.status_code == 201 - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() # Check that the returned record matches the given data - assert data['metadata'] == new_record + assert data["metadata"] == new_record res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert notif == data['metadata'] + assert notif == data["metadata"] # Update record/PUT - data = data['metadata'] - data['notification_type'] = NotificationType.DUE_SOON - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data = data["metadata"] + data["notification_type"] = NotificationType.DUE_SOON + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['notification_type'] == NotificationType.DUE_SOON + assert data["metadata"]["notification_type"] == NotificationType.DUE_SOON res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['notification_type'] == NotificationType.DUE_SOON + assert data["metadata"]["notification_type"] == NotificationType.DUE_SOON res = client.get(list_url) assert res.status_code == 200 @@ -324,46 +299,53 @@ def test_notifications_post_put_delete( def test_recall_notification( - client, patron_sion, lib_sion, json_header, patron2_martigny, - patron_martigny, item_lib_sion, librarian_sion, circulation_policies, - loc_public_sion, mailbox + client, + patron_sion, + lib_sion, + json_header, + patron2_martigny, + patron_martigny, + item_lib_sion, + librarian_sion, + circulation_policies, + loc_public_sion, + mailbox, ): """Test recall notification.""" mailbox.clear() login_user_via_session(client, librarian_sion.user) res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_sion.pid, patron_pid=patron_sion.pid, transaction_location_pid=loc_public_sion.pid, transaction_user_pid=librarian_sion.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not get_notification(loan, NotificationType.RECALL) # test notification permissions res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_sion.pid, pickup_location_pid=loc_public_sion.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_sion.pid, - transaction_user_pid=librarian_sion.pid - ) + transaction_user_pid=librarian_sion.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") request_loan = Loan.get_record_by_pid(request_loan_pid) - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() notification = get_notification(loan, NotificationType.RECALL) assert notification and notification.loan_pid == loan.pid @@ -374,50 +356,47 @@ def test_recall_notification( for notification_type in NotificationType.ALL_NOTIFICATIONS: process_notifications(notification_type) # one new email for the patron - assert mailbox[-1].recipients == [patron_sion.dumps()['email']] + assert mailbox[-1].recipients == [patron_sion.dumps()["email"]] assert loan.is_notified(notification_type=NotificationType.RECALL) mailbox.clear() # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item_lib_sion.pid, pid=request_loan_pid, transaction_user_pid=librarian_sion.pid, - transaction_location_pid=loc_public_sion.pid - ) + transaction_location_pid=loc_public_sion.pid, + ), ) assert res.status_code == 200 # no new notification is sent for the second time res, _ = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_sion.pid, pickup_location_pid=loc_public_sion.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_sion.pid, - transaction_user_pid=librarian_sion.pid - ) + transaction_user_pid=librarian_sion.pid, + ), ) assert res.status_code == 200 - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() - assert not loan.is_notified( - notification_type=NotificationType.RECALL, counter=1) - assert not loan.is_notified( - notification_type=NotificationType.AVAILABILITY) + assert not loan.is_notified(notification_type=NotificationType.RECALL, counter=1) + assert not loan.is_notified(notification_type=NotificationType.AVAILABILITY) assert not get_notification(loan, NotificationType.AVAILABILITY) assert not get_notification(request_loan, NotificationType.REQUEST) - assert not request_loan.is_notified( - notification_type=NotificationType.REQUEST) + assert not request_loan.is_notified(notification_type=NotificationType.REQUEST) assert len(mailbox) == 0 params = { - 'transaction_location_pid': loc_public_sion.pid, - 'transaction_user_pid': librarian_sion.pid + "transaction_location_pid": loc_public_sion.pid, + "transaction_user_pid": librarian_sion.pid, } item_lib_sion.checkin(**params) mailbox.clear() @@ -426,8 +405,14 @@ def test_recall_notification( def test_recall2_notifications( - client, librarian_martigny, item_lib_martigny, patron_martigny, - patron2_martigny, loc_public_martigny, circulation_policies, mailbox + client, + librarian_martigny, + item_lib_martigny, + patron_martigny, + patron2_martigny, + loc_public_martigny, + circulation_policies, + mailbox, ): mailbox.clear() login_user_via_session(client, librarian_martigny.user) @@ -439,21 +424,21 @@ def test_recall2_notifications( item, actions = item_lib_martigny.request( patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - loan_request1 = Loan.get_record_by_pid(actions['request']['pid']) + loan_request1 = Loan.get_record_by_pid(actions["request"]["pid"]) item, actions = item_lib_martigny.request( patron_pid=patron2_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - loan_request2 = Loan.get_record_by_pid(actions['request']['pid']) + loan_request2 = Loan.get_record_by_pid(actions["request"]["pid"]) assert loan_request1.pid != loan_request2.pid item_lib_martigny.validate_request( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - pid=loan_request1.pid + pid=loan_request1.pid, ) # Checkout the first request, as another request already exists then a @@ -462,12 +447,13 @@ def test_recall2_notifications( item_lib_martigny.checkout( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - pid=loan_request1.pid + pid=loan_request1.pid, ) process_notifications(NotificationType.RECALL) - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() assert not loan_request1.is_notified( - notification_type=NotificationType.RECALL, counter=1) + notification_type=NotificationType.RECALL, counter=1 + ) assert len(mailbox) == 1 # RESET @@ -476,37 +462,48 @@ def test_recall2_notifications( item_lib_martigny.cancel_item_request( loan_request2.pid, transaction_user_pid=patron2_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ) item_lib_martigny.checkin( transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) def test_recall_notification_with_disabled_config( - app, client, librarian_martigny, item3_lib_martigny, - patron_sion, loc_public_martigny, patron2_martigny, lib_martigny, - circulation_policies, mailbox + app, + client, + librarian_martigny, + item3_lib_martigny, + patron_sion, + loc_public_martigny, + patron2_martigny, + lib_martigny, + circulation_policies, + mailbox, ): """Test the recall notification if app config disable it.""" - initial_config = deepcopy( - app.config.get('RERO_ILS_DISABLED_NOTIFICATION_TYPE', [])) - app.config.setdefault('RERO_ILS_DISABLED_NOTIFICATION_TYPE', []).append( - NotificationType.RECALL) + initial_config = deepcopy(app.config.get("RERO_ILS_DISABLED_NOTIFICATION_TYPE", [])) + app.config.setdefault("RERO_ILS_DISABLED_NOTIFICATION_TYPE", []).append( + NotificationType.RECALL + ) # STEP#0 :: INIT # Create a checkout mailbox.clear() login_user_via_session(client, librarian_martigny.user) - res, data = postdata(client, 'api_item.checkout', dict( - item_pid=item3_lib_martigny.pid, - patron_pid=patron_sion.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid, - )) + res, data = postdata( + client, + "api_item.checkout", + dict( + item_pid=item3_lib_martigny.pid, + patron_pid=patron_sion.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not get_notification(loan, NotificationType.RECALL) @@ -514,17 +511,20 @@ def test_recall_notification_with_disabled_config( # A request on a checkout item should be create a 'recall' notification. # But as 'recall' type is disabled from app config, no notification # must be created/sent. - res, data = postdata(client, 'api_item.librarian_request', dict( - item_pid=item3_lib_martigny.pid, - pickup_location_pid=loc_public_martigny.pid, - patron_pid=patron2_martigny.pid, - transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - )) - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + res, data = postdata( + client, + "api_item.librarian_request", + dict( + item_pid=item3_lib_martigny.pid, + pickup_location_pid=loc_public_martigny.pid, + patron_pid=patron2_martigny.pid, + transaction_library_pid=lib_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ), + ) + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") assert res.status_code == 200 - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() notification = get_notification(loan, NotificationType.RECALL) assert not notification @@ -533,17 +533,21 @@ def test_recall_notification_with_disabled_config( # RESET # * Reset application configuration # * Cancel the request, checkin the item - app.config['RERO_ILS_DISABLED_NOTIFICATION_TYPE'] = initial_config - res, _ = postdata(client, 'api_item.cancel_item_request', dict( - item_pid=item3_lib_martigny.pid, - pid=request_loan_pid, - transaction_user_pid=librarian_martigny.pid, - transaction_library_pid=lib_martigny.pid - )) + app.config["RERO_ILS_DISABLED_NOTIFICATION_TYPE"] = initial_config + res, _ = postdata( + client, + "api_item.cancel_item_request", + dict( + item_pid=item3_lib_martigny.pid, + pid=request_loan_pid, + transaction_user_pid=librarian_martigny.pid, + transaction_library_pid=lib_martigny.pid, + ), + ) assert res.status_code == 200 params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item3_lib_martigny.checkin(**params) for notification_type in NotificationType.ALL_NOTIFICATIONS: @@ -551,45 +555,50 @@ def test_recall_notification_with_disabled_config( def test_recall_notification_without_email( - client, patron_sion_without_email1, lib_martigny, - json_header, patron2_martigny, - item3_lib_martigny, librarian_martigny, - circulation_policies, loc_public_martigny, - mailbox): + client, + patron_sion_without_email1, + lib_martigny, + json_header, + patron2_martigny, + item3_lib_martigny, + librarian_martigny, + circulation_policies, + loc_public_martigny, + mailbox, +): """Test recall notification.""" mailbox.clear() login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item3_lib_martigny.pid, patron_pid=patron_sion_without_email1.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not get_notification(loan, NotificationType.RECALL) # test notification res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item3_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") assert res.status_code == 200 - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() notification = get_notification(loan, NotificationType.RECALL) assert notification and notification.loan_pid == loan.pid @@ -598,26 +607,26 @@ def test_recall_notification_without_email( for notification_type in NotificationType.ALL_NOTIFICATIONS: process_notifications(notification_type) # one new email for the librarian - recipient = lib_martigny.get_email(notification['notification_type']) + recipient = lib_martigny.get_email(notification["notification_type"]) assert recipient assert mailbox[0].recipients == [recipient] # check the address block - assert patron2_martigny.dumps()['street'] in mailbox[0].body + assert patron2_martigny.dumps()["street"] in mailbox[0].body mailbox.clear() params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item3_lib_martigny.pid, pid=request_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_library_pid=lib_martigny.pid - ) + transaction_library_pid=lib_martigny.pid, + ), ) assert res.status_code == 200 item3_lib_martigny.checkin(**params) @@ -626,68 +635,73 @@ def test_recall_notification_without_email( def test_recall_notification_with_patron_additional_email_only( - client, patron_sion_with_additional_email, lib_martigny, - json_header, patron2_martigny, - item3_lib_martigny, librarian_martigny, - circulation_policies, loc_public_martigny, - mailbox): + client, + patron_sion_with_additional_email, + lib_martigny, + json_header, + patron2_martigny, + item3_lib_martigny, + librarian_martigny, + circulation_policies, + loc_public_martigny, + mailbox, +): """Test recall notification.""" mailbox.clear() login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item3_lib_martigny.pid, patron_pid=patron_sion_with_additional_email.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not get_notification(loan, NotificationType.RECALL) # test notification res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item3_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") for notification_type in NotificationType.ALL_NOTIFICATIONS: process_notifications(notification_type) # one new email for the librarian - assert mailbox[0].recipients == \ - [patron_sion_with_additional_email[ - 'patron']['additional_communication_email']] + assert mailbox[0].recipients == [ + patron_sion_with_additional_email["patron"]["additional_communication_email"] + ] mailbox.clear() params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item3_lib_martigny.pid, pid=request_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_library_pid=lib_martigny.pid - ) + transaction_library_pid=lib_martigny.pid, + ), ) assert res.status_code == 200 item3_lib_martigny.checkin(**params) @@ -697,55 +711,63 @@ def test_recall_notification_with_patron_additional_email_only( def test_notification_templates_list(client, librarian_martigny): """Test notification templates list API.""" - url = url_for('notifications.list_available_template') + url = url_for("notifications.list_available_template") res = client.get(url) assert res.status_code == 401 login_user_via_session(client, librarian_martigny.user) res = client.get(url) assert res.status_code == 200 data = get_json(res) - assert isinstance(data.get('templates'), list) + assert isinstance(data.get("templates"), list) -def test_multiple_notifications(client, patron_martigny, patron_sion, - lib_martigny, lib_fully, - item_lib_martigny, librarian_martigny, - loc_public_martigny, circulation_policies, - loc_public_fully, mailbox): +def test_multiple_notifications( + client, + patron_martigny, + patron_sion, + lib_martigny, + lib_fully, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, + mailbox, +): """Test multiple notifications.""" mailbox.clear() login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() # REQUEST loan = Loan.get_record_by_pid(request_loan_pid) assert loan.state == LoanState.PENDING assert mailbox[-1].recipients == [ - lib_martigny.get('notification_settings')[5].get('email')] + lib_martigny.get("notification_settings")[5].get("email") + ] mailbox.clear() # validate request params = { - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item_lib_martigny.validate_request(**params) loan = Loan.get_record_by_pid(request_loan_pid) @@ -760,14 +782,15 @@ def test_multiple_notifications(client, patron_martigny, patron_sion, loan = Loan.get_record_by_pid(request_loan_pid) assert loan.state == LoanState.ITEM_IN_TRANSIT_TO_HOUSE assert mailbox[-1].recipients == [ - lib_fully.get('notification_settings')[5].get('email')] + lib_fully.get("notification_settings")[5].get("email") + ] mailbox.clear() # back on shelf: required to restore the initial stat for other tests params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item_lib_martigny.receive(**params) for notification_type in NotificationType.ALL_NOTIFICATIONS: @@ -775,140 +798,162 @@ def test_multiple_notifications(client, patron_martigny, patron_sion, def test_request_notifications_temp_item_type( - client, patron_martigny, patron_sion, lib_martigny, lib_fully, - item_lib_martigny, librarian_martigny, loc_public_martigny, - circulation_policies, loc_public_fully, item_type_missing_martigny, mailbox + client, + patron_martigny, + patron_sion, + lib_martigny, + lib_fully, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, + item_type_missing_martigny, + mailbox, ): """Test request notifications with item type with negative availability.""" mailbox.clear() - item_lib_martigny['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_missing_martigny.pid) + item_lib_martigny["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_missing_martigny.pid) } item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_fully.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() assert len(mailbox) == 0 # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item_lib_martigny.pid, pid=request_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_library_pid=lib_martigny.pid - ) + transaction_library_pid=lib_martigny.pid, + ), ) assert res.status_code == 200 mailbox.clear() - del item_lib_martigny['temporary_item_type'] + del item_lib_martigny["temporary_item_type"] item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) -def test_request_notifications(client, patron_martigny, patron_sion, - lib_martigny, - lib_fully, - item_lib_martigny, librarian_martigny, - loc_public_martigny, circulation_policies, - loc_public_fully, mailbox): +def test_request_notifications( + client, + patron_martigny, + patron_sion, + lib_martigny, + lib_fully, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, + mailbox, +): """Test request notifications.""" mailbox.clear() login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_fully.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() assert len(mailbox) == 1 assert mailbox[-1].recipients == [ - lib_martigny.get('notification_settings')[5].get('email')] + lib_martigny.get("notification_settings")[5].get("email") + ] # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item_lib_martigny.pid, pid=request_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_library_pid=lib_martigny.pid - ) + transaction_library_pid=lib_martigny.pid, + ), ) assert res.status_code == 200 mailbox.clear() -@mock.patch.object(Dispatcher, '_process_notification', - mock.MagicMock(side_effect=Exception('Test!'))) -def test_dispatch_error(client, patron_martigny, patron_sion, - lib_martigny, - lib_fully, - item_lib_martigny, librarian_martigny, - loc_public_martigny, circulation_policies, - loc_public_fully, mailbox): +@mock.patch.object( + Dispatcher, "_process_notification", mock.MagicMock(side_effect=Exception("Test!")) +) +def test_dispatch_error( + client, + patron_martigny, + patron_sion, + lib_martigny, + lib_fully, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, + mailbox, +): """Test request notifications.""" mailbox.clear() login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_fully.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") # check that the email has not been sent - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() assert len(mailbox) == 0 # cancel request res, _ = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( item_pid=item_lib_martigny.pid, pid=request_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_library_pid=lib_martigny.pid - ) + transaction_library_pid=lib_martigny.pid, + ), ) assert res.status_code == 200 mailbox.clear() @@ -916,53 +961,63 @@ def test_dispatch_error(client, patron_martigny, patron_sion, def test_multiple_request_booking_notifications( client, - patron_martigny, patron2_martigny, patron4_martigny, - librarian_martigny, librarian_sion, librarian_saxon, - loc_public_martigny, loc_public_sion, loc_public_saxon, - lib_martigny, lib_sion, lib_saxon, - item_lib_martigny, circulation_policies, mailbox + patron_martigny, + patron2_martigny, + patron4_martigny, + librarian_martigny, + librarian_sion, + librarian_saxon, + loc_public_martigny, + loc_public_sion, + loc_public_saxon, + lib_martigny, + lib_sion, + lib_saxon, + item_lib_martigny, + circulation_policies, + mailbox, ): """Test multiple requests booking notifications.""" # request 1 login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 # request 2 login_user_via_session(client, librarian_sion.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_sion.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_sion.pid, - transaction_user_pid=librarian_sion.pid - ) + transaction_user_pid=librarian_sion.pid, + ), ) assert res.status_code == 200 # request 3 login_user_via_session(client, librarian_saxon.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_saxon.pid, patron_pid=patron4_martigny.pid, transaction_library_pid=lib_saxon.pid, - transaction_user_pid=librarian_saxon.pid - ) + transaction_user_pid=librarian_saxon.pid, + ), ) assert res.status_code == 200 mailbox.clear() @@ -970,9 +1025,9 @@ def test_multiple_request_booking_notifications( # CHECKOUT FOR REQUEST#1 # After the checkout, no new notification will be sent. params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } loan, actions = item_lib_martigny.checkout(**params) assert actions.get(LoanAction.CHECKOUT) @@ -982,8 +1037,8 @@ def test_multiple_request_booking_notifications( # - BOOKING notification --> sent to library # - AT_DESK notification --> sent to library too params = { - 'transaction_location_pid': loc_public_sion.pid, - 'transaction_user_pid': librarian_sion.pid + "transaction_location_pid": loc_public_sion.pid, + "transaction_user_pid": librarian_sion.pid, } _, actions = item_lib_martigny.checkin(**params) assert actions.get(LoanAction.CHECKIN) @@ -993,15 +1048,15 @@ def test_multiple_request_booking_notifications( # CHECKOUT & CHECKIN FOR PATRON#2 mailbox.clear() params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_sion.pid, - 'transaction_user_pid': librarian_sion.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_sion.pid, + "transaction_user_pid": librarian_sion.pid, } loan, actions = item_lib_martigny.checkout(**params) assert actions.get(LoanAction.CHECKOUT) params = { - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } # checkin at the request pickup of patron3 loan, actions = item_lib_martigny.checkin(**params) @@ -1012,15 +1067,15 @@ def test_multiple_request_booking_notifications( # checkout for patron3 mailbox.clear() params = { - 'patron_pid': patron4_martigny.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_saxon.pid + "patron_pid": patron4_martigny.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_saxon.pid, } loan, actions = item_lib_martigny.checkout(**params) assert actions.get(LoanAction.CHECKOUT) params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } # checkin at the request pickup of patron3 _, actions = item_lib_martigny.checkin(**params) @@ -1028,62 +1083,66 @@ def test_multiple_request_booking_notifications( process_notifications(notification_type) -@mock.patch('flask.current_app.logger.error', - mock.MagicMock(side_effect=Exception('Test!'))) +@mock.patch( + "flask.current_app.logger.error", mock.MagicMock(side_effect=Exception("Test!")) +) def test_cancel_notifications( - client, patron_martigny, lib_martigny, item_lib_martigny, - librarian_martigny, loc_public_martigny, circulation_policies, mailbox + client, + patron_martigny, + lib_martigny, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + mailbox, ): """Test cancel notifications.""" login_user_via_session(client, librarian_martigny.user) # CREATE and VALIDATE a request ... res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() res, data = postdata( client, - 'api_item.validate_request', + "api_item.validate_request", dict( pid=request_loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 # At this time, an AVAILABILITY notification should be create but not yet # dispatched loan = Loan.get_record_by_pid(request_loan_pid) notification = get_notification(loan, NotificationType.AVAILABILITY) - assert notification \ - and notification['status'] == NotificationStatus.CREATED + assert notification and notification["status"] == NotificationStatus.CREATED # BORROW the requested item res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get( - 'action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) # Try to dispatch pending availability notifications. @@ -1093,20 +1152,19 @@ def test_cancel_notifications( process_notifications(NotificationType.AVAILABILITY) notification = get_notification(loan, NotificationType.AVAILABILITY) - assert notification and \ - notification['status'] == NotificationStatus.CANCELLED + assert notification and notification["status"] == NotificationStatus.CANCELLED assert len(mailbox) == 0 # restore to initial state res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, # patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 mailbox.clear() @@ -1120,37 +1178,45 @@ def test_cancel_notifications( assert not can_cancel process_notifications(NotificationType.DUE_SOON) notification = Notification.get_record_by_pid(notification.pid) - assert notification['status'] == NotificationStatus.DONE - flush_index(NotificationsSearch.Meta.index) + assert notification["status"] == NotificationStatus.DONE + NotificationsSearch.flush_and_refresh() # try to create a new DUE_SOON notification for the same loan record = { - 'creation_date': datetime.now(timezone.utc).isoformat(), - 'notification_type': NotificationType.DUE_SOON, - 'context': { - 'loan': {'$ref': get_ref_for_pid('loans', loan.pid)}, - 'reminder_counter': 0 - } + "creation_date": datetime.now(timezone.utc).isoformat(), + "notification_type": NotificationType.DUE_SOON, + "context": { + "loan": {"$ref": get_ref_for_pid("loans", loan.pid)}, + "reminder_counter": 0, + }, } notification = Notification.create(record) can_cancel, _ = notification.can_be_cancelled() assert can_cancel Dispatcher.dispatch_notifications([notification.pid]) notification = Notification.get_record_by_pid(notification.pid) - assert notification['status'] == NotificationStatus.CANCELLED + assert notification["status"] == NotificationStatus.CANCELLED -def test_booking_notifications(client, patron_martigny, patron_sion, - lib_martigny, lib_fully, - librarian_fully, - item_lib_martigny, librarian_martigny, - loc_public_martigny, circulation_policies, - loc_public_fully, mailbox): +def test_booking_notifications( + client, + patron_martigny, + patron_sion, + lib_martigny, + lib_fully, + librarian_fully, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, + mailbox, +): """Test booking notifications.""" params = { - 'patron_pid': patron_sion.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "patron_pid": patron_sion.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item_lib_martigny.checkout(**params) mailbox.clear() @@ -1158,26 +1224,25 @@ def test_booking_notifications(client, patron_martigny, patron_sion, res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - request_loan_pid = data.get( - 'action_applied')[LoanAction.REQUEST].get('pid') + request_loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") - flush_index(NotificationsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() # BOOKING params = { - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_fully.pid + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_fully.pid, } _, actions = item_lib_martigny.checkin(**params) # the checked in loan is cancelled and the requested loan is in transit for @@ -1185,24 +1250,27 @@ def test_booking_notifications(client, patron_martigny, patron_sion, loan = Loan.get_record_by_pid(request_loan_pid) assert loan.state == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP assert mailbox[0].recipients == [ - lib_fully.get('notification_settings')[5].get('email')] + lib_fully.get("notification_settings")[5].get("email") + ] # the patron information is the patron request - assert patron_martigny['patron']['barcode'][0] in mailbox[0].body + assert patron_martigny["patron"]["barcode"][0] in mailbox[0].body mailbox.clear() params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item_lib_martigny.cancel_item_request( request_loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid) + transaction_location_pid=loc_public_martigny.pid, + ) item_lib_martigny.receive(**params) def test_delete_pickup_location( - loan2_validated_martigny, loc_restricted_martigny, mailbox): + loan2_validated_martigny, loc_restricted_martigny, mailbox +): """Test delete pickup location.""" mailbox.clear() loan = loan2_validated_martigny @@ -1211,14 +1279,19 @@ def test_delete_pickup_location( # We can not delete location used as transaction or pickup location # # any more. reasons_not_to_delete = loc_restricted_martigny.reasons_not_to_delete() - assert reasons_not_to_delete == {'links': {'loans': 1}} + assert reasons_not_to_delete == {"links": {"loans": 1}} with pytest.raises(IlsRecordError.NotDeleted): loc_restricted_martigny.delete(dbcommit=True, delindex=True) def test_reminder_notifications_after_extend( - item_lib_martigny, patron_martigny, loc_public_martigny, - librarian_martigny, circulation_policies, mailbox, client + item_lib_martigny, + patron_martigny, + loc_public_martigny, + librarian_martigny, + circulation_policies, + mailbox, + client, ): """Test any reminder notification could be resend after loan extension.""" @@ -1227,15 +1300,17 @@ def test_reminder_notifications_after_extend( # * Run the `notification-creation` task to create a DUE_SOON # notification params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) # get the related cipo and check than an due_soon reminder exists cipo = get_circ_policy(loan) @@ -1243,11 +1318,11 @@ def test_reminder_notifications_after_extend( assert due_soon_reminder # Update the loan - delay = due_soon_reminder.get('days_delay') - 1 + delay = due_soon_reminder.get("days_delay") - 1 due_soon_date = datetime.now() - timedelta(days=delay) end_date = datetime.now() + timedelta(days=1) - loan['due_soon_date'] = due_soon_date.astimezone(pytz.utc).isoformat() - loan['end_date'] = end_date.astimezone(pytz.utc).isoformat() + loan["due_soon_date"] = due_soon_date.astimezone(pytz.utc).isoformat() + loan["end_date"] = end_date.astimezone(pytz.utc).isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) assert loan.is_loan_due_soon() @@ -1257,13 +1332,16 @@ def test_reminder_notifications_after_extend( process_notifications(NotificationType.DUE_SOON) first_notification = get_notification(loan, NotificationType.DUE_SOON) - assert first_notification \ - and first_notification['status'] == NotificationStatus.DONE + assert ( + first_notification and first_notification["status"] == NotificationStatus.DONE + ) assert len(mailbox) == 1 - counter = NotificationsSearch()\ - .filter('term', context__loan__pid=loan.pid)\ - .filter('term', notification_type=NotificationType.DUE_SOON)\ + counter = ( + NotificationsSearch() + .filter("term", context__loan__pid=loan.pid) + .filter("term", notification_type=NotificationType.DUE_SOON) .count() + ) assert counter == 1 # STEP 2 - CHECK NOTIFICATIONS CREATION @@ -1271,10 +1349,13 @@ def test_reminder_notifications_after_extend( # As a notification already exists, no new DUE_SOON#1 notifications # should be created create_notifications(types=[NotificationType.DUE_SOON]) - query = NotificationsSearch() \ - .filter('term', context__loan__pid=loan.pid) \ - .filter('term', notification_type=NotificationType.DUE_SOON) \ - .source('pid').scan() + query = ( + NotificationsSearch() + .filter("term", context__loan__pid=loan.pid) + .filter("term", notification_type=NotificationType.DUE_SOON) + .source("pid") + .scan() + ) notification_pids = [hit.pid for hit in query] assert len(notification_pids) == 1 assert notification_pids[0] == first_notification.pid @@ -1287,12 +1368,12 @@ def test_reminder_notifications_after_extend( params = dict( item_pid=item.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid + transaction_location_pid=loc_public_martigny.pid, ) - res, _ = postdata(client, 'api_item.extend_loan', params) + res, _ = postdata(client, "api_item.extend_loan", params) assert res.status_code == 200 loan = Loan.get_record_by_pid(loan.pid) - due_soon_date = ciso8601.parse_datetime(loan.get('due_soon_date')) + due_soon_date = ciso8601.parse_datetime(loan.get("due_soon_date")) # STEP 4 - CHECK NOTIFICATIONS CREATION # Run again the `create_notification` task, again for DUE_SOON @@ -1301,25 +1382,25 @@ def test_reminder_notifications_after_extend( # Process the notification, check that this new notification isn't # cancelled and well processed. process_date = due_soon_date + timedelta(days=1) - create_notifications( - types=[NotificationType.DUE_SOON], - tstamp=process_date - ) - counter = NotificationsSearch() \ - .filter('term', context__loan__pid=loan.pid) \ - .filter('term', notification_type=NotificationType.DUE_SOON) \ + create_notifications(types=[NotificationType.DUE_SOON], tstamp=process_date) + counter = ( + NotificationsSearch() + .filter("term", context__loan__pid=loan.pid) + .filter("term", notification_type=NotificationType.DUE_SOON) .count() + ) assert counter == 2 process_notifications(NotificationType.DUE_SOON) assert len(mailbox) == 2 second_notification = get_notification(loan, NotificationType.DUE_SOON) - assert second_notification \ - and second_notification['status'] == NotificationStatus.DONE + assert ( + second_notification and second_notification["status"] == NotificationStatus.DONE + ) assert second_notification.pid != first_notification # should be at the end to avoid notifications in other tests -def test_transaction_library_pid(notification_late_martigny, - lib_martigny_data): - assert notification_late_martigny.transaction_library_pid == \ - lib_martigny_data.get('pid') +def test_transaction_library_pid(notification_late_martigny, lib_martigny_data): + assert notification_late_martigny.transaction_library_pid == lib_martigny_data.get( + "pid" + ) diff --git a/tests/api/operation_logs/test_operation_logs_permissions.py b/tests/api/operation_logs/test_operation_logs_permissions.py index 9e104fb321..8cc3690234 100644 --- a/tests/api/operation_logs/test_operation_logs_permissions.py +++ b/tests/api/operation_logs/test_operation_logs_permissions.py @@ -20,17 +20,14 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission -from rero_ils.modules.operation_logs.permissions import \ - OperationLogPermissionPolicy +from rero_ils.modules.operation_logs.permissions import OperationLogPermissionPolicy from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) -def test_operation_logs_permissions( - patron_martigny, operation_log, librarian_martigny -): +@mock.patch.object(Patron, "_extensions", []) +def test_operation_logs_permissions(patron_martigny, operation_log, librarian_martigny): """Test item permissions class.""" # Anonymous user & Patron user @@ -39,58 +36,78 @@ def test_operation_logs_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(OperationLogPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(OperationLogPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, operation_log) + check_permission( + OperationLogPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + OperationLogPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + operation_log, + ) login_user(patron_martigny.user) - check_permission(OperationLogPermissionPolicy, {'create': False}, {}) - check_permission(OperationLogPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, operation_log) + check_permission(OperationLogPermissionPolicy, {"create": False}, {}) + check_permission( + OperationLogPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + operation_log, + ) # Librarian with specific role # - search/read: any items # - create/update/delete: allowed for items of its own library login_user(librarian_martigny.user) - check_permission(OperationLogPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, operation_log) + check_permission( + OperationLogPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + operation_log, + ) # Librarian without specific role :: No action allowed - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_user_manager'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_user_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(OperationLogPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, operation_log) + check_permission( + OperationLogPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + operation_log, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() diff --git a/tests/api/operation_logs/test_operation_logs_rest.py b/tests/api/operation_logs/test_operation_logs_rest.py index 5e511a7d9b..643d198a2e 100644 --- a/tests/api/operation_logs/test_operation_logs_rest.py +++ b/tests/api/operation_logs/test_operation_logs_rest.py @@ -23,55 +23,65 @@ from flask import current_app, url_for from invenio_access.permissions import system_identity from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, flush_index, get_json, postdata +from utils import VerifyRecordPermissionPatch, get_json, postdata from rero_ils.modules.documents.api import DocumentsSearch from rero_ils.modules.files.cli import create_pdf_record_files from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemStatus -from rero_ils.modules.operation_logs.api import OperationLog +from rero_ils.modules.operation_logs.api import OperationLog, OperationLogsSearch from rero_ils.modules.operation_logs.models import OperationLogOperation from rero_ils.modules.utils import get_ref_for_pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_operation_logs_permissions(client, operation_log, - librarian_martigny, patron_martigny, - librarian_patron_martigny, json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_operation_logs_permissions( + client, + operation_log, + librarian_martigny, + patron_martigny, + librarian_patron_martigny, + json_header, +): """Test operation logs permissions.""" - item_list = url_for('invenio_records_rest.oplg_list') + item_list = url_for("invenio_records_rest.oplg_list") # Check access for librarian role login_user_via_session(client, librarian_martigny.user) res = client.get(item_list) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 # Check access for patron role login_user_via_session(client, patron_martigny.user) res = client.get(item_list) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 # Check access for patron and librarian roles login_user_via_session(client, librarian_patron_martigny.user) res = client.get(item_list) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 4 + assert data["hits"]["total"]["value"] == 4 -def test_operation_logs_rest(client, loan_pending_martigny, - librarian_martigny, - json_header, - loan_overdue_martigny): +def test_operation_logs_rest( + client, + loan_pending_martigny, + librarian_martigny, + json_header, + loan_overdue_martigny, +): """Test operation logs REST API.""" login_user_via_session(client, librarian_martigny.user) - item_url = url_for('invenio_records_rest.oplg_item', pid_value='1') - item_list = url_for('invenio_records_rest.oplg_list') + item_url = url_for("invenio_records_rest.oplg_item", pid_value="1") + item_list = url_for("invenio_records_rest.oplg_list") res = client.get(item_url) assert res.status_code == 404 @@ -79,24 +89,20 @@ def test_operation_logs_rest(client, loan_pending_martigny, res = client.get(item_list) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] > 0 - pid = data['hits']['hits'][0]['metadata']['pid'] + assert data["hits"]["total"]["value"] > 0 + pid = data["hits"]["hits"][0]["metadata"]["pid"] assert pid - assert data['hits']['hits'][0]['id'] == pid - assert data['hits']['hits'][0]['created'] - assert data['hits']['hits'][0]['updated'] - - res, _ = postdata( - client, - 'invenio_records_rest.oplg_list', - {} - ) + assert data["hits"]["hits"][0]["id"] == pid + assert data["hits"]["hits"][0]["created"] + assert data["hits"]["hits"][0]["updated"] + + res, _ = postdata(client, "invenio_records_rest.oplg_list", {}) assert res.status_code == 403 res = client.put( - url_for('invenio_records_rest.oplg_item', pid_value='1'), + url_for("invenio_records_rest.oplg_item", pid_value="1"), data={}, - headers=json_header + headers=json_header, ) assert res.status_code == 404 @@ -105,101 +111,99 @@ def test_operation_logs_rest(client, loan_pending_martigny, def test_operation_log_on_item( - client, item_lib_martigny_data_tmp, librarian_martigny, json_header, - item_lib_martigny + client, + item_lib_martigny_data_tmp, + librarian_martigny, + json_header, + item_lib_martigny, ): """Test operation log on Item.""" - # Get the operation log index - fake_data = {'date': datetime.now().isoformat()} - oplg_index = OperationLog.get_index(fake_data) - # STEP #1 : Create an item. This will generate an operation log item_data = deepcopy(item_lib_martigny_data_tmp) - del item_data['pid'] + del item_data["pid"] item = Item.create(item_data, dbcommit=True, reindex=True) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() - q = f'record.type:item AND record.value:{item.pid}' - es_url = url_for('invenio_records_rest.oplg_list', q=q, sort='mostrecent') + q = f"record.type:item AND record.value:{item.pid}" + es_url = url_for("invenio_records_rest.oplg_list", q=q, sort="mostrecent") login_user_via_session(client, librarian_martigny.user) res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 1 - metadata = data['hits']['hits'][0]['metadata'] - assert metadata['operation'] == OperationLogOperation.CREATE + assert data["hits"]["total"]["value"] == 1 + metadata = data["hits"]["hits"][0]["metadata"] + assert metadata["operation"] == OperationLogOperation.CREATE # STEP #2 : Update the item ``price`` attribute. # As any changes on this attribute must be logged, a new operation log # will be generated. - item['price'] = 10 + item["price"] = 10 item = item.update(item, dbcommit=True, reindex=True) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 2 - metadata = data['hits']['hits'][0]['metadata'] - assert metadata['operation'] == OperationLogOperation.UPDATE + assert data["hits"]["total"]["value"] == 2 + metadata = data["hits"]["hits"][0]["metadata"] + assert metadata["operation"] == OperationLogOperation.UPDATE # STEP #3 : Update the item ``status`` attribute. # This attribute doesn't need to be tracked. So if it's the only change # on this record then no OpLog should be created. - item['status'] = ItemStatus.EXCLUDED + item["status"] = ItemStatus.EXCLUDED item = item.update(item, dbcommit=True, reindex=True) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # STEP #4 : Update the item ``status`` and ``price`` attributes. # As we update at least one attribute that need to be tracked, this # update will generate a new OpLog (UPDATE) - item['status'] = ItemStatus.AT_DESK - item['price'] = 12 + item["status"] = ItemStatus.AT_DESK + item["price"] = 12 item = item.update(item, dbcommit=True, reindex=True) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 3 - metadata = data['hits']['hits'][0]['metadata'] - assert metadata['operation'] == OperationLogOperation.UPDATE + assert data["hits"]["total"]["value"] == 3 + metadata = data["hits"]["hits"][0]["metadata"] + assert metadata["operation"] == OperationLogOperation.UPDATE # STEP #5 : Delete the item # This will generate the last OpLog about the item. item.delete(dbcommit=True, delindex=True) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 4 - metadata = data['hits']['hits'][0]['metadata'] - assert metadata['operation'] == OperationLogOperation.DELETE + assert data["hits"]["total"]["value"] == 4 + metadata = data["hits"]["hits"][0]["metadata"] + assert metadata["operation"] == OperationLogOperation.DELETE -def test_operation_log_on_ill_request(client, ill_request_martigny, - librarian_martigny): +def test_operation_log_on_ill_request(client, ill_request_martigny, librarian_martigny): """Test operation log on ILL request.""" # Using the ``ill_request_martigny`` fixtures, an operation log is created # for 'create' operation. Check this operation log to check if special # additional informations are included into this OpLog. login_user_via_session(client, librarian_martigny.user) - fake_data = {'date': datetime.now().isoformat()} + fake_data = {"date": datetime.now().isoformat()} oplg_index = OperationLog.get_index(fake_data) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() - q = f'record.type:illr AND record.value:{ill_request_martigny.pid}' - es_url = url_for('invenio_records_rest.oplg_list', q=q, sort='mostrecent') + q = f"record.type:illr AND record.value:{ill_request_martigny.pid}" + es_url = url_for("invenio_records_rest.oplg_list", q=q, sort="mostrecent") res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 1 - metadata = data['hits']['hits'][0]['metadata'] - assert metadata['operation'] == OperationLogOperation.CREATE - assert 'ill_request' in metadata - assert 'status' in metadata['ill_request'] + assert data["hits"]["total"]["value"] == 1 + metadata = data["hits"]["hits"][0]["metadata"] + assert metadata["operation"] == OperationLogOperation.CREATE + assert "ill_request" in metadata + assert "status" in metadata["ill_request"] def test_operation_log_on_file( @@ -208,13 +212,13 @@ def test_operation_log_on_file( """Test files operation log.""" # get the op index - fake_data = {'date': datetime.now().isoformat()} + fake_data = {"date": datetime.now().isoformat()} oplg_index = OperationLog.get_index(fake_data) # create a pdf file metadata = dict( - library={'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - collections=['col1', 'col2'] + library={"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + collections=["col1", "col2"], ) record = create_pdf_record_files(document, metadata, flush=True) recid = record["id"] @@ -225,69 +229,71 @@ def test_operation_log_on_file( record_service = ext.records_service # flush indices - flush_index(DocumentsSearch.Meta.index) - flush_index(oplg_index) + DocumentsSearch.flush_and_refresh() + OperationLogsSearch.flush_and_refresh() # REST API are restricted, thus it needs a login login_user_via_session(client, librarian_martigny.user) # record file creation is in the op es_url = url_for( - 'invenio_records_rest.oplg_list', - q=f'record.type:recid AND operation:create') + "invenio_records_rest.oplg_list", q=f"record.type:recid AND operation:create" + ) res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 1 - metadata = data['hits']['hits'][0]['metadata'] - assert set(metadata['record'].keys()) == \ - set(['library_pid', 'organisation_pid', 'type', 'value']) - assert set(metadata['file']['document']) == {'pid', 'type', 'title'} + assert data["hits"]["total"]["value"] == 1 + metadata = data["hits"]["hits"][0]["metadata"] + assert set(metadata["record"].keys()) == set( + ["library_pid", "organisation_pid", "type", "value"] + ) + assert set(metadata["file"]["document"]) == {"pid", "type", "title"} # record file update is in the op - record_service.update( - system_identity, recid, dict(metadata=record['metadata'])) - flush_index(oplg_index) + record_service.update(system_identity, recid, dict(metadata=record["metadata"])) + OperationLogsSearch.flush_and_refresh() es_url = url_for( - 'invenio_records_rest.oplg_list', - q=f'record.type:recid AND operation:update') + "invenio_records_rest.oplg_list", q=f"record.type:recid AND operation:update" + ) res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # file creation is in the op - pdf_file_name = 'doc_doc1_1.pdf' + pdf_file_name = "doc_doc1_1.pdf" es_url = url_for( - 'invenio_records_rest.oplg_list', - q='record.type:file AND operation:create ' - f'AND record.value:{pdf_file_name}') + "invenio_records_rest.oplg_list", + q="record.type:file AND operation:create " f"AND record.value:{pdf_file_name}", + ) res = client.get(es_url) data = get_json(res) - metadata = data['hits']['hits'][0]['metadata'] - assert data['hits']['total']['value'] == 1 - assert set(data['hits']['hits'][0]['metadata']['record'].keys()) == \ - set(['library_pid', 'organisation_pid', 'type', 'value']) - assert set(metadata['file']['document']) == {'pid', 'type', 'title'} - assert metadata['file']['recid'] == recid + metadata = data["hits"]["hits"][0]["metadata"] + assert data["hits"]["total"]["value"] == 1 + assert set(data["hits"]["hits"][0]["metadata"]["record"].keys()) == set( + ["library_pid", "organisation_pid", "type", "value"] + ) + assert set(metadata["file"]["document"]) == {"pid", "type", "title"} + assert metadata["file"]["recid"] == recid # file deletion is in the op file_service.delete_file( - identity=system_identity, id_=recid, file_key=pdf_file_name) - flush_index(oplg_index) + identity=system_identity, id_=recid, file_key=pdf_file_name + ) + OperationLogsSearch.flush_and_refresh() es_url = url_for( - 'invenio_records_rest.oplg_list', - q='record.type:file AND operation:delete ' - f'AND record.value:{pdf_file_name}') + "invenio_records_rest.oplg_list", + q="record.type:file AND operation:delete " f"AND record.value:{pdf_file_name}", + ) res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # record file deletion is in the op record_service.delete(identity=system_identity, id_=recid) - flush_index(oplg_index) + OperationLogsSearch.flush_and_refresh() es_url = url_for( - 'invenio_records_rest.oplg_list', - q=f'record.type:recid AND operation:delete') + "invenio_records_rest.oplg_list", q=f"record.type:recid AND operation:delete" + ) res = client.get(es_url) data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 diff --git a/tests/api/organisations/test_organisations_permissions.py b/tests/api/organisations/test_organisations_permissions.py index 7c6af51afb..f5613abb74 100644 --- a/tests/api/organisations/test_organisations_permissions.py +++ b/tests/api/organisations/test_organisations_permissions.py @@ -22,27 +22,23 @@ from invenio_accounts.testutils import login_user_via_session from utils import check_permission, get_json -from rero_ils.modules.organisations.permissions import \ - OrganisationPermissionPolicy +from rero_ils.modules.organisations.permissions import OrganisationPermissionPolicy -def test_organisation_permissions_api(client, patron_martigny, - org_martigny, org_sion, - system_librarian_martigny): +def test_organisation_permissions_api( + client, patron_martigny, org_martigny, org_sion, system_librarian_martigny +): """Test organisations permissions api.""" org_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='organisations' + "api_blueprint.permissions", route_name="organisations" ) org_martigny_permission_url = url_for( - 'api_blueprint.permissions', - route_name='organisations', - record_pid=org_martigny.pid + "api_blueprint.permissions", + route_name="organisations", + record_pid=org_martigny.pid, ) org_sion_permission_url = url_for( - 'api_blueprint.permissions', - route_name='organisations', - record_pid=org_sion.pid + "api_blueprint.permissions", route_name="organisations", record_pid=org_sion.pid ) # Not logged @@ -62,23 +58,26 @@ def test_organisation_permissions_api(client, patron_martigny, res = client.get(org_martigny_permission_url) assert res.status_code == 200 data = get_json(res) - assert data['read']['can'] - assert data['list']['can'] - assert not data['create']['can'] - assert data['update']['can'] - assert not data['delete']['can'] + assert data["read"]["can"] + assert data["list"]["can"] + assert not data["create"]["can"] + assert data["update"]["can"] + assert not data["delete"]["can"] res = client.get(org_sion_permission_url) assert res.status_code == 200 data = get_json(res) - assert not data['read']['can'] - assert not data['update']['can'] + assert not data["read"]["can"] + assert not data["update"]["can"] -def test_organisation_permissions(patron_martigny, - librarian_martigny, - system_librarian_martigny, - org_martigny, org_sion): +def test_organisation_permissions( + patron_martigny, + librarian_martigny, + system_librarian_martigny, + org_martigny, + org_sion, +): """Test organisation permissions class.""" permission_policy = OrganisationPermissionPolicy @@ -87,61 +86,65 @@ def test_organisation_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(permission_policy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + permission_policy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron # A simple patron can't operate any operation about Organisation login_user(patron_martigny.user) - check_permission(permission_policy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, org_martigny) + check_permission( + permission_policy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + org_martigny, + ) # Librarian # - search : any Organisation despite organisation owner # - read : only Organisation for its own organisation # - create/update/delete: disallowed login_user(librarian_martigny.user) - check_permission(permission_policy, {'search': True}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, org_martigny) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, org_sion) + check_permission(permission_policy, {"search": True}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": True, "create": False, "update": False, "delete": False}, + org_martigny, + ) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + org_sion, + ) # SystemLibrarian # - search : any Organisation despite organisation owner # - read/update : only Organisation for its own organisation # - create/delete : always disallowed (only CLI command) login_user(system_librarian_martigny.user) - check_permission(permission_policy, {'search': True}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': True, - 'create': False, - 'update': True, - 'delete': False - }, org_martigny) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, org_sion) + check_permission(permission_policy, {"search": True}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": True, "create": False, "update": True, "delete": False}, + org_martigny, + ) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + org_sion, + ) diff --git a/tests/api/organisations/test_organisations_rest_api.py b/tests/api/organisations/test_organisations_rest_api.py index b240bd248f..45083679d9 100644 --- a/tests/api/organisations/test_organisations_rest_api.py +++ b/tests/api/organisations/test_organisations_rest_api.py @@ -29,126 +29,115 @@ def test_get_record_by_viewcode(org_martigny): """Test Organisation.get_record_by_viewcode.""" - data = Organisation.get_record_by_viewcode('org1') - assert data['pid'] == org_martigny.pid + data = Organisation.get_record_by_viewcode("org1") + assert data["pid"] == org_martigny.pid with pytest.raises(Exception): - assert Organisation.get_record_by_viewcode('dummy') + assert Organisation.get_record_by_viewcode("dummy") def test_get_record_by_online_harvested_source(org_martigny): """Test get_record_by_online_harvested_source.""" - source = org_martigny.get('online_harvested_source')[0] + source = org_martigny.get("online_harvested_source")[0] org = Organisation.get_record_by_online_harvested_source(source) assert org.pid == org_martigny.pid - assert Organisation.get_record_by_online_harvested_source('dummy') is None - - -def test_organisation_secure_api_update(client, json_header, org_martigny, - librarian_martigny, - system_librarian_martigny, - librarian_sion, - org_martigny_data): + assert Organisation.get_record_by_online_harvested_source("dummy") is None + + +def test_organisation_secure_api_update( + client, + json_header, + org_martigny, + librarian_martigny, + system_librarian_martigny, + librarian_sion, + org_martigny_data, +): """Test organisation secure api create.""" login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.org_item', - pid_value=org_martigny.pid) + record_url = url_for("invenio_records_rest.org_item", pid_value=org_martigny.pid) data = org_martigny_data - data['name'] = 'New Name 1' - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "New Name 1" + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 - list_url = url_for('invenio_records_rest.org_list') + list_url = url_for("invenio_records_rest.org_list") client.get(list_url) assert res.status_code == 200 login_user_via_session(client, librarian_martigny.user) - data['name'] = 'New Name 2' - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "New Name 2" + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 # Sion login_user_via_session(client, librarian_sion.user) - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 -def test_organisation_can_delete(client, org_martigny, lib_martigny, - acq_receipt_fiction_martigny): +def test_organisation_can_delete( + client, org_martigny, lib_martigny, acq_receipt_fiction_martigny +): """Test can delete an organisation.""" can, reasons = org_martigny.can_delete assert not can - assert reasons['links']['libraries'] - assert reasons['links']['acq_receipts'] + assert reasons["links"]["libraries"] + assert reasons["links"]["acq_receipts"] -def test_organisation_secure_api(client, json_header, org_martigny, - librarian_martigny, - librarian_sion): +def test_organisation_secure_api( + client, json_header, org_martigny, librarian_martigny, librarian_sion +): """Test organisation secure api access.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.org_item', - pid_value=org_martigny.pid) + record_url = url_for("invenio_records_rest.org_item", pid_value=org_martigny.pid) res = client.get(record_url) assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.org_item', - pid_value=org_martigny.pid) + record_url = url_for("invenio_records_rest.org_item", pid_value=org_martigny.pid) -def test_organisation_secure_api_create(client, json_header, org_martigny, - librarian_martigny, - librarian_sion, - org_martigny_data): +def test_organisation_secure_api_create( + client, + json_header, + org_martigny, + librarian_martigny, + librarian_sion, + org_martigny_data, +): """Test organisation secure api create.""" # Martigny login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.org_list' - - del org_martigny_data['pid'] - res, _ = postdata( - client, - post_entrypoint, - org_martigny_data - ) + post_entrypoint = "invenio_records_rest.org_list" + + del org_martigny_data["pid"] + res, _ = postdata(client, post_entrypoint, org_martigny_data) assert res.status_code == 403 # Sion login_user_via_session(client, librarian_sion.user) - res, _ = postdata( - client, - post_entrypoint, - org_martigny_data - ) + res, _ = postdata(client, post_entrypoint, org_martigny_data) assert res.status_code == 403 -def test_organisation_secure_api_delete(client, json_header, org_martigny, - librarian_martigny, - librarian_sion, - org_martigny_data): +def test_organisation_secure_api_delete( + client, + json_header, + org_martigny, + librarian_martigny, + librarian_sion, + org_martigny_data, +): """Test organisation secure api delete.""" login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.org_item', - pid_value=org_martigny.pid) + record_url = url_for("invenio_records_rest.org_item", pid_value=org_martigny.pid) res = client.delete(record_url) assert res.status_code == 403 diff --git a/tests/api/patron_transaction_events/test_patron_payments_rest.py b/tests/api/patron_transaction_events/test_patron_payments_rest.py index f41101fc10..139c50f889 100644 --- a/tests/api/patron_transaction_events/test_patron_payments_rest.py +++ b/tests/api/patron_transaction_events/test_patron_payments_rest.py @@ -25,15 +25,14 @@ from utils import VerifyRecordPermissionPatch, get_json, postdata from rero_ils.modules.loans.api import Loan -from rero_ils.modules.patron_transaction_events.models import \ - PatronTransactionEventType +from rero_ils.modules.patron_transaction_events.models import PatronTransactionEventType from rero_ils.modules.patron_transactions.api import PatronTransaction from rero_ils.modules.utils import get_ref_for_pid def test_patron_payment( - client, librarian_martigny, - patron_transaction_overdue_event_martigny): + client, librarian_martigny, patron_transaction_overdue_event_martigny +): """Test patron payment.""" ptre = patron_transaction_overdue_event_martigny transaction = ptre.patron_transaction @@ -42,158 +41,168 @@ def test_patron_payment( assert calculated_amount == transaction.total_amount == 2.00 login_user_via_session(client, librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.ptre_list' + post_entrypoint = "invenio_records_rest.ptre_list" payment = deepcopy(ptre) # STEP#1 :: PARTIAL PAYMENT WITH TOO MUCH DECIMAL # Try to pay a part of the transaction amount, but according to # event amount restriction, only 2 decimals are allowed. - del payment['pid'] - payment['type'] = PatronTransactionEventType.PAYMENT - payment['subtype'] = 'cash' - payment['amount'] = 0.545 - payment['operator'] = { - '$ref': get_ref_for_pid('patrons', librarian_martigny.pid) - } + del payment["pid"] + payment["type"] = PatronTransactionEventType.PAYMENT + payment["subtype"] = "cash" + payment["amount"] = 0.545 + payment["operator"] = {"$ref": get_ref_for_pid("patrons", librarian_martigny.pid)} res, _ = postdata(client, post_entrypoint, payment) assert res.status_code == 400 # STEP#2 :: PARTIAL PAYMENT WITH GOOD NUMBER OF DECIMALS # Despite if a set a number with 3 decimals, if this number represent # the value of a 2 decimals, it's allowed - payment['amount'] = 0.540 + payment["amount"] = 0.540 res, _ = postdata(client, post_entrypoint, payment) assert res.status_code == 201 transaction = PatronTransaction.get_record_by_pid(transaction.pid) assert transaction.total_amount == 1.46 - assert transaction.status == 'open' + assert transaction.status == "open" # STEP#3 :: PAY TOO MUCH MONEY # Try to proceed a payment with too much money, the system must # reject the payment - payment['amount'] = 2 + payment["amount"] = 2 res, data = postdata(client, post_entrypoint, payment) assert res.status_code == 400 # STEP34 :: ADD A DISPUTE # Just to test if a ptte without amount doesn't break the process. dispute = deepcopy(ptre) - del dispute['pid'] - del dispute['subtype'] - del dispute['amount'] - dispute['type'] = PatronTransactionEventType.DISPUTE - dispute['note'] = 'this is a dispute note' - dispute['operator'] = { - '$ref': get_ref_for_pid('patrons', librarian_martigny.pid) - } + del dispute["pid"] + del dispute["subtype"] + del dispute["amount"] + dispute["type"] = PatronTransactionEventType.DISPUTE + dispute["note"] = "this is a dispute note" + dispute["operator"] = {"$ref": get_ref_for_pid("patrons", librarian_martigny.pid)} res, data = postdata(client, post_entrypoint, dispute) assert res.status_code == 201 transaction = PatronTransaction.get_record_by_pid(transaction.pid) assert transaction.total_amount == 1.46 - assert transaction.status == 'open' + assert transaction.status == "open" # STEP#5 :: PAY THE REST # Conclude the transaction by creation of a payment for the rest of the # transaction - payment['amount'] = transaction.total_amount + payment["amount"] = transaction.total_amount res, _ = postdata(client, post_entrypoint, payment) assert res.status_code == 201 transaction = PatronTransaction.get_record_by_pid(transaction.pid) assert transaction.total_amount == 0 - assert transaction.status == 'closed' + assert transaction.status == "closed" -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transaction_events_facets( - client, patron_transaction_overdue_event_martigny, loc_public_martigny, - item4_lib_martigny, rero_json_header + client, + patron_transaction_overdue_event_martigny, + loc_public_martigny, + item4_lib_martigny, + rero_json_header, ): """Test record retrieval.""" def _find_bucket(buckets, bucket_key): - for bucket in buckets['buckets']: - if bucket['key'] == bucket_key: + for bucket in buckets["buckets"]: + if bucket["key"] == bucket_key: return bucket # STEP#1 :: CHECK FACETS ARE PRESENT INTO SEARCH RESULT - url = url_for('invenio_records_rest.ptre_list') + url = url_for("invenio_records_rest.ptre_list") res = client.get(url, headers=rero_json_header) data = get_json(res) facet_keys = [ - 'category', 'owning_library', 'patron_type', 'total', - 'transaction_date', 'transaction_library', 'type' + "category", + "owning_library", + "patron_type", + "total", + "transaction_date", + "transaction_library", + "type", ] - assert all(key in data['aggregations'] for key in facet_keys) - - owning_library = data['aggregations']['owning_library']['buckets'] - assert owning_library[0]['owning_location']['buckets'][0]['name'] == \ - loc_public_martigny['name'] - params = {'facets': ''} - url = url_for('invenio_records_rest.ptre_list', **params) + assert all(key in data["aggregations"] for key in facet_keys) + + owning_library = data["aggregations"]["owning_library"]["buckets"] + assert ( + owning_library[0]["owning_location"]["buckets"][0]["name"] + == loc_public_martigny["name"] + ) + params = {"facets": ""} + url = url_for("invenio_records_rest.ptre_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - assert not data['aggregations'] + assert not data["aggregations"] - params = {'facets': 'type'} - url = url_for('invenio_records_rest.ptre_list', **params) + params = {"facets": "type"} + url = url_for("invenio_records_rest.ptre_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - assert list(data['aggregations'].keys()) == ['type'] + assert list(data["aggregations"].keys()) == ["type"] # CHECK NESTED FACETS :: TYPE & SUBTYPE + OR SEARCH # This test must be executed after `test_patron_payment` to retrieve # some payments. - params = {'facets': 'total'} - url = url_for('invenio_records_rest.ptre_list', **params) + params = {"facets": "total"} + url = url_for("invenio_records_rest.ptre_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - total_bucket = data['aggregations']['total'] - assert total_bucket['doc_count'] == 2 - cash_subtype_aggr = _find_bucket(total_bucket['subtype'], 'cash') - assert cash_subtype_aggr['doc_count'] == 2 - assert cash_subtype_aggr['subtotal']['value'] == 2.0 + total_bucket = data["aggregations"]["total"] + assert total_bucket["doc_count"] == 2 + cash_subtype_aggr = _find_bucket(total_bucket["subtype"], "cash") + assert cash_subtype_aggr["doc_count"] == 2 + assert cash_subtype_aggr["subtotal"]["value"] == 2.0 # filter with dummy subtypes :: no payment must be found params = { - 'facets': 'total', - 'type': PatronTransactionEventType.PAYMENT, - 'subtype': ['foo', 'bar'] + "facets": "total", + "type": PatronTransactionEventType.PAYMENT, + "subtype": ["foo", "bar"], } - url = url_for('invenio_records_rest.ptre_list', **params) + url = url_for("invenio_records_rest.ptre_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - total_bucket = data['aggregations']['total'] - assert total_bucket['doc_count'] == 0 - assert not _find_bucket(total_bucket['subtype'], 'cash') + total_bucket = data["aggregations"]["total"] + assert total_bucket["doc_count"] == 0 + assert not _find_bucket(total_bucket["subtype"], "cash") # filter with an available subtype (cash) and an absent subtype # (credit_card) :: payments must be found but only 'cash' subtype must # exist params = { - 'facets': 'total', - 'type': PatronTransactionEventType.PAYMENT, - 'subtype': ['cash', 'credit_card'] + "facets": "total", + "type": PatronTransactionEventType.PAYMENT, + "subtype": ["cash", "credit_card"], } - url = url_for('invenio_records_rest.ptre_list', **params) + url = url_for("invenio_records_rest.ptre_list", **params) res = client.get(url, headers=rero_json_header) data = get_json(res) - total_bucket = data['aggregations']['total'] - assert total_bucket['doc_count'] == 2 - assert _find_bucket(total_bucket['subtype'], 'cash') - assert not _find_bucket(total_bucket['subtype'], 'credit_card') + total_bucket = data["aggregations"]["total"] + assert total_bucket["doc_count"] == 2 + assert _find_bucket(total_bucket["subtype"], "cash") + assert not _find_bucket(total_bucket["subtype"], "credit_card") # delete Location item4_links = item4_lib_martigny.get_links_to_me(get_pids=True) - loan = Loan.get_record_by_pid(item4_links['loans'][0]) + loan = Loan.get_record_by_pid(item4_links["loans"][0]) loan.delete(dbcommit=True, delindex=True) item4_lib_martigny.delete(dbcommit=True, delindex=True) loc_pid = loc_public_martigny.pid loc_public_martigny.delete(dbcommit=True, delindex=True) - url = url_for('invenio_records_rest.ptre_list') + url = url_for("invenio_records_rest.ptre_list") res = client.get(url, headers=rero_json_header) data = get_json(res) - owning_library = data['aggregations']['owning_library']['buckets'] - assert owning_library[0]['owning_location']['buckets'][0]['name'] == \ - f'Unknown ({loc_pid})' + owning_library = data["aggregations"]["owning_library"]["buckets"] + assert ( + owning_library[0]["owning_location"]["buckets"][0]["name"] + == f"Unknown ({loc_pid})" + ) diff --git a/tests/api/patron_transaction_events/test_patron_transaction_events_permissions.py b/tests/api/patron_transaction_events/test_patron_transaction_events_permissions.py index 438ca675f3..45ca89e8c3 100644 --- a/tests/api/patron_transaction_events/test_patron_transaction_events_permissions.py +++ b/tests/api/patron_transaction_events/test_patron_transaction_events_permissions.py @@ -20,19 +20,22 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission -from rero_ils.modules.patron_transaction_events.permissions import \ - PatronTransactionEventPermissionPolicy +from rero_ils.modules.patron_transaction_events.permissions import ( + PatronTransactionEventPermissionPolicy, +) from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_ptre_permissions( - patron_martigny, librarian_martigny, system_librarian_martigny, + patron_martigny, + librarian_martigny, + system_librarian_martigny, patron_transaction_overdue_event_saxon, patron_transaction_overdue_event_sion, - patron_transaction_overdue_event_martigny + patron_transaction_overdue_event_martigny, ): """Test patron transaction event permissions class.""" @@ -44,89 +47,111 @@ def test_ptre_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, ptre_martigny) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + ptre_martigny, + ) # Patron user :: could search any, could read own pttr login_user(patron_martigny.user) - check_permission(PatronTransactionEventPermissionPolicy, { - 'create': False - }, {}) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, ptre_martigny) - check_permission(PatronTransactionEventPermissionPolicy, { - 'read': False, - }, ptre_sion) + check_permission(PatronTransactionEventPermissionPolicy, {"create": False}, {}) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + ptre_martigny, + ) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "read": False, + }, + ptre_sion, + ) # Librarian with specific role # - search: any pttr # - other operations : allowed for pttr of its own organisation login_user(librarian_martigny.user) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, ptre_martigny) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, ptre_saxon) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, ptre_sion) + check_permission( + PatronTransactionEventPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + ptre_martigny, + ) + check_permission( + PatronTransactionEventPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + ptre_saxon, + ) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + ptre_sion, + ) # Librarian without specific role # - search: any items # - read: only record of own organisation # - all other operations are disallowed - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_read_only'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_read_only"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, ptre_saxon) - check_permission(PatronTransactionEventPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, ptre_sion) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + ptre_saxon, + ) + check_permission( + PatronTransactionEventPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + ptre_sion, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() diff --git a/tests/api/patron_transaction_events/test_patron_transaction_events_rest.py b/tests/api/patron_transaction_events/test_patron_transaction_events_rest.py index 98bd4e069b..a49979f0f4 100644 --- a/tests/api/patron_transaction_events/test_patron_transaction_events_rest.py +++ b/tests/api/patron_transaction_events/test_patron_transaction_events_rest.py @@ -23,127 +23,119 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.notifications.models import NotificationType -from rero_ils.modules.patron_transaction_events.api import \ - PatronTransactionEvent +from rero_ils.modules.patron_transaction_events.api import PatronTransactionEvent -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transaction_events_get( - client, patron_transaction_overdue_event_martigny): + client, patron_transaction_overdue_event_martigny +): """Test record retrieval.""" patron_event = patron_transaction_overdue_event_martigny pid = patron_event.pid - item_url = url_for('invenio_records_rest.ptre_item', pid_value=pid) - list_url = url_for('invenio_records_rest.ptre_list', q=f'pid:{pid}') + item_url = url_for("invenio_records_rest.ptre_item", pid_value=pid) + list_url = url_for("invenio_records_rest.ptre_list", q=f"pid:{pid}") item_url_with_resolve = url_for( - 'invenio_records_rest.ptre_item', - pid_value=pid, - resolve=1, - sources=1 + "invenio_records_rest.ptre_item", pid_value=pid, resolve=1, sources=1 ) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{patron_event.revision_id}"' + assert res.headers["ETag"] == f'"{patron_event.revision_id}"' data = get_json(res) - assert patron_event.dumps() == data['metadata'] + assert patron_event.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert patron_event.dumps() == data['metadata'] + assert patron_event.dumps() == data["metadata"] # check resolve res = client.get(item_url_with_resolve) assert res.status_code == 200 data = get_json(res) - assert patron_event.replace_refs().dumps() == data['metadata'] + assert patron_event.replace_refs().dumps() == data["metadata"] res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - result = data['hits']['hits'][0]['metadata'] + result = data["hits"]["hits"][0]["metadata"] # delete dynamically added keys (listener) - assert result.pop('organisation') == {'pid': 'org1', 'type': 'org'} - assert result.pop('patron') == { - 'barcode': '4098124352', - 'pid': 'ptrn6', - 'type': 'ptrn' + assert result.pop("organisation") == {"pid": "org1", "type": "org"} + assert result.pop("patron") == { + "barcode": "4098124352", + "pid": "ptrn6", + "type": "ptrn", } - assert result.pop('category') == NotificationType.OVERDUE - assert result.pop('owning_library') == {'pid': 'lib1', 'type': 'lib'} - assert result.pop('owning_location') == {'pid': 'loc1', 'type': 'loc'} - assert result.pop('patron_type') == {'pid': 'ptty1', 'type': 'ptty'} - assert result.pop('document') == {'pid': 'doc1', 'type': 'doc'} - item_data = result.pop('item') - assert item_data.pop('barcode').startswith('f-') - assert item_data == {'pid': 'item8', 'type': 'item'} + assert result.pop("category") == NotificationType.OVERDUE + assert result.pop("owning_library") == {"pid": "lib1", "type": "lib"} + assert result.pop("owning_location") == {"pid": "loc1", "type": "loc"} + assert result.pop("patron_type") == {"pid": "ptty1", "type": "ptty"} + assert result.pop("document") == {"pid": "doc1", "type": "doc"} + item_data = result.pop("item") + assert item_data.pop("barcode").startswith("f-") + assert item_data == {"pid": "item8", "type": "item"} assert result == patron_event.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transaction_events_post_put_delete( - client, patron_transaction_overdue_event_martigny, - json_header): + client, patron_transaction_overdue_event_martigny, json_header +): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.ptre_item', pid_value='new_ptre') - list_url = url_for('invenio_records_rest.ptre_list', q='pid:new_ptre') + item_url = url_for("invenio_records_rest.ptre_item", pid_value="new_ptre") + list_url = url_for("invenio_records_rest.ptre_list", q="pid:new_ptre") event_data = deepcopy(patron_transaction_overdue_event_martigny) # Create record / POST - event_data['pid'] = 'new_ptre' - res, data = postdata( - client, - 'invenio_records_rest.ptre_list', - event_data - ) + event_data["pid"] = "new_ptre" + res, data = postdata(client, "invenio_records_rest.ptre_list", event_data) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == event_data + assert data["metadata"] == event_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert event_data == data['metadata'] + assert event_data == data["metadata"] # Update record/PUT data = event_data - data['note'] = 'Test Note' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["note"] = "Test Note" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 - assert res.headers['ETag'] != f'"{event_data.revision_id}"' + assert res.headers["ETag"] != f'"{event_data.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['note'] == 'Test Note' + assert data["metadata"]["note"] == "Test Note" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['note'] == 'Test Note' + assert data["metadata"]["note"] == "Test Note" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['note'] == 'Test Note' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["note"] == "Test Note" # Delete record/DELETE res = client.delete(item_url) @@ -154,24 +146,28 @@ def test_patron_transaction_events_post_put_delete( def test_patron_transaction_event_utils_shortcuts( - client, patron_transaction_overdue_event_martigny, - loan_overdue_martigny): + client, patron_transaction_overdue_event_martigny, loan_overdue_martigny +): """Test patron transaction utils and shortcuts.""" can, reasons = patron_transaction_overdue_event_martigny.can_delete assert can assert reasons == {} - assert patron_transaction_overdue_event_martigny.patron_pid == \ - loan_overdue_martigny.patron_pid + assert ( + patron_transaction_overdue_event_martigny.patron_pid + == loan_overdue_martigny.patron_pid + ) def test_filtered_patron_transaction_events_get( - client, librarian_martigny, - patron_transaction_overdue_event_martigny, - librarian_sion, patron_martigny + client, + librarian_martigny, + patron_transaction_overdue_event_martigny, + librarian_sion, + patron_martigny, ): """Test patron transaction event filter by organisation.""" - list_url = url_for('invenio_records_rest.ptre_list') + list_url = url_for("invenio_records_rest.ptre_list") login_user_via_session(client, patron_martigny.user) res = client.get(list_url) @@ -183,26 +179,32 @@ def test_filtered_patron_transaction_events_get( res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.ptre_list') + list_url = url_for("invenio_records_rest.ptre_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 def test_patron_transaction_event_secure_api( - client, json_header, patron_transaction_overdue_event_martigny, - librarian_martigny, librarian_sion, - system_librarian_martigny, system_librarian_sion, - patron_transaction_overdue_event_saxon, patron_martigny): + client, + json_header, + patron_transaction_overdue_event_martigny, + librarian_martigny, + librarian_sion, + system_librarian_martigny, + system_librarian_sion, + patron_transaction_overdue_event_saxon, + patron_martigny, +): """Test patron transaction event secure api access.""" # test if a 'creation_date' attribute is created if not present into data trans_data = deepcopy(patron_transaction_overdue_event_martigny) - del trans_data['creation_date'] + del trans_data["creation_date"] trans = PatronTransactionEvent.create(trans_data, delete_pid=True) - assert trans.get('creation_date') + assert trans.get("creation_date") diff --git a/tests/api/patron_transactions/test_patron_transactions_permissions.py b/tests/api/patron_transactions/test_patron_transactions_permissions.py index cd37dea88b..355b7c5e82 100644 --- a/tests/api/patron_transactions/test_patron_transactions_permissions.py +++ b/tests/api/patron_transactions/test_patron_transactions_permissions.py @@ -20,18 +20,22 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission -from rero_ils.modules.patron_transactions.permissions import \ - PatronTransactionPermissionPolicy +from rero_ils.modules.patron_transactions.permissions import ( + PatronTransactionPermissionPolicy, +) from rero_ils.modules.patrons.api import Patron, PatronsSearch -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_pttr_permissions( - patron_martigny, librarian_martigny, system_librarian_martigny, - patron_transaction_overdue_saxon, patron_transaction_overdue_sion, - patron_transaction_overdue_martigny + patron_martigny, + librarian_martigny, + system_librarian_martigny, + patron_transaction_overdue_saxon, + patron_transaction_overdue_sion, + patron_transaction_overdue_martigny, ): """Test patron transaction permissions class.""" @@ -43,87 +47,111 @@ def test_pttr_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(PatronTransactionPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(PatronTransactionPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, pttr_martigny) + check_permission( + PatronTransactionPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + PatronTransactionPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + pttr_martigny, + ) # Patron user :: could search any, could read own pttr login_user(patron_martigny.user) - check_permission(PatronTransactionPermissionPolicy, {'create': False}, {}) - check_permission(PatronTransactionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, pttr_martigny) - check_permission(PatronTransactionPermissionPolicy, { - 'read': False, - }, pttr_sion) + check_permission(PatronTransactionPermissionPolicy, {"create": False}, {}) + check_permission( + PatronTransactionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + pttr_martigny, + ) + check_permission( + PatronTransactionPermissionPolicy, + { + "read": False, + }, + pttr_sion, + ) # Librarian with specific role # - search: any pttr # - other operations : allowed for pttr of its own organisation login_user(librarian_martigny.user) - check_permission(PatronTransactionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, pttr_martigny) - check_permission(PatronTransactionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, pttr_saxon) - check_permission(PatronTransactionPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, pttr_sion) + check_permission( + PatronTransactionPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + pttr_martigny, + ) + check_permission( + PatronTransactionPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + pttr_saxon, + ) + check_permission( + PatronTransactionPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + pttr_sion, + ) # Librarian without specific role # - search: any items # - read: only record of own organisation # - all other operations are disallowed - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_read_only'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_read_only"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(PatronTransactionPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, pttr_saxon) - check_permission(PatronTransactionPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, pttr_sion) + check_permission( + PatronTransactionPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + pttr_saxon, + ) + check_permission( + PatronTransactionPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + pttr_sion, + ) # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() diff --git a/tests/api/patron_transactions/test_patron_transactions_rest.py b/tests/api/patron_transactions/test_patron_transactions_rest.py index efcef52db0..af256af7c1 100644 --- a/tests/api/patron_transactions/test_patron_transactions_rest.py +++ b/tests/api/patron_transactions/test_patron_transactions_rest.py @@ -25,142 +25,135 @@ import pytest from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.api import IlsRecordError from rero_ils.modules.patron_transactions.api import PatronTransaction -from rero_ils.modules.patron_transactions.utils import \ - create_subscription_for_patron, get_transactions_pids_for_patron -from rero_ils.modules.utils import add_years, extracted_data_from_ref, \ - get_ref_for_pid +from rero_ils.modules.patron_transactions.utils import ( + create_subscription_for_patron, + get_transactions_pids_for_patron, +) +from rero_ils.modules.utils import add_years, extracted_data_from_ref, get_ref_for_pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transactions_get( client, patron_transaction_overdue_martigny, rero_json_header ): """Test record retrieval.""" transaction = patron_transaction_overdue_martigny pid = transaction.pid - item_url = url_for('invenio_records_rest.pttr_item', pid_value=pid) - list_url = url_for('invenio_records_rest.pttr_list', q=f'pid:{pid}') + item_url = url_for("invenio_records_rest.pttr_item", pid_value=pid) + list_url = url_for("invenio_records_rest.pttr_list", q=f"pid:{pid}") item_url_with_resolve = url_for( - 'invenio_records_rest.pttr_item', - pid_value=pid, - resolve=1, - sources=1 + "invenio_records_rest.pttr_item", pid_value=pid, resolve=1, sources=1 ) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{transaction.revision_id}"' + assert res.headers["ETag"] == f'"{transaction.revision_id}"' data = get_json(res) - assert transaction.dumps() == data['metadata'] + assert transaction.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert transaction.dumps() == data['metadata'] + assert transaction.dumps() == data["metadata"] # check resolve res = client.get(item_url_with_resolve) assert res.status_code == 200 data = get_json(res) - assert transaction.replace_refs().dumps() == data['metadata'] + assert transaction.replace_refs().dumps() == data["metadata"] res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - result = data['hits']['hits'][0]['metadata'] - assert result.pop('document') == {'pid': 'doc1', 'type': 'doc'} - assert result.pop('library') == {'pid': 'lib1', 'type': 'lib'} - assert result.pop('item') == {'pid': 'item8', 'type': 'item'} - del result['patron']['barcode'] + result = data["hits"]["hits"][0]["metadata"] + assert result.pop("document") == {"pid": "doc1", "type": "doc"} + assert result.pop("library") == {"pid": "lib1", "type": "lib"} + assert result.pop("item") == {"pid": "item8", "type": "item"} + del result["patron"]["barcode"] assert result == transaction.replace_refs() # Check for `rero+json` mime type response response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - record = data.get('hits', {}).get('hits', [])[0] - assert record.get('metadata', {}).get('document') - assert record.get('metadata', {}).get('loan') - assert record.get('metadata', {}).get('loan', {}).get('item') + record = data.get("hits", {}).get("hits", [])[0] + assert record.get("metadata", {}).get("document") + assert record.get("metadata", {}).get("loan") + assert record.get("metadata", {}).get("loan", {}).get("item") -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transactions_get_delete_resources( - client, - patron_transaction_overdue_martigny, - item4_lib_martigny + client, patron_transaction_overdue_martigny, item4_lib_martigny ): """Test patron transaction list if related resources are unavailable.""" - list_url = url_for('invenio_records_rest.pttr_list', format='rero') + list_url = url_for("invenio_records_rest.pttr_list", format="rero") item4_lib_martigny.delete(force=True, dbcommit=False, delindex=False) res = client.get(list_url) assert res.status_code == 200 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transactions_post_put_delete( - client, lib_martigny, patron_transaction_overdue_martigny, - json_header): + client, lib_martigny, patron_transaction_overdue_martigny, json_header +): """Test record retrieval.""" - pttr_pid = 'new_pttr' - item_url = url_for('invenio_records_rest.pttr_item', pid_value=pttr_pid) - list_url = url_for('invenio_records_rest.pttr_list', q='pid:new_pttr') + pttr_pid = "new_pttr" + item_url = url_for("invenio_records_rest.pttr_item", pid_value=pttr_pid) + list_url = url_for("invenio_records_rest.pttr_list", q="pid:new_pttr") transaction_data = deepcopy(patron_transaction_overdue_martigny) # Create record / POST - transaction_data['pid'] = pttr_pid - res, data = postdata( - client, - 'invenio_records_rest.pttr_list', - transaction_data - ) + transaction_data["pid"] = pttr_pid + res, data = postdata(client, "invenio_records_rest.pttr_list", transaction_data) assert res.status_code == 201 # Check that the returned record matches the given data - assert data['metadata'] == transaction_data + assert data["metadata"] == transaction_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert transaction_data == data['metadata'] + assert transaction_data == data["metadata"] # Update record/PUT data = transaction_data - data['note'] = 'Test Note' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["note"] = "Test Note" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 - assert res.headers['ETag'] != f'"{transaction_data.revision_id}"' + assert res.headers["ETag"] != f'"{transaction_data.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['note'] == 'Test Note' + assert data["metadata"]["note"] == "Test Note" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['note'] == 'Test Note' + assert data["metadata"]["note"] == "Test Note" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['note'] == 'Test Note' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["note"] == "Test Note" # Delete record # Deleting a transaction is not allowed, @@ -172,24 +165,24 @@ def test_patron_transactions_post_put_delete( clear_patron_transaction_data(pttr_pid) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_transaction_photocopy_create( - client, lib_martigny, - patron_transaction_photocopy_martigny_data, system_librarian_martigny, + client, + lib_martigny, + patron_transaction_photocopy_martigny_data, + system_librarian_martigny, ): """Test creation and delete of photocopy fee transaction.""" # Create another kind of transaction transaction_data = deepcopy(patron_transaction_photocopy_martigny_data) - del transaction_data['pid'] - res, data = postdata( - client, - 'invenio_records_rest.pttr_list', - transaction_data - ) + del transaction_data["pid"] + res, data = postdata(client, "invenio_records_rest.pttr_list", transaction_data) assert res.status_code == 201 - pid = data['metadata']['pid'] - item_url = url_for('invenio_records_rest.pttr_item', pid_value=pid) + pid = data["metadata"]["pid"] + item_url = url_for("invenio_records_rest.pttr_item", pid_value=pid) with pytest.raises(IlsRecordError.NotDeleted): client.delete(item_url) @@ -198,26 +191,30 @@ def test_patron_transaction_photocopy_create( def test_patron_transaction_shortcuts_utils( - client, patron_transaction_overdue_martigny, loan_overdue_martigny): + client, patron_transaction_overdue_martigny, loan_overdue_martigny +): """Test patron transaction shortcuts and utils.""" can, reasons = patron_transaction_overdue_martigny.can_delete assert not can - assert reasons['links']['events'] + assert reasons["links"]["events"] - assert patron_transaction_overdue_martigny.loan_pid == \ - loan_overdue_martigny.pid + assert patron_transaction_overdue_martigny.loan_pid == loan_overdue_martigny.pid - assert patron_transaction_overdue_martigny.patron_pid == \ - loan_overdue_martigny.patron_pid + assert ( + patron_transaction_overdue_martigny.patron_pid + == loan_overdue_martigny.patron_pid + ) def test_filtered_patron_transactions_get( - client, librarian_martigny, - patron_transaction_overdue_martigny, - librarian_sion, patron_martigny + client, + librarian_martigny, + patron_transaction_overdue_martigny, + librarian_sion, + patron_martigny, ): """Test patron transaction filter by organisation.""" - list_url = url_for('invenio_records_rest.pttr_list') + list_url = url_for("invenio_records_rest.pttr_list") res = client.get(list_url) assert res.status_code == 401 @@ -227,7 +224,7 @@ def test_filtered_patron_transactions_get( res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # Martigny login_user_via_session(client, librarian_martigny.user) @@ -235,20 +232,19 @@ def test_filtered_patron_transactions_get( res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.pttr_list') + list_url = url_for("invenio_records_rest.pttr_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 -def test_patron_subscription_transaction( - patron_type_youngsters_sion, patron_sion): +def test_patron_subscription_transaction(patron_type_youngsters_sion, patron_sion): """Test the creation of a subscription transaction for a patron.""" subscription_start_date = datetime.now() subscription_end_date = add_years(subscription_start_date, 1) @@ -263,14 +259,14 @@ def test_patron_subscription_transaction( subscription_end_date, dbcommit=True, reindex=True, - delete_pid=True + delete_pid=True, ) - assert subscription.get_links_to_me() == {'events': 1} + assert subscription.get_links_to_me() == {"events": 1} assert subscription.get_links_to_me(get_pids=True) event = list(subscription.events)[0] - assert event.get('type') == 'fee' - assert event.get('subtype') == 'other' - assert event.get('amount') == subscription.get('total_amount') + assert event.get("type") == "fee" + assert event.get("subtype") == "other" + assert event.get("amount") == subscription.get("total_amount") # Delete the record created above clear_patron_transaction_data(subscription.pid) @@ -278,57 +274,44 @@ def test_patron_subscription_transaction( def test_get_transactions_pids_for_patron(patron_sion): """Test function get_transactions_pids_for_patron.""" - assert len(list(get_transactions_pids_for_patron( - patron_sion.pid, status='open' - ))) == 1 - assert not list(get_transactions_pids_for_patron( - patron_sion.pid, status='closed')) + assert ( + len(list(get_transactions_pids_for_patron(patron_sion.pid, status="open"))) == 1 + ) + assert not list(get_transactions_pids_for_patron(patron_sion.pid, status="closed")) -def test_transactions_add_manual_fee(client, librarian_sion, org_sion, - patron_sion): +def test_transactions_add_manual_fee(client, librarian_sion, org_sion, patron_sion): """Test for adding manual fees.""" # Sion login_user_via_session(client, librarian_sion.user) data = { - 'type': 'photocopy', - 'total_amount': 20, - 'creation_date': datetime.now().isoformat(), - 'patron': { - '$ref': get_ref_for_pid('ptrn', patron_sion.get('pid')) - }, - 'organisation': { - '$ref': get_ref_for_pid('org', org_sion.get('pid')) + "type": "photocopy", + "total_amount": 20, + "creation_date": datetime.now().isoformat(), + "patron": {"$ref": get_ref_for_pid("ptrn", patron_sion.get("pid"))}, + "organisation": {"$ref": get_ref_for_pid("org", org_sion.get("pid"))}, + "library": librarian_sion.get("libraries")[0], + "note": "Thesis", + "status": "open", + "event": { + "operator": {"$ref": get_ref_for_pid("ptrn", librarian_sion.get("pid"))}, + "library": librarian_sion.get("libraries")[0], }, - 'library': librarian_sion.get('libraries')[0], - 'note': 'Thesis', - 'status': 'open', - 'event': { - 'operator': { - '$ref': get_ref_for_pid('ptrn', librarian_sion.get('pid')) - }, - 'library': librarian_sion.get('libraries')[0] - } } - post_entrypoint = 'invenio_records_rest.pttr_list' - res, data = postdata( - client, - post_entrypoint, - data - ) + post_entrypoint = "invenio_records_rest.pttr_list" + res, data = postdata(client, post_entrypoint, data) assert res.status_code == 201 - metadata = data['metadata'] - record = PatronTransaction.get_record_by_pid(metadata['pid']) - assert record.get('library') == librarian_sion.get('libraries')[0] + metadata = data["metadata"] + record = PatronTransaction.get_record_by_pid(metadata["pid"]) + assert record.get("library") == librarian_sion.get("libraries")[0] event = next(record.events) - assert extracted_data_from_ref(event.get('operator')) \ - == librarian_sion.get('pid') - assert event.get('library') == librarian_sion.get('libraries')[0] + assert extracted_data_from_ref(event.get("operator")) == librarian_sion.get("pid") + assert event.get("library") == librarian_sion.get("libraries")[0] # Delete the record created above - clear_patron_transaction_data(metadata['pid']) + clear_patron_transaction_data(metadata["pid"]) def clear_patron_transaction_data(pid): diff --git a/tests/api/patron_types/test_patron_types_permissions.py b/tests/api/patron_types/test_patron_types_permissions.py index f8a39f5c5d..ebb1345773 100644 --- a/tests/api/patron_types/test_patron_types_permissions.py +++ b/tests/api/patron_types/test_patron_types_permissions.py @@ -22,28 +22,29 @@ from invenio_accounts.testutils import login_user_via_session from utils import check_permission, get_json -from rero_ils.modules.patron_types.permissions import \ - PatronTypePermissionPolicy +from rero_ils.modules.patron_types.permissions import PatronTypePermissionPolicy -def test_patron_types_permissions_api(client, librarian_martigny, - system_librarian_martigny, - patron_type_adults_martigny, - patron_type_youngsters_sion): +def test_patron_types_permissions_api( + client, + librarian_martigny, + system_librarian_martigny, + patron_type_adults_martigny, + patron_type_youngsters_sion, +): """Test patron types permissions api.""" ptty_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='patron_types' + "api_blueprint.permissions", route_name="patron_types" ) ptty_adult_martigny_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='patron_types', - record_pid=patron_type_adults_martigny.pid + "api_blueprint.permissions", + route_name="patron_types", + record_pid=patron_type_adults_martigny.pid, ) ptty_youngsters_sion_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='patron_types', - record_pid=patron_type_youngsters_sion.pid + "api_blueprint.permissions", + route_name="patron_types", + record_pid=patron_type_youngsters_sion.pid, ) # Not logged @@ -59,13 +60,13 @@ def test_patron_types_permissions_api(client, librarian_martigny, res = client.get(ptty_adult_martigny_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read']: - assert data[action]['can'] - for action in ['create', 'update', 'delete']: - assert not data[action]['can'] + for action in ["list", "read"]: + assert data[action]["can"] + for action in ["create", "update", "delete"]: + assert not data[action]["can"] res = client.get(ptty_youngsters_sion_permissions_url) data = get_json(res) - assert not data['read']['can'] + assert not data["read"]["can"] # Logged as system librarian # * sys_lib can do anything about patron_type for its own organisation @@ -74,20 +75,22 @@ def test_patron_types_permissions_api(client, librarian_martigny, res = client.get(ptty_adult_martigny_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read', 'create', 'update', 'delete']: - assert data[action]['can'] + for action in ["list", "read", "create", "update", "delete"]: + assert data[action]["can"] res = client.get(ptty_youngsters_sion_permissions_url) assert res.status_code == 200 data = get_json(res) - for action in ['update', 'delete']: - assert not data[action]['can'] + for action in ["update", "delete"]: + assert not data[action]["can"] -def test_patron_types_permissions(patron_martigny, - librarian_martigny, - system_librarian_martigny, - patron_type_adults_martigny, - patron_type_youngsters_sion): +def test_patron_types_permissions( + patron_martigny, + librarian_martigny, + system_librarian_martigny, + patron_type_adults_martigny, + patron_type_youngsters_sion, +): """Test patron types permissions class.""" permission_policy = PatronTypePermissionPolicy @@ -97,63 +100,57 @@ def test_patron_types_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(permission_policy, {'search': False}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_type_adults_martigny) + check_permission(permission_policy, {"search": False}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_type_adults_martigny, + ) # Patron # A simple patron can't operate any operation about PatronType login_user(patron_martigny.user) - check_permission(permission_policy, {'search': False}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_type_adults_martigny) + check_permission(permission_policy, {"search": False}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_type_adults_martigny, + ) # Librarian # - search : any PatronType despite organisation owner # - read : only PatronType for its own organisation # - create/update/delete: disallowed login_user(librarian_martigny.user) - check_permission(permission_policy, {'search': True}, None) - check_permission(permission_policy, {'create': False}, {}) - check_permission(permission_policy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, patron_type_adults_martigny) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_type_youngsters_sion) + check_permission(permission_policy, {"search": True}, None) + check_permission(permission_policy, {"create": False}, {}) + check_permission( + permission_policy, + {"read": True, "create": False, "update": False, "delete": False}, + patron_type_adults_martigny, + ) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_type_youngsters_sion, + ) # SystemLibrarian # - search : any PatronType despite organisation owner # - read/create/update/delete : only PatronType for its own # organisation login_user(system_librarian_martigny.user) - check_permission(permission_policy, {'search': True}, None) - check_permission(permission_policy, {'create': True}, {}) - check_permission(permission_policy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, patron_type_adults_martigny) - check_permission(permission_policy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_type_youngsters_sion) + check_permission(permission_policy, {"search": True}, None) + check_permission(permission_policy, {"create": True}, {}) + check_permission( + permission_policy, + {"read": True, "create": True, "update": True, "delete": True}, + patron_type_adults_martigny, + ) + check_permission( + permission_policy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_type_youngsters_sion, + ) diff --git a/tests/api/patron_types/test_patron_types_rest.py b/tests/api/patron_types/test_patron_types_rest.py index 08a7e50653..9fe0f6a983 100644 --- a/tests/api/patron_types/test_patron_types_rest.py +++ b/tests/api/patron_types/test_patron_types_rest.py @@ -23,127 +23,119 @@ import pytest from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.api import IlsRecordError from rero_ils.modules.patron_types.api import PatronType from rero_ils.modules.utils import get_ref_for_pid -def test_patron_types_permissions(client, patron_type_children_martigny, - json_header): +def test_patron_types_permissions(client, patron_type_children_martigny, json_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.ptty_item', pid_value='ptty1') + item_url = url_for("invenio_records_rest.ptty_item", pid_value="ptty1") res = client.get(item_url) assert res.status_code == 401 - res, _ = postdata( - client, - 'invenio_records_rest.ptty_list', - {} - ) + res, _ = postdata(client, "invenio_records_rest.ptty_list", {}) assert res.status_code == 401 res = client.put( - url_for('invenio_records_rest.ptty_item', pid_value='ptty1'), + url_for("invenio_records_rest.ptty_item", pid_value="ptty1"), data={}, - headers=json_header + headers=json_header, ) res = client.delete(item_url) assert res.status_code == 401 - res = client.get(url_for('patron_types.name_validate', name='standard')) + res = client.get(url_for("patron_types.name_validate", name="standard")) assert res.status_code == 401 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patron_types_get(client, patron_type_children_martigny): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.ptty_item', pid_value='ptty1') + item_url = url_for("invenio_records_rest.ptty_item", pid_value="ptty1") patron_type = patron_type_children_martigny res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{patron_type.revision_id}"' + assert res.headers["ETag"] == f'"{patron_type.revision_id}"' data = get_json(res) - assert patron_type.dumps() == data['metadata'] + assert patron_type.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert patron_type.dumps() == data['metadata'] + assert patron_type.dumps() == data["metadata"] - list_url = url_for('invenio_records_rest.ptty_list', pid='ptty1') + list_url = url_for("invenio_records_rest.ptty_list", pid="ptty1") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'][0]['metadata'] == patron_type.replace_refs() + assert data["hits"]["hits"][0]["metadata"] == patron_type.replace_refs() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_patron_types_post_put_delete(client, org_martigny, - patron_type_children_martigny_data, - json_header): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_patron_types_post_put_delete( + client, org_martigny, patron_type_children_martigny_data, json_header +): """Test record retrieval.""" # Create record / POST - item_url = url_for('invenio_records_rest.ptty_item', pid_value='1') - list_url = url_for('invenio_records_rest.ptty_list', q='pid:1') + item_url = url_for("invenio_records_rest.ptty_item", pid_value="1") + list_url = url_for("invenio_records_rest.ptty_list", q="pid:1") - patron_type_children_martigny_data['pid'] = '1' + patron_type_children_martigny_data["pid"] = "1" res, _ = postdata( - client, - 'invenio_records_rest.ptty_list', - patron_type_children_martigny_data + client, "invenio_records_rest.ptty_list", patron_type_children_martigny_data ) assert res.status_code == 201 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata'] == patron_type_children_martigny_data + assert data["metadata"] == patron_type_children_martigny_data res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert patron_type_children_martigny_data == data['metadata'] + assert patron_type_children_martigny_data == data["metadata"] # Update record/PUT data = patron_type_children_martigny_data - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"{librarie.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete record/DELETE res = client.delete(item_url) @@ -153,107 +145,115 @@ def test_patron_types_post_put_delete(client, org_martigny, assert res.status_code == 410 -@mock.patch('rero_ils.modules.decorators.login_and_librarian', - mock.MagicMock()) +@mock.patch("rero_ils.modules.decorators.login_and_librarian", mock.MagicMock()) def test_patron_types_name_validate(client): """Test patron type name validation.""" - url = url_for('patron_types.name_validate', name='children') + url = url_for("patron_types.name_validate", name="children") class current_librarian: class organisation: - pid = 'org1' + pid = "org1" + with mock.patch( - 'rero_ils.modules.patron_types.views.current_librarian', - current_librarian + "rero_ils.modules.patron_types.views.current_librarian", current_librarian ): res = client.get(url) assert res.status_code == 200 - assert get_json(res) == {'name': 'children'} + assert get_json(res) == {"name": "children"} class current_librarian: class organisation: - pid = 'does not exists' + pid = "does not exists" + with mock.patch( - 'rero_ils.modules.patron_types.views.current_librarian', - current_librarian + "rero_ils.modules.patron_types.views.current_librarian", current_librarian ): res = client.get(url) assert res.status_code == 200 - assert get_json(res) == {'name': None} + assert get_json(res) == {"name": None} -def test_patron_types_can_delete(client, patron_type_children_martigny, - patron_martigny, - circulation_policies): +def test_patron_types_can_delete( + client, patron_type_children_martigny, patron_martigny, circulation_policies +): """Test can delete a patron type.""" can, reasons = patron_type_children_martigny.can_delete assert not can - assert reasons['links']['circ_policies'] - assert reasons['links']['patrons'] + assert reasons["links"]["circ_policies"] + assert reasons["links"]["patrons"] def test_filtered_patron_types_get( - client, librarian_martigny, patron_type_children_martigny, - patron_type_adults_martigny, librarian_sion, - patron_type_youngsters_sion, patron_type_grown_sion): + client, + librarian_martigny, + patron_type_children_martigny, + patron_type_adults_martigny, + librarian_sion, + patron_type_youngsters_sion, + patron_type_grown_sion, +): """Test patron types filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.ptty_list') + list_url = url_for("invenio_records_rest.ptty_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.ptty_list') + list_url = url_for("invenio_records_rest.ptty_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 -def test_patron_type_secure_api(client, json_header, - patron_type_children_martigny, - librarian_martigny, - librarian_sion): +def test_patron_type_secure_api( + client, + json_header, + patron_type_children_martigny, + librarian_martigny, + librarian_sion, +): """Test patron type secure api access.""" # Martigny login_user_via_session(client, librarian_martigny.user) - record_url = url_for('invenio_records_rest.ptty_item', - pid_value=patron_type_children_martigny.pid) + record_url = url_for( + "invenio_records_rest.ptty_item", pid_value=patron_type_children_martigny.pid + ) res = client.get(record_url) assert res.status_code == 200 # Sion login_user_via_session(client, librarian_sion.user) - record_url = url_for('invenio_records_rest.ptty_item', - pid_value=patron_type_children_martigny.pid) + record_url = url_for( + "invenio_records_rest.ptty_item", pid_value=patron_type_children_martigny.pid + ) res = client.get(record_url) assert res.status_code == 403 -def test_patron_type_secure_api_create(client, json_header, - patron_type_children_martigny, - system_librarian_martigny, - system_librarian_sion, - patron_type_children_martigny_data): +def test_patron_type_secure_api_create( + client, + json_header, + patron_type_children_martigny, + system_librarian_martigny, + system_librarian_sion, + patron_type_children_martigny_data, +): """Test patron type secure api create.""" # Martigny login_user_via_session(client, system_librarian_martigny.user) - post_entrypoint = 'invenio_records_rest.ptty_list' + post_entrypoint = "invenio_records_rest.ptty_list" - del patron_type_children_martigny_data['pid'] - res, _ = postdata( - client, - post_entrypoint, - patron_type_children_martigny_data - ) + del patron_type_children_martigny_data["pid"] + res, _ = postdata(client, post_entrypoint, patron_type_children_martigny_data) assert res.status_code == 201 # Sion @@ -267,45 +267,45 @@ def test_patron_type_secure_api_create(client, json_header, assert res.status_code == 403 -def test_patron_type_secure_api_update(client, json_header, - patron_type_adults_martigny, - system_librarian_martigny, - system_librarian_sion, - patron_type_adults_martigny_data): +def test_patron_type_secure_api_update( + client, + json_header, + patron_type_adults_martigny, + system_librarian_martigny, + system_librarian_sion, + patron_type_adults_martigny_data, +): """Test patron type secure api create.""" login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.ptty_item', - pid_value=patron_type_adults_martigny.pid) + record_url = url_for( + "invenio_records_rest.ptty_item", pid_value=patron_type_adults_martigny.pid + ) data = patron_type_adults_martigny_data - data['name'] = 'New Name' - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "New Name" + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Sion login_user_via_session(client, system_librarian_sion.user) - res = client.put( - record_url, - data=json.dumps(data), - headers=json_header - ) + res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 -def test_patron_type_secure_api_delete(client, json_header, - patron_type_adults_martigny, - system_librarian_martigny, - system_librarian_sion, - patron_type_adults_martigny_data): +def test_patron_type_secure_api_delete( + client, + json_header, + patron_type_adults_martigny, + system_librarian_martigny, + system_librarian_sion, + patron_type_adults_martigny_data, +): """Test patron type secure api delete.""" login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.ptty_item', - pid_value=patron_type_adults_martigny.pid) + record_url = url_for( + "invenio_records_rest.ptty_item", pid_value=patron_type_adults_martigny.pid + ) with pytest.raises(IlsRecordError.NotDeleted): res = client.delete(record_url) @@ -319,17 +319,20 @@ def test_patron_type_secure_api_delete(client, json_header, def test_patron_types_subscription( - patron_type_youngsters_sion, patron_type_adults_martigny, - patron_type_grown_sion, patron_sion): + patron_type_youngsters_sion, + patron_type_adults_martigny, + patron_type_grown_sion, + patron_sion, +): """Test subscription behavior for patron_types.""" assert patron_type_youngsters_sion.is_subscription_required # A patron_type with a subscription amount equal to 0 doesn't # require a subscription - patron_type_youngsters_sion['subscription_amount'] = 0 + patron_type_youngsters_sion["subscription_amount"] = 0 assert not patron_type_youngsters_sion.is_subscription_required - del patron_type_youngsters_sion['subscription_amount'] + del patron_type_youngsters_sion["subscription_amount"] assert not patron_type_youngsters_sion.is_subscription_required # Test the 'get_yearly_subscription_patron_types' function. @@ -338,13 +341,15 @@ def test_patron_types_subscription( # Test 'get_linked_patrons' functions. assert len(list(patron_type_grown_sion.get_linked_patron())) == 1 assert len(list(patron_type_youngsters_sion.get_linked_patron())) == 0 - patron_sion['patron']['type']['$ref'] = get_ref_for_pid( - 'ptty', patron_type_youngsters_sion.pid) + patron_sion["patron"]["type"]["$ref"] = get_ref_for_pid( + "ptty", patron_type_youngsters_sion.pid + ) patron_sion.update(patron_sion, dbcommit=True) patron_sion.reindex() assert len(list(patron_type_grown_sion.get_linked_patron())) == 0 assert len(list(patron_type_youngsters_sion.get_linked_patron())) == 1 - patron_sion['patron']['type']['$ref'] = get_ref_for_pid( - 'ptty', patron_type_grown_sion.pid) + patron_sion["patron"]["type"]["$ref"] = get_ref_for_pid( + "ptty", patron_type_grown_sion.pid + ) patron_sion.update(patron_sion, dbcommit=True) patron_sion.reindex() diff --git a/tests/api/patrons/test_patrons_blocked.py b/tests/api/patrons/test_patrons_blocked.py index 041660cbd5..329afdd9c2 100644 --- a/tests/api/patrons/test_patrons_blocked.py +++ b/tests/api/patrons/test_patrons_blocked.py @@ -26,93 +26,88 @@ def test_blocked_field_exists( - client, - librarian_martigny, - patron_martigny, - patron3_martigny_blocked): + client, librarian_martigny, patron_martigny, patron3_martigny_blocked +): """Test ptrn6 have blocked field present and set to False.""" login_user_via_session(client, librarian_martigny.user) patron3 = patron3_martigny_blocked # non blocked patron non_blocked_patron_url = url_for( - 'invenio_records_rest.ptrn_item', - pid_value=patron_martigny.pid) + "invenio_records_rest.ptrn_item", pid_value=patron_martigny.pid + ) res = client.get(non_blocked_patron_url) assert res.status_code == 200 data = get_json(res) - assert 'blocked' in data['metadata']['patron'] - assert data['metadata']['patron']['blocked'] is False + assert "blocked" in data["metadata"]["patron"] + assert data["metadata"]["patron"]["blocked"] is False # blocked patron blocked_patron_url = url_for( - 'invenio_records_rest.ptrn_item', - pid_value=patron3_martigny_blocked.pid) + "invenio_records_rest.ptrn_item", pid_value=patron3_martigny_blocked.pid + ) res = client.get(blocked_patron_url) assert res.status_code == 200 data = get_json(res) - assert 'blocked' in data['metadata']['patron'] - assert data['metadata']['patron']['blocked'] is True + assert "blocked" in data["metadata"]["patron"] + assert data["metadata"]["patron"]["blocked"] is True assert patron3.is_blocked - note = patron3.patron.get('blocked_note') + note = patron3.patron.get("blocked_note") assert note and note in patron3.get_blocked_message() -def test_blocked_field_not_present( - client, - librarian_martigny, - patron2_martigny): +def test_blocked_field_not_present(client, librarian_martigny, patron2_martigny): """Test ptrn7 do not have any blocked field.""" login_user_via_session(client, librarian_martigny.user) - item_url = url_for( - 'invenio_records_rest.ptrn_item', - pid_value=patron2_martigny.pid) + item_url = url_for("invenio_records_rest.ptrn_item", pid_value=patron2_martigny.pid) res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert 'blocked' not in data['metadata']['patron'] + assert "blocked" not in data["metadata"]["patron"] -def test_blocked_patron_cannot_request(client, - librarian_martigny, - item_lib_martigny, - lib_martigny, - patron_martigny, - patron3_martigny_blocked, - circulation_policies): +def test_blocked_patron_cannot_request( + client, + librarian_martigny, + item_lib_martigny, + lib_martigny, + patron_martigny, + patron3_martigny_blocked, + circulation_policies, +): login_user_via_session(client, librarian_martigny.user) res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron3_martigny_blocked.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron3_martigny_blocked.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 200 data = get_json(res) - assert not data['can'] + assert not data["can"] # Check with valid patron res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 200 data = get_json(res) - assert data['can'] + assert data["can"] # Create "virtual" Loan (not registered) - loan = Loan({ - 'item_pid': item_pid_to_object(item_lib_martigny.pid), - 'library_pid': lib_martigny.pid, - 'patron_pid': patron3_martigny_blocked.pid - }) + loan = Loan( + { + "item_pid": item_pid_to_object(item_lib_martigny.pid), + "library_pid": lib_martigny.pid, + "patron_pid": patron3_martigny_blocked.pid, + } + ) assert not can_be_requested(loan) diff --git a/tests/api/patrons/test_patrons_marshmallow.py b/tests/api/patrons/test_patrons_marshmallow.py index 9adc188374..b537ffca84 100644 --- a/tests/api/patrons/test_patrons_marshmallow.py +++ b/tests/api/patrons/test_patrons_marshmallow.py @@ -29,8 +29,7 @@ def test_patrons_marshmallow_loaders( - client, librarian_martigny, system_librarian_martigny_data_tmp, - json_header + client, librarian_martigny, system_librarian_martigny_data_tmp, json_header ): """Test marshmallow schema/restrictions for Patron resources.""" @@ -39,15 +38,16 @@ def test_patrons_marshmallow_loaders( # allowed on a Patron, even changes any roles. user_data = create_user_from_data(system_librarian_martigny_data_tmp) from rero_ils.modules.users.api import User + user_data = User.remove_fields(user_data) patron = Patron.create(user_data, dbcommit=True, reindex=True) - assert patron and patron['roles'] == [UserRole.FULL_PERMISSIONS] + assert patron and patron["roles"] == [UserRole.FULL_PERMISSIONS] roles = [UserRole.ACQUISITION_MANAGER, UserRole.CATALOG_MANAGER] - patron['roles'] = roles + patron["roles"] = roles patron = patron.update(patron, dbcommit=True, reindex=True) patron = Patron.get_record_by_pid(patron.pid) - assert patron['roles'] == roles + assert patron["roles"] == roles patron.delete(dbcommit=True, delindex=True) patron = Patron.get_record_by_pid(patron.pid) @@ -59,48 +59,46 @@ def test_patrons_marshmallow_loaders( # roles. login_user_via_session(client, librarian_martigny.user) - user_data['roles'] = [UserRole.FULL_PERMISSIONS] + user_data["roles"] = [UserRole.FULL_PERMISSIONS] user_data = create_user_from_data(system_librarian_martigny_data_tmp) - del (user_data['pid']) + del user_data["pid"] # Step 1 :: Send POST API to create user with bad roles --> 400 # Should fail because the current logged user doesn't have authorization # to deal with `roles` of the user data. - res, response_data = postdata( - client, 'invenio_records_rest.ptrn_list', user_data) + res, response_data = postdata(client, "invenio_records_rest.ptrn_list", user_data) assert res.status_code == 400 - assert 'Validation error' in response_data['message'] + assert "Validation error" in response_data["message"] # Step 2 :: Send POST API to create user with correct roles --> 201 # Update user data with correct `roles` values and create the # user. - original_roles = librarian_martigny['roles'] - librarian_martigny['roles'] = [UserRole.LIBRARY_ADMINISTRATOR] + original_roles = librarian_martigny["roles"] + librarian_martigny["roles"] = [UserRole.LIBRARY_ADMINISTRATOR] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) login_user_via_session(client, librarian_martigny.user) - user_data['roles'] = [UserRole.USER_MANAGER] - res, response_data = postdata( - client, 'invenio_records_rest.ptrn_list', user_data) + user_data["roles"] = [UserRole.USER_MANAGER] + res, response_data = postdata(client, "invenio_records_rest.ptrn_list", user_data) assert res.status_code == 201 - pid = response_data['metadata']['pid'] + pid = response_data["metadata"]["pid"] # Step 3 :: Send PUT API to update roles --> 400 # Try to update the created patron to add it some unauthorized roles - item_url = url_for('invenio_records_rest.ptrn_item', pid_value=pid) - user_data['pid'] = pid - user_data['roles'] = [UserRole.FULL_PERMISSIONS] + item_url = url_for("invenio_records_rest.ptrn_item", pid_value=pid) + user_data["pid"] = pid + user_data["roles"] = [UserRole.FULL_PERMISSIONS] res = client.put(item_url, data=json.dumps(user_data), headers=json_header) assert res.status_code == 400 - assert 'Validation error' in res.json['message'] + assert "Validation error" in res.json["message"] # Step 4 :: Update the patron using console # Force the patron update using console patron = Patron.get_record_by_pid(pid) - patron['roles'] = [UserRole.FULL_PERMISSIONS] + patron["roles"] = [UserRole.FULL_PERMISSIONS] patron.update(patron, dbcommit=True, reindex=True) patron = Patron.get_record_by_pid(pid) - assert patron['roles'] == [UserRole.FULL_PERMISSIONS] + assert patron["roles"] == [UserRole.FULL_PERMISSIONS] # Step 5 :: Delete patron through API # This should be disallowed due to role management restrictions. @@ -109,5 +107,5 @@ def test_patrons_marshmallow_loaders( # Reset the fixtures patron.delete(True, True, True) - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) diff --git a/tests/api/patrons/test_patrons_permissions.py b/tests/api/patrons/test_patrons_permissions.py index 5ca602d738..07a274fa64 100644 --- a/tests/api/patrons/test_patrons_permissions.py +++ b/tests/api/patrons/test_patrons_permissions.py @@ -22,18 +22,25 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.patrons.api import Patron, PatronsSearch from rero_ils.modules.patrons.permissions import PatronPermissionPolicy from rero_ils.modules.users.models import UserRole -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_patrons_permissions( - patron_martigny, librarian_martigny, system_librarian_martigny, - org_martigny, librarian_saxon, patron_sion, patron2_martigny, - librarian2_martigny, librarian2_martigny_data, lib_saxon + patron_martigny, + librarian_martigny, + system_librarian_martigny, + org_martigny, + librarian_saxon, + patron_sion, + patron2_martigny, + librarian2_martigny, + librarian2_martigny_data, + lib_saxon, ): """Test patrons permissions class.""" @@ -41,57 +48,51 @@ def test_patrons_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(PatronPermissionPolicy, {'search': False}, {}) - check_permission(PatronPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(PatronPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_martigny) + check_permission(PatronPermissionPolicy, {"search": False}, {}) + check_permission( + PatronPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + None, + ) + check_permission( + PatronPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_martigny, + ) login_user(patron_martigny.user) - check_permission(PatronPermissionPolicy, {'search': False}, {}) - check_permission(PatronPermissionPolicy, {'create': False}, {}) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, patron_martigny) - check_permission(PatronPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron2_martigny) + check_permission(PatronPermissionPolicy, {"search": False}, {}) + check_permission(PatronPermissionPolicy, {"create": False}, {}) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + patron_martigny, + ) + check_permission( + PatronPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + patron2_martigny, + ) # Librarian without any specific role # - search/read: any patrons of its own organisation # - create/update/delete: disallowed - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = [UserRole.CIRCULATION_MANAGER] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = [UserRole.CIRCULATION_MANAGER] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(PatronPermissionPolicy, {'search': True}, {}) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, patron_martigny) - check_permission(PatronPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_sion) + check_permission(PatronPermissionPolicy, {"search": True}, {}) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + patron_martigny, + ) + check_permission( + PatronPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_sion, + ) # Librarian with specific 'user-management' # - operation allowed on any 'patron' of its own organisation @@ -100,73 +101,71 @@ def test_patrons_permissions( # - can only manage 'patron' roles and 'pro_user_manager' role. Any # operation including roles management outside this scope must be # denied. - librarian_martigny['roles'] = [UserRole.USER_MANAGER] + librarian_martigny["roles"] = [UserRole.USER_MANAGER] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(PatronPermissionPolicy, {'search': True}, {}) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, patron_martigny) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, patron2_martigny) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': False # simple librarian cannot delete other librarian - }, librarian2_martigny) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, librarian_saxon) - check_permission(PatronPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, patron_sion) + check_permission(PatronPermissionPolicy, {"search": True}, {}) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + patron_martigny, + ) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + patron2_martigny, + ) + check_permission( + PatronPermissionPolicy, + { + "read": True, + "create": True, + "update": True, + "delete": False, # simple librarian cannot delete other librarian + }, + librarian2_martigny, + ) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": False, "update": False, "delete": False}, + librarian_saxon, + ) + check_permission( + PatronPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + patron_sion, + ) # reset librarian # reset the librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() - original_roles = patron_martigny.get('roles', []) + original_roles = patron_martigny.get("roles", []) # librarian + patron roles - patron_martigny['roles'] = [UserRole.FULL_PERMISSIONS, UserRole.PATRON] - patron_martigny['libraries'] = librarian_martigny['libraries'] + patron_martigny["roles"] = [UserRole.FULL_PERMISSIONS, UserRole.PATRON] + patron_martigny["libraries"] = librarian_martigny["libraries"] patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(patron_martigny.user) # to refresh identity ! - check_permission(PatronPermissionPolicy, {'search': True}, {}) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, patron_martigny) - check_permission(PatronPermissionPolicy, { - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, patron2_martigny) + check_permission(PatronPermissionPolicy, {"search": True}, {}) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + patron_martigny, + ) + check_permission( + PatronPermissionPolicy, + {"read": True, "create": True, "update": True, "delete": True}, + patron2_martigny, + ) - patron_martigny['roles'] = original_roles - del patron_martigny['libraries'] + patron_martigny["roles"] = original_roles + del patron_martigny["libraries"] patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() diff --git a/tests/api/patrons/test_patrons_rest.py b/tests/api/patrons/test_patrons_rest.py index d1670f1141..855e9de4f5 100644 --- a/tests/api/patrons/test_patrons_rest.py +++ b/tests/api/patrons/test_patrons_rest.py @@ -26,8 +26,7 @@ from invenio_accounts.testutils import login_user_via_session from invenio_db import db from invenio_oauth2server.models import Client, Token -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.patron_transactions.api import PatronTransaction from rero_ils.modules.patrons.api import Patron @@ -37,26 +36,26 @@ def test_patrons_shortcuts( - client, librarian_martigny, patron_martigny, - librarian_sion, yesterday, tomorrow): + client, librarian_martigny, patron_martigny, librarian_sion, yesterday, tomorrow +): """Test patron shortcuts.""" new_patron = deepcopy(patron_martigny) assert new_patron.patron_type_pid assert new_patron.organisation_pid - del new_patron['patron']['type'] + del new_patron["patron"]["type"] assert not new_patron.patron_type_pid assert not new_patron.organisation_pid assert new_patron.formatted_name == "Roduit, Louis" # check for expiration_date - expiration_date = new_patron['patron']['expiration_date'] - expiration_date = datetime.strptime(expiration_date, '%Y-%m-%d') + expiration_date = new_patron["patron"]["expiration_date"] + expiration_date = datetime.strptime(expiration_date, "%Y-%m-%d") assert new_patron.expiration_date == expiration_date - new_patron['patron']['expiration_date'] = yesterday.strftime('%Y-%m-%d') + new_patron["patron"]["expiration_date"] = yesterday.strftime("%Y-%m-%d") assert new_patron.is_expired - new_patron['patron']['expiration_date'] = tomorrow.strftime('%Y-%m-%d') + new_patron["patron"]["expiration_date"] = tomorrow.strftime("%Y-%m-%d") assert not new_patron.is_expired @@ -66,28 +65,32 @@ def test_filtered_patrons_get( """Test patron filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.ptrn_list') + list_url = url_for("invenio_records_rest.ptrn_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion # TODO: find why it's failed login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.ptrn_list') + list_url = url_for("invenio_records_rest.ptrn_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 def test_patron_has_valid_subscriptions( - patron_type_grown_sion, patron_sion, patron_sion_data, - patron_type_adults_martigny, patron2_martigny, - patron_type_youngsters_sion): + patron_type_grown_sion, + patron_sion, + patron_sion_data, + patron_type_adults_martigny, + patron2_martigny, + patron_type_youngsters_sion, +): """Test patron subscriptions.""" patron_sion = patron_sion patron_martigny = patron2_martigny @@ -100,8 +103,8 @@ def test_patron_has_valid_subscriptions( # 'patron_type_grown_sion' require a subscription # removed all stored subscription and test if subscription exists - if patron_sion.get('patron', {}).get('subscriptions'): - del patron_sion['patron']['subscriptions'] + if patron_sion.get("patron", {}).get("subscriptions"): + del patron_sion["patron"]["subscriptions"] assert patron_type_grown_sion.is_subscription_required assert not patron_sion.has_valid_subscription @@ -113,18 +116,18 @@ def test_patron_has_valid_subscriptions( assert patron_sion.has_valid_subscription assert len(patron_sion.valid_subscriptions) == 1 subscription = patron_sion.valid_subscriptions[0] - assert subscription.get('start_date') == start.strftime('%Y-%m-%d') + assert subscription.get("start_date") == start.strftime("%Y-%m-%d") # Create a old subscription for this patron and check validity start = datetime.now() - timedelta(days=20) end = start + timedelta(days=10) patron_sion.add_subscription(patron_type_grown_sion, start, end) - assert len(patron_sion.get('patron', {}).get('subscriptions', [])) == 2 + assert len(patron_sion.get("patron", {}).get("subscriptions", [])) == 2 assert len(patron_sion.valid_subscriptions) == 1 # remove old subscriptions. Create an old one and check the patron doesn't # have any valid subscription - del patron_sion['patron']['subscriptions'] + del patron_sion["patron"]["subscriptions"] patron_sion.add_subscription(patron_type_grown_sion, start, end) # !! As `add_subscription` use the method `Patron.update`, then the signal # `after_record_update` are send by invenio_records and the patron @@ -132,13 +135,13 @@ def test_patron_has_valid_subscriptions( # listener found that user doesn't have any subscription and add a valid # one for this patron. So after `add_subscription` call, I just removed # the valid subscription created. - del patron_sion['patron']['subscriptions'][1] + del patron_sion["patron"]["subscriptions"][1] assert not patron_sion.has_valid_subscription # remove all subscriptions. Create a valid subscription other patron_type # than current patron.patron_type. Check if the patron has a valid # subscription - del patron_sion['patron']['subscriptions'] + del patron_sion["patron"]["subscriptions"] start = datetime.now() - timedelta(seconds=10) end = datetime.now() + timedelta(days=10) patron_sion.add_subscription(patron_type_youngsters_sion, start, end) @@ -159,17 +162,20 @@ def test_patron_has_valid_subscriptions( assert not patrons # Reset the patron as at the beginning - del patron_sion['patron']['subscriptions'] + del patron_sion["patron"]["subscriptions"] start = datetime.now() end = datetime.now() + timedelta(days=10) patron_sion.add_subscription(patron_type_grown_sion, start, end) -def test_patron_pending_subscription(client, patron_type_grown_sion, - patron_sion, - librarian_sion, - patron_transaction_overdue_event_martigny, - lib_sion): +def test_patron_pending_subscription( + client, + patron_type_grown_sion, + patron_sion, + librarian_sion, + patron_transaction_overdue_event_martigny, + lib_sion, +): """Test get pending subscription for patron.""" # At the beginning, `patron_sion` should have one pending # subscription. @@ -178,29 +184,23 @@ def test_patron_pending_subscription(client, patron_type_grown_sion, # Pay this subscription. login_user_via_session(client, librarian_sion.user) - post_entrypoint = 'invenio_records_rest.ptre_list' + post_entrypoint = "invenio_records_rest.ptre_list" trans_pid = extracted_data_from_ref( - pending_subscription[0]['patron_transaction'], data='pid' + pending_subscription[0]["patron_transaction"], data="pid" ) transaction = PatronTransaction.get_record_by_pid(trans_pid) payment = deepcopy(patron_transaction_overdue_event_martigny) - del payment['pid'] - payment['type'] = 'payment' - payment['subtype'] = 'cash' - payment['amount'] = transaction.total_amount - payment['operator'] = { - '$ref': get_ref_for_pid( - 'patrons', librarian_sion.pid - ) - } - payment['library'] = { - '$ref': get_ref_for_pid('libraries', lib_sion.pid) - } - payment['parent'] = pending_subscription[0]['patron_transaction'] + del payment["pid"] + payment["type"] = "payment" + payment["subtype"] = "cash" + payment["amount"] = transaction.total_amount + payment["operator"] = {"$ref": get_ref_for_pid("patrons", librarian_sion.pid)} + payment["library"] = {"$ref": get_ref_for_pid("libraries", lib_sion.pid)} + payment["parent"] = pending_subscription[0]["patron_transaction"] res, _ = postdata(client, post_entrypoint, payment) assert res.status_code == 201 transaction = PatronTransaction.get_record_by_pid(transaction.pid) - assert transaction.status == 'closed' + assert transaction.status == "closed" # reload the patron and check the pending subscription. As we paid the # previous subscription, there will be none pending subscription @@ -209,75 +209,77 @@ def test_patron_pending_subscription(client, patron_type_grown_sion, assert len(pending_subscription) == 0 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_patrons_get(client, librarian_martigny): """Test record retrieval.""" patron = librarian_martigny item_url = url_for( - 'invenio_records_rest.ptrn_item', - pid_value=librarian_martigny.pid + "invenio_records_rest.ptrn_item", pid_value=librarian_martigny.pid ) list_url = url_for( - 'invenio_records_rest.ptrn_list', - q=f'pid:{librarian_martigny.pid}' + "invenio_records_rest.ptrn_list", q=f"pid:{librarian_martigny.pid}" ) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{patron.revision_id}"' + assert res.headers["ETag"] == f'"{patron.revision_id}"' data = get_json(res) - assert patron.dumps() == data['metadata'] + assert patron.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res) - assert patron.dumps() == data['metadata'] + assert patron.dumps() == data["metadata"] res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - result = data['hits']['hits'][0]['metadata'] + result = data["hits"]["hits"][0]["metadata"] # organisation has been added during the indexing - del result['organisation'] + del result["organisation"] assert result == patron.replace_refs().dumps() def test_patrons_post_put_delete( - app, client, lib_martigny, system_librarian_martigny, - patron_type_children_martigny, patron_martigny_data_tmp, json_header, - roles, mailbox + app, + client, + lib_martigny, + system_librarian_martigny, + patron_type_children_martigny, + patron_martigny_data_tmp, + json_header, + roles, + mailbox, ): """Test record retrieval.""" login_user_via_session(client, system_librarian_martigny.user) - pid_value = 'ptrn_1' - item_url = url_for('invenio_records_rest.ptrn_item', pid_value=pid_value) - list_url = url_for('invenio_records_rest.ptrn_list', q=f'pid:{pid_value}') + pid_value = "ptrn_1" + item_url = url_for("invenio_records_rest.ptrn_item", pid_value=pid_value) + list_url = url_for("invenio_records_rest.ptrn_list", q=f"pid:{pid_value}") patron_data = deepcopy(patron_martigny_data_tmp) - patron_data['email'] = 'post_put_delete@test.ch' - patron_data['patron']['barcode'] = ['2384768231'] - patron_data['username'] = 'post_put_delete' + patron_data["email"] = "post_put_delete@test.ch" + patron_data["patron"]["barcode"] = ["2384768231"] + patron_data["username"] = "post_put_delete" patron_data = create_user_from_data(patron_data) pids = Patron.count() assert len(mailbox) == 0 # Create record / POST - patron_data['pid'] = pid_value + patron_data["pid"] = pid_value # patron_data['email'] = 'test_librarian@rero.ch' # patron_data['username'] = 'test_librarian' - res, _ = postdata( - client, - 'invenio_records_rest.ptrn_list', - patron_data - ) + res, _ = postdata(client, "invenio_records_rest.ptrn_list", patron_data) assert res.status_code == 201 assert Patron.count() == pids + 1 @@ -294,34 +296,30 @@ def test_patrons_post_put_delete( assert res.status_code == 200 data = get_json(res) # add dynamic property - patron_data['user_id'] = data['metadata']['user_id'] + patron_data["user_id"] = data["metadata"]["user_id"] # Update record/PUT data = patron_data - data['patron']['barcode'] = ['barcode_test'] - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["patron"]["barcode"] = ["barcode_test"] + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # assert res.headers['ETag'] != f'"{ptrnrarie.revision_id}"' # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['patron']['barcode'][0] == 'barcode_test' + assert data["metadata"]["patron"]["barcode"][0] == "barcode_test" res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['patron']['barcode'][0] == 'barcode_test' + assert data["metadata"]["patron"]["barcode"][0] == "barcode_test" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['patron']['barcode'][0] == 'barcode_test' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["patron"]["barcode"][0] == "barcode_test" # Delete record/DELETE res = client.delete(item_url) @@ -329,35 +327,37 @@ def test_patrons_post_put_delete( res = client.get(item_url) assert res.status_code == 410 - ds = app.extensions['invenio-accounts'].datastore - ds.delete_user(ds.find_user(id=patron_data['user_id'])) + ds = app.extensions["invenio-accounts"].datastore + ds.delete_user(ds.find_user(id=patron_data["user_id"])) def test_patrons_post_without_email( - app, client, lib_martigny, patron_type_children_martigny, - patron_martigny_data_tmp, json_header, roles, mailbox, - system_librarian_martigny + app, + client, + lib_martigny, + patron_type_children_martigny, + patron_martigny_data_tmp, + json_header, + roles, + mailbox, + system_librarian_martigny, ): """Test record retrieval.""" login_user_via_session(client, system_librarian_martigny.user) patron_data = deepcopy(patron_martigny_data_tmp) - patron_data['email'] = 'post_without_email@test.ch' - patron_data['username'] = 'post_without_email' - patron_data['patron']['barcode'] = ['23841238231'] - del patron_data['pid'] - del patron_data['email'] - patron_data['patron']['communication_channel'] = CommunicationChannel.MAIL + patron_data["email"] = "post_without_email@test.ch" + patron_data["username"] = "post_without_email" + patron_data["patron"]["barcode"] = ["23841238231"] + del patron_data["pid"] + del patron_data["email"] + patron_data["patron"]["communication_channel"] = CommunicationChannel.MAIL patron_data = create_user_from_data(patron_data) pids = Patron.count() assert len(mailbox) == 0 # Create record / POST - res, _ = postdata( - client, - 'invenio_records_rest.ptrn_list', - patron_data - ) + res, _ = postdata(client, "invenio_records_rest.ptrn_list", patron_data) assert res.status_code == 201 assert Patron.count() == pids + 1 @@ -365,35 +365,37 @@ def test_patrons_post_without_email( # Check that the returned record matches the given data data = get_json(res) - data['metadata']['patron']['communication_channel'] = \ - CommunicationChannel.MAIL + data["metadata"]["patron"]["communication_channel"] = CommunicationChannel.MAIL - ds = app.extensions['invenio-accounts'].datastore - ds.delete_user(ds.find_user(id=patron_data['user_id'])) + ds = app.extensions["invenio-accounts"].datastore + ds.delete_user(ds.find_user(id=patron_data["user_id"])) def test_patrons_dirty_barcode(client, patron_martigny, librarian_martigny): """Test patron update with dirty barcode.""" - barcode = patron_martigny.get('patron', {}).get('barcode')[0] - patron_martigny['patron']['barcode'] = [f' {barcode} '] - patron_martigny.update( - patron_martigny, dbcommit=True, reindex=True) + barcode = patron_martigny.get("patron", {}).get("barcode")[0] + patron_martigny["patron"]["barcode"] = [f" {barcode} "] + patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) patron = Patron.get_record_by_pid(patron_martigny.pid) - assert patron.patron.get('barcode') == [barcode] + assert patron.patron.get("barcode") == [barcode] # Ensure that users with no patron role will not have a barcode - librarian_martigny.update( - librarian_martigny, dbcommit=True, reindex=True) - assert not librarian_martigny.get('patron', {}).get('barcode') + librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) + assert not librarian_martigny.get("patron", {}).get("barcode") def test_patrons_circulation_informations( - client, patron_sion, librarian_martigny, - patron3_martigny_blocked, yesterday, tomorrow, ill_request_sion): + client, + patron_sion, + librarian_martigny, + patron3_martigny_blocked, + yesterday, + tomorrow, + ill_request_sion, +): """test patron circulation informations.""" url = url_for( - 'api_patrons.patron_circulation_informations', - patron_pid=patron_sion.pid + "api_patrons.patron_circulation_informations", patron_pid=patron_sion.pid ) res = client.get(url) assert res.status_code == 401 @@ -402,51 +404,45 @@ def test_patrons_circulation_informations( res = client.get(url) assert res.status_code == 200 data = res.json - assert len(data['messages']) == 0 + assert len(data["messages"]) == 0 url = url_for( - 'api_patrons.patron_circulation_informations', - patron_pid=patron_sion.pid + "api_patrons.patron_circulation_informations", patron_pid=patron_sion.pid ) res = client.get(url) data = res.json assert res.status_code == 200 - assert 'engaged' in data['fees'] - assert 'preview' in data['fees'] - assert data['messages'] == [] - assert data['statistics'] == { - 'ill_requests': 1 - } + assert "engaged" in data["fees"] + assert "preview" in data["fees"] + assert data["messages"] == [] + assert data["statistics"] == {"ill_requests": 1} url = url_for( - 'api_patrons.patron_circulation_informations', - patron_pid=patron3_martigny_blocked.pid + "api_patrons.patron_circulation_informations", + patron_pid=patron3_martigny_blocked.pid, ) res = client.get(url) assert res.status_code == 200 data = res.json - assert 'error' == data['messages'][0]['type'] - assert 'This patron is currently blocked' in data['messages'][0]['content'] + assert "error" == data["messages"][0]["type"] + assert "This patron is currently blocked" in data["messages"][0]["content"] patron = patron3_martigny_blocked - original_expiration_date = patron['patron']['expiration_date'] - patron['patron']['expiration_date'] = yesterday.strftime('%Y-%m-%d') - patron['patron']['blocked'] = False + original_expiration_date = patron["patron"]["expiration_date"] + patron["patron"]["expiration_date"] = yesterday.strftime("%Y-%m-%d") + patron["patron"]["blocked"] = False patron.update(patron, dbcommit=True, reindex=True) res = client.get(url) data = res.json - assert 'error' == data['messages'][0]['type'] - assert 'Patron rights expired.' in data['messages'][0]['content'] + assert "error" == data["messages"][0]["type"] + assert "Patron rights expired." in data["messages"][0]["content"] # reset the patron - patron['patron']['blocked'] = True - patron['patron']['expiration_date'] = original_expiration_date + patron["patron"]["blocked"] = True + patron["patron"]["expiration_date"] = original_expiration_date patron.update(patron, dbcommit=True, reindex=True) - url = url_for( - 'api_patrons.patron_circulation_informations', - patron_pid='dummy_pid' - ) + url = url_for("api_patrons.patron_circulation_informations", patron_pid="dummy_pid") res = client.get(url) assert res.status_code == 404 @@ -454,19 +450,21 @@ def test_patrons_circulation_informations( def test_patron_messages(client, patron_martigny): """Test for patron messages.""" patron_pid = patron_martigny.pid - url = url_for('api_patrons.get_messages', patron_pid=patron_pid) + url = url_for("api_patrons.get_messages", patron_pid=patron_pid) res = client.get(url) assert res.status_code == 401 login_user_via_session(client, patron_martigny.user) - url = url_for('api_patrons.get_messages', patron_pid=patron_pid) + url = url_for("api_patrons.get_messages", patron_pid=patron_pid) res = client.get(url) assert res.status_code == 200 data = get_json(res) assert len(data) == 1 - assert data[0]['type'] == 'warning' - assert data[0]['content'] == 'This person will be in vacations.\n' \ - 'Will be back in february.' + assert data[0]["type"] == "warning" + assert ( + data[0]["content"] == "This person will be in vacations.\n" + "Will be back in february." + ) def test_patron_info(app, client, patron_martigny, librarian_martigny): @@ -474,51 +472,59 @@ def test_patron_info(app, client, patron_martigny, librarian_martigny): # All scopes scopes = [ - 'fullname', 'birthdate', 'institution', 'expiration_date', - 'patron_type', 'patron_types' + "fullname", + "birthdate", + "institution", + "expiration_date", + "patron_type", + "patron_types", ] # create a oauth client liked to the librarian account oauth_client = Client( - client_id='dev', - client_secret='dev', - name='Test name', - description='Test description', + client_id="dev", + client_secret="dev", + name="Test name", + description="Test description", is_confidential=False, user=librarian_martigny.user, - website='http://foo.org', - _redirect_uris='') + website="http://foo.org", + _redirect_uris="", + ) # token with all scopes librarian_token = Token( client=oauth_client, user=librarian_martigny.user, - token_type='bearer', - access_token='test_librarian_access', + token_type="bearer", + access_token="test_librarian_access", expires=None, is_personal=False, is_internal=False, - _scopes=' '.join(scopes)) + _scopes=" ".join(scopes), + ) token = Token( client=oauth_client, user=patron_martigny.user, - token_type='bearer', - access_token='test_access_1', + token_type="bearer", + access_token="test_access_1", expires=None, is_personal=False, is_internal=False, - _scopes=' '.join(scopes)) + _scopes=" ".join(scopes), + ) # token without scope no_scope_token = Token( client=oauth_client, user=patron_martigny.user, - token_type='bearer', - access_token='test_access_2', + token_type="bearer", + access_token="test_access_2", expires=None, is_personal=False, - is_internal=False) + is_internal=False, + ) db.session.add(oauth_client) db.session.add(librarian_token) @@ -527,110 +533,101 @@ def test_patron_info(app, client, patron_martigny, librarian_martigny): db.session.commit() # denied with a wrong token - res = client.get(url_for('api_patrons.info', access_token='wrong')) + res = client.get(url_for("api_patrons.info", access_token="wrong")) assert res.status_code == 401 # denied without token - res = client.get(url_for('api_patrons.info')) + res = client.get(url_for("api_patrons.info")) assert res.status_code == 401 # minimal information without scope res = client.get( - url_for('api_patrons.info', access_token=no_scope_token.access_token)) + url_for("api_patrons.info", access_token=no_scope_token.access_token) + ) assert res.status_code == 200 - assert res.json == {'barcode': patron_martigny['patron']['barcode'].pop()} + assert res.json == {"barcode": patron_martigny["patron"]["barcode"].pop()} # full information with all scopes - res = client.get( - url_for('api_patrons.info', access_token=token.access_token)) + res = client.get(url_for("api_patrons.info", access_token=token.access_token)) assert res.status_code == 200 assert res.json == { - 'barcode': - '4098124352', - 'birthdate': - '1947-06-07', - 'fullname': - 'Roduit, Louis', - 'patron_types': [{ - 'expiration_date': - patron_martigny['patron']['expiration_date']+'T00:00:00', - 'institution': 'org1', - 'patron_type': 'patron-code' - }] + "barcode": "4098124352", + "birthdate": "1947-06-07", + "fullname": "Roduit, Louis", + "patron_types": [ + { + "expiration_date": patron_martigny["patron"]["expiration_date"] + + "T00:00:00", + "institution": "org1", + "patron_type": "patron-code", + } + ], } # librarian information with all scopes res = client.get( - url_for('api_patrons.info', access_token=librarian_token.access_token)) + url_for("api_patrons.info", access_token=librarian_token.access_token) + ) assert res.status_code == 200 - assert res.json == { - 'birthdate': - '1965-02-07', - 'fullname': - 'Pedronni, Marie' - } + assert res.json == {"birthdate": "1965-02-07", "fullname": "Pedronni, Marie"} def test_patrons_search(client, librarian_martigny): """Test patron search.""" login_user_via_session(client, librarian_martigny.user) - birthdate = librarian_martigny.dumps()['birth_date'] + birthdate = librarian_martigny.dumps()["birth_date"] # complete birthdate - list_url = url_for( - 'invenio_records_rest.ptrn_list', q=f'{birthdate}', simple='1' - ) + list_url = url_for("invenio_records_rest.ptrn_list", q=f"{birthdate}", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # birth year list_url = url_for( - 'invenio_records_rest.ptrn_list', - q=f'{birthdate.split("-")[0]}', - simple='1' + "invenio_records_rest.ptrn_list", q=f'{birthdate.split("-")[0]}', simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 def test_patrons_expired(client, librarian_martigny, patron_martigny): """Test patron expired filter.""" login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.ptrn_list', simple='1') + list_url = url_for("invenio_records_rest.ptrn_list", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 6 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 6 - original_expiration_date = patron_martigny['patron']['expiration_date'] - patron_martigny['patron']['barcode'] = ['4098124352'] + original_expiration_date = patron_martigny["patron"]["expiration_date"] + patron_martigny["patron"]["barcode"] = ["4098124352"] new_expiration_date = datetime.now() - timedelta(days=10) - patron_martigny['patron']['expiration_date'] = new_expiration_date \ - .strftime("%Y-%m-%d") + patron_martigny["patron"]["expiration_date"] = new_expiration_date.strftime( + "%Y-%m-%d" + ) patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) - list_url = url_for( - 'invenio_records_rest.ptrn_list', expired='true', simple='1') + list_url = url_for("invenio_records_rest.ptrn_list", expired="true", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 - patron_martigny['patron']['expiration_date'] = original_expiration_date + patron_martigny["patron"]["expiration_date"] = original_expiration_date patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) -def test_patrons_blocked(client, librarian_martigny, patron_martigny, - patron3_martigny_blocked): +def test_patrons_blocked( + client, librarian_martigny, patron_martigny, patron3_martigny_blocked +): """Test patron blocked filter.""" login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.ptrn_list', simple='1') + list_url = url_for("invenio_records_rest.ptrn_list", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 6 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 6 - list_url = url_for( - 'invenio_records_rest.ptrn_list', blocked='true', simple='1') + list_url = url_for("invenio_records_rest.ptrn_list", blocked="true", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 diff --git a/tests/api/patrons/test_patrons_views.py b/tests/api/patrons/test_patrons_views.py index 5d759ddeac..1316697473 100644 --- a/tests/api/patrons/test_patrons_views.py +++ b/tests/api/patrons/test_patrons_views.py @@ -27,10 +27,16 @@ from rero_ils.modules.loans.models import LoanAction -def test_patron_can_delete(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_lib_martigny, json_header, lib_martigny, - circulation_policies): +def test_patron_can_delete( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + json_header, + lib_martigny, + circulation_policies, +): """Test patron can delete.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -38,45 +44,50 @@ def test_patron_can_delete(client, librarian_martigny, location = loc_public_martigny data = deepcopy(patron_martigny) - del data['patron']['type'] + del data["patron"]["type"] assert not data.organisation # request res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item.pid, pickup_location_pid=location.pid, patron_pid=patron.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.REQUEST].get('pid') + loan_pid = data.get("action_applied")[LoanAction.REQUEST].get("pid") can, reasons = patron_martigny.can_delete assert not can - assert reasons['links']['loans'] + assert reasons["links"]["loans"] res, data = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert item.status == ItemStatus.ON_SHELF -def test_patron_utils(client, librarian_martigny, - patron_martigny, loc_public_martigny, - item_lib_martigny, json_header, - circulation_policies): +def test_patron_utils( + client, + librarian_martigny, + patron_martigny, + loc_public_martigny, + item_lib_martigny, + json_header, + circulation_policies, +): """Test patron utils.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -84,71 +95,71 @@ def test_patron_utils(client, librarian_martigny, location = loc_public_martigny from rero_ils.modules.patrons.views import get_location_name_from_pid - assert get_location_name_from_pid(loc_public_martigny.pid) == \ - location.get('name') + + assert get_location_name_from_pid(loc_public_martigny.pid) == location.get("name") from rero_ils.modules.patrons.views import get_patron_from_pid + assert get_patron_from_pid(patron.pid) == patron from rero_ils.modules.patrons.views import get_checkout_loan_for_item + assert not get_checkout_loan_for_item(item.pid) -def test_patron_authenticate(client, patron_martigny, patron_martigny_data, - system_librarian_martigny, default_user_password): +def test_patron_authenticate( + client, + patron_martigny, + patron_martigny_data, + system_librarian_martigny, + default_user_password, +): """Test for patron authenticate.""" # parameters - token = 'Or7DTg1WT34cLKuSMcS7WzhdhxtKklpTizb1Hn2H0aaV5Vig6nden63VEqBE' - token_url_data = {'access_token': token} - username = patron_martigny_data['username'] + token = "Or7DTg1WT34cLKuSMcS7WzhdhxtKklpTizb1Hn2H0aaV5Vig6nden63VEqBE" + token_url_data = {"access_token": token} + username = patron_martigny_data["username"] password = default_user_password create_personal( - name='token_test', - user_id=system_librarian_martigny['user_id'], - access_token=token + name="token_test", + user_id=system_librarian_martigny["user_id"], + access_token=token, ) # Missing access_token parameter - res, _ = postdata( - client, 'api_patrons.patron_authenticate') + res, _ = postdata(client, "api_patrons.patron_authenticate") assert res.status_code == 401 # Missing parameters (username and password) res, _ = postdata( - client, 'api_patrons.patron_authenticate', url_data=token_url_data) + client, "api_patrons.patron_authenticate", url_data=token_url_data + ) assert res.status_code == 400 # User not found - post_data = {'username': 'foo', 'password': 'bar'} + post_data = {"username": "foo", "password": "bar"} res, _ = postdata( - client, - 'api_patrons.patron_authenticate', - post_data, - url_data=token_url_data + client, "api_patrons.patron_authenticate", post_data, url_data=token_url_data ) assert res.status_code == 404 # User found, bad password - post_data = {'username': username, 'password': 'bar'} + post_data = {"username": username, "password": "bar"} res, _ = postdata( - client, - 'api_patrons.patron_authenticate', - post_data, - url_data=token_url_data + client, "api_patrons.patron_authenticate", post_data, url_data=token_url_data ) assert res.status_code == 401 # User found - post_data = {'username': username, 'password': password} + post_data = {"username": username, "password": password} res, output = postdata( - client, - 'api_patrons.patron_authenticate', - post_data, - url_data=token_url_data + client, "api_patrons.patron_authenticate", post_data, url_data=token_url_data ) assert res.status_code == 200 - assert output['city'] == patron_martigny_data['city'] - assert output['fullname'] == patron_martigny_data['first_name'] + ' ' +\ - patron_martigny_data['last_name'] - assert 'blocked' not in output + assert output["city"] == patron_martigny_data["city"] + assert ( + output["fullname"] + == patron_martigny_data["first_name"] + " " + patron_martigny_data["last_name"] + ) + assert "blocked" not in output diff --git a/tests/api/selfcheck/test_admin.py b/tests/api/selfcheck/test_admin.py index 0cd70f3d89..d1a8d527c7 100644 --- a/tests/api/selfcheck/test_admin.py +++ b/tests/api/selfcheck/test_admin.py @@ -31,68 +31,64 @@ def test_admin_view(app): """Test flask-admin interface.""" assert isinstance(selfcheck_terminal_adminview, dict) - assert 'model' in selfcheck_terminal_adminview - assert 'modelview' in selfcheck_terminal_adminview + assert "model" in selfcheck_terminal_adminview + assert "modelview" in selfcheck_terminal_adminview admin = Admin(app, name="Test") selfcheck_user_adminview_copy = dict(selfcheck_terminal_adminview) - selfcheck_user_model = selfcheck_user_adminview_copy.pop('model') - selfcheck_user_view = selfcheck_user_adminview_copy.pop('modelview') - admin.add_view(selfcheck_user_view(selfcheck_user_model, db.session, - **selfcheck_user_adminview_copy)) + selfcheck_user_model = selfcheck_user_adminview_copy.pop("model") + selfcheck_user_view = selfcheck_user_adminview_copy.pop("modelview") + admin.add_view( + selfcheck_user_view( + selfcheck_user_model, db.session, **selfcheck_user_adminview_copy + ) + ) with app.test_request_context(): - request_url = url_for('selfcheckterminal.index_view') + request_url = url_for("selfcheckterminal.index_view") with app.app_context(): with app.test_client() as client: - res = client.get( - request_url, - follow_redirects=True - ) + res = client.get(request_url, follow_redirects=True) assert res.status_code == 200 - assert b'Name' in (res.get_data()) - assert b'Access Token' in (res.get_data()) - assert b'Organisation Pid' in (res.get_data()) - assert b'Library Pid' in (res.get_data()) - assert b'Location Pid' in (res.get_data()) + assert b"Name" in (res.get_data()) + assert b"Access Token" in (res.get_data()) + assert b"Organisation Pid" in (res.get_data()) + assert b"Library Pid" in (res.get_data()) + assert b"Location Pid" in (res.get_data()) def test_admin_createuser(app, client, loc_public_martigny): """Test flask-admin user creation.""" - create_view_url = url_for('selfcheckterminal.create_view') + create_view_url = url_for("selfcheckterminal.create_view") # test required values - res = client.post( - create_view_url, - data={}, - follow_redirects=True - ) - assert b'This field is required.' in res.data - assert res.data.count(b'This field is required.') == 3 + res = client.post(create_view_url, data={}, follow_redirects=True) + assert b"This field is required." in res.data + assert res.data.count(b"This field is required.") == 3 # test create selfcheck user res = client.post( create_view_url, data={ - 'name': 'test_user', - 'access_token': 'TESTACCESSTOKEN', - 'location_pid': 'loc1', + "name": "test_user", + "access_token": "TESTACCESSTOKEN", + "location_pid": "loc1", }, - follow_redirects=True + follow_redirects=True, ) - assert b'Record was successfully created.' in res.data + assert b"Record was successfully created." in res.data # test create selfcheck user whith same username res = client.post( create_view_url, data={ - 'name': 'test_user', - 'access_token': 'TESTACCESSTOKEN', - 'location_pid': 'loc1', + "name": "test_user", + "access_token": "TESTACCESSTOKEN", + "location_pid": "loc1", }, - follow_redirects=True + follow_redirects=True, ) - assert b'Already exists.' in res.data + assert b"Already exists." in res.data diff --git a/tests/api/selfcheck/test_models.py b/tests/api/selfcheck/test_models.py index 23432d14ed..e747a80991 100644 --- a/tests/api/selfcheck/test_models.py +++ b/tests/api/selfcheck/test_models.py @@ -31,12 +31,12 @@ def test_selfcheckuser(app): """Test SelfcheckUser model.""" selfcheck_terminal = SelfcheckTerminal( - name='selfcheck_test', - access_token='UNACCESSTOKENDETEST', - organisation_pid='org1', - library_pid='lib1', - location_pid='loc1', - comments='a new comment', + name="selfcheck_test", + access_token="UNACCESSTOKENDETEST", + organisation_pid="org1", + library_pid="lib1", + location_pid="loc1", + comments="a new comment", ) # 1. test create selfcheck user assert not selfcheck_terminal.active @@ -50,24 +50,24 @@ def test_selfcheckuser(app): # 2. test update selfcheck user selfcheck_terminal_patch = SelfcheckTerminal( id=selfcheck_terminal_id, - name='selfcheck_test_modified', - access_token='UNACCESSTOKENDETEST', - organisation_pid='org1', - library_pid='lib1', - location_pid='loc1', - comments='an updated comment', + name="selfcheck_test_modified", + access_token="UNACCESSTOKENDETEST", + organisation_pid="org1", + library_pid="lib1", + location_pid="loc1", + comments="an updated comment", ) db.session.merge(selfcheck_terminal_patch) db.session.commit() # 3. test unique name for selfcheck terminal selfcheck_terminal = SelfcheckTerminal( - name='selfcheck_test_modified', - access_token='UNACCESSTOKENDETEST', - organisation_pid='org1', - library_pid='lib1', - location_pid='loc2', - comments='a third comment', + name="selfcheck_test_modified", + access_token="UNACCESSTOKENDETEST", + organisation_pid="org1", + library_pid="lib1", + location_pid="loc2", + comments="a third comment", ) db.session.add(selfcheck_terminal) pytest.raises(IntegrityError, db.session.commit) diff --git a/tests/api/selfcheck/test_selfcheck.py b/tests/api/selfcheck/test_selfcheck.py index 22071b1325..05c3078110 100644 --- a/tests/api/selfcheck/test_selfcheck.py +++ b/tests/api/selfcheck/test_selfcheck.py @@ -24,27 +24,36 @@ import pytest from invenio_accounts.testutils import login_user_via_session -from invenio_circulation.search.api import LoansSearch -from utils import flush_index, get_json, postdata +from utils import get_json, postdata from rero_ils.modules.items.api import Item -from rero_ils.modules.loans.api import Loan +from rero_ils.modules.loans.api import Loan, LoansSearch from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.dispatcher import Dispatcher from rero_ils.modules.notifications.models import NotificationType from rero_ils.modules.notifications.utils import number_of_notifications_sent from rero_ils.modules.patrons.api import Patron -from rero_ils.modules.selfcheck.api import authorize_patron, enable_patron, \ - item_information, patron_information, patron_status, selfcheck_checkin, \ - selfcheck_checkout, selfcheck_login, selfcheck_renew, system_status, \ - validate_patron_account +from rero_ils.modules.selfcheck.api import ( + authorize_patron, + enable_patron, + item_information, + patron_information, + patron_status, + selfcheck_checkin, + selfcheck_checkout, + selfcheck_login, + selfcheck_renew, + system_status, + validate_patron_account, +) from rero_ils.modules.selfcheck.utils import check_sip2_module from rero_ils.modules.users.api import User # skip tests if invenio-sip2 module is not installed -pytestmark = pytest.mark.skipif(not check_sip2_module(), - reason='invenio-sip2 not installed') +pytestmark = pytest.mark.skipif( + not check_sip2_module(), reason="invenio-sip2 not installed" +) def test_invenio_sip2(): @@ -56,55 +65,58 @@ def test_selfcheck_login(librarian_martigny, selfcheck_librarian_martigny): """Test selfcheck client login.""" # test failed login - response = selfcheck_login('invalid_user', - 'invalid_password', - terminal_ip='127.0.0.1') + response = selfcheck_login( + "invalid_user", "invalid_password", terminal_ip="127.0.0.1" + ) assert not response # test success login response = selfcheck_login( selfcheck_librarian_martigny.name, selfcheck_librarian_martigny.access_token, - terminal_ip='127.0.0.1' + terminal_ip="127.0.0.1", ) assert response - assert response.get('authenticated') + assert response.get("authenticated") def test_authorize_patron(selfcheck_patron_martigny, default_user_password): """Test authorize patron.""" # try to authorize with wrong password - response = authorize_patron(selfcheck_patron_martigny.get( - 'patron', {}).get('barcode')[0], 'invalid_password') + response = authorize_patron( + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0], + "invalid_password", + ) assert not response # try to authorize with wrong barcode - response = authorize_patron('invalid_barcode', 'invalid_password') + response = authorize_patron("invalid_barcode", "invalid_password") assert not response # authorize patron with email response = authorize_patron( - selfcheck_patron_martigny.get('patron', {}).get('barcode')[0], - default_user_password) + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0], + default_user_password, + ) assert response # authorize patron without email (using username for authentication) user = User.get_record(selfcheck_patron_martigny.user.id) user_metadata = user.dumps_metadata() - email = user_metadata.pop('email', None) + email = user_metadata.pop("email", None) user.update(user_metadata) - selfcheck_patron_martigny = Patron.get_record_by_pid( - selfcheck_patron_martigny.pid) + selfcheck_patron_martigny = Patron.get_record_by_pid(selfcheck_patron_martigny.pid) response = authorize_patron( - selfcheck_patron_martigny.get('patron', {}).get('barcode')[0], - default_user_password) + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0], + default_user_password, + ) assert response # reset user data user = User.get_record(selfcheck_patron_martigny.user.id) user_metadata = user.dumps_metadata() - user_metadata['email'] = email + user_metadata["email"] = email user.update(user_metadata) @@ -112,285 +124,294 @@ def test_validate_patron(selfcheck_patron_martigny): """Test validate patron.""" # test valid patron barcode assert validate_patron_account( - selfcheck_patron_martigny.get('patron', {}).get('barcode')[0]) + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0] + ) # test invalid patron barcode - assert not validate_patron_account('invalid_barcode') + assert not validate_patron_account("invalid_barcode") def test_system_status(selfcheck_librarian_martigny): """Test automated circulation system status.""" response = system_status(selfcheck_librarian_martigny.name) - assert response.get('institution_id') == \ - selfcheck_librarian_martigny.library_pid + assert response.get("institution_id") == selfcheck_librarian_martigny.library_pid def test_enable_patron(selfcheck_patron_martigny): """Test enable patron.""" response = enable_patron( - selfcheck_patron_martigny.get('patron', {}).get('barcode')[0]) + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0] + ) ptrn = selfcheck_patron_martigny - assert response.get('institution_id') == ptrn.organisation_pid - assert response.get('patron_id') == ptrn.patron['barcode'] - assert response.get('patron_name') == ptrn.formatted_name - assert response.get('language') == \ - ptrn.patron['communication_language'] + assert response.get("institution_id") == ptrn.organisation_pid + assert response.get("patron_id") == ptrn.patron["barcode"] + assert response.get("patron_name") == ptrn.formatted_name + assert response.get("language") == ptrn.patron["communication_language"] # test with wrong patron - response = enable_patron('wrong_patron_barcode') - assert 'patron not found' in response.get('screen_messages')[0] - - -def test_patron_information(client, librarian_martigny, - selfcheck_patron_martigny, loc_public_martigny, - item_lib_martigny, item2_lib_martigny, - item3_lib_martigny, circulation_policies, - lib_martigny): + response = enable_patron("wrong_patron_barcode") + assert "patron not found" in response.get("screen_messages")[0] + + +def test_patron_information( + client, + librarian_martigny, + selfcheck_patron_martigny, + loc_public_martigny, + item_lib_martigny, + item2_lib_martigny, + item3_lib_martigny, + circulation_policies, + lib_martigny, +): """Test patron information.""" login_user_via_session(client, librarian_martigny.user) # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=selfcheck_patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not loan.is_loan_overdue() # set loan on overdue end_date = datetime.now(timezone.utc) - timedelta(days=7) - loan['end_date'] = end_date.isoformat() - loan.update( - loan, - dbcommit=True, - reindex=True - ) + loan["end_date"] = end_date.isoformat() + loan.update(loan, dbcommit=True, reindex=True) loan = Loan.get_record_by_pid(loan_pid) assert loan.is_loan_overdue() - notification = loan.create_notification( - _type=NotificationType.OVERDUE).pop() - Dispatcher.dispatch_notifications([notification.get('pid')]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + notification = loan.create_notification(_type=NotificationType.OVERDUE).pop() + Dispatcher.dispatch_notifications([notification.get("pid")]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() assert number_of_notifications_sent(loan) == 1 # create pending request res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, patron_pid=selfcheck_patron_martigny.pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) - pending_request_loan_pid = \ - get_json(res)['action_applied']['request']['pid'] + pending_request_loan_pid = get_json(res)["action_applied"]["request"]["pid"] assert res.status_code == 200 # create validated request circ_params = { - 'item_pid': item3_lib_martigny.pid, - 'patron_pid': selfcheck_patron_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "item_pid": item3_lib_martigny.pid, + "patron_pid": selfcheck_patron_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item3_lib_martigny.pid, patron_pid=selfcheck_patron_martigny.pid, pickup_location_pid=loc_public_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 # validate the request - request_loan_pid = get_json(res)['action_applied']['request']['pid'] - circ_params['pid'] = request_loan_pid - res, data = postdata( - client, 'api_item.validate_request', dict(circ_params)) + request_loan_pid = get_json(res)["action_applied"]["request"]["pid"] + circ_params["pid"] = request_loan_pid + res, data = postdata(client, "api_item.validate_request", dict(circ_params)) assert res.status_code == 200 # get patron information - response = patron_information(selfcheck_patron_martigny.get( - 'patron', {}).get('barcode')[0]) + response = patron_information( + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0] + ) assert response # check required fields required_fields = [ - 'patron_id', - 'patron_name', - 'patron_status', - 'institution_id', - 'language', - 'valid_patron' + "patron_id", + "patron_name", + "patron_status", + "institution_id", + "language", + "valid_patron", ] for field in required_fields: assert response.get(field) # check summary fields summary_fields = [ - 'charged_items', - 'fine_items', - 'hold_items', - 'overdue_items', - 'unavailable_hold_items' + "charged_items", + "fine_items", + "hold_items", + "overdue_items", + "unavailable_hold_items", ] for field in summary_fields: assert len(response.get(field)) > 0 # get patron status - response = patron_status(selfcheck_patron_martigny.get( - 'patron', {}).get('barcode')[0]) + response = patron_status( + selfcheck_patron_martigny.get("patron", {}).get("barcode")[0] + ) assert response # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 # cancel the first request circ_params = { - 'pid': pending_request_loan_pid, - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": pending_request_loan_pid, + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } - res, data = postdata( - client, 'api_item.cancel_item_request', dict(circ_params)) + res, data = postdata(client, "api_item.cancel_item_request", dict(circ_params)) assert res.status_code == 200 # test with wrong patron - response = patron_information('wrong_patron_barcode') - assert 'patron not found' in response.get('screen_messages')[0] + response = patron_information("wrong_patron_barcode") + assert "patron not found" in response.get("screen_messages")[0] - assert 'patron not found' in response.get('screen_messages')[0] + assert "patron not found" in response.get("screen_messages")[0] -def test_item_information(client, librarian_martigny, - selfcheck_patron_martigny, loc_public_martigny, - item_lib_martigny, - circulation_policies): +def test_item_information( + client, + librarian_martigny, + selfcheck_patron_martigny, + loc_public_martigny, + item_lib_martigny, + circulation_policies, +): """Test item information.""" login_user_via_session(client, librarian_martigny.user) # checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=selfcheck_patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) assert not loan.is_loan_overdue() # set loan on overdue end_date = datetime.now(timezone.utc) - timedelta(days=7) - loan['end_date'] = end_date.isoformat() - loan.update( - loan, - dbcommit=True, - reindex=True - ) + loan["end_date"] = end_date.isoformat() + loan.update(loan, dbcommit=True, reindex=True) loan = Loan.get_record_by_pid(loan_pid) - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN assert loan.is_loan_overdue() - notification = loan.create_notification( - _type=NotificationType.OVERDUE).pop() - Dispatcher.dispatch_notifications([notification.get('pid')]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + notification = loan.create_notification(_type=NotificationType.OVERDUE).pop() + Dispatcher.dispatch_notifications([notification.get("pid")]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() assert number_of_notifications_sent(loan) == 1 - patron_barcode = selfcheck_patron_martigny\ - .get('patron', {}).get('barcode')[0] - item_barcode = item_lib_martigny.get('barcode') + patron_barcode = selfcheck_patron_martigny.get("patron", {}).get("barcode")[0] + item_barcode = item_lib_martigny.get("barcode") # get item information response = item_information( patron_barcode=patron_barcode, item_barcode=item_barcode, - institution_id=librarian_martigny.organisation_pid + institution_id=librarian_martigny.organisation_pid, ) assert response # check required fields in response - assert all(key in response for key in ( - 'item_id', - 'title_id', - 'circulation_status', - 'fee_type', - 'security_marker', - )) - assert response['due_date'] - assert response['fee_amount'] + assert all( + key in response + for key in ( + "item_id", + "title_id", + "circulation_status", + "fee_type", + "security_marker", + ) + ) + assert response["due_date"] + assert response["fee_amount"] # checkin res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_user_pid=librarian_martigny.pid, - transaction_location_pid=loc_public_martigny.pid - ) + transaction_location_pid=loc_public_martigny.pid, + ), ) assert res.status_code == 200 # test with wrong item barcode response = item_information( patron_barcode=patron_barcode, - item_barcode='wrong_item_barcode', - institution_id=librarian_martigny.organisation_pid) - assert 'item not found' in response.get('screen_messages')[0] - - -def test_selfcheck_circulation(client, selfcheck_librarian_martigny, document, - librarian_martigny, librarian2_martigny, - loc_public_martigny, selfcheck_patron_martigny, - item_lib_martigny, circulation_policies): + item_barcode="wrong_item_barcode", + institution_id=librarian_martigny.organisation_pid, + ) + assert "item not found" in response.get("screen_messages")[0] + + +def test_selfcheck_circulation( + client, + selfcheck_librarian_martigny, + document, + librarian_martigny, + librarian2_martigny, + loc_public_martigny, + selfcheck_patron_martigny, + item_lib_martigny, + circulation_policies, +): """Test selfcheck circulation operation.""" - patron_barcode = selfcheck_patron_martigny \ - .get('patron', {}).get('barcode')[0] - item_barcode = item_lib_martigny.get('barcode') + patron_barcode = selfcheck_patron_martigny.get("patron", {}).get("barcode")[0] + item_barcode = item_lib_martigny.get("barcode") # selfcheck checkout with wrong item barcode checkout = selfcheck_checkout( transaction_user_pid=librarian_martigny.pid, - item_barcode='wrong_barcode', patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + item_barcode="wrong_barcode", + patron_barcode=patron_barcode, + terminal=selfcheck_librarian_martigny.name, ) assert checkout assert not checkout.is_success - assert 'Error encountered: item not found' in \ - checkout.get('screen_messages') + assert "Error encountered: item not found" in checkout.get("screen_messages") # selfcheck checkout checkout = selfcheck_checkout( transaction_user_pid=librarian_martigny.pid, - item_barcode=item_barcode, patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + item_barcode=item_barcode, + patron_barcode=patron_barcode, + terminal=selfcheck_librarian_martigny.name, ) assert checkout assert checkout.is_success @@ -399,33 +420,36 @@ def test_selfcheck_circulation(client, selfcheck_librarian_martigny, document, # test second checkout checkout = selfcheck_checkout( transaction_user_pid=librarian_martigny.pid, - item_barcode=item_barcode, patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + item_barcode=item_barcode, + patron_barcode=patron_barcode, + terminal=selfcheck_librarian_martigny.name, ) assert not checkout.is_success # Get the loan and update end_date to allow direct renewal loan_pid = Item.get_loan_pid_with_item_on_loan(item_lib_martigny.pid) loan = Loan.get_record_by_pid(loan_pid) - assert 'selfcheck_terminal_id' in loan - loan['end_date'] = loan['start_date'] + assert "selfcheck_terminal_id" in loan + loan["end_date"] = loan["start_date"] loan.update(loan, dbcommit=True, reindex=True) # selfcheck renew with wrong item barcode renew = selfcheck_renew( transaction_user_pid=librarian_martigny.pid, - item_barcode='wrong_barcode', patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + item_barcode="wrong_barcode", + patron_barcode=patron_barcode, + terminal=selfcheck_librarian_martigny.name, ) assert renew assert not renew.is_success - assert 'Error encountered: item not found' in renew.get('screen_messages') + assert "Error encountered: item not found" in renew.get("screen_messages") # selfcheck renew renew = selfcheck_renew( transaction_user_pid=librarian_martigny.pid, - item_barcode=item_barcode, patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + item_barcode=item_barcode, + patron_barcode=patron_barcode, + terminal=selfcheck_librarian_martigny.name, ) assert renew assert renew.is_success @@ -434,21 +458,20 @@ def test_selfcheck_circulation(client, selfcheck_librarian_martigny, document, # selfcheck checkin wrong item barcode checkin = selfcheck_checkin( transaction_user_pid=librarian_martigny.pid, - item_barcode='wrong_barcode', + item_barcode="wrong_barcode", patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + terminal=selfcheck_librarian_martigny.name, ) assert checkin assert not checkin.is_success - assert 'Error encountered: item not found' in \ - checkin.get('screen_messages') + assert "Error encountered: item not found" in checkin.get("screen_messages") # selfcheck checkin checkin = selfcheck_checkin( transaction_user_pid=librarian_martigny.pid, item_barcode=item_barcode, patron_barcode=patron_barcode, - terminal=selfcheck_librarian_martigny.name + terminal=selfcheck_librarian_martigny.name, ) assert checkin assert checkin.is_success diff --git a/tests/api/selfcheck/test_selfcheck_utils.py b/tests/api/selfcheck/test_selfcheck_utils.py index 7c604bc3cc..bde63d1db8 100644 --- a/tests/api/selfcheck/test_selfcheck_utils.py +++ b/tests/api/selfcheck/test_selfcheck_utils.py @@ -21,30 +21,28 @@ from __future__ import absolute_import, print_function from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus -from rero_ils.modules.selfcheck.utils import map_item_circulation_status, \ - map_media_type +from rero_ils.modules.selfcheck.utils import map_item_circulation_status, map_media_type def test_media_type(client): """Test invenio-sip2 media type mapping.""" # TODO: test all document types - assert 'BOOK' == map_media_type('docmaintype_book') - assert 'MAGAZINE' == map_media_type('docmaintype_article') - assert 'MAGAZINE' == map_media_type('docmaintype_serial') - assert 'BOUND_JOURNAL' == map_media_type('docmaintype_series') - assert 'AUDIO' == map_media_type('docmaintype_audio') - assert 'VIDEO' == map_media_type('docmaintype_movie_series') + assert "BOOK" == map_media_type("docmaintype_book") + assert "MAGAZINE" == map_media_type("docmaintype_article") + assert "MAGAZINE" == map_media_type("docmaintype_serial") + assert "BOUND_JOURNAL" == map_media_type("docmaintype_series") + assert "AUDIO" == map_media_type("docmaintype_audio") + assert "VIDEO" == map_media_type("docmaintype_movie_series") def test_circulation_status(): """Test invenio-sip2 item circultation status mapping.""" - assert 'AVAILABLE' == map_item_circulation_status(ItemStatus.ON_SHELF) - assert 'WAITING_ON_HOLD_SHELF' == \ - map_item_circulation_status(ItemStatus.AT_DESK) - assert 'CHARGED' == map_item_circulation_status(ItemStatus.ON_LOAN) - assert 'IN_TRANSIT' == map_item_circulation_status(ItemStatus.IN_TRANSIT) - assert 'MISSING' == map_item_circulation_status(ItemStatus.MISSING) - assert 'OTHER' == map_item_circulation_status(ItemStatus.EXCLUDED) - assert 'OTHER' == map_item_circulation_status(ItemIssueStatus.RECEIVED) - assert 'OTHER' == map_item_circulation_status(ItemIssueStatus.DELETED) - assert 'OTHER' == map_item_circulation_status(ItemIssueStatus.LATE) + assert "AVAILABLE" == map_item_circulation_status(ItemStatus.ON_SHELF) + assert "WAITING_ON_HOLD_SHELF" == map_item_circulation_status(ItemStatus.AT_DESK) + assert "CHARGED" == map_item_circulation_status(ItemStatus.ON_LOAN) + assert "IN_TRANSIT" == map_item_circulation_status(ItemStatus.IN_TRANSIT) + assert "MISSING" == map_item_circulation_status(ItemStatus.MISSING) + assert "OTHER" == map_item_circulation_status(ItemStatus.EXCLUDED) + assert "OTHER" == map_item_circulation_status(ItemIssueStatus.RECEIVED) + assert "OTHER" == map_item_circulation_status(ItemIssueStatus.DELETED) + assert "OTHER" == map_item_circulation_status(ItemIssueStatus.LATE) diff --git a/tests/api/sru/test_sru_rest.py b/tests/api/sru/test_sru_rest.py index 6fca044a42..273ad9df76 100644 --- a/tests/api/sru/test_sru_rest.py +++ b/tests/api/sru/test_sru_rest.py @@ -25,84 +25,92 @@ def test_sru_explain(client): """Test sru documents rest api.""" - api_url = url_for('api_sru.documents') + api_url = url_for("api_sru.documents") res = client.get(api_url) assert res.status_code == 200 xml_dict = get_xml_dict(res) - assert 'sru:explainResponse' in xml_dict + assert "sru:explainResponse" in xml_dict def test_sru_documents(client, document_ref, entity_person_data): """Test sru documents rest api.""" - api_url = url_for('api_sru.documents', - version='1.1', operation='searchRetrieve', - query='al-Wajīz') + api_url = url_for( + "api_sru.documents", version="1.1", operation="searchRetrieve", query="al-Wajīz" + ) res = client.get(api_url) assert res.status_code == 200 xml_dict = get_xml_dict(res) - assert 'zs:searchRetrieveResponse' in xml_dict - search_rr = xml_dict['zs:searchRetrieveResponse'] - assert search_rr.get('zs:echoedSearchRetrieveRequest') == { - 'zs:maximumRecords': '100', - 'zs:query': 'al-Wajīz', - 'zs:query_es': 'al-Wajīz', - 'zs:recordPacking': 'XML', - 'zs:recordSchema': 'info:sru/schema/1/marcxml-v1.1-light', - 'zs:resultSetTTL': '0', - 'zs:startRecord': '1' + assert "zs:searchRetrieveResponse" in xml_dict + search_rr = xml_dict["zs:searchRetrieveResponse"] + assert search_rr.get("zs:echoedSearchRetrieveRequest") == { + "zs:maximumRecords": "100", + "zs:query": "al-Wajīz", + "zs:query_es": "al-Wajīz", + "zs:recordPacking": "XML", + "zs:recordSchema": "info:sru/schema/1/marcxml-v1.1-light", + "zs:resultSetTTL": "0", + "zs:startRecord": "1", } - assert search_rr.get('zs:numberOfRecords') == str(Document.count()) + assert search_rr.get("zs:numberOfRecords") == str(Document.count()) def test_sru_documents_items(client, document_sion_items): """Test sru documents with items.""" - api_url = url_for('api_sru.documents', - version='1.1', operation='searchRetrieve', - query='"La reine Berthe et son fils"') + api_url = url_for( + "api_sru.documents", + version="1.1", + operation="searchRetrieve", + query='"La reine Berthe et son fils"', + ) res = client.get(api_url) assert res.status_code == 200 xml_dict = get_xml_dict(res) - assert 'zs:searchRetrieveResponse' in xml_dict - ech_srr = xml_dict['zs:searchRetrieveResponse'][ - 'zs:echoedSearchRetrieveRequest'] - assert ech_srr['zs:query'] == '"La reine Berthe et son fils"' - assert ech_srr['zs:query_es'] == '"La reine Berthe et son fils"' + assert "zs:searchRetrieveResponse" in xml_dict + ech_srr = xml_dict["zs:searchRetrieveResponse"]["zs:echoedSearchRetrieveRequest"] + assert ech_srr["zs:query"] == '"La reine Berthe et son fils"' + assert ech_srr["zs:query_es"] == '"La reine Berthe et son fils"' - api_url = url_for('api_sru.documents', - version='1.1', operation='searchRetrieve', - query='"La reine Berthe et son fils"', - format='marcxml') + api_url = url_for( + "api_sru.documents", + version="1.1", + operation="searchRetrieve", + query='"La reine Berthe et son fils"', + format="marcxml", + ) res = client.get(api_url) assert res.status_code == 200 xml_dict = get_xml_dict(res) - assert 'zs:searchRetrieveResponse' in xml_dict - ech_srr = xml_dict['zs:searchRetrieveResponse'][ - 'zs:echoedSearchRetrieveRequest'] - assert ech_srr['zs:query'] == '"La reine Berthe et son fils"' - assert ech_srr['zs:query_es'] == '"La reine Berthe et son fils"' + assert "zs:searchRetrieveResponse" in xml_dict + ech_srr = xml_dict["zs:searchRetrieveResponse"]["zs:echoedSearchRetrieveRequest"] + assert ech_srr["zs:query"] == '"La reine Berthe et son fils"' + assert ech_srr["zs:query_es"] == '"La reine Berthe et son fils"' - api_url = url_for('api_sru.documents', - version='1.1', operation='searchRetrieve', - query='dc.title="La reine Berthe et son fils"', - format='dc') + api_url = url_for( + "api_sru.documents", + version="1.1", + operation="searchRetrieve", + query='dc.title="La reine Berthe et son fils"', + format="dc", + ) res = client.get(api_url) assert res.status_code == 200 xml_dict = get_xml_dict(res) - assert 'searchRetrieveResponse' in xml_dict - ech_srr = xml_dict['searchRetrieveResponse']['echoedSearchRetrieveRequest'] - assert ech_srr['query'] == 'dc.title="La reine Berthe et son fils"' - assert ech_srr['query_es'] == 'title.\\*:' \ - '"La reine Berthe et son fils"' + assert "searchRetrieveResponse" in xml_dict + ech_srr = xml_dict["searchRetrieveResponse"]["echoedSearchRetrieveRequest"] + assert ech_srr["query"] == 'dc.title="La reine Berthe et son fils"' + assert ech_srr["query_es"] == "title.\\*:" '"La reine Berthe et son fils"' def test_sru_documents_diagnostics(client): """Test sru documents diagnostics.""" - api_url = url_for('api_sru.documents', - version='1.1', operation='searchRetrieve', - query='(((') + api_url = url_for( + "api_sru.documents", version="1.1", operation="searchRetrieve", query="(((" + ) res = client.get(api_url) assert res.status_code == 200 xml_dict = get_xml_dict(res) - assert 'srw:searchRetrieveResponse' in xml_dict - assert xml_dict['srw:searchRetrieveResponse'][ - 'diag:diagnostics']['diag:message'] == 'Malformed Query' + assert "srw:searchRetrieveResponse" in xml_dict + assert ( + xml_dict["srw:searchRetrieveResponse"]["diag:diagnostics"]["diag:message"] + == "Malformed Query" + ) diff --git a/tests/api/stats/conftest.py b/tests/api/stats/conftest.py index 07914145c4..a6f8b42ff2 100644 --- a/tests/api/stats/conftest.py +++ b/tests/api/stats/conftest.py @@ -26,42 +26,32 @@ from rero_ils.modules.stats.api.report import StatsReport -@pytest.fixture(scope='module') -def stats(item_lib_martigny, item_lib_fully, item_lib_sion, - ill_request_martigny): +@pytest.fixture(scope="module") +def stats(item_lib_martigny, item_lib_fully, item_lib_sion, ill_request_martigny): """Stats fixture.""" stats = StatsForPricing(to_date=arrow.utcnow()) yield Stat.create( - data=dict( - type='billing', - values=stats.collect() - ), - dbcommit=True, - reindex=True + data=dict(type="billing", values=stats.collect()), dbcommit=True, reindex=True ) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def stats_librarian(item_lib_martigny, item_lib_fully, item_lib_sion): """Stats fixture for librarian.""" stats_librarian = StatsForLibrarian() date_range = { - 'from': stats_librarian.date_range['gte'], - 'to': stats_librarian.date_range['lte'] + "from": stats_librarian.date_range["gte"], + "to": stats_librarian.date_range["lte"], } stats_values = stats_librarian.collect() yield Stat.create( - data=dict( - type='librarian', - date_range=date_range, - values=stats_values - ), + data=dict(type="librarian", date_range=date_range, values=stats_values), dbcommit=True, - reindex=True + reindex=True, ) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def stats_report_martigny(stats_cfg_martigny, item_lib_martigny): """Stats fixture for librarian.""" stat_report = StatsReport(stats_cfg_martigny) diff --git a/tests/api/stats/test_stats_permissions.py b/tests/api/stats/test_stats_permissions.py index e5362b43eb..c32494b29b 100644 --- a/tests/api/stats/test_stats_permissions.py +++ b/tests/api/stats/test_stats_permissions.py @@ -25,8 +25,7 @@ def test_stats_permissions( - patron_martigny, stats_librarian, librarian_martigny, - system_librarian_martigny + patron_martigny, stats_librarian, librarian_martigny, system_librarian_martigny ): """Test stat permissions class.""" @@ -34,47 +33,67 @@ def test_stats_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(StatisticsPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(StatisticsPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_librarian) + check_permission( + StatisticsPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + StatisticsPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_librarian, + ) login_user(patron_martigny.user) - check_permission(StatisticsPermissionPolicy, {'create': False}, {}) - check_permission(StatisticsPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_librarian) + check_permission(StatisticsPermissionPolicy, {"create": False}, {}) + check_permission( + StatisticsPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_librarian, + ) # Librarian with specific role # - search/read: any items # - create/update/delete: always disallowed login_user(librarian_martigny.user) - check_permission(StatisticsPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_librarian) + check_permission( + StatisticsPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_librarian, + ) login_user(system_librarian_martigny.user) - check_permission(StatisticsPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, stats_librarian) + check_permission( + StatisticsPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + stats_librarian, + ) diff --git a/tests/api/stats/test_stats_rest.py b/tests/api/stats/test_stats_rest.py index 1dfd4d97ed..ae8c8c6516 100644 --- a/tests/api/stats/test_stats_rest.py +++ b/tests/api/stats/test_stats_rest.py @@ -20,51 +20,52 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_csv, get_json, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_csv, get_json, to_relative_url -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_stats_get(client, stats, csv_header): """Test record retrieval.""" - item_url = url_for('invenio_records_rest.stat_item', pid_value=stats.pid) + item_url = url_for("invenio_records_rest.stat_item", pid_value=stats.pid) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] + assert res.headers["ETag"] data = get_json(res) - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 # CSV format - params = {'pid_value': stats.pid, 'format': 'csv'} - item_url = url_for('invenio_records_rest.stat_item', **params) + params = {"pid_value": stats.pid, "format": "csv"} + item_url = url_for("invenio_records_rest.stat_item", **params) res = client.get(item_url, headers=csv_header) assert res.status_code == 200 data = get_csv(res) assert data == ( - 'files_volume,library id,library name,number_of_active_patrons,' - 'number_of_checkins,number_of_checkouts,number_of_deleted_items,' - 'number_of_docs_with_files,number_of_documents,number_of_files,' - 'number_of_ill_requests,number_of_items,number_of_librarians,' - 'number_of_libraries,number_of_new_items,number_of_new_patrons,' - 'number_of_order_lines,number_of_patrons,number_of_renewals,' - 'number_of_requests\r\n' - '0.000,lib3,Library of Fully,0,0,0,0,0,1,0,0,1,0,2,1,1,0,1,0,0\r\n' - '0.000,lib1,Library of Martigny-ville,' - '0,0,0,0,0,1,0,1,1,0,2,1,1,0,1,0,0\r\n' - '0.000,lib4,Library of Sion,0,0,0,0,0,1,0,0,1,0,1,1,0,0,0,0,0\r\n' + "files_volume,library id,library name,number_of_active_patrons," + "number_of_checkins,number_of_checkouts,number_of_deleted_items," + "number_of_docs_with_files,number_of_documents,number_of_files," + "number_of_ill_requests,number_of_items,number_of_librarians," + "number_of_libraries,number_of_new_items,number_of_new_patrons," + "number_of_order_lines,number_of_patrons,number_of_renewals," + "number_of_requests\r\n" + "0.000,lib3,Library of Fully,0,0,0,0,0,1,0,0,1,0,2,1,1,0,1,0,0\r\n" + "0.000,lib1,Library of Martigny-ville," + "0,0,0,0,0,1,0,1,1,0,2,1,1,0,1,0,0\r\n" + "0.000,lib4,Library of Sion,0,0,0,0,0,1,0,0,1,0,1,1,0,0,0,0,0\r\n" ) - list_url = url_for('invenio_records_rest.stat_list') + list_url = url_for("invenio_records_rest.stat_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'] + assert data["hits"]["hits"] def test_stats_librarian_data( @@ -72,72 +73,73 @@ def test_stats_librarian_data( ): """Test librarian statistics.""" params = dict(pid_value=stats_librarian.pid) - item_url = url_for('invenio_records_rest.stat_item', **params) + item_url = url_for("invenio_records_rest.stat_item", **params) # system librarian could view all libraries stats for its own organisation login_user_via_session(client, system_librarian_martigny.user) res = client.get(item_url) data = res.get_json() filtered_stat_libs = { - value['library']['pid'] for value in data['metadata']['values'] + value["library"]["pid"] for value in data["metadata"]["values"] } manageable_libs = set(system_librarian_martigny.manageable_library_pids) assert not filtered_stat_libs.difference(manageable_libs) # Check filtered librarian stats by libraries - librarian_martigny['roles'].append('pro_statistic_manager') - librarian_martigny.update( - librarian_martigny, dbcommit=False, reindex=False) + librarian_martigny["roles"].append("pro_statistic_manager") + librarian_martigny.update(librarian_martigny, dbcommit=False, reindex=False) login_user_via_session(client, librarian_martigny.user) res = client.get(item_url) data = res.get_json() # Check response contains 'date_range' and 'librarian' - assert data['metadata']['date_range'] - assert data['metadata']['type'] == 'librarian' + assert data["metadata"]["date_range"] + assert data["metadata"]["type"] == "librarian" # Check that response contains only stats for the manageable libraries. # This filter is applied by the 'pre_dump' resource extension manageable_libs = set(librarian_martigny.manageable_library_pids) - initial_stat_libs = { - value['library']['pid'] for value in stats_librarian['values'] - } + initial_stat_libs = {value["library"]["pid"] for value in stats_librarian["values"]} filtered_stat_libs = { - value['library']['pid'] for value in data['metadata']['values'] + value["library"]["pid"] for value in data["metadata"]["values"] } assert initial_stat_libs.difference(manageable_libs) assert not filtered_stat_libs.difference(manageable_libs) from invenio_db import db + db.session.rollback() -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_stats_report_get(client, stats_report_martigny, csv_header): """Test record retrieval.""" item_url = url_for( - 'invenio_records_rest.stat_item', pid_value=stats_report_martigny.pid) + "invenio_records_rest.stat_item", pid_value=stats_report_martigny.pid + ) res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] + assert res.headers["ETag"] data = get_json(res) - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 # CSV format - params = {'pid_value': stats_report_martigny.pid, 'format': 'csv'} - item_url = url_for('invenio_records_rest.stat_item', **params) + params = {"pid_value": stats_report_martigny.pid, "format": "csv"} + item_url = url_for("invenio_records_rest.stat_item", **params) res = client.get(item_url, headers=csv_header) assert res.status_code == 200 data = get_csv(res) assert data - list_url = url_for('invenio_records_rest.stat_list') + list_url = url_for("invenio_records_rest.stat_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'] + assert data["hits"]["hits"] diff --git a/tests/api/stats_cfg/test_stats_cfg_permissions.py b/tests/api/stats_cfg/test_stats_cfg_permissions.py index bb9b2f68d1..cde6c7dd11 100644 --- a/tests/api/stats_cfg/test_stats_cfg_permissions.py +++ b/tests/api/stats_cfg/test_stats_cfg_permissions.py @@ -21,13 +21,19 @@ from flask_security import login_user from utils import check_permission -from rero_ils.modules.stats_cfg.permissions import \ - StatisticsConfigurationPermissionPolicy +from rero_ils.modules.stats_cfg.permissions import ( + StatisticsConfigurationPermissionPolicy, +) def test_stats_cfg_permissions( - patron_martigny, stats_cfg_martigny, stats_cfg_sion, - librarian_martigny, system_librarian_martigny, librarian_saxon, lib_saxon + patron_martigny, + stats_cfg_martigny, + stats_cfg_sion, + librarian_martigny, + system_librarian_martigny, + librarian_saxon, + lib_saxon, ): """Test statistics configuration permissions class.""" @@ -35,83 +41,100 @@ def test_stats_cfg_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_cfg_martigny) + check_permission( + StatisticsConfigurationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + StatisticsConfigurationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_cfg_martigny, + ) login_user(patron_martigny.user) - check_permission(StatisticsConfigurationPermissionPolicy, - {'create': False}, {}) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_cfg_martigny) + check_permission(StatisticsConfigurationPermissionPolicy, {"create": False}, {}) + check_permission( + StatisticsConfigurationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_cfg_martigny, + ) # Librarian with specific role # - search/read: any items login_user(librarian_martigny.user) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_cfg_martigny) + check_permission( + StatisticsConfigurationPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_cfg_martigny, + ) # Librarian with the right role # cannot update or delete a config of an other lib login_user(librarian_saxon.user) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': False, - 'delete': False - }, stats_cfg_martigny) + check_permission( + StatisticsConfigurationPermissionPolicy, + { + "search": True, + "read": True, + "create": True, + "update": False, + "delete": False, + }, + stats_cfg_martigny, + ) # Librarian with the right role # can update or delete a config of this library stats_cfg_martigny.update( - dict( - library={ - '$ref': f'https://bib.test.rero.ch/libraries/{lib_saxon.pid}' - })) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, stats_cfg_martigny) + dict(library={"$ref": f"https://bib.test.rero.ch/libraries/{lib_saxon.pid}"}) + ) + check_permission( + StatisticsConfigurationPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + stats_cfg_martigny, + ) # System librarian with specific role # - search/read: any items login_user(system_librarian_martigny.user) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, stats_cfg_martigny) + check_permission( + StatisticsConfigurationPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + stats_cfg_martigny, + ) - check_permission(StatisticsConfigurationPermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, stats_cfg_sion) + check_permission( + StatisticsConfigurationPermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + stats_cfg_sion, + ) diff --git a/tests/api/stats_cfg/test_stats_cfg_rest.py b/tests/api/stats_cfg/test_stats_cfg_rest.py index 997dbe90e5..e7749223e1 100644 --- a/tests/api/stats_cfg/test_stats_cfg_rest.py +++ b/tests/api/stats_cfg/test_stats_cfg_rest.py @@ -22,26 +22,29 @@ from utils import VerifyRecordPermissionPatch, get_json, to_relative_url -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_stats_cfg_get(client, stats_cfg_martigny): """Test record retrieval.""" item_url = url_for( - 'invenio_records_rest.stacfg_item', pid_value=stats_cfg_martigny.pid) + "invenio_records_rest.stacfg_item", pid_value=stats_cfg_martigny.pid + ) res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 # search - list_url = url_for('invenio_records_rest.stacfg_list') + list_url = url_for("invenio_records_rest.stacfg_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['hits'] - assert data['aggregations']['category']['buckets'] + assert data["hits"]["hits"] + assert data["aggregations"]["category"]["buckets"] diff --git a/tests/api/stats_cfg/test_stats_cfg_views.py b/tests/api/stats_cfg/test_stats_cfg_views.py index 4e4ad004a5..fe4f56e9aa 100644 --- a/tests/api/stats_cfg/test_stats_cfg_views.py +++ b/tests/api/stats_cfg/test_stats_cfg_views.py @@ -25,25 +25,24 @@ def test_view_stats_cfg( - client, patron_martigny, librarian_martigny, - system_librarian_martigny + client, patron_martigny, librarian_martigny, system_librarian_martigny ): """Test view status.""" # User not logged - result = client.get(url_for('stats_cfg.live_stats_reports', pid='1')) + result = client.get(url_for("stats_cfg.live_stats_reports", pid="1")) assert result.status_code == 401 # User without access permissions login_user_via_session(client, patron_martigny.user) - result = client.get(url_for('stats_cfg.live_stats_reports', pid='1')) + result = client.get(url_for("stats_cfg.live_stats_reports", pid="1")) assert result.status_code == 403 # User with librarian permissions login_user_via_session(client, librarian_martigny.user) - result = client.get(url_for('stats_cfg.live_stats_reports', pid='1')) + result = client.get(url_for("stats_cfg.live_stats_reports", pid="1")) assert result.status_code == 403 # User with librarian permissions login_user_via_session(client, system_librarian_martigny.user) - result = client.get(url_for('stats_cfg.live_stats_reports', pid='foo')) + result = client.get(url_for("stats_cfg.live_stats_reports", pid="foo")) assert result.status_code == 404 diff --git a/tests/api/templates/test_templates_marshmallow.py b/tests/api/templates/test_templates_marshmallow.py index 69cef8f51d..fd8bd2cb20 100644 --- a/tests/api/templates/test_templates_marshmallow.py +++ b/tests/api/templates/test_templates_marshmallow.py @@ -28,24 +28,25 @@ from rero_ils.modules.templates.models import TemplateVisibility -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_templates_marshmallow_loaders( - client, system_librarian_martigny, templ_doc_public_martigny_data_tmp, - json_header + client, system_librarian_martigny, templ_doc_public_martigny_data_tmp, json_header ): """Test template marshmallow loaders""" login_user_via_session(client, system_librarian_martigny.user) data = templ_doc_public_martigny_data_tmp - del data['pid'] + del data["pid"] # TEST#1 :: API vs Console mode # Through the API, a public template creation aren't allowed. # Public template must be created as `private` and updated later by # an authorized staff member. But using console mode, such restriction # aren't applicable. - assert data['visibility'] == TemplateVisibility.PUBLIC - res, res_data = postdata(client, 'invenio_records_rest.tmpl_list', data) + assert data["visibility"] == TemplateVisibility.PUBLIC + res, res_data = postdata(client, "invenio_records_rest.tmpl_list", data) assert res.status_code == 400 template = Template.create(deepcopy(data), dbcommit=True, reindex=False) @@ -55,19 +56,19 @@ def test_templates_marshmallow_loaders( # TEST#2 :: API workflow # Create a private template using API, then update it to set visibility # as 'public'. - data['visibility'] = TemplateVisibility.PRIVATE - res, res_data = postdata(client, 'invenio_records_rest.tmpl_list', data) + data["visibility"] = TemplateVisibility.PRIVATE + res, res_data = postdata(client, "invenio_records_rest.tmpl_list", data) assert res.status_code == 201 - data['pid'] = res_data['metadata']['pid'] - data['visibility'] = TemplateVisibility.PUBLIC + data["pid"] = res_data["metadata"]["pid"] + data["visibility"] = TemplateVisibility.PUBLIC res = client.put( - url_for('invenio_records_rest.tmpl_item', pid_value=data['pid']), + url_for("invenio_records_rest.tmpl_item", pid_value=data["pid"]), data=json.dumps(data), - headers=json_header + headers=json_header, ) assert res.status_code == 200 - template = Template.get_record_by_pid(data['pid']) + template = Template.get_record_by_pid(data["pid"]) assert template.is_public # RESET FIXTURES diff --git a/tests/api/templates/test_templates_permissions.py b/tests/api/templates/test_templates_permissions.py index 4f38cfa09d..5149701dc0 100644 --- a/tests/api/templates/test_templates_permissions.py +++ b/tests/api/templates/test_templates_permissions.py @@ -20,18 +20,24 @@ from flask import current_app from flask_principal import AnonymousIdentity, identity_changed from flask_security import login_user -from utils import check_permission, flush_index +from utils import check_permission from rero_ils.modules.patrons.api import Patron, PatronsSearch from rero_ils.modules.templates.permissions import TemplatePermissionPolicy -@mock.patch.object(Patron, '_extensions', []) +@mock.patch.object(Patron, "_extensions", []) def test_template_permissions( - patron_martigny, librarian_martigny, system_librarian_martigny, - org_martigny, templ_doc_public_martigny, templ_doc_private_martigny, - templ_doc_public_sion, templ_doc_private_saxon, templ_doc_public_saxon, - templ_doc_private_sion + patron_martigny, + librarian_martigny, + system_librarian_martigny, + org_martigny, + templ_doc_public_martigny, + templ_doc_private_martigny, + templ_doc_public_sion, + templ_doc_private_saxon, + templ_doc_public_saxon, + templ_doc_private_sion, ): """Test template permissions class.""" @@ -40,74 +46,106 @@ def test_template_permissions( identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(TemplatePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, None) - check_permission(TemplatePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_martigny) + check_permission( + TemplatePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + None, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_martigny, + ) login_user(patron_martigny.user) - check_permission(TemplatePermissionPolicy, {'create': False}, {}) - check_permission(TemplatePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_sion) + check_permission(TemplatePermissionPolicy, {"create": False}, {}) + check_permission( + TemplatePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_martigny, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_sion, + ) # Librarian with only 'read_only' role # - search/read: templates for its own organisation # - create/update/delete: disallowed - original_roles = librarian_martigny.get('roles', []) - librarian_martigny['roles'] = ['pro_read_only'] + original_roles = librarian_martigny.get("roles", []) + librarian_martigny["roles"] = ["pro_read_only"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_private_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_sion) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_martigny, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_private_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_sion, + ) # Librarian with classic 'staff-member' role: # * public template : @@ -115,53 +153,71 @@ def test_template_permissions( # - create/update/delete: disallowed operations # * private templates : # - all operations available only for its own templates. - librarian_martigny['roles'] = ['pro_circulation_manager'] + librarian_martigny["roles"] = ["pro_circulation_manager"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_sion) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, templ_doc_private_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_private_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_private_sion) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_martigny, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_sion, + ) + check_permission( + TemplatePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + templ_doc_private_martigny, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_private_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_private_sion, + ) # Librarian with classic 'library-administration' role: # * public template (same other staff-members): @@ -170,106 +226,124 @@ def test_template_permissions( # * private templates : # - read: all templates linked to its own library # - other operations available only for its own templates. - librarian_martigny['roles'] = ['pro_library_administrator'] + librarian_martigny["roles"] = ["pro_library_administrator"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_sion) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, templ_doc_private_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_private_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_private_sion) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_martigny, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_sion, + ) + check_permission( + TemplatePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + templ_doc_private_martigny, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_private_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_private_sion, + ) # Librarian with classic 'full-permissions' role: # * public and private templates: # - all operations for templates in their own organisation - librarian_martigny['roles'] = ['pro_full_permissions'] + librarian_martigny["roles"] = ["pro_full_permissions"] librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() login_user(librarian_martigny.user) # to refresh identity ! - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, templ_doc_public_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, templ_doc_public_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_public_sion) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, templ_doc_private_martigny) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, templ_doc_private_saxon) - check_permission(TemplatePermissionPolicy, { - 'search': True, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, templ_doc_private_sion) + check_permission( + TemplatePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + templ_doc_public_martigny, + ) + check_permission( + TemplatePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + templ_doc_public_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_public_sion, + ) + check_permission( + TemplatePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + templ_doc_private_martigny, + ) + check_permission( + TemplatePermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + templ_doc_private_saxon, + ) + check_permission( + TemplatePermissionPolicy, + { + "search": True, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + templ_doc_private_sion, + ) # reset librarian - librarian_martigny['roles'] = original_roles + librarian_martigny["roles"] = original_roles librarian_martigny.update(librarian_martigny, dbcommit=True, reindex=True) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() diff --git a/tests/api/templates/test_templates_rest.py b/tests/api/templates/test_templates_rest.py index 7ae90b50ed..f68aced809 100644 --- a/tests/api/templates/test_templates_rest.py +++ b/tests/api/templates/test_templates_rest.py @@ -23,144 +23,149 @@ import mock from flask import url_for from invenio_accounts.testutils import login_user_via_session -from utils import VerifyRecordPermissionPatch, get_json, postdata, \ - to_relative_url +from utils import VerifyRecordPermissionPatch, get_json, postdata, to_relative_url from rero_ils.modules.templates.api import Template from rero_ils.modules.templates.models import TemplateVisibility from rero_ils.modules.utils import get_ref_for_pid -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_templates_get(client, templ_doc_public_martigny): """Test template retrieval.""" template = templ_doc_public_martigny - url = url_for('invenio_records_rest.tmpl_item', pid_value=template.pid) + url = url_for("invenio_records_rest.tmpl_item", pid_value=template.pid) res = client.get(url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{template.revision_id}"' + assert res.headers["ETag"] == f'"{template.revision_id}"' data = get_json(res) - assert template.dumps() == data['metadata'] + assert template.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data # Check self links - res = client.get(to_relative_url(data['links']['self'])) + res = client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == res.json - assert template.dumps() == data['metadata'] + assert template.dumps() == data["metadata"] - url = url_for('invenio_records_rest.tmpl_list', q='pid:tmpl1') + url = url_for("invenio_records_rest.tmpl_list", q="pid:tmpl1") res = client.get(url) assert res.status_code == 200 - assert res.json['hits']['total']['value'] == 1 + assert res.json["hits"]["total"]["value"] == 1 data = template.replace_refs() - data.pop('data', None) - assert res.json['hits']['hits'][0]['metadata'] == data + data.pop("data", None) + assert res.json["hits"]["hits"][0]["metadata"] == data def test_filtered_templates_get( - client, librarian_martigny, templ_doc_public_martigny, - templ_doc_private_martigny, librarian_sion, - system_librarian_martigny, librarian_fully, - system_librarian_sion): + client, + librarian_martigny, + templ_doc_public_martigny, + templ_doc_private_martigny, + librarian_sion, + system_librarian_martigny, + librarian_fully, + system_librarian_sion, +): """Test templates filter by organisation.""" # Martigny # system librarian can have access to all templates login_user_via_session(client, system_librarian_martigny.user) - list_url = url_for('invenio_records_rest.tmpl_list') + list_url = url_for("invenio_records_rest.tmpl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # librarian martigny can have access to all public and his templates login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.tmpl_list') + list_url = url_for("invenio_records_rest.tmpl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # librarian fully can have access to all public templates only login_user_via_session(client, librarian_fully.user) - list_url = url_for('invenio_records_rest.tmpl_list') + list_url = url_for("invenio_records_rest.tmpl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 1 + assert data["hits"]["total"]["value"] == 1 # Sion # librarian sion can have access to no templates login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.tmpl_list') + list_url = url_for("invenio_records_rest.tmpl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 # system librarian sion can have access to no templates login_user_via_session(client, system_librarian_sion.user) - list_url = url_for('invenio_records_rest.tmpl_list') + list_url = url_for("invenio_records_rest.tmpl_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 0 + assert data["hits"]["total"]["value"] == 0 -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_templates_post_put_delete( - client, org_martigny, system_librarian_martigny, json_header, - templ_doc_private_martigny_data_tmp + client, + org_martigny, + system_librarian_martigny, + json_header, + templ_doc_private_martigny_data_tmp, ): """Test template post.""" # Create policy / POST - item_url = url_for('invenio_records_rest.tmpl_item', pid_value='foo1') - list_url = url_for('invenio_records_rest.tmpl_list', q='pid:foo1') - templ_doc_private_martigny_data_tmp['pid'] = 'foo1' + item_url = url_for("invenio_records_rest.tmpl_item", pid_value="foo1") + list_url = url_for("invenio_records_rest.tmpl_list", q="pid:foo1") + templ_doc_private_martigny_data_tmp["pid"] = "foo1" res, data = postdata( - client, - 'invenio_records_rest.tmpl_list', - templ_doc_private_martigny_data_tmp + client, "invenio_records_rest.tmpl_list", templ_doc_private_martigny_data_tmp ) assert res.status_code == 201 # Check that the returned template matches the given data - templ_doc_private_martigny_data_tmp['pid'] = 'foo1' - assert data['metadata'] == templ_doc_private_martigny_data_tmp + templ_doc_private_martigny_data_tmp["pid"] = "foo1" + assert data["metadata"] == templ_doc_private_martigny_data_tmp res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert templ_doc_private_martigny_data_tmp == data['metadata'] + assert templ_doc_private_martigny_data_tmp == data["metadata"] # Update template/PUT data = templ_doc_private_martigny_data_tmp - data['name'] = 'Test Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 data = get_json(res) - assert data['metadata']['name'] == 'Test Name' + assert data["metadata"]["name"] == "Test Name" res = client.get(list_url) assert res.status_code == 200 - data = get_json(res)['hits']['hits'][0] - assert data['metadata']['name'] == 'Test Name' + data = get_json(res)["hits"]["hits"][0] + assert data["metadata"]["name"] == "Test Name" # Delete template/DELETE res = client.delete(item_url) @@ -170,140 +175,156 @@ def test_templates_post_put_delete( def test_template_secure_api_create( - client, json_header, system_librarian_martigny, - templ_doc_public_martigny, templ_item_public_martigny, - templ_hold_public_martigny, templ_patron_public_martigny + client, + json_header, + system_librarian_martigny, + templ_doc_public_martigny, + templ_item_public_martigny, + templ_hold_public_martigny, + templ_patron_public_martigny, ): """Test templates secure api create.""" - post_entrypoint = 'invenio_records_rest.tmpl_list' + post_entrypoint = "invenio_records_rest.tmpl_list" login_user_via_session(client, system_librarian_martigny.user) # test template creation for documents doc_tmpl = templ_doc_public_martigny - del doc_tmpl['pid'] - doc_tmpl['visibility'] = TemplateVisibility.PRIVATE - doc_tmpl['data']['pid'] = 'toto' + del doc_tmpl["pid"] + doc_tmpl["visibility"] = TemplateVisibility.PRIVATE + doc_tmpl["data"]["pid"] = "toto" res, _ = postdata(client, post_entrypoint, doc_tmpl) assert res.status_code == 201 # ensure that pid is removed from record - assert 'pid' not in res.json['metadata']['data'] + assert "pid" not in res.json["metadata"]["data"] # ensure that DB stored data is cleaned too - record = Template.get_record_by_pid(res.json['metadata']['pid']) - assert 'pid' not in record.get('data', {}) + record = Template.get_record_by_pid(res.json["metadata"]["pid"]) + assert "pid" not in record.get("data", {}) # test template creation for items # add fields that will be removed at the creation of the template. item_tmpl = templ_item_public_martigny - del item_tmpl['pid'] - item_tmpl['visibility'] = TemplateVisibility.PRIVATE - item_tmpl['data'].update({ - 'pid': 'dummy', - 'barcode': 'dummy', - 'status': 'on_loan', - 'library': {'$ref': get_ref_for_pid('lib', 'x')}, - 'document': {'$ref': get_ref_for_pid('doc', 'x')}, - 'holding': {'$ref': get_ref_for_pid('hold', 'x')}, - 'organisation': {'$ref': get_ref_for_pid('org', 'x')} - }) + del item_tmpl["pid"] + item_tmpl["visibility"] = TemplateVisibility.PRIVATE + item_tmpl["data"].update( + { + "pid": "dummy", + "barcode": "dummy", + "status": "on_loan", + "library": {"$ref": get_ref_for_pid("lib", "x")}, + "document": {"$ref": get_ref_for_pid("doc", "x")}, + "holding": {"$ref": get_ref_for_pid("hold", "x")}, + "organisation": {"$ref": get_ref_for_pid("org", "x")}, + } + ) res, _ = postdata(client, post_entrypoint, item_tmpl) assert res.status_code == 201 # ensure that added fields are removed from record. - record = Template.get_record_by_pid(res.json['metadata']['pid']) - for field in ['barcode', 'pid', 'status', 'document', 'holding', - 'organisation', 'library']: - assert field not in record['data'] + record = Template.get_record_by_pid(res.json["metadata"]["pid"]) + for field in [ + "barcode", + "pid", + "status", + "document", + "holding", + "organisation", + "library", + ]: + assert field not in record["data"] # templates now prevent the deletion of its owner - assert system_librarian_martigny.get_links_to_me().get('templates') + assert system_librarian_martigny.get_links_to_me().get("templates") # test template creation for holdings # add fields that will be removed at the creation of the template. hold_tmpl = templ_hold_public_martigny - del hold_tmpl['pid'] - hold_tmpl['visibility'] = TemplateVisibility.PRIVATE - hold_tmpl['data'].update({ - 'pid': 'dummy', - 'organisation': {'$ref': get_ref_for_pid('org', 'x')}, - 'library': {'$ref': get_ref_for_pid('lib', 'x')}, - 'document': {'$ref': get_ref_for_pid('doc', 'x')} - }) + del hold_tmpl["pid"] + hold_tmpl["visibility"] = TemplateVisibility.PRIVATE + hold_tmpl["data"].update( + { + "pid": "dummy", + "organisation": {"$ref": get_ref_for_pid("org", "x")}, + "library": {"$ref": get_ref_for_pid("lib", "x")}, + "document": {"$ref": get_ref_for_pid("doc", "x")}, + } + ) res, _ = postdata(client, post_entrypoint, hold_tmpl) assert res.status_code == 201 # ensure that added fields are removed from record. - for field in ['organisation', 'library', 'document', 'pid']: - assert field not in res.json['metadata']['data'] + for field in ["organisation", "library", "document", "pid"]: + assert field not in res.json["metadata"]["data"] # test template creation for patrons # add fields that will be removed at the creation of the template. ptrn_tmpl = templ_patron_public_martigny - del ptrn_tmpl['pid'] - ptrn_tmpl['visibility'] = TemplateVisibility.PRIVATE - ptrn_tmpl['data'].update({ - 'pid': 'toto', - 'user_id': 'toto' - }) - ptrn_tmpl['data']['patron'].update({ - 'subscriptions': 'toto', - 'barcode': ['toto'] - }) + del ptrn_tmpl["pid"] + ptrn_tmpl["visibility"] = TemplateVisibility.PRIVATE + ptrn_tmpl["data"].update({"pid": "toto", "user_id": "toto"}) + ptrn_tmpl["data"]["patron"].update({"subscriptions": "toto", "barcode": ["toto"]}) res, _ = postdata(client, post_entrypoint, ptrn_tmpl) assert res.status_code == 201 # ensure that added fields are removed from record. - json_data = res.json['metadata']['data'] - for field in ['user_id', 'patron.subscriptions', 'patron.barcode', 'pid']: - if '.' in field: - level_1, level_2 = field.split('.') + json_data = res.json["metadata"]["data"] + for field in ["user_id", "patron.subscriptions", "patron.barcode", "pid"]: + if "." in field: + level_1, level_2 = field.split(".") assert level_2 not in json_data.get(level_1) else: assert field not in json_data def test_template_secure_api_update( - client, templ_doc_private_martigny, templ_doc_private_martigny_data, - system_librarian_martigny, system_librarian_sion, librarian_martigny, - librarian_saxon, librarian2_martigny, json_header + client, + templ_doc_private_martigny, + templ_doc_private_martigny_data, + system_librarian_martigny, + system_librarian_sion, + librarian_martigny, + librarian_saxon, + librarian2_martigny, + json_header, ): """Test templates secure api update.""" # Martigny login_user_via_session(client, system_librarian_martigny.user) - record_url = url_for('invenio_records_rest.tmpl_item', - pid_value=templ_doc_private_martigny.pid) + record_url = url_for( + "invenio_records_rest.tmpl_item", pid_value=templ_doc_private_martigny.pid + ) original_data = deepcopy(templ_doc_private_martigny_data) data = templ_doc_private_martigny_data - data['name'] = 'Test Name' + data["name"] = "Test Name" res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 login_user_via_session(client, librarian_martigny.user) data = templ_doc_private_martigny_data - data['name'] = 'Test Name' - data['data']['pid'] = 'toto' + data["name"] = "Test Name" + data["data"]["pid"] = "toto" res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # ensure that pid is removed from records - assert 'pid' not in res.json['metadata']['data'] + assert "pid" not in res.json["metadata"]["data"] login_user_via_session(client, librarian2_martigny.user) data = templ_doc_private_martigny_data - data['visibility'] = 'public' + data["visibility"] = "public" res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 login_user_via_session(client, system_librarian_martigny.user) data = templ_doc_private_martigny_data - data['visibility'] = TemplateVisibility.PUBLIC + data["visibility"] = TemplateVisibility.PUBLIC res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 - data['visibility'] = TemplateVisibility.PRIVATE + data["visibility"] = TemplateVisibility.PRIVATE # reverse visibility data.update(data, dbcommit=True, reindex=True) login_user_via_session(client, librarian_saxon.user) data = templ_doc_private_martigny_data - data['name'] = 'Test Name' + data["name"] = "Test Name" res = client.put(record_url, data=json.dumps(data), headers=json_header) assert res.status_code == 403 @@ -313,31 +334,33 @@ def test_template_secure_api_update( assert res.status_code == 403 # Reset data - templ_doc_private_martigny.update( - original_data, dbcommit=True, reindex=True) + templ_doc_private_martigny.update(original_data, dbcommit=True, reindex=True) def test_template_update_visibility( - client, templ_doc_private_martigny, templ_doc_private_martigny_data_tmp, - librarian2_martigny, system_librarian_martigny, json_header + client, + templ_doc_private_martigny, + templ_doc_private_martigny_data_tmp, + librarian2_martigny, + system_librarian_martigny, + json_header, ): """Test template visibility attribute update thought API.""" tmpl = templ_doc_private_martigny - record_url = url_for('invenio_records_rest.tmpl_item', pid_value=tmpl.pid) - post_entrypoint = 'invenio_records_rest.tmpl_list' + record_url = url_for("invenio_records_rest.tmpl_item", pid_value=tmpl.pid) + post_entrypoint = "invenio_records_rest.tmpl_list" # STEP#1 :: Update a template without connected user. # Without connected user, visibility changes cannot be checked - any # changes are accepted. magic_mock = mock.MagicMock(return_value=None) - with mock.patch('flask_login.utils._get_user', magic_mock): - tmpl['visibility'] = TemplateVisibility.PUBLIC - tmpl['creator']['$ref'] = \ - get_ref_for_pid('ptrn', librarian2_martigny.pid) + with mock.patch("flask_login.utils._get_user", magic_mock): + tmpl["visibility"] = TemplateVisibility.PUBLIC + tmpl["creator"]["$ref"] = get_ref_for_pid("ptrn", librarian2_martigny.pid) tmpl = tmpl.update(tmpl, dbcommit=True) assert tmpl.is_public # reset to 'private' - tmpl['visibility'] = TemplateVisibility.PRIVATE + tmpl["visibility"] = TemplateVisibility.PRIVATE tmpl = tmpl.update(tmpl, dbcommit=True) assert tmpl.is_private @@ -345,23 +368,15 @@ def test_template_update_visibility( # Owner of the template can update template attributes but can't change # the template visibility. login_user_via_session(client, librarian2_martigny.user) - description_content = 'my custom description' - tmpl['description'] = description_content - res = client.put( - record_url, - data=json.dumps(tmpl), - headers=json_header - ) + description_content = "my custom description" + tmpl["description"] = description_content + res = client.put(record_url, data=json.dumps(tmpl), headers=json_header) assert res.status_code == 200 tmpl = Template.get_record(tmpl.id) - assert tmpl.is_private and tmpl.get('description') == description_content + assert tmpl.is_private and tmpl.get("description") == description_content - tmpl['visibility'] = TemplateVisibility.PUBLIC - res = client.put( - record_url, - data=json.dumps(tmpl), - headers=json_header - ) + tmpl["visibility"] = TemplateVisibility.PUBLIC + res = client.put(record_url, data=json.dumps(tmpl), headers=json_header) assert res.status_code == 400 # STEP#2 :: System librarian @@ -370,25 +385,18 @@ def test_template_update_visibility( # template login_user_via_session(client, system_librarian_martigny.user) tmpl_data = deepcopy(templ_doc_private_martigny_data_tmp) - del tmpl_data['pid'] - tmpl_data['creator']['$ref'] = \ - get_ref_for_pid('ptrn', system_librarian_martigny.pid) - - res, res_data = postdata( - client, - post_entrypoint, - tmpl_data + del tmpl_data["pid"] + tmpl_data["creator"]["$ref"] = get_ref_for_pid( + "ptrn", system_librarian_martigny.pid ) + + res, res_data = postdata(client, post_entrypoint, tmpl_data) assert res.status_code == 201 - tmpl = Template(res_data['metadata']) - record_url = url_for('invenio_records_rest.tmpl_item', pid_value=tmpl.pid) - tmpl['visibility'] = TemplateVisibility.PUBLIC - res = client.put( - record_url, - data=json.dumps(tmpl), - headers=json_header - ) + tmpl = Template(res_data["metadata"]) + record_url = url_for("invenio_records_rest.tmpl_item", pid_value=tmpl.pid) + tmpl["visibility"] = TemplateVisibility.PUBLIC + res = client.put(record_url, data=json.dumps(tmpl), headers=json_header) assert res.status_code == 200 res = client.delete(record_url) diff --git a/tests/api/test_accounts_rest_auth.py b/tests/api/test_accounts_rest_auth.py index ceb5a24cb3..d1c3b03e0c 100644 --- a/tests/api/test_accounts_rest_auth.py +++ b/tests/api/test_accounts_rest_auth.py @@ -24,13 +24,12 @@ def test_disabled_endpoint(client, app, patron_martigny): """Test disabled endpoint.""" - ext = app.extensions['security'] - ext.registerable = ext.changeable = ext.recoverable = ext.confirmable = \ - True + ext = app.extensions["security"] + ext.registerable = ext.changeable = ext.recoverable = ext.confirmable = True - app.config['SECURITY_CONFIRMABLE'] = True - app.config['SECURITY_SEND_PASSWORD_CHANGE_EMAIL'] = True - app.config['ACCOUNTS_SESSION_ACTIVITY_ENABLED'] = True + app.config["SECURITY_CONFIRMABLE"] = True + app.config["SECURITY_SEND_PASSWORD_CHANGE_EMAIL"] = True + app.config["ACCOUNTS_SESSION_ACTIVITY_ENABLED"] = True def get(url_endpoint): res = client.get(url_endpoint) @@ -40,17 +39,17 @@ def post(url_endpoint): res = client.post(url_endpoint) assert res.status_code == 404 - post(url_for('invenio_accounts_rest_auth.register')) - post(url_for('invenio_accounts_rest_auth.forgot_password')) - post(url_for('invenio_accounts_rest_auth.reset_password')) + post(url_for("invenio_accounts_rest_auth.register")) + post(url_for("invenio_accounts_rest_auth.forgot_password")) + post(url_for("invenio_accounts_rest_auth.reset_password")) - post('invenio_accounts_rest_auth.send_confirmation') - post('invenio_accounts_rest_auth.confirm_email') + post("invenio_accounts_rest_auth.send_confirmation") + post("invenio_accounts_rest_auth.confirm_email") # Logged as user login_user_via_session(client, patron_martigny.user) - post(url_for('invenio_accounts_rest_auth.logout')) - get(url_for('invenio_accounts_rest_auth.user_info')) - get(url_for('invenio_accounts_rest_auth.sessions_list')) - get(url_for('invenio_accounts_rest_auth.sessions_item', sid_s='1')) + post(url_for("invenio_accounts_rest_auth.logout")) + get(url_for("invenio_accounts_rest_auth.user_info")) + get(url_for("invenio_accounts_rest_auth.sessions_list")) + get(url_for("invenio_accounts_rest_auth.sessions_item", sid_s="1")) diff --git a/tests/api/test_availability.py b/tests/api/test_availability.py index c7a446e9d6..dfcd3291a5 100644 --- a/tests/api/test_availability.py +++ b/tests/api/test_availability.py @@ -35,22 +35,30 @@ def test_item_can_request( - client, document, holding_lib_martigny, item_lib_martigny, - librarian_martigny, lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies, - patron_type_children_martigny, loc_public_martigny_data, - system_librarian_martigny, item_lib_martigny_data, - yesterday, tomorrow + client, + document, + holding_lib_martigny, + item_lib_martigny, + librarian_martigny, + lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, + patron_type_children_martigny, + loc_public_martigny_data, + system_librarian_martigny, + item_lib_martigny_data, + yesterday, + tomorrow, ): """Test item can request API.""" # test no logged user res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 401 @@ -62,25 +70,23 @@ def test_item_can_request( # valid test res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 200 data = get_json(res) - assert data.get('can') + assert data.get("can") # test no valid item res = client.get( url_for( - 'api_item.can_request', - item_pid='no_item', + "api_item.can_request", + item_pid="no_item", library_pid=lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 404 @@ -88,11 +94,10 @@ def test_item_can_request( # test no valid library res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, - library_pid='no_library', - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + library_pid="no_library", + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 404 @@ -100,30 +105,29 @@ def test_item_can_request( # test no valid patron res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode='no_barcode' + patron_barcode="no_barcode", ) ) assert res.status_code == 404 # test no valid item status - item_lib_martigny['status'] = ItemStatus.MISSING + item_lib_martigny["status"] = ItemStatus.MISSING item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 200 data = get_json(res) - assert not data.get('can') - item_lib_martigny['status'] = ItemStatus.ON_SHELF + assert not data.get("can") + item_lib_martigny["status"] = ItemStatus.ON_SHELF item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) # Location :: allow_request == false @@ -131,20 +135,20 @@ def test_item_can_request( # item to this location. Check if the item can be requested : it couldn't # with 'Item location doesn't allow request' reason. new_location = deepcopy(loc_public_martigny_data) - del new_location['pid'] - new_location['allow_request'] = False + del new_location["pid"] + new_location["allow_request"] = False new_location = Location.create(new_location, dbcommit=True, reindex=True) assert new_location new_item = deepcopy(item_lib_martigny_data) - del new_item['pid'] - new_item['barcode'] = 'dummy_barcode_allow_request' - new_item['location']['$ref'] = get_ref_for_pid(Location, new_location.pid) + del new_item["pid"] + new_item["barcode"] = "dummy_barcode_allow_request" + new_item["location"]["$ref"] = get_ref_for_pid(Location, new_location.pid) new_item = Item.create(new_item, dbcommit=True, reindex=True) assert new_item - res = client.get(url_for('api_item.can_request', item_pid=new_item.pid)) + res = client.get(url_for("api_item.can_request", item_pid=new_item.pid)) assert res.status_code == 200 - assert not res.json.get('can') + assert not res.json.get("can") # Same test with temporary_location disallowing request. # * Main location of the new item allow request @@ -153,191 +157,183 @@ def test_item_can_request( # --> the request is disallowed # * with an obsolete temporary location # --> the request is allowed - new_item['location']['$ref'] = get_ref_for_pid( - Location, loc_public_martigny.pid) - assert loc_public_martigny.get('allow_request') + new_item["location"]["$ref"] = get_ref_for_pid(Location, loc_public_martigny.pid) + assert loc_public_martigny.get("allow_request") new_item.update(new_item, dbcommit=True, reindex=True) - res = client.get(url_for('api_item.can_request', item_pid=new_item.pid)) + res = client.get(url_for("api_item.can_request", item_pid=new_item.pid)) assert res.status_code == 200 - assert res.json.get('can') + assert res.json.get("can") - new_item['temporary_location'] = { - '$ref': get_ref_for_pid(Location, new_location.pid), - 'end_date': tomorrow.strftime('%Y-%m-%d') + new_item["temporary_location"] = { + "$ref": get_ref_for_pid(Location, new_location.pid), + "end_date": tomorrow.strftime("%Y-%m-%d"), } new_item.update(new_item, dbcommit=True, reindex=True) - res = client.get(url_for('api_item.can_request', item_pid=new_item.pid)) + res = client.get(url_for("api_item.can_request", item_pid=new_item.pid)) assert res.status_code == 200 - assert not res.json.get('can') + assert not res.json.get("can") - new_item['temporary_location']['end_date'] = yesterday.strftime('%Y-%m-%d') + new_item["temporary_location"]["end_date"] = yesterday.strftime("%Y-%m-%d") new_item.update(new_item, dbcommit=True, reindex=True) - res = client.get(url_for('api_item.can_request', item_pid=new_item.pid)) + res = client.get(url_for("api_item.can_request", item_pid=new_item.pid)) assert res.status_code == 200 - assert res.json.get('can') + assert res.json.get("can") # remove created data - client.delete(url_for( - 'invenio_records_rest.item_item', - pid_value=new_item.pid - )) - client.delete(url_for( - 'invenio_records_rest.hold_item', - pid_value=new_item.holding_pid - )) - client.delete(url_for( - 'invenio_records_rest.loc_item', - pid_value=new_location.pid - )) + client.delete(url_for("invenio_records_rest.item_item", pid_value=new_item.pid)) + client.delete( + url_for("invenio_records_rest.hold_item", pid_value=new_item.holding_pid) + ) + client.delete(url_for("invenio_records_rest.loc_item", pid_value=new_location.pid)) def test_item_holding_document_availability( - client, document, lib_martigny, - holding_lib_martigny, - item_lib_martigny, item2_lib_martigny, - librarian_martigny, librarian_saxon, - patron_martigny, patron2_martigny, - loc_public_saxon, circulation_policies, ebook_1_data, - item_lib_martigny_data): + client, + document, + lib_martigny, + holding_lib_martigny, + item_lib_martigny, + item2_lib_martigny, + librarian_martigny, + librarian_saxon, + patron_martigny, + patron2_martigny, + loc_public_saxon, + circulation_policies, + ebook_1_data, + item_lib_martigny_data, +): """Test item, holding and document availability.""" assert item_availablity_status( - client, item_lib_martigny.pid, librarian_martigny.user) + client, item_lib_martigny.pid, librarian_martigny.user + ) assert item_lib_martigny.is_available() assert holding_lib_martigny.is_available() - assert holding_lib_martigny.get_holding_loan_conditions() == 'standard' - assert Document.is_available(document.pid, view_code='global') - assert document_availability_status( - client, document.pid, librarian_martigny.user) + assert holding_lib_martigny.get_holding_loan_conditions() == "standard" + assert Document.is_available(document.pid, view_code="global") + assert document_availability_status(client, document.pid, librarian_martigny.user) # login as patron - with mock.patch( - 'rero_ils.modules.patrons.api.current_patrons', - [patron_martigny] - ): + with mock.patch("rero_ils.modules.patrons.api.current_patrons", [patron_martigny]): login_user_via_session(client, patron_martigny.user) - assert holding_lib_martigny.get_holding_loan_conditions() \ - == 'short 15 days' + assert holding_lib_martigny.get_holding_loan_conditions() == "short 15 days" # request login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, pickup_location_pid=loc_public_saxon.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.REQUEST].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.REQUEST].get("pid") assert not item_lib_martigny.is_available() assert not item_availablity_status( - client, item_lib_martigny.pid, librarian_martigny.user) + client, item_lib_martigny.pid, librarian_martigny.user + ) holding = Holding.get_record_by_pid(holding_lib_martigny.pid) assert holding.is_available() - assert holding_lib_martigny.get_holding_loan_conditions() == 'standard' - assert Document.is_available(document.pid, 'global') - assert document_availability_status( - client, document.pid, librarian_martigny.user) + assert holding_lib_martigny.get_holding_loan_conditions() == "standard" + assert Document.is_available(document.pid, "global") + assert document_availability_status(client, document.pid, librarian_martigny.user) # validate request res, _ = postdata( client, - 'api_item.validate_request', + "api_item.validate_request", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert not item_lib_martigny.is_available() assert not item_availablity_status( - client, item_lib_martigny.pid, librarian_martigny.user) + client, item_lib_martigny.pid, librarian_martigny.user + ) assert not item_lib_martigny.is_available() holding = Holding.get_record_by_pid(holding_lib_martigny.pid) assert holding.is_available() - assert holding_lib_martigny.get_holding_loan_conditions() == 'standard' - assert Document.is_available(document.pid, 'global') - assert document_availability_status( - client, document.pid, librarian_martigny.user) + assert holding_lib_martigny.get_holding_loan_conditions() == "standard" + assert Document.is_available(document.pid, "global") + assert document_availability_status(client, document.pid, librarian_martigny.user) login_user_via_session(client, librarian_saxon.user) # receive res, _ = postdata( client, - 'api_item.receive', + "api_item.receive", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert not item_lib_martigny.is_available() assert not item_availablity_status( - client, item_lib_martigny.pid, librarian_saxon.user) + client, item_lib_martigny.pid, librarian_saxon.user + ) item = Item.get_record_by_pid(item_lib_martigny.pid) assert not item.is_available() holding = Holding.get_record_by_pid(holding_lib_martigny.pid) assert holding.is_available() - assert holding_lib_martigny.get_holding_loan_conditions() == 'standard' - assert Document.is_available(document.pid, 'global') - assert document_availability_status( - client, document.pid, librarian_martigny.user) + assert holding_lib_martigny.get_holding_loan_conditions() == "standard" + assert Document.is_available(document.pid, "global") + assert document_availability_status(client, document.pid, librarian_martigny.user) # checkout res, _ = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 item = Item.get_record_by_pid(item_lib_martigny.pid) assert not item.is_available() - assert not item_availablity_status( - client, item.pid, librarian_martigny.user) + assert not item_availablity_status(client, item.pid, librarian_martigny.user) holding = Holding.get_record_by_pid(holding_lib_martigny.pid) assert holding.is_available() - assert holding_lib_martigny.get_holding_loan_conditions() == 'standard' - assert Document.is_available(document.pid, 'global') - assert document_availability_status( - client, document.pid, librarian_martigny.user) + assert holding_lib_martigny.get_holding_loan_conditions() == "standard" + assert Document.is_available(document.pid, "global") + assert document_availability_status(client, document.pid, librarian_martigny.user) # masked item isn't.is_available() - item['_masked'] = True + item["_masked"] = True item = item.update(item, dbcommit=True, reindex=True) assert not item.is_available() - del item['_masked'] + del item["_masked"] item.update(item, dbcommit=True, reindex=True) # test can not request item already checked out to patron res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, library_pid=lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 200 data = get_json(res) - assert not data.get('can_request') + assert not data.get("can_request") - end_date = item.get_item_end_date(time_format=None, - language='en') + end_date = item.get_item_end_date(time_format=None, language="en") """ request second item with another patron and test document and holding @@ -345,171 +341,143 @@ def test_item_holding_document_availability( """ # login as patron - with mock.patch( - 'rero_ils.modules.patrons.api.current_patrons', - [patron_martigny] - ): + with mock.patch("rero_ils.modules.patrons.api.current_patrons", [patron_martigny]): login_user_via_session(client, patron2_martigny.user) - assert holding_lib_martigny.get_holding_loan_conditions() \ - == 'short 15 days' + assert holding_lib_martigny.get_holding_loan_conditions() == "short 15 days" # request second item login_user_via_session(client, librarian_martigny.user) res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron2_martigny.pid, pickup_location_pid=loc_public_saxon.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert not item2_lib_martigny.is_available() assert not item_availablity_status( - client, item2_lib_martigny.pid, librarian_martigny.user) + client, item2_lib_martigny.pid, librarian_martigny.user + ) holding = Holding.get_record_by_pid(holding_lib_martigny.pid) assert not holding.is_available() - assert holding_lib_martigny.get_holding_loan_conditions() == 'standard' - assert not Document.is_available(document.pid, 'global') + assert holding_lib_martigny.get_holding_loan_conditions() == "standard" + assert not Document.is_available(document.pid, "global") assert not document_availability_status( - client, document.pid, librarian_martigny.user) + client, document.pid, librarian_martigny.user + ) def item_availablity_status(client, pid, user): """Returns item availability.""" res = client.get( url_for( - 'api_item.item_availability', + "api_item.item_availability", pid=pid, ) ) assert res.status_code == 200 data = get_json(res) - return data.get('available') + return data.get("available") def document_availability_status(client, pid, user): """Returns document availability.""" res = client.get( - url_for( - 'api_documents.document_availability', - pid=pid, - view_code='global' - ) + url_for("api_documents.document_availability", pid=pid, view_code="global") ) assert res.status_code == 200 data = get_json(res) - return data.get('available') + return data.get("available") def test_availability_cipo_allow_request( - client, librarian_martigny, item_lib_martigny, - item_type_standard_martigny, patron_martigny, - circ_policy_short_martigny): + client, + librarian_martigny, + item_lib_martigny, + item_type_standard_martigny, + patron_martigny, + circ_policy_short_martigny, +): """Test availability is cipo disallow request.""" login_user_via_session(client, librarian_martigny.user) # update the cipo to disallow request cipo = circ_policy_short_martigny - cipo['allow_requests'] = False + cipo["allow_requests"] = False cipo.update(cipo.dumps(), dbcommit=True, reindex=True) res = client.get( url_for( - 'api_item.can_request', + "api_item.can_request", item_pid=item_lib_martigny.pid, - patron_barcode=patron_martigny.get( - 'patron', {}).get('barcode')[0] + patron_barcode=patron_martigny.get("patron", {}).get("barcode")[0], ) ) assert res.status_code == 200 data = get_json(res) - assert not data.get('can') + assert not data.get("can") # reset the cipo - cipo['allow_requests'] = True + cipo["allow_requests"] = True cipo.update(cipo.dumps(), dbcommit=True, reindex=True) def test_document_availability_failed( - client, item_lib_martigny, document_with_issn, org_martigny): + client, item_lib_martigny, document_with_issn, org_martigny +): """Test document availability with dummy data should failed.""" - res = client.get( - url_for( - 'api_documents.document_availability', - pid='dummy_pid' - ) - ) + res = client.get(url_for("api_documents.document_availability", pid="dummy_pid")) assert res.status_code == 404 res = client.get( - url_for( - 'api_documents.document_availability', - pid=document_with_issn.pid - ) + url_for("api_documents.document_availability", pid=document_with_issn.pid) ) assert res.status_code == 200 - assert not res.json.get('available') + assert not res.json.get("available") res = client.get( url_for( - 'api_documents.document_availability', + "api_documents.document_availability", pid=document_with_issn.pid, - view_code=org_martigny['code'] + view_code=org_martigny["code"], ) ) assert res.status_code == 200 - assert not res.json.get('available') + assert not res.json.get("available") def test_item_availability_failed(client, librarian2_martigny): """Test item availability with dummy data should failed.""" - res = client.get( - url_for( - 'api_item.item_availability', - pid='dummy_pid' - ) - ) + res = client.get(url_for("api_item.item_availability", pid="dummy_pid")) assert res.status_code == 404 def test_item_availability_extra(client, item_lib_sion): """Test item availability with an extra parameters.""" - res = client.get( - url_for( - 'api_item.item_availability', - pid=item_lib_sion.pid - ) - ) - assert list(res.json.keys()) == ['available'] + res = client.get(url_for("api_item.item_availability", pid=item_lib_sion.pid)) + assert list(res.json.keys()) == ["available"] res = client.get( - url_for( - 'api_item.item_availability', - pid=item_lib_sion.pid, - more_info=1 - ) + url_for("api_item.item_availability", pid=item_lib_sion.pid, more_info=1) ) - assert list(res.json.keys()) == \ - ['available', 'circulation_message', 'number_of_request', 'status'] + assert list(res.json.keys()) == [ + "available", + "circulation_message", + "number_of_request", + "status", + ] def test_holding_availability(client, holding_lib_martigny): """Test holding availability endpoint.""" - res = client.get( - url_for( - 'api_holding.holding_availability', - pid='dummy_pid' - ) - ) + res = client.get(url_for("api_holding.holding_availability", pid="dummy_pid")) assert res.status_code == 404 res = client.get( - url_for( - 'api_holding.holding_availability', - pid=holding_lib_martigny.pid - ) + url_for("api_holding.holding_availability", pid=holding_lib_martigny.pid) ) assert res.status_code == 200 - assert 'available' in res.json + assert "available" in res.json diff --git a/tests/api/test_circ_bug.py b/tests/api/test_circ_bug.py index 57522218e6..28bd8eff07 100644 --- a/tests/api/test_circ_bug.py +++ b/tests/api/test_circ_bug.py @@ -25,139 +25,151 @@ def test_document_with_one_item_attached_bug( - client, librarian_martigny, patron_martigny, - patron2_martigny, loc_public_martigny, - item_type_standard_martigny, item_lib_martigny, json_header, - circulation_policies, lib_martigny): + client, + librarian_martigny, + patron_martigny, + patron2_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circulation_policies, + lib_martigny, +): """Test document with one item.""" login_user_via_session(client, librarian_martigny.user) # checkout first item1 to patron res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.CHECKOUT].get("pid") # request first item by patron2 res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan2_pid = actions[LoanAction.REQUEST].get('pid') + actions = data.get("action_applied") + loan2_pid = actions[LoanAction.REQUEST].get("pid") # checkin the first item res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert item_lib_martigny.number_of_requests() == 1 res, data = postdata( client, - 'api_item.cancel_item_request', + "api_item.cancel_item_request", dict( pid=loan2_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert item_lib_martigny.number_of_requests() == 0 -def test_document_with_items_attached_bug(client, librarian_martigny, - patron_martigny, - patron2_martigny, - item2_lib_martigny, - loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny, json_header, - circulation_policies, lib_martigny): +def test_document_with_items_attached_bug( + client, + librarian_martigny, + patron_martigny, + patron2_martigny, + item2_lib_martigny, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny, + json_header, + circulation_policies, + lib_martigny, +): """Test document with multiple items.""" login_user_via_session(client, librarian_martigny.user) # checkout first item1 to patron res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan_pid = actions[LoanAction.CHECKOUT].get("pid") # checkout second item2 to patron res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item2_lib_martigny.pid, patron_pid=patron_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 - actions = data.get('action_applied') - loan2_pid = actions[LoanAction.CHECKOUT].get('pid') + actions = data.get("action_applied") + loan2_pid = actions[LoanAction.CHECKOUT].get("pid") # request first item by patron2 res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 # request second item by patron2 res, data = postdata( client, - 'api_item.librarian_request', + "api_item.librarian_request", dict( item_pid=item2_lib_martigny.pid, pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_library_pid=lib_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 assert item_lib_martigny.number_of_requests() == 1 @@ -166,13 +178,13 @@ def test_document_with_items_attached_bug(client, librarian_martigny, # checkin the first item res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_lib_martigny.pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 @@ -182,13 +194,13 @@ def test_document_with_items_attached_bug(client, librarian_martigny, # checkin the second item res, data = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item2_lib_martigny.pid, pid=loan2_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 diff --git a/tests/api/test_commons_api.py b/tests/api/test_commons_api.py index b3f35af910..72917bd3cb 100644 --- a/tests/api/test_commons_api.py +++ b/tests/api/test_commons_api.py @@ -24,8 +24,9 @@ from invenio_accounts.testutils import login_user_via_session from utils import get_json, mock_response, postdata -from rero_ils.modules.acquisition.budgets.permissions import \ - search_action as budget_search_action +from rero_ils.modules.acquisition.budgets.permissions import ( + search_action as budget_search_action, +) from rero_ils.modules.permissions import PermissionContext, can_use_debug_mode from rero_ils.modules.receivers import process_boosting from rero_ils.modules.users.models import UserRole @@ -33,30 +34,35 @@ def test_librarian_delete_permission_factory( - client, librarian_fully, org_martigny, lib_martigny): - """Test librarian_delete_permission_factory """ + client, librarian_fully, org_martigny, lib_martigny +): + """Test librarian_delete_permission_factory""" login_user_via_session(client, librarian_fully.user) - assert type(librarian_delete_permission_factory( - None, - credentials_only=True - )) == Permission + assert ( + type(librarian_delete_permission_factory(None, credentials_only=True)) + == Permission + ) assert librarian_delete_permission_factory(org_martigny) is not None def test_system_librarian_permissions( - client, json_header, system_librarian_martigny, - patron_martigny, patron_type_adults_martigny, - librarian_fully): + client, + json_header, + system_librarian_martigny, + patron_martigny, + patron_type_adults_martigny, + librarian_fully, +): """Test system_librarian permissions.""" # Login as system_librarian login_user_via_session(client, system_librarian_martigny.user) # can manage all types of patron roles - role_url = url_for('api_patrons.get_roles_management_permissions') + role_url = url_for("api_patrons.get_roles_management_permissions") res = client.get(role_url) assert res.status_code == 200 data = get_json(res) - assert UserRole.FULL_PERMISSIONS in data['allowed_roles'] + assert UserRole.FULL_PERMISSIONS in data["allowed_roles"] def test_permission_exposition(app, db, client, system_librarian_martigny): @@ -64,61 +70,60 @@ def test_permission_exposition(app, db, client, system_librarian_martigny): login_user_via_session(client, system_librarian_martigny.user) # test exposition by role ================================================= - res = client.get(url_for( - 'api_blueprint.permissions_by_role', - role='dummy-role' - )) + res = client.get(url_for("api_blueprint.permissions_by_role", role="dummy-role")) data = get_json(res) assert res.status_code == 200 assert not data - res = client.get(url_for( - 'api_blueprint.permissions_by_role', - role=UserRole.PROFESSIONAL_READ_ONLY - )) + res = client.get( + url_for( + "api_blueprint.permissions_by_role", role=UserRole.PROFESSIONAL_READ_ONLY + ) + ) data = get_json(res) assert res.status_code == 200 assert UserRole.PROFESSIONAL_READ_ONLY in data - res = client.get(url_for( - 'api_blueprint.permissions_by_role', - role=UserRole.PROFESSIONAL_ROLES - )) + res = client.get( + url_for("api_blueprint.permissions_by_role", role=UserRole.PROFESSIONAL_ROLES) + ) data = get_json(res) assert res.status_code == 200 assert all(role in data for role in UserRole.PROFESSIONAL_ROLES) # test exposition by patron =============================================== - res = client.get(url_for( - 'api_blueprint.permissions_by_patron', - patron_pid=system_librarian_martigny.pid - )) + res = client.get( + url_for( + "api_blueprint.permissions_by_patron", + patron_pid=system_librarian_martigny.pid, + ) + ) data = get_json(res) assert res.status_code == 200 - assert len(data) == len(app.extensions['invenio-access'].actions) + assert len(data) == len(app.extensions["invenio-access"].actions) # system librarian should access to 'can-use-debug-mode' - perm = [p for p in data if p['name'] == can_use_debug_mode.value][0] - assert perm['can'] + perm = [p for p in data if p["name"] == can_use_debug_mode.value][0] + assert perm["can"] # add a restriction specific for this user - db.session.add(ActionUsers.deny( - can_use_debug_mode, - user_id=system_librarian_martigny.user.id - )) + db.session.add( + ActionUsers.deny(can_use_debug_mode, user_id=system_librarian_martigny.user.id) + ) db.session.commit() - res = client.get(url_for( - 'api_blueprint.permissions_by_patron', - patron_pid=system_librarian_martigny.pid - )) + res = client.get( + url_for( + "api_blueprint.permissions_by_patron", + patron_pid=system_librarian_martigny.pid, + ) + ) data = get_json(res) assert res.status_code == 200 - perm = [p for p in data if p['name'] == can_use_debug_mode.value][0] - assert not perm['can'] + perm = [p for p in data if p["name"] == can_use_debug_mode.value][0] + assert not perm["can"] # reset DB - ActionUsers\ - .query_by_action(can_use_debug_mode)\ - .filter(ActionUsers.user_id == system_librarian_martigny.user.id)\ - .delete(synchronize_session=False) + ActionUsers.query_by_action(can_use_debug_mode).filter( + ActionUsers.user_id == system_librarian_martigny.user.id + ).delete(synchronize_session=False) db.session.commit() @@ -130,40 +135,53 @@ def test_permission_management(client, system_librarian_martigny): # 2) try with bad payload data # 3) try with not implemented context # 4) try with bad parameters - res, _ = postdata(client, 'api_blueprint.permission_management', {}) + res, _ = postdata(client, "api_blueprint.permission_management", {}) assert res.status_code == 401 login_user_via_session(client, system_librarian_martigny.user) - res, data = postdata(client, 'api_blueprint.permission_management', {}) + res, data = postdata(client, "api_blueprint.permission_management", {}) assert res.status_code == 400 - assert 'context' in data['message'] - res, data = postdata(client, 'api_blueprint.permission_management', dict( - context=PermissionContext.BY_ROLE, - permission=budget_search_action.value, - )) + assert "context" in data["message"] + res, data = postdata( + client, + "api_blueprint.permission_management", + dict( + context=PermissionContext.BY_ROLE, + permission=budget_search_action.value, + ), + ) assert res.status_code == 400 - assert 'role_name' in data['message'] + assert "role_name" in data["message"] - res, data = postdata(client, 'api_blueprint.permission_management', dict( - context=PermissionContext.BY_USER, - permission=budget_search_action.value - )) + res, data = postdata( + client, + "api_blueprint.permission_management", + dict(context=PermissionContext.BY_USER, permission=budget_search_action.value), + ) assert res.status_code == 501 - res, data = postdata(client, 'api_blueprint.permission_management', dict( - context=PermissionContext.BY_ROLE, - permission='unknown-permission', - role_name=UserRole.PROFESSIONAL_READ_ONLY - )) + res, data = postdata( + client, + "api_blueprint.permission_management", + dict( + context=PermissionContext.BY_ROLE, + permission="unknown-permission", + role_name=UserRole.PROFESSIONAL_READ_ONLY, + ), + ) assert res.status_code == 400 - assert 'not found' in data['message'] - res, data = postdata(client, 'api_blueprint.permission_management', dict( - context=PermissionContext.BY_ROLE, - permission=budget_search_action.value, - role_name='dummy-role' - )) + assert "not found" in data["message"] + res, data = postdata( + client, + "api_blueprint.permission_management", + dict( + context=PermissionContext.BY_ROLE, + permission=budget_search_action.value, + role_name="dummy-role", + ), + ) assert res.status_code == 400 - assert 'not found' in data['message'] + assert "not found" in data["message"] # Real test begin now # 1) test user has permission @@ -174,11 +192,11 @@ def test_permission_management(client, system_librarian_martigny): permission = Permission(budget_search_action) assert fake_identity.can(permission) - perm_url = url_for('api_blueprint.permission_management') + perm_url = url_for("api_blueprint.permission_management") perm_data = dict( context=PermissionContext.BY_ROLE, permission=budget_search_action.value, - role_name=UserRole.PROFESSIONAL_READ_ONLY + role_name=UserRole.PROFESSIONAL_READ_ONLY, ) res = client.delete(perm_url, json=perm_data) assert res.status_code == 204 @@ -189,32 +207,28 @@ def test_permission_management(client, system_librarian_martigny): assert fake_identity.can(permission) -@mock.patch('rero_ils.modules.decorators.login_and_librarian', - mock.MagicMock()) -@mock.patch('requests.get') +@mock.patch("rero_ils.modules.decorators.login_and_librarian", mock.MagicMock()) +@mock.patch("requests.get") def test_proxy(mock_get, client): """Test proxy.""" - response = client.get(url_for('api_blueprint.proxy')) + response = client.get(url_for("api_blueprint.proxy")) assert response.status_code == 400 - assert response.json['message'] == 'Missing `url` parameter' + assert response.json["message"] == "Missing `url` parameter" mock_get.return_value = mock_response(status=418) - response = client.get(url_for( - 'api_blueprint.proxy', - url='http://mocked.url') - ) + response = client.get(url_for("api_blueprint.proxy", url="http://mocked.url")) assert response.status_code == 418 def test_boosting_fields(app): """Test the boosting configuration.""" # the configuration should exists - assert app.config.get('RERO_ILS_QUERY_BOOSTING') + assert app.config.get("RERO_ILS_QUERY_BOOSTING") # several cases of configurations - assert process_boosting('documents', ['title.*']) == ['title.*'] - assert 'title.*' in process_boosting('documents', ['*']) - assert 'title.*^2' in process_boosting('documents', ['title.*^2', '*']) + assert process_boosting("documents", ["title.*"]) == ["title.*"] + assert "title.*" in process_boosting("documents", ["*"]) + assert "title.*^2" in process_boosting("documents", ["title.*^2", "*"]) # test fields - assert 'fulltext' in process_boosting('documents', ['*']) - assert 'fulltext.*' in process_boosting('documents', ['*']) + assert "fulltext" in process_boosting("documents", ["*"]) + assert "fulltext.*" in process_boosting("documents", ["*"]) diff --git a/tests/api/test_exports.py b/tests/api/test_exports.py index 4807d73e4a..de4eea17ec 100644 --- a/tests/api/test_exports.py +++ b/tests/api/test_exports.py @@ -26,12 +26,13 @@ from rero_ils.modules.utils import get_ref_for_pid -def test_loans_exports(app, client, librarian_martigny, - loan_pending_martigny, loan2_validated_martigny): +def test_loans_exports( + app, client, librarian_martigny, loan_pending_martigny, loan2_validated_martigny +): """Test loans streamed exportation.""" # STEP#1 :: CHECK EXPORT PERMISSION # Only authenticated user could export loans. - url = url_for('api_exports.loan_export') + url = url_for("api_exports.loan_export") res = client.get(url) assert res.status_code == 401 @@ -44,28 +45,40 @@ def test_loans_exports(app, client, librarian_martigny, header = data.pop(0) header_columns = [ - 'pid', 'document_title', 'item_barcode', 'item_call_numbers', - 'patron_name', 'patron_barcode', 'patron_email', 'patron_type', - 'owning_library', 'transaction_library', 'pickup_library', - 'state', 'end_date', 'request_expire_date' + "pid", + "document_title", + "item_barcode", + "item_call_numbers", + "patron_name", + "patron_barcode", + "patron_email", + "patron_type", + "owning_library", + "transaction_library", + "pickup_library", + "state", + "end_date", + "request_expire_date", ] assert all(field in header for field in header_columns) assert len(data) == 2 def test_patron_transaction_events_exports( - app, client, librarian_martigny, + app, + client, + librarian_martigny, patron_transaction_overdue_event_martigny, patron_martigny, item4_lib_martigny, patron_type_children_martigny, - document + document, ): """Test patron transaction events expotation.""" ptre = patron_transaction_overdue_event_martigny # STEP#1 :: CHECK EXPORT PERMISSION # Only authenticated user could export loans. - url = url_for('api_exports.patron_transaction_events_export') + url = url_for("api_exports.patron_transaction_events_export") res = client.get(url) assert res.status_code == 401 @@ -73,9 +86,7 @@ def test_patron_transaction_events_exports( # Logged as librarian and test the export endpoint. # DEV NOTE :: update `operator` to max the code coverage login_user_via_session(client, librarian_martigny.user) - ptre['operator'] = { - '$ref': get_ref_for_pid('ptrn', librarian_martigny.pid) - } + ptre["operator"] = {"$ref": get_ref_for_pid("ptrn", librarian_martigny.pid)} ptre.update(ptre, dbcommit=False, reindex=True) # If some missing related resources are missing, this will not cause any @@ -84,7 +95,7 @@ def test_patron_transaction_events_exports( (patron_martigny, False), (item4_lib_martigny, False), (document, False), - (patron_type_children_martigny, True) + (patron_type_children_martigny, True), ]: resource.delete(force=True, dbcommit=False, delindex=delindex) res = client.get(url) @@ -100,10 +111,21 @@ def test_patron_transaction_events_exports( header = data.pop(0) header_columns = [ - 'category', 'type', 'subtype', 'transaction_date', 'amount', - 'patron_name', 'patron_barcode', 'patron_email', 'patron_type', - 'document_pid', 'document_title', 'item_barcode', - 'item_owning_library', 'transaction_library', 'operator_name' + "category", + "type", + "subtype", + "transaction_date", + "amount", + "patron_name", + "patron_barcode", + "patron_email", + "patron_type", + "document_pid", + "document_title", + "item_barcode", + "item_owning_library", + "transaction_library", + "operator_name", ] assert all(field in header for field in header_columns) assert len(data) == 1 diff --git a/tests/api/test_external_services.py b/tests/api/test_external_services.py index ef0f0c2da5..749cfa8098 100644 --- a/tests/api/test_external_services.py +++ b/tests/api/test_external_services.py @@ -33,814 +33,635 @@ def test_imports_get_config(client, librarian_martigny): """Get the configuration for the external import services.""" login_user_via_session(client, librarian_martigny.user) - res = client.get(url_for('api_import.get_config')) + res = client.get(url_for("api_import.get_config")) assert res.status_code == 200 data = get_json(res) assert data - assert all('weight' in source for source in data) - - -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) -def test_documents_import_bnf_ean(mock_get, client, bnf_ean_any_123, - bnf_ean_any_9782070541270, - bnf_ean_any_9782072862014, - bnf_anywhere_all_peter, - bnf_recordid_all_FRBNF370903960000006): + assert all("weight" in source for source in data) + + +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) +def test_documents_import_bnf_ean( + mock_get, + client, + bnf_ean_any_123, + bnf_ean_any_9782070541270, + bnf_ean_any_9782072862014, + bnf_anywhere_all_peter, + bnf_recordid_all_FRBNF370903960000006, +): """Test document import from bnf.""" - mock_get.return_value = mock_response( - content=bnf_ean_any_123 - ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='ean:any:123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=bnf_ean_any_123) + res = client.get(url_for("api_imports.import_bnf", q="ean:any:123", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=bnf_ean_any_9782070541270 + mock_get.return_value = mock_response(content=bnf_ean_any_9782070541270) + res = client.get( + url_for("api_imports.import_bnf", q="ean:any:9782070541270", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='ean:any:9782070541270', - no_cache=1 - )) assert res.status_code == 200 - data = get_json(res).get('hits').get('hits')[0].get('metadata') - assert data['pid'] == 'FRBNF370903960000006' + data = get_json(res).get("hits").get("hits")[0].get("metadata") + assert data["pid"] == "FRBNF370903960000006" assert Document.create(data) - mock_get.return_value = mock_response( - content=bnf_ean_any_9782072862014 + mock_get.return_value = mock_response(content=bnf_ean_any_9782072862014) + res = client.get( + url_for("api_imports.import_bnf", q="ean:any:9782072862014", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='ean:any:9782072862014', - no_cache=1 - )) assert res.status_code == 200 res_j = get_json(res) - data = res_j.get('hits').get('hits')[0].get('metadata') - data.update({ - "$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json" - }) + data = res_j.get("hits").get("hits")[0].get("metadata") + data.update( + {"$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json"} + ) assert Document.create(data) - marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21') + marc21_link = res_j.get("hits").get("hits")[0].get("links").get("marc21") res = client.get(marc21_link) data = get_json(res) - assert data[0][0] == 'leader' + assert data[0][0] == "leader" - res = client.get(url_for( - 'api_imports.import_bnf', - q='', - no_cache=1 - )) + res = client.get(url_for("api_imports.import_bnf", q="", no_cache=1)) assert res.status_code == 200 assert get_json(res) == { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'remote_total': 0, - 'total': 0 - } + "aggregations": {}, + "hits": {"hits": [], "remote_total": 0, "total": 0}, } - mock_get.return_value = mock_response( - content=bnf_anywhere_all_peter - ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='peter', - no_cache=1 - )) + mock_get.return_value = mock_response(content=bnf_anywhere_all_peter) + res = client.get(url_for("api_imports.import_bnf", q="peter", no_cache=1)) assert res.status_code == 200 - unfiltered_total = get_json(res)['hits']['remote_total'] + unfiltered_total = get_json(res)["hits"]["remote_total"] assert get_json(res) - res = client.get(url_for( - 'api_imports.import_bnf', - q='peter', - year=2000, - format='rerojson' - )) + res = client.get( + url_for("api_imports.import_bnf", q="peter", year=2000, format="rerojson") + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_bnf', - q='peter', - author='Peter Owen', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_bnf", q="peter", author="Peter Owen", format="rerojson" + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_bnf', - q='peter', - document_type='docmaintype_book', - document_subtype='docsubtype_other_book', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_bnf", + q="peter", + document_type="docmaintype_book", + document_subtype="docsubtype_other_book", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - mock_get.return_value = mock_response( - content=bnf_recordid_all_FRBNF370903960000006 + mock_get.return_value = mock_response(content=bnf_recordid_all_FRBNF370903960000006) + res = client.get( + url_for("api_imports.import_bnf_record", id="FRBNF370903960000006", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_bnf_record', - id='FRBNF370903960000006', - no_cache=1 - )) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('identifiedBy') + assert get_json(res).get("metadata", {}).get("identifiedBy") - res = client.get(url_for( - 'api_imports.import_bnf_record', - id='FRBNF370903960000006', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_bnf_record", + id="FRBNF370903960000006", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('ui_title_text') + assert get_json(res).get("metadata", {}).get("ui_title_text") - res = client.get(url_for( - 'api_imports.import_bnf_record', - id='FRBNF370903960000006', - format='marc' - )) + res = client.get( + url_for( + "api_imports.import_bnf_record", id="FRBNF370903960000006", format="marc" + ) + ) assert res.status_code == 200 - assert get_json(res)[1][1] == 'FRBNF370903960000006' + assert get_json(res)[1][1] == "FRBNF370903960000006" -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) -def test_documents_import_loc_isbn(mock_get, client, loc_isbn_all_123, - loc_isbn_all_9781604689808, - loc_isbn_all_9780821417478, - loc_anywhere_all_samuelson, - loc_recordid_all_2014043016): +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) +def test_documents_import_loc_isbn( + mock_get, + client, + loc_isbn_all_123, + loc_isbn_all_9781604689808, + loc_isbn_all_9780821417478, + loc_anywhere_all_samuelson, + loc_recordid_all_2014043016, +): """Test document import from LoC.""" - mock_get.return_value = mock_response( - content=loc_isbn_all_123 - ) - res = client.get(url_for( - 'api_imports.import_loc', - q='isbn:all:123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=loc_isbn_all_123) + res = client.get(url_for("api_imports.import_loc", q="isbn:all:123", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=loc_isbn_all_9781604689808 + mock_get.return_value = mock_response(content=loc_isbn_all_9781604689808) + res = client.get( + url_for("api_imports.import_loc", q="isbn:all:9781604689808", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_loc', - q='isbn:all:9781604689808', - no_cache=1 - )) assert res.status_code == 200 - data = get_json(res).get('hits').get('hits')[0].get('metadata') - assert data['pid'] == '2018032710' + data = get_json(res).get("hits").get("hits")[0].get("metadata") + assert data["pid"] == "2018032710" assert Document.create(data) - mock_get.return_value = mock_response( - content=loc_isbn_all_9780821417478 + mock_get.return_value = mock_response(content=loc_isbn_all_9780821417478) + res = client.get( + url_for("api_imports.import_loc", q="isbn:all:9780821417478", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_loc', - q='isbn:all:9780821417478', - no_cache=1 - )) assert res.status_code == 200 res_j = get_json(res) - data = res_j.get('hits').get('hits')[0].get('metadata') - data.update({ - "$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json" - }) + data = res_j.get("hits").get("hits")[0].get("metadata") + data.update( + {"$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json"} + ) assert Document.create(data) - marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21') + marc21_link = res_j.get("hits").get("hits")[0].get("links").get("marc21") res = client.get(marc21_link) data = get_json(res) - assert data[0][0] == 'leader' + assert data[0][0] == "leader" - res = client.get(url_for( - 'api_imports.import_loc', - q='', - no_cache=1 - )) + res = client.get(url_for("api_imports.import_loc", q="", no_cache=1)) assert res.status_code == 200 assert get_json(res) == { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'remote_total': 0, - 'total': 0 - } + "aggregations": {}, + "hits": {"hits": [], "remote_total": 0, "total": 0}, } - mock_get.return_value = mock_response( - content=loc_anywhere_all_samuelson - ) - res = client.get(url_for( - 'api_imports.import_loc', - q='samuelson', - no_cache=1 - )) + mock_get.return_value = mock_response(content=loc_anywhere_all_samuelson) + res = client.get(url_for("api_imports.import_loc", q="samuelson", no_cache=1)) assert res.status_code == 200 - unfiltered_total = get_json(res)['hits']['remote_total'] + unfiltered_total = get_json(res)["hits"]["remote_total"] assert get_json(res) - res = client.get(url_for( - 'api_imports.import_loc', - q='samuelson', - year=2019, - format='rerojson' - )) + res = client.get( + url_for("api_imports.import_loc", q="samuelson", year=2019, format="rerojson") + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_loc', - q='samuelson', - author='Samuelson, Paul', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_loc", + q="samuelson", + author="Samuelson, Paul", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_loc', - q='samuelson', - document_type='docmaintype_book', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_loc", + q="samuelson", + document_type="docmaintype_book", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - mock_get.return_value = mock_response( - content=loc_recordid_all_2014043016 + mock_get.return_value = mock_response(content=loc_recordid_all_2014043016) + res = client.get( + url_for("api_imports.import_loc_record", id="2014043016", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_loc_record', - id='2014043016', - no_cache=1 - )) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('identifiedBy') + assert get_json(res).get("metadata", {}).get("identifiedBy") - res = client.get(url_for( - 'api_imports.import_loc_record', - id='2014043016', - format='rerojson' - )) + res = client.get( + url_for("api_imports.import_loc_record", id="2014043016", format="rerojson") + ) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('ui_title_text') + assert get_json(res).get("metadata", {}).get("ui_title_text") -@mock.patch('requests.get') +@mock.patch("requests.get") def test_documents_import_loc_missing_id(mock_get, client, loc_without_010): """Test document import from LoC.""" - mock_get.return_value = mock_response( - content=loc_without_010 - ) + mock_get.return_value = mock_response(content=loc_without_010) results, status_code = LoCImport().search_records( - what='test', - relation='all', - where='anywhere', - max_results=100, - no_cache=True + what="test", relation="all", where="anywhere", max_results=100, no_cache=True ) assert status_code == 200 - assert results['hits']['total']['value'] == 9 - assert len(results['hits']['hits']) == 9 - - -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) -def test_documents_import_dnb_isbn(mock_get, client, dnb_isbn_123, - dnb_isbn_9783862729852, - dnb_isbn_3858818526, - dnb_samuelson, - dnb_recordid_1214325203): + assert results["hits"]["total"]["value"] == 9 + assert len(results["hits"]["hits"]) == 9 + + +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) +def test_documents_import_dnb_isbn( + mock_get, + client, + dnb_isbn_123, + dnb_isbn_9783862729852, + dnb_isbn_3858818526, + dnb_samuelson, + dnb_recordid_1214325203, +): """Test document import from DNB.""" - mock_get.return_value = mock_response( - content=dnb_isbn_123 - ) - res = client.get(url_for( - 'api_imports.import_dnb', - q='123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=dnb_isbn_123) + res = client.get(url_for("api_imports.import_dnb", q="123", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=dnb_isbn_3858818526 - ) - res = client.get(url_for( - 'api_imports.import_dnb', - q='3858818526', - no_cache=1 - )) + mock_get.return_value = mock_response(content=dnb_isbn_3858818526) + res = client.get(url_for("api_imports.import_dnb", q="3858818526", no_cache=1)) assert res.status_code == 200 res_j = get_json(res) - data = res_j.get('hits').get('hits')[0].get('metadata') - data.update({ - "$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json" - }) + data = res_j.get("hits").get("hits")[0].get("metadata") + data.update( + {"$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json"} + ) data = clean_text(data) assert Document.create(data) - marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21') + marc21_link = res_j.get("hits").get("hits")[0].get("links").get("marc21") res = client.get(marc21_link) data = get_json(res) - assert data[0][0] == 'leader' + assert data[0][0] == "leader" - res = client.get(url_for( - 'api_imports.import_dnb', - q='', - no_cache=1 - )) + res = client.get(url_for("api_imports.import_dnb", q="", no_cache=1)) assert res.status_code == 200 assert get_json(res) == { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'remote_total': 0, - 'total': 0 - } + "aggregations": {}, + "hits": {"hits": [], "remote_total": 0, "total": 0}, } - mock_get.return_value = mock_response( - content=dnb_samuelson - ) - res = client.get(url_for( - 'api_imports.import_dnb', - q='samuelson, paul', - no_cache=1 - )) + mock_get.return_value = mock_response(content=dnb_samuelson) + res = client.get(url_for("api_imports.import_dnb", q="samuelson, paul", no_cache=1)) assert res.status_code == 200 - unfiltered_total = get_json(res)['hits']['remote_total'] + unfiltered_total = get_json(res)["hits"]["remote_total"] assert get_json(res) - res = client.get(url_for( - 'api_imports.import_dnb', - q='samuelson, paul', - year=2019 - )) + res = client.get(url_for("api_imports.import_dnb", q="samuelson, paul", year=2019)) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_dnb', - q='samuelson, paul', - author='Samuelson, Paul A.' - )) + res = client.get( + url_for( + "api_imports.import_dnb", q="samuelson, paul", author="Samuelson, Paul A." + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_dnb', - q='samuelson, paul', - document_type='docmaintype_book' - )) + res = client.get( + url_for( + "api_imports.import_dnb", + q="samuelson, paul", + document_type="docmaintype_book", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - mock_get.return_value = mock_response( - content=dnb_recordid_1214325203 + mock_get.return_value = mock_response(content=dnb_recordid_1214325203) + res = client.get( + url_for("api_imports.import_dnb_record", id="1214325203", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_dnb_record', - id='1214325203', - no_cache=1 - )) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('identifiedBy') + assert get_json(res).get("metadata", {}).get("identifiedBy") -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) -def test_documents_import_slsp_isbn(mock_get, client, slsp_anywhere_123, - slsp_isbn_9782296076648, - slsp_isbn_3908497272, - slsp_samuelson, - slsp_recordid_9910137): +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) +def test_documents_import_slsp_isbn( + mock_get, + client, + slsp_anywhere_123, + slsp_isbn_9782296076648, + slsp_isbn_3908497272, + slsp_samuelson, + slsp_recordid_9910137, +): """Test document import from slsp.""" - mock_get.return_value = mock_response( - content=slsp_anywhere_123 - ) - res = client.get(url_for( - 'api_imports.import_slsp', - q='123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=slsp_anywhere_123) + res = client.get(url_for("api_imports.import_slsp", q="123", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=slsp_isbn_9782296076648 + mock_get.return_value = mock_response(content=slsp_isbn_9782296076648) + res = client.get( + url_for("api_imports.import_slsp", q="isbn:all:9782296076648", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_slsp', - q='isbn:all:9782296076648', - no_cache=1 - )) assert res.status_code == 200 - data = get_json(res).get('hits').get('hits')[0].get('metadata') - assert data['pid'] == '991079993319705501' + data = get_json(res).get("hits").get("hits")[0].get("metadata") + assert data["pid"] == "991079993319705501" assert Document.create(data) - mock_get.return_value = mock_response( - content=slsp_isbn_3908497272 + mock_get.return_value = mock_response(content=slsp_isbn_3908497272) + res = client.get( + url_for("api_imports.import_slsp", q="isbn:all:3908497272", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_slsp', - q='isbn:all:3908497272', - no_cache=1 - )) assert res.status_code == 200 res_j = get_json(res) - data = res_j.get('hits').get('hits')[0].get('metadata') - data.update({ - "$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json" - }) + data = res_j.get("hits").get("hits")[0].get("metadata") + data.update( + {"$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json"} + ) assert Document.create(data) - marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21') + marc21_link = res_j.get("hits").get("hits")[0].get("links").get("marc21") res = client.get(marc21_link) data = get_json(res) - assert data[0][0] == 'leader' + assert data[0][0] == "leader" - res = client.get(url_for( - 'api_imports.import_slsp', - q='', - no_cache=1 - )) + res = client.get(url_for("api_imports.import_slsp", q="", no_cache=1)) assert res.status_code == 200 assert get_json(res) == { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'remote_total': 0, - 'total': 0 - } + "aggregations": {}, + "hits": {"hits": [], "remote_total": 0, "total": 0}, } - mock_get.return_value = mock_response( - content=slsp_samuelson - ) - res = client.get(url_for( - 'api_imports.import_slsp', - q='samuelson', - no_cache=1 - )) + mock_get.return_value = mock_response(content=slsp_samuelson) + res = client.get(url_for("api_imports.import_slsp", q="samuelson", no_cache=1)) assert res.status_code == 200 - unfiltered_total = get_json(res)['hits']['remote_total'] + unfiltered_total = get_json(res)["hits"]["remote_total"] assert get_json(res) - res = client.get(url_for( - 'api_imports.import_slsp', - q='samuelson', - year=2019, - format='rerojson' - )) + res = client.get( + url_for("api_imports.import_slsp", q="samuelson", year=2019, format="rerojson") + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_slsp', - q='samuelson', - author='samuelson', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_slsp", + q="samuelson", + author="samuelson", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_slsp', - q='samuelson', - document_type='docmaintype_book', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_slsp", + q="samuelson", + document_type="docmaintype_book", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - mock_get.return_value = mock_response( - content=slsp_recordid_9910137 + mock_get.return_value = mock_response(content=slsp_recordid_9910137) + res = client.get( + url_for( + "api_imports.import_slsp_record", + id="recordid:all:991013724759705501", + no_cache=1, + ) ) - res = client.get(url_for( - 'api_imports.import_slsp_record', - id='recordid:all:991013724759705501', - no_cache=1 - )) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('identifiedBy') + assert get_json(res).get("metadata", {}).get("identifiedBy") - res = client.get(url_for( - 'api_imports.import_slsp_record', - id='recordid:all:991013724759705501', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_slsp_record", + id="recordid:all:991013724759705501", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('ui_title_text') + assert get_json(res).get("metadata", {}).get("ui_title_text") -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) -def test_documents_import_ugent_isbn(mock_get, client, ugent_anywhere_123, - ugent_isbn_9781108422925, - ugent_book_without_26X, - ugent_isbn_9780415773867, - ugent_samuelson, - ugent_recordid_001247835): +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) +def test_documents_import_ugent_isbn( + mock_get, + client, + ugent_anywhere_123, + ugent_isbn_9781108422925, + ugent_book_without_26X, + ugent_isbn_9780415773867, + ugent_samuelson, + ugent_recordid_001247835, +): """Test document import from ugent.""" - mock_get.return_value = mock_response( - content=ugent_anywhere_123 - ) - res = client.get(url_for( - 'api_imports.import_ugent', - q='123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=ugent_anywhere_123) + res = client.get(url_for("api_imports.import_ugent", q="123", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=ugent_isbn_9781108422925 + mock_get.return_value = mock_response(content=ugent_isbn_9781108422925) + res = client.get( + url_for("api_imports.import_ugent", q="isbn:all:9781108422925", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_ugent', - q='isbn:all:9781108422925', - no_cache=1 - )) assert res.status_code == 200 - data = get_json(res).get('hits').get('hits')[0].get('metadata') - assert data['pid'] == '002487518' + data = get_json(res).get("hits").get("hits")[0].get("metadata") + assert data["pid"] == "002487518" assert Document.create(data) - mock_get.return_value = mock_response( - content=ugent_book_without_26X + mock_get.return_value = mock_response(content=ugent_book_without_26X) + res = client.get( + url_for("api_imports.import_ugent", q="isbn:all:9782717725650", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_ugent', - q='isbn:all:9782717725650', - no_cache=1 - )) assert res.status_code == 200 - data = get_json(res).get('hits').get('hits')[0].get('metadata') - assert data['pid'] == '002762516' + data = get_json(res).get("hits").get("hits")[0].get("metadata") + assert data["pid"] == "002762516" assert Document.create(data) - res = client.get(url_for( - 'api_imports.import_ugent', - q='', - no_cache=1 - )) + res = client.get(url_for("api_imports.import_ugent", q="", no_cache=1)) assert res.status_code == 200 assert get_json(res) == { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'remote_total': 0, - 'total': 0 - } + "aggregations": {}, + "hits": {"hits": [], "remote_total": 0, "total": 0}, } - mock_get.return_value = mock_response( - content=ugent_samuelson - ) - res = client.get(url_for( - 'api_imports.import_ugent', - q='samuelson', - no_cache=1 - )) + mock_get.return_value = mock_response(content=ugent_samuelson) + res = client.get(url_for("api_imports.import_ugent", q="samuelson", no_cache=1)) assert res.status_code == 200 - unfiltered_total = get_json(res)['hits']['remote_total'] + unfiltered_total = get_json(res)["hits"]["remote_total"] assert get_json(res) - res = client.get(url_for( - 'api_imports.import_ugent', - q='samuelson', - year=2019, - format='rerojson' - )) + res = client.get( + url_for("api_imports.import_ugent", q="samuelson", year=2019, format="rerojson") + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_ugent', - q='samuelson', - author='samuelson', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_ugent", + q="samuelson", + author="samuelson", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - mock_get.return_value = mock_response( - content=ugent_recordid_001247835 + mock_get.return_value = mock_response(content=ugent_recordid_001247835) + res = client.get( + url_for( + "api_imports.import_ugent_record", id="recordid:all:001247835", no_cache=1 + ) ) - res = client.get(url_for( - 'api_imports.import_ugent_record', - id='recordid:all:001247835', - no_cache=1 - )) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('identifiedBy') + assert get_json(res).get("metadata", {}).get("identifiedBy") -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) -def test_documents_import_kul_isbn(mock_get, client, kul_anywhere_123, - kul_isbn_9782265089419, - kul_book_without_26X, - kul_isbn_2804600068, - kul_samuelson, - kul_recordid_9992876296301471): +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) +def test_documents_import_kul_isbn( + mock_get, + client, + kul_anywhere_123, + kul_isbn_9782265089419, + kul_book_without_26X, + kul_isbn_2804600068, + kul_samuelson, + kul_recordid_9992876296301471, +): """Test document import from kul.""" - mock_get.return_value = mock_response( - content=kul_anywhere_123 - ) - res = client.get(url_for( - 'api_imports.import_kul', - q='123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=kul_anywhere_123) + res = client.get(url_for("api_imports.import_kul", q="123", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=kul_isbn_9782265089419 + mock_get.return_value = mock_response(content=kul_isbn_9782265089419) + res = client.get( + url_for("api_imports.import_kul", q="isbn:all:9782265089419", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_kul', - q='isbn:all:9782265089419', - no_cache=1 - )) assert res.status_code == 200 - data = get_json(res).get('hits').get('hits')[0].get('metadata') - assert data['pid'] == '9983115060101471' + data = get_json(res).get("hits").get("hits")[0].get("metadata") + assert data["pid"] == "9983115060101471" - mock_get.return_value = mock_response( - content=kul_isbn_2804600068 + mock_get.return_value = mock_response(content=kul_isbn_2804600068) + res = client.get( + url_for("api_imports.import_kul", q="isbn:all:2804600068", no_cache=1) ) - res = client.get(url_for( - 'api_imports.import_kul', - q='isbn:all:2804600068', - no_cache=1 - )) assert res.status_code == 200 res_j = get_json(res) - data = res_j.get('hits').get('hits')[0].get('metadata') - data.update({ - "$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json" - }) + data = res_j.get("hits").get("hits")[0].get("metadata") + data.update( + {"$schema": "https://bib.rero.ch/schemas/documents/document-v0.0.1.json"} + ) assert Document.create(data) - marc21_link = res_j.get('hits').get('hits')[0].get('links').get('marc21') + marc21_link = res_j.get("hits").get("hits")[0].get("links").get("marc21") res = client.get(marc21_link) data = get_json(res) - assert data[0][0] == 'leader' + assert data[0][0] == "leader" - res = client.get(url_for( - 'api_imports.import_kul', - q='', - no_cache=1 - )) + res = client.get(url_for("api_imports.import_kul", q="", no_cache=1)) assert res.status_code == 200 assert get_json(res) == { - 'aggregations': {}, - 'hits': { - 'hits': [], - 'remote_total': 0, - 'total': 0 - } + "aggregations": {}, + "hits": {"hits": [], "remote_total": 0, "total": 0}, } - mock_get.return_value = mock_response( - content=kul_samuelson - ) - res = client.get(url_for( - 'api_imports.import_kul', - q='samuelson', - no_cache=1 - )) + mock_get.return_value = mock_response(content=kul_samuelson) + res = client.get(url_for("api_imports.import_kul", q="samuelson", no_cache=1)) assert res.status_code == 200 - unfiltered_total = get_json(res)['hits']['remote_total'] + unfiltered_total = get_json(res)["hits"]["remote_total"] assert get_json(res) - res = client.get(url_for( - 'api_imports.import_kul', - q='samuelson', - year=2019, - format='rerojson' - )) + res = client.get( + url_for("api_imports.import_kul", q="samuelson", year=2019, format="rerojson") + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_kul', - q='samuelson', - author='samuelson', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_kul", + q="samuelson", + author="samuelson", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - res = client.get(url_for( - 'api_imports.import_kul', - q='samuelson', - document_type='docmaintype_book', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_kul", + q="samuelson", + document_type="docmaintype_book", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res)['hits']['total'] < unfiltered_total + assert get_json(res)["hits"]["total"] < unfiltered_total - mock_get.return_value = mock_response( - content=kul_recordid_9992876296301471 + mock_get.return_value = mock_response(content=kul_recordid_9992876296301471) + res = client.get( + url_for( + "api_imports.import_kul_record", + id="recordid:all:9992876296301471", + no_cache=1, + ) ) - res = client.get(url_for( - 'api_imports.import_kul_record', - id='recordid:all:9992876296301471', - no_cache=1 - )) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('identifiedBy') + assert get_json(res).get("metadata", {}).get("identifiedBy") - res = client.get(url_for( - 'api_imports.import_kul_record', - id='recordid:all:9992876296301471', - format='rerojson' - )) + res = client.get( + url_for( + "api_imports.import_kul_record", + id="recordid:all:9992876296301471", + format="rerojson", + ) + ) assert res.status_code == 200 - assert get_json(res).get('metadata', {}).get('ui_title_text') + assert get_json(res).get("metadata", {}).get("ui_title_text") -@mock.patch('requests.get') -@mock.patch('rero_ils.permissions.login_and_librarian', mock.MagicMock()) +@mock.patch("requests.get") +@mock.patch("rero_ils.permissions.login_and_librarian", mock.MagicMock()) def test_documents_import_bnf_errors(mock_get, client): """Test document import from bnf.""" - mock_get.return_value = mock_response( - content=b'' - ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='ean:any', - no_cache=1 - )) + mock_get.return_value = mock_response(content=b"") + res = client.get(url_for("api_imports.import_bnf", q="ean:any", no_cache=1)) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') + assert not data.get("metadata") - mock_get.return_value = mock_response( - content=b'', - status=429 - ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='ean:any:123', - no_cache=1 - )) + mock_get.return_value = mock_response(content=b"", status=429) + res = client.get(url_for("api_imports.import_bnf", q="ean:any:123", no_cache=1)) assert res.status_code == 429 data = get_json(res) - assert data.get('errors') + assert data.get("errors") - err_msg = 'error' + err_msg = "error" err_code = 555 error = requests.exceptions.HTTPError(err_msg) error.response = mock.MagicMock() error.response.status_code = err_code - error.response.content = 'Error Code' + error.response.content = "Error Code" mock_get.return_value = mock_response( - content=b'', - status=555, - raise_for_status=error - ) - res = client.get(url_for( - 'api_imports.import_bnf', - q='ean:any:123', - no_cache=1 - )) + content=b"", status=555, raise_for_status=error + ) + res = client.get(url_for("api_imports.import_bnf", q="ean:any:123", no_cache=1)) data = get_json(res) assert res.status_code == err_code - assert data['errors']['message'] == err_msg + assert data["errors"]["message"] == err_msg diff --git a/tests/api/test_monitoring_rest.py b/tests/api/test_monitoring_rest.py index f16a4d91a3..c0bffc0862 100644 --- a/tests/api/test_monitoring_rest.py +++ b/tests/api/test_monitoring_rest.py @@ -25,147 +25,139 @@ from invenio_access.permissions import superuser_access from invenio_accounts.testutils import login_user_via_session from invenio_db import db -from utils import flush_index, get_json +from utils import get_json -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) from rero_ils.modules.utils import get_timestamp, set_timestamp def test_monitoring_es_db_counts(client): """Test monitoring es_db_counts.""" - res = client.get(url_for('api_monitoring.es_db_counts')) + res = client.get(url_for("api_monitoring.es_db_counts")) assert res.status_code == 200 assert get_json(res) == { - 'data': { - 'acac': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_accounts'}, - 'acin': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_invoices'}, - 'acol': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_order_lines'}, - 'acor': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_orders'}, - 'acre': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'acq_receipts'}, - 'acrl': {'db': 0, 'db-es': 0, 'es': 0, - 'index': 'acq_receipt_lines'}, - 'budg': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'budgets'}, - 'cipo': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'circ_policies'}, - 'coll': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'collections'}, - 'rement': {'db': 0, 'db-es': 0, 'es': 0, - 'index': 'remote_entities'}, - 'doc': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'documents'}, - 'hold': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'holdings'}, - 'illr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'ill_requests'}, - 'item': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'items'}, - 'itty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'item_types'}, - 'lib': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'libraries'}, - 'loanid': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'loans'}, - 'loc': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'locations'}, - 'locent': {'db': 0, 'db-es': 0, 'es': 0, - 'index': 'local_entities'}, - 'lofi': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'local_fields'}, - 'notif': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'notifications'}, - 'oplg': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'operation_logs'}, - 'org': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'organisations'}, - 'ptre': {'db': 0, 'db-es': 0, 'es': 0, - 'index': 'patron_transaction_events'}, - 'ptrn': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patrons'}, - 'pttr': {'db': 0, 'db-es': 0, 'es': 0, - 'index': 'patron_transactions'}, - 'ptty': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'patron_types'}, - 'stacfg': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'stats_cfg'}, - 'stat': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'stats'}, - 'tmpl': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'templates'}, - 'ent': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'entities'}, - 'vndr': {'db': 0, 'db-es': 0, 'es': 0, 'index': 'vendors'}, + "data": { + "acac": {"db": 0, "db-es": 0, "es": 0, "index": "acq_accounts"}, + "acin": {"db": 0, "db-es": 0, "es": 0, "index": "acq_invoices"}, + "acol": {"db": 0, "db-es": 0, "es": 0, "index": "acq_order_lines"}, + "acor": {"db": 0, "db-es": 0, "es": 0, "index": "acq_orders"}, + "acre": {"db": 0, "db-es": 0, "es": 0, "index": "acq_receipts"}, + "acrl": {"db": 0, "db-es": 0, "es": 0, "index": "acq_receipt_lines"}, + "budg": {"db": 0, "db-es": 0, "es": 0, "index": "budgets"}, + "cipo": {"db": 0, "db-es": 0, "es": 0, "index": "circ_policies"}, + "coll": {"db": 0, "db-es": 0, "es": 0, "index": "collections"}, + "rement": {"db": 0, "db-es": 0, "es": 0, "index": "remote_entities"}, + "doc": {"db": 0, "db-es": 0, "es": 0, "index": "documents"}, + "hold": {"db": 0, "db-es": 0, "es": 0, "index": "holdings"}, + "illr": {"db": 0, "db-es": 0, "es": 0, "index": "ill_requests"}, + "item": {"db": 0, "db-es": 0, "es": 0, "index": "items"}, + "itty": {"db": 0, "db-es": 0, "es": 0, "index": "item_types"}, + "lib": {"db": 0, "db-es": 0, "es": 0, "index": "libraries"}, + "loanid": {"db": 0, "db-es": 0, "es": 0, "index": "loans"}, + "loc": {"db": 0, "db-es": 0, "es": 0, "index": "locations"}, + "locent": {"db": 0, "db-es": 0, "es": 0, "index": "local_entities"}, + "lofi": {"db": 0, "db-es": 0, "es": 0, "index": "local_fields"}, + "notif": {"db": 0, "db-es": 0, "es": 0, "index": "notifications"}, + "oplg": {"db": 0, "db-es": 0, "es": 0, "index": "operation_logs"}, + "org": {"db": 0, "db-es": 0, "es": 0, "index": "organisations"}, + "ptre": { + "db": 0, + "db-es": 0, + "es": 0, + "index": "patron_transaction_events", + }, + "ptrn": {"db": 0, "db-es": 0, "es": 0, "index": "patrons"}, + "pttr": {"db": 0, "db-es": 0, "es": 0, "index": "patron_transactions"}, + "ptty": {"db": 0, "db-es": 0, "es": 0, "index": "patron_types"}, + "stacfg": {"db": 0, "db-es": 0, "es": 0, "index": "stats_cfg"}, + "stat": {"db": 0, "db-es": 0, "es": 0, "index": "stats"}, + "tmpl": {"db": 0, "db-es": 0, "es": 0, "index": "templates"}, + "ent": {"db": 0, "db-es": 0, "es": 0, "index": "entities"}, + "vndr": {"db": 0, "db-es": 0, "es": 0, "index": "vendors"}, } } -def test_monitoring_check_es_db_counts(app, client, entity_person_data, - system_librarian_martigny): +def test_monitoring_check_es_db_counts( + app, client, entity_person_data, system_librarian_martigny +): """Test monitoring check_es_db_counts.""" - res = client.get(url_for('api_monitoring.check_es_db_counts', delay=0)) + res = client.get(url_for("api_monitoring.check_es_db_counts", delay=0)) assert res.status_code == 200 - assert get_json(res) == {'data': {'status': 'green'}} + assert get_json(res) == {"data": {"status": "green"}} pers = RemoteEntity.create( - data=entity_person_data, - delete_pid=False, - dbcommit=True, - reindex=False) - flush_index(RemoteEntitiesSearch.Meta.index) - res = client.get(url_for('api_monitoring.check_es_db_counts', delay=0)) + data=entity_person_data, delete_pid=False, dbcommit=True, reindex=False + ) + RemoteEntitiesSearch.flush_and_refresh() + res = client.get(url_for("api_monitoring.check_es_db_counts", delay=0)) assert res.status_code == 200 assert get_json(res) == { - 'data': {'status': 'red'}, - 'errors': [{ - 'code': 'DB_ES_COUNTER_MISMATCH', - 'details': 'There are 1 items from rement missing in ES.', - 'id': 'DB_ES_COUNTER_MISMATCH', - 'links': { - 'about': 'http://localhost/monitoring/check_es_db_counts', - 'rement': 'http://localhost/monitoring/missing_pids/rement' - }, - 'title': "DB items counts don't match ES items count." - }] + "data": {"status": "red"}, + "errors": [ + { + "code": "DB_ES_COUNTER_MISMATCH", + "details": "There are 1 items from rement missing in ES.", + "id": "DB_ES_COUNTER_MISMATCH", + "links": { + "about": "http://localhost/monitoring/check_es_db_counts", + "rement": "http://localhost/monitoring/missing_pids/rement", + }, + "title": "DB items counts don't match ES items count.", + } + ], } # this view is only accessible by monitoring - res = client.get(url_for('api_monitoring.missing_pids', doc_type='rement')) + res = client.get(url_for("api_monitoring.missing_pids", doc_type="rement")) assert res.status_code == 401 login_user_via_session(client, system_librarian_martigny.user) - res = client.get(url_for('api_monitoring.missing_pids', doc_type='rement')) + res = client.get(url_for("api_monitoring.missing_pids", doc_type="rement")) assert res.status_code == 403 # give user superuser admin rights db.session.add( - ActionUsers.allow( - superuser_access, - user=system_librarian_martigny.user - ) + ActionUsers.allow(superuser_access, user=system_librarian_martigny.user) ) db.session.commit() - res = client.get(url_for( - 'api_monitoring.missing_pids', doc_type='rement', delay=0)) + res = client.get(url_for("api_monitoring.missing_pids", doc_type="rement", delay=0)) assert res.status_code == 200 assert get_json(res) == { - 'data': { - 'DB': [], - 'ES': ['http://localhost/remote_entities/ent_pers'], - 'ES duplicate': [] + "data": { + "DB": [], + "ES": ["http://localhost/remote_entities/ent_pers"], + "ES duplicate": [], } } def test_timestamps(app, client): """Test timestamps.""" - time_stamp = set_timestamp('test', msg='test msg') - assert get_timestamp('test') == { - 'time': time_stamp, - 'msg': 'test msg' - } - res = client.get(url_for('api_monitoring.timestamps')) + time_stamp = set_timestamp("test", msg="test msg") + assert get_timestamp("test") == {"time": time_stamp, "msg": "test msg"} + res = client.get(url_for("api_monitoring.timestamps")) assert res.status_code == 401 - ds = app.extensions['invenio-accounts'].datastore - user = ds.create_user( - email='monitoring@rero.ch', - password='1234', - active=True - ) - role = ds.create_role(name='monitoring', description='Monitoring Group') + ds = app.extensions["invenio-accounts"].datastore + user = ds.create_user(email="monitoring@rero.ch", password="1234", active=True) + role = ds.create_role(name="monitoring", description="Monitoring Group") ds.add_role_to_user(user, role) ds.commit() - user = ds.get_user('monitoring@rero.ch') + user = ds.get_user("monitoring@rero.ch") login_user_via_session(client, user) - res = client.get(url_for('api_monitoring.timestamps')) + res = client.get(url_for("api_monitoring.timestamps")) assert res.status_code == 200 assert get_json(res) == { - 'data': { - 'test': { - 'msg': 'test msg', - 'name': 'test', - 'unixtime': time.mktime(time_stamp.timetuple()), - 'utctime': time_stamp.strftime("%Y-%m-%d %H:%M:%S") + "data": { + "test": { + "msg": "test msg", + "name": "test", + "unixtime": time.mktime(time_stamp.timetuple()), + "utctime": time_stamp.strftime("%Y-%m-%d %H:%M:%S"), } } } diff --git a/tests/api/test_pid_rest.py b/tests/api/test_pid_rest.py index 3a0e1314c0..6b586e6ee5 100644 --- a/tests/api/test_pid_rest.py +++ b/tests/api/test_pid_rest.py @@ -24,7 +24,8 @@ def test_ilsrecord_pid_after_validationerror( - client, loc_online_martigny_data, librarian_martigny): + client, loc_online_martigny_data, librarian_martigny +): """Check PID before and after a ValidationError: it should be the same""" loc = Location.create(loc_online_martigny_data, delete_pid=True) next_pid = str(int(loc.pid) + 1) @@ -33,13 +34,12 @@ def test_ilsrecord_pid_after_validationerror( login_user_via_session(client, librarian_martigny.user) res, _ = postdata( client, - 'invenio_records_rest.loc_list', + "invenio_records_rest.loc_list", { - '$schema': - 'https://bib.rero.ch/schemas/locations/location-v0.0.1.json', - 'library': {'$ref': 'https://bib.rero.ch/api/libraries/lib1'}, - 'name': 'Library of Foo' - } + "$schema": "https://bib.rero.ch/schemas/locations/location-v0.0.1.json", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "name": "Library of Foo", + }, ) # check http status for invalid record diff --git a/tests/api/test_record_permissions.py b/tests/api/test_record_permissions.py index 76ef09e19d..f9fb15aea1 100644 --- a/tests/api/test_record_permissions.py +++ b/tests/api/test_record_permissions.py @@ -22,24 +22,25 @@ def test_document_permissions( - client, document, librarian_martigny, - patron_martigny, ebook_1, circ_policy_short_martigny): + client, + document, + librarian_martigny, + patron_martigny, + ebook_1, + circ_policy_short_martigny, +): """Test document permissions.""" # failed: invalid document pid is given res = client.get( url_for( - 'api_blueprint.permissions', - route_name='documents', - record_pid='no_pid' + "api_blueprint.permissions", route_name="documents", record_pid="no_pid" ) ) assert res.status_code == 401 # failed: no logged user res = client.get( url_for( - 'api_blueprint.permissions', - route_name='documents', - record_pid=document.pid + "api_blueprint.permissions", route_name="documents", record_pid=document.pid ) ) assert res.status_code == 401 @@ -48,41 +49,36 @@ def test_document_permissions( login_user_via_session(client, patron_martigny.user) res = client.get( url_for( - 'api_blueprint.permissions', - route_name='documents', - record_pid=document.pid + "api_blueprint.permissions", route_name="documents", record_pid=document.pid ) ) assert res.status_code == 403 # success: logged user and a valid document pid is given login_user_via_session(client, librarian_martigny.user) - data = call_api_permissions(client, 'documents', document.pid) - assert 'update' in data - assert 'delete' in data + data = call_api_permissions(client, "documents", document.pid) + assert "update" in data + assert "delete" in data # success: logged user and a valid document pid is given login_user_via_session(client, librarian_martigny.user) - data = call_api_permissions(client, 'documents', ebook_1.pid) - assert 'update' in data - assert 'delete' in data + data = call_api_permissions(client, "documents", ebook_1.pid) + assert "update" in data + assert "delete" in data # failed: invalid route name res = client.get( url_for( - 'api_blueprint.permissions', - route_name='no_route', - record_pid=document.pid + "api_blueprint.permissions", route_name="no_route", record_pid=document.pid ) ) assert res.status_code == 400 # failed: permission denied - data = call_api_permissions(client, 'circ_policies', - circ_policy_short_martigny.pid) - assert data.get('delete', {}).get('can') is False - reasons = data.get('delete', {}).get('reasons', {}) - assert 'others' in reasons and 'permission' in reasons['others'] + data = call_api_permissions(client, "circ_policies", circ_policy_short_martigny.pid) + assert data.get("delete", {}).get("can") is False + reasons = data.get("delete", {}).get("reasons", {}) + assert "others" in reasons and "permission" in reasons["others"] def test_patrons_permissions( @@ -94,74 +90,67 @@ def test_patrons_permissions( system_librarian_martigny, system_librarian2_martigny, system_librarian_sion, - librarian_sion + librarian_sion, ): """Test permissions for patrons.""" # simple librarian ----------------------------------------------- login_user(client, librarian_martigny) # 1) should update and delete a librarian of the same library - data = call_api_permissions(client, 'patrons', librarian2_martigny.pid) - assert data['delete']['can'] - assert data['update']['can'] + data = call_api_permissions(client, "patrons", librarian2_martigny.pid) + assert data["delete"]["can"] + assert data["update"]["can"] # 2) should not update and delete a librarian of an other library - data = call_api_permissions(client, 'patrons', librarian_saxon.pid) - assert not data['delete']['can'] - assert not data['update']['can'] + data = call_api_permissions(client, "patrons", librarian_saxon.pid) + assert not data["delete"]["can"] + assert not data["update"]["can"] # 3) should not delete a system librarian # but can update it (except some roles management) - data = call_api_permissions(client, 'patrons', - system_librarian_martigny.pid) - assert not data['delete']['can'] - assert data['update']['can'] + data = call_api_permissions(client, "patrons", system_librarian_martigny.pid) + assert not data["delete"]["can"] + assert data["update"]["can"] # system librarian ---------------------------------------------- login_user(client, system_librarian_martigny) # should update and delete a librarian of the same library - data = call_api_permissions(client, 'patrons', librarian2_martigny.pid) - assert data['delete']['can'] - assert data['update']['can'] + data = call_api_permissions(client, "patrons", librarian2_martigny.pid) + assert data["delete"]["can"] + assert data["update"]["can"] # should update and delete a librarian of an other library - data = call_api_permissions(client, 'patrons', librarian_saxon.pid) - assert data['delete']['can'] - assert data['update']['can'] + data = call_api_permissions(client, "patrons", librarian_saxon.pid) + assert data["delete"]["can"] + assert data["update"]["can"] # should update and delete a system librarian of the same organisation # but not itself - data = call_api_permissions(client, 'patrons', - system_librarian2_martigny.pid) - assert data['delete']['can'] - assert data['update']['can'] + data = call_api_permissions(client, "patrons", system_librarian2_martigny.pid) + assert data["delete"]["can"] + assert data["update"]["can"] # should not update and delete a system librarian of another organisation - data = call_api_permissions(client, 'patrons', system_librarian_sion.pid) - assert not data['delete']['can'] - assert not data['update']['can'] + data = call_api_permissions(client, "patrons", system_librarian_sion.pid) + assert not data["delete"]["can"] + assert not data["update"]["can"] def test_items_permissions( - client, - item_lib_martigny, # on shelf - item_lib_fully, # on loan - librarian_martigny + client, item_lib_martigny, item_lib_fully, librarian_martigny # on shelf # on loan ): """Test record retrieval.""" login_user(client, librarian_martigny) - data = call_api_permissions(client, 'items', item_lib_fully.pid) - assert not data['delete']['can'] - assert not data['update']['can'] + data = call_api_permissions(client, "items", item_lib_fully.pid) + assert not data["delete"]["can"] + assert not data["update"]["can"] - data = call_api_permissions(client, 'items', item_lib_martigny.pid) - assert data['delete']['can'] - assert data['update']['can'] + data = call_api_permissions(client, "items", item_lib_martigny.pid) + assert data["delete"]["can"] + assert data["update"]["can"] response = client.get( url_for( - 'api_blueprint.permissions', - route_name='items', - record_pid='dummy_item_pid' + "api_blueprint.permissions", route_name="items", record_pid="dummy_item_pid" ) ) assert response.status_code == 404 @@ -170,11 +159,7 @@ def test_items_permissions( def call_api_permissions(client, route_name, pid): """Get permissions from permissions API.""" response = client.get( - url_for( - 'api_blueprint.permissions', - route_name=route_name, - record_pid=pid - ) + url_for("api_blueprint.permissions", route_name=route_name, record_pid=pid) ) assert response.status_code == 200 return get_json(response) diff --git a/tests/api/test_search.py b/tests/api/test_search.py index d9e3e7b766..290c35937b 100644 --- a/tests/api/test_search.py +++ b/tests/api/test_search.py @@ -21,330 +21,232 @@ from utils import VerifyRecordPermissionPatch, get_json -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_documents_search( - client, - doc_title_travailleurs, - doc_title_travailleuses -): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_documents_search(client, doc_title_travailleurs, doc_title_travailleuses): """Test document search queries.""" # phrase search list_url = url_for( - 'invenio_records_rest.doc_list', + "invenio_records_rest.doc_list", q='"Les travailleurs assidus sont de retours"', - simple='1' + simple="1", ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # phrase search with punctuations list_url = url_for( - 'invenio_records_rest.doc_list', + "invenio_records_rest.doc_list", q='"Les travailleurs assidus sont de retours."', - simple='1' + simple="1", ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # word search - list_url = url_for( - 'invenio_records_rest.doc_list', - q='travailleurs', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="travailleurs", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 2 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 2 # travailleurs == travailleur == travailleuses - list_url = url_for( - 'invenio_records_rest.doc_list', - q='travailleur', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="travailleur", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 2 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 2 # ecole == école - list_url = url_for( - 'invenio_records_rest.doc_list', - q='ecole', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="ecole", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # Ecole == école - list_url = url_for( - 'invenio_records_rest.doc_list', - q='Ecole', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="Ecole", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # ECOLE == école - list_url = url_for( - 'invenio_records_rest.doc_list', - q='Ecole', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="Ecole", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # _école_ == école - list_url = url_for( - 'invenio_records_rest.doc_list', - q=' école ', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q=" école ", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # Müller - list_url = url_for( - 'invenio_records_rest.doc_list', - q='Müller', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="Müller", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # Müller == Muller - list_url = url_for( - 'invenio_records_rest.doc_list', - q='Muller', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="Muller", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # Müller == Mueller - list_url = url_for( - 'invenio_records_rest.doc_list', - q='Mueller', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="Mueller", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # test AND list_url = url_for( - 'invenio_records_rest.doc_list', - q='travailleuse école', - simple='1' + "invenio_records_rest.doc_list", q="travailleuse école", simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # test OR in two docs - list_url = url_for( - 'invenio_records_rest.doc_list', - q='retours | école', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="retours | école", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 2 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 2 # test AND in two fields (travailleuses == travailleur) list_url = url_for( - 'invenio_records_rest.doc_list', - q='travailleuses bientôt', - simple='1' + "invenio_records_rest.doc_list", q="travailleuses bientôt", simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 list_url = url_for( - 'invenio_records_rest.doc_list', - q='travailleuses +bientôt', - simple='1' + "invenio_records_rest.doc_list", q="travailleuses +bientôt", simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # test NOT list_url = url_for( - 'invenio_records_rest.doc_list', - q='travailleur -école', - simple='1' + "invenio_records_rest.doc_list", q="travailleur -école", simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # test OR in two docs (each match only one term) - list_url = url_for( - 'invenio_records_rest.doc_list', - q='retours | école', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="retours | école", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 2 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 2 # test AND in two docs (each match only one term) => no result - list_url = url_for( - 'invenio_records_rest.doc_list', - q='retours école', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="retours école", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 0 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 0 - list_url = url_for( - 'invenio_records_rest.doc_list', - q='retours + école', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="retours + école", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 0 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 0 # title + subtitle list_url = url_for( - 'invenio_records_rest.doc_list', - q='Les travailleurs assidus sont de retours : ' - 'les jeunes arrivent bientôt ?', - simple='1' + "invenio_records_rest.doc_list", + q="Les travailleurs assidus sont de retours : " "les jeunes arrivent bientôt ?", + simple="1", ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # punctuation list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'école : . ... , ; ? \ ! = == - --', - simple='1' + "invenio_records_rest.doc_list", + q=r"école : . ... , ; ? \ ! = == - --", + simple="1", ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'école:.,;?\!...=-==--', - simple='1' + "invenio_records_rest.doc_list", q=r"école:.,;?\!...=-==--", simple="1" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # special chars # œ in title - list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'bœuf', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q=r"bœuf", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # æ in title - list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'ex aequo', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q=r"ex aequo", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # æ in title - list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'ÆQUO', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q=r"ÆQUO", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # œ in author - list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'Corminbœuf', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q=r"Corminbœuf", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # auto complete should not use ngram - list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'1000091', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q=r"1000091", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 0 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 0 # wildcard - list_url = url_for( - 'invenio_records_rest.doc_list', - q='histoire*', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="histoire*", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # & char query - list_url = url_for( - 'invenio_records_rest.doc_list', - q='Boy & Girl', - simple='1' - ) + list_url = url_for("invenio_records_rest.doc_list", q="Boy & Girl", simple="1") res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] == 1 + hits = get_json(res)["hits"] + assert hits["total"]["value"] == 1 # test wildcard query with boolean sub property # See: elasticsearch query_string lenient property # for more details - list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'subjects.\*:test' - ) + list_url = url_for("invenio_records_rest.doc_list", q=r"subjects.\*:test") res = client.get(list_url) - hits = get_json(res)['hits'] + hits = get_json(res)["hits"] assert hits # test wildcard query with boolean sub property # See: elasticsearch query_string lenient property # for more details list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'autocomplete_title:travailleu' + "invenio_records_rest.doc_list", q=r"autocomplete_title:travailleu" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] != 0 + hits = get_json(res)["hits"] + assert hits["total"]["value"] != 0 list_url = url_for( - 'invenio_records_rest.doc_list', - q=r'autocomplete_title:travailleur' + "invenio_records_rest.doc_list", q=r"autocomplete_title:travailleur" ) res = client.get(list_url) - hits = get_json(res)['hits'] - assert hits['total']['value'] != 0 + hits = get_json(res)["hits"] + assert hits["total"]["value"] != 0 diff --git a/tests/api/test_serializers.py b/tests/api/test_serializers.py index ae8fec2b66..c7cbabd95e 100644 --- a/tests/api/test_serializers.py +++ b/tests/api/test_serializers.py @@ -19,8 +19,12 @@ import mock from flask import url_for -from utils import VerifyRecordPermissionPatch, flush_index, get_json, \ - item_record_to_a_specific_loan_state, login_user +from utils import ( + VerifyRecordPermissionPatch, + get_json, + item_record_to_a_specific_loan_state, + login_user, +) from rero_ils.modules.loans.models import LoanState from rero_ils.modules.locations.api import LocationsSearch @@ -35,61 +39,59 @@ def test_operation_logs_serializers( item_lib_martigny, loc_public_martigny, circulation_policies, - lib_martigny_data + lib_martigny_data, ): """Test serializers for operation logs.""" params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.ITEM_AT_DESK, + params=params, + copy_item=True, + ) # Force update ES index - flush_index(OperationLogsSearch.Meta.index) - list_url = url_for('invenio_records_rest.oplg_list') + OperationLogsSearch.flush_and_refresh() + list_url = url_for("invenio_records_rest.oplg_list") login_user(client, patron_martigny) response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - loan = data.get('hits', {}).get('hits', [])[0].get('metadata', {})\ - .get('loan', {}) - libary_name = lib_martigny_data['name'] + loan = data.get("hits", {}).get("hits", [])[0].get("metadata", {}).get("loan", {}) + libary_name = lib_martigny_data["name"] # Check if the library data injected into the section - assert libary_name == loan.get('transaction_location', {})\ - .get('library').get('name') - assert libary_name == loan.get('pickup_location', {})\ - .get('library').get('name') + assert libary_name == loan.get("transaction_location", {}).get("library").get( + "name" + ) + assert libary_name == loan.get("pickup_location", {}).get("library").get("name") def test_patrons_serializers( - client, - json_header, - librarian_martigny, - librarian2_martigny, - rero_json_header + client, json_header, librarian_martigny, librarian2_martigny, rero_json_header ): """Test serializers for patrons.""" login_user(client, librarian_martigny) - list_url = url_for('invenio_records_rest.ptrn_list') + list_url = url_for("invenio_records_rest.ptrn_list") response = client.get(list_url, headers=json_header) assert response.status_code == 200 # Get the first result and check if it contains all desired keys. data = get_json(response) - hit = data['hits']['hits'][0] - for key in ['created', 'updated', 'id', 'links', 'metadata']: + hit = data["hits"]["hits"][0] + for key in ["created", "updated", "id", "links", "metadata"]: assert key in hit assert hit[key] response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - ptty_aggr = data.get('aggregations', {}).get('patron_type', {}) - assert all('name' in term for term in ptty_aggr.get('buckets', [])) + ptty_aggr = data.get("aggregations", {}).get("patron_type", {}) + assert all("name" in term for term in ptty_aggr.get("buckets", [])) def test_document_and_holdings_serializers( @@ -98,26 +100,26 @@ def test_document_and_holdings_serializers( document, librarian_martigny, lib_martigny, - holding_lib_martigny + holding_lib_martigny, ): """Test serializers for holdings.""" login_user(client, librarian_martigny) - doc_url = url_for('invenio_records_rest.doc_list') + doc_url = url_for("invenio_records_rest.doc_list") response = client.get(doc_url, headers=rero_json_header) assert response.status_code == 200 - doc_url = url_for('invenio_records_rest.doc_item', pid_value=document.pid) + doc_url = url_for("invenio_records_rest.doc_item", pid_value=document.pid) response = client.get(doc_url, headers=rero_json_header) assert response.status_code == 200 - holding_url = url_for('invenio_records_rest.hold_list') + holding_url = url_for("invenio_records_rest.hold_list") response = client.get(holding_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - record = data['hits']['hits'][0]['metadata'] - assert record.get('location', {}).get('name') - assert record.get('library', {}).get('name') - assert record.get('circulation_category', {}).get('name') + record = data["hits"]["hits"][0]["metadata"] + assert record.get("location", {}).get("name") + assert record.get("library", {}).get("name") + assert record.get("circulation_category", {}).get("name") def test_loans_serializers( @@ -129,210 +131,242 @@ def test_loans_serializers( librarian_martigny, item_lib_martigny, item_lib_fully, - circulation_policies + circulation_policies, ): """Test serializers for loans.""" # create somes loans on same item with different state params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.PENDING, + params=params, + copy_item=True, + ) item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.ITEM_AT_DESK, + params=params, + copy_item=True, + ) item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.ITEM_ON_LOAN, + params=params, + copy_item=True, + ) params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid, - 'checkin_transaction_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, + "checkin_transaction_location_pid": loc_public_martigny.pid, } item_record_to_a_specific_loan_state( item=item_lib_fully, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) - list_url = url_for('invenio_records_rest.loanid_list') + list_url = url_for("invenio_records_rest.loanid_list") login_user(client, patron_martigny) response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - records = data.get('hits', {}).get('hits', []) + records = data.get("hits", {}).get("hits", []) for record in records: - data = record.get('metadata', {}) - if data.get('state') == 'PENDING': - assert data.get('pickup_name') - elif data.get('state') == 'ITEM_AT_DESK': - assert data.get('rank') == 0 - elif data.get('state') == 'ITEM_ON_LOAN': - assert data.get('overdue') is False - elif data.get('state') == 'ITEM_IN_TRANSIT_TO_HOUSE': - assert data.get('pickup_library_name') - assert data.get('transaction_library_name') + data = record.get("metadata", {}) + if data.get("state") == "PENDING": + assert data.get("pickup_name") + elif data.get("state") == "ITEM_AT_DESK": + assert data.get("rank") == 0 + elif data.get("state") == "ITEM_ON_LOAN": + assert data.get("overdue") is False + elif data.get("state") == "ITEM_IN_TRANSIT_TO_HOUSE": + assert data.get("pickup_library_name") + assert data.get("transaction_library_name") def test_patron_transaction_events_serializers( - client, - rero_json_header, - librarian_saxon, - patron_transaction_overdue_event_saxon + client, rero_json_header, librarian_saxon, patron_transaction_overdue_event_saxon ): """Test serializers for patron transaction events.""" login_user(client, librarian_saxon) - list_url = url_for('invenio_records_rest.ptre_list') + list_url = url_for("invenio_records_rest.ptre_list") response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - record = data.get('hits', {}).get('hits', [])[0] - assert record.get('metadata', {}).get('library', {}).get('name') + record = data.get("hits", {}).get("hits", [])[0] + assert record.get("metadata", {}).get("library", {}).get("name") def test_ill_requests_serializers( - client, - rero_json_header, - patron_martigny, - ill_request_martigny + client, rero_json_header, patron_martigny, ill_request_martigny ): """Test serializers for ills requests.""" login_user(client, patron_martigny) - list_url = url_for('invenio_records_rest.illr_list') + list_url = url_for("invenio_records_rest.illr_list") response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - record = data.get('hits', {}).get('hits', [])[0] - assert record.get('metadata', {}).get('pickup_location', {}).get('name') + record = data.get("hits", {}).get("hits", [])[0] + assert record.get("metadata", {}).get("pickup_location", {}).get("name") # ACQUISITIONS MODULES ======================================================== -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_budgets_serializers( client, rero_json_header, lib_martigny, budget_2020_martigny ): """Test serializers for budgets requests.""" budget = budget_2020_martigny - item_url = url_for('invenio_records_rest.budg_item', pid_value=budget.pid) + item_url = url_for("invenio_records_rest.budg_item", pid_value=budget.pid) response = client.get(item_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for key in ['is_current_budget']: - assert key in data['metadata'] + for key in ["is_current_budget"]: + assert key in data["metadata"] -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_accounts_serializers( - client, rero_json_header, lib_martigny, budget_2020_martigny, - acq_account_fiction_martigny + client, + rero_json_header, + lib_martigny, + budget_2020_martigny, + acq_account_fiction_martigny, ): """Test serializers for acq_accounts requests.""" account = acq_account_fiction_martigny - item_url = url_for('invenio_records_rest.acac_item', pid_value=account.pid) + item_url = url_for("invenio_records_rest.acac_item", pid_value=account.pid) response = client.get(item_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for key in ['depth', 'distribution', 'is_active', 'encumbrance_amount', - 'expenditure_amount', 'remaining_balance', - 'is_current_budget']: - assert key in data['metadata'] - - -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) + for key in [ + "depth", + "distribution", + "is_active", + "encumbrance_amount", + "expenditure_amount", + "remaining_balance", + "is_current_budget", + ]: + assert key in data["metadata"] + + +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_orders_serializers( client, rero_json_header, acq_order_fiction_martigny, acq_account_fiction_martigny, acq_order_line_fiction_martigny, - lib_martigny + lib_martigny, ): """Test serializers for acq_orders/acq_order_lines requests.""" order = acq_order_fiction_martigny - item_url = url_for('invenio_records_rest.acor_item', pid_value=order.pid) + item_url = url_for("invenio_records_rest.acor_item", pid_value=order.pid) response = client.get(item_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for attr in ['is_current_budget']: - assert attr in data['metadata'] + for attr in ["is_current_budget"]: + assert attr in data["metadata"] - url = url_for( - 'invenio_records_rest.acol_list', - q=f'acq_order.pid:{order.pid}' - ) + url = url_for("invenio_records_rest.acol_list", q=f"acq_order.pid:{order.pid}") response = client.get(url) assert response.status_code == 200 data = get_json(response) - acol_pid = data['hits']['hits'][0]['metadata']['pid'] - item_url = url_for('invenio_records_rest.acol_item', pid_value=acol_pid) + acol_pid = data["hits"]["hits"][0]["metadata"]["pid"] + item_url = url_for("invenio_records_rest.acol_item", pid_value=acol_pid) response = client.get(item_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for attr in ['is_current_budget']: - assert attr in data['metadata'] + for attr in ["is_current_budget"]: + assert attr in data["metadata"] -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) def test_acq_receipts_serializers( client, rero_json_header, acq_order_fiction_martigny, - acq_account_fiction_martigny, acq_receipt_fiction_martigny, - acq_receipt_line_1_fiction_martigny, acq_receipt_line_2_fiction_martigny, - lib_martigny + acq_account_fiction_martigny, + acq_receipt_fiction_martigny, + acq_receipt_line_1_fiction_martigny, + acq_receipt_line_2_fiction_martigny, + lib_martigny, ): """Test serializers for acq_receipts/acq_receipt_lines requests.""" acre = acq_receipt_fiction_martigny - item_url = url_for('invenio_records_rest.acre_item', pid_value=acre.pid) + item_url = url_for("invenio_records_rest.acre_item", pid_value=acre.pid) response = client.get(item_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for attr in ['currency', 'quantity', 'total_amount', 'receipt_lines', - 'is_current_budget']: - assert attr in data['metadata'] + for attr in [ + "currency", + "quantity", + "total_amount", + "receipt_lines", + "is_current_budget", + ]: + assert attr in data["metadata"] list_url = url_for( - 'invenio_records_rest.acrl_list', - q=f'acq_receipt.pid:{acre.pid}' + "invenio_records_rest.acrl_list", q=f"acq_receipt.pid:{acre.pid}" ) response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for hit in data['hits']['hits']: - assert 'document' in hit['metadata'] + for hit in data["hits"]["hits"]: + assert "document" in hit["metadata"] - acrl_pid = data['hits']['hits'][0]['metadata']['pid'] - item_url = url_for('invenio_records_rest.acrl_item', pid_value=acrl_pid) + acrl_pid = data["hits"]["hits"][0]["metadata"]["pid"] + item_url = url_for("invenio_records_rest.acrl_item", pid_value=acrl_pid) response = client.get(item_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - for attr in ['is_current_budget']: - assert attr in data['metadata'] + for attr in ["is_current_budget"]: + assert attr in data["metadata"] acor = acq_order_fiction_martigny - url = url_for('invenio_records_rest.acor_list', q=f'pid:{acor.pid}') + url = url_for("invenio_records_rest.acor_list", q=f"pid:{acor.pid}") response = client.get(url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - receipt_data_aggr = data.get('aggregations', {}).get('receipt_date', {}) - assert len(receipt_data_aggr.get('buckets', [])) - assert receipt_data_aggr.get('config', {}) + receipt_data_aggr = data.get("aggregations", {}).get("receipt_date", {}) + assert len(receipt_data_aggr.get("buckets", [])) + assert receipt_data_aggr.get("config", {}) -@mock.patch('invenio_records_rest.views.verify_record_permission', - mock.MagicMock(return_value=VerifyRecordPermissionPatch)) -def test_cached_serializers(client, rero_json_header, item_lib_martigny, - loc_public_martigny, loc_public_martigny_data): +@mock.patch( + "invenio_records_rest.views.verify_record_permission", + mock.MagicMock(return_value=VerifyRecordPermissionPatch), +) +def test_cached_serializers( + client, + rero_json_header, + item_lib_martigny, + loc_public_martigny, + loc_public_martigny_data, +): """Test cached serializers.""" # Ensure than cache used in some serializer is reset each time we request @@ -346,24 +380,25 @@ def test_cached_serializers(client, rero_json_header, item_lib_martigny, # STEP#1 : first items search serialization item = item_lib_martigny - list_url = url_for('invenio_records_rest.item_list', q=item.pid) + list_url = url_for("invenio_records_rest.item_list", q=item.pid) response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 # STEP#2 : update item location name location = loc_public_martigny - location['name'] = 'new location name' + location["name"] = "new location name" location = location.update(location, dbcommit=True, reindex=True) - flush_index(LocationsSearch.Meta.index) - assert LocationsSearch().get_record_by_pid(location.pid)['name'] == \ - location.get('name') + LocationsSearch.flush_and_refresh() + assert LocationsSearch().get_record_by_pid(location.pid)["name"] == location.get( + "name" + ) # STEP#3 : second items search serialization response = client.get(list_url, headers=rero_json_header) assert response.status_code == 200 data = get_json(response) - hit_metadata = data['hits']['hits'][0]['metadata'] - assert hit_metadata['location']['name'] == location.get('name') + hit_metadata = data["hits"]["hits"][0]["metadata"] + assert hit_metadata["location"]["name"] == location.get("name") # reset location to initial values location.update(loc_public_martigny_data, dbcommit=True, reindex=True) diff --git a/tests/api/test_tasks.py b/tests/api/test_tasks.py index d1fe2664ea..e392851e11 100644 --- a/tests/api/test_tasks.py +++ b/tests/api/test_tasks.py @@ -23,36 +23,49 @@ from freezegun import freeze_time from invenio_accounts.testutils import login_user_via_session from invenio_records.signals import after_record_update -from utils import flush_index, postdata +from utils import postdata from rero_ils.modules.items.api import Item -from rero_ils.modules.items.tasks import \ - clean_obsolete_temporary_item_types_and_locations +from rero_ils.modules.items.tasks import ( + clean_obsolete_temporary_item_types_and_locations, +) from rero_ils.modules.libraries.api import Library -from rero_ils.modules.loans.api import Loan, LoansSearch, get_due_soon_loans, \ - get_overdue_loans +from rero_ils.modules.loans.api import ( + Loan, + LoansSearch, + get_due_soon_loans, + get_overdue_loans, +) from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.loans.tasks import cancel_expired_request_task from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.models import NotificationType from rero_ils.modules.notifications.tasks import create_notifications -from rero_ils.modules.notifications.utils import get_notification, \ - number_of_notifications_sent +from rero_ils.modules.notifications.utils import ( + get_notification, + number_of_notifications_sent, +) from rero_ils.modules.patrons.api import Patron -from rero_ils.modules.patrons.listener import \ - create_subscription_patron_transaction -from rero_ils.modules.patrons.tasks import \ - check_patron_types_and_add_subscriptions as \ - check_patron_types_and_add_subscriptions -from rero_ils.modules.patrons.tasks import clean_obsolete_subscriptions, \ - task_clear_and_renew_subscriptions +from rero_ils.modules.patrons.listener import create_subscription_patron_transaction +from rero_ils.modules.patrons.tasks import ( + check_patron_types_and_add_subscriptions as check_patron_types_and_add_subscriptions, +) +from rero_ils.modules.patrons.tasks import ( + clean_obsolete_subscriptions, + task_clear_and_renew_subscriptions, +) from rero_ils.modules.utils import add_years, get_ref_for_pid def test_notifications_task( - client, librarian_martigny, patron_martigny, - item_lib_martigny, circ_policy_short_martigny, - loc_public_martigny, lib_martigny): + client, + librarian_martigny, + patron_martigny, + item_lib_martigny, + circ_policy_short_martigny, + loc_public_martigny, + lib_martigny, +): """Test overdue and due_soon loans.""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny @@ -61,16 +74,16 @@ def test_notifications_task( # First we need to create a checkout res, data = postdata( client, - 'api_item.checkout', + "api_item.checkout", dict( item_pid=item_pid, patron_pid=patron_pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - ) + ), ) assert res.status_code == 200 - loan_pid = data.get('action_applied')[LoanAction.CHECKOUT].get('pid') + loan_pid = data.get("action_applied")[LoanAction.CHECKOUT].get("pid") loan = Loan.get_record_by_pid(loan_pid) # test due_soon notification @@ -78,22 +91,19 @@ def test_notifications_task( # the task to create notification this loan should be considered as # due_soon and a notification should be created. end_date = datetime.now(timezone.utc) + timedelta(days=5) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) - flush_index(LoansSearch.Meta.index) + LoansSearch.flush_and_refresh() due_soon_loans = list(get_due_soon_loans()) - assert due_soon_loans[0].get('pid') == loan_pid - - create_notifications(types=[ - NotificationType.DUE_SOON, - NotificationType.OVERDUE - ]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + assert due_soon_loans[0].get("pid") == loan_pid + + create_notifications(types=[NotificationType.DUE_SOON, NotificationType.OVERDUE]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() assert loan.is_notified(NotificationType.DUE_SOON) notif = get_notification(loan, NotificationType.DUE_SOON) - notif_date = ciso8601.parse_datetime(notif.get('creation_date')) + notif_date = ciso8601.parse_datetime(notif.get("creation_date")) assert notif_date.date() == datetime.today().date() # -- test overdue notification -- @@ -103,23 +113,23 @@ def test_notifications_task( # Friday end_date = datetime(year=2021, month=1, day=22, tzinfo=timezone.utc) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) # Process the notification during the weekend (Saturday) process_date = datetime(year=2021, month=1, day=23, tzinfo=timezone.utc) overdue_loans = list(get_overdue_loans(tstamp=process_date)) - assert overdue_loans[0].get('pid') == loan_pid - create_notifications(types=[ - NotificationType.OVERDUE - ], tstamp=process_date) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + assert overdue_loans[0].get("pid") == loan_pid + create_notifications(types=[NotificationType.OVERDUE], tstamp=process_date) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() # Should not be created assert not loan.is_notified(NotificationType.OVERDUE, 1) # Should not be sent - assert number_of_notifications_sent( - loan, notification_type=NotificationType.OVERDUE) == 0 + assert ( + number_of_notifications_sent(loan, notification_type=NotificationType.OVERDUE) + == 0 + ) # For this test, we will update the loan to simulate an overdue of 12 # days. With this delay, regarding the cipo configuration, only the first @@ -136,67 +146,66 @@ def test_notifications_task( open_days = loan_lib.get_open_days(end_date) add_days += 1 - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) overdue_loans = list(get_overdue_loans()) - assert overdue_loans[0].get('pid') == loan_pid - - create_notifications(types=[ - NotificationType.DUE_SOON, - NotificationType.OVERDUE - ]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + assert overdue_loans[0].get("pid") == loan_pid + + create_notifications(types=[NotificationType.DUE_SOON, NotificationType.OVERDUE]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() assert loan.is_notified(NotificationType.OVERDUE, 0) - assert number_of_notifications_sent( - loan, notification_type=NotificationType.OVERDUE) == 1 + assert ( + number_of_notifications_sent(loan, notification_type=NotificationType.OVERDUE) + == 1 + ) # test overdue notification#2 # Now simulate than the previous call crashed. So call the task with a # fixed date. In our test, no new notifications should be sent - create_notifications(types=[ - NotificationType.DUE_SOON, - NotificationType.OVERDUE - ], tstamp=datetime.now(timezone.utc)) - assert number_of_notifications_sent( - loan, notification_type=NotificationType.OVERDUE) == 1 + create_notifications( + types=[NotificationType.DUE_SOON, NotificationType.OVERDUE], + tstamp=datetime.now(timezone.utc), + ) + assert ( + number_of_notifications_sent(loan, notification_type=NotificationType.OVERDUE) + == 1 + ) # test overdue notification#3 # For this test, we will update the loan to simulate an overdue of 40 # days. With this delay, regarding the cipo configuration, the second # (and last) overdue reminder should be sent. end_date = datetime.now(timezone.utc) - timedelta(days=40) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) overdue_loans = list(get_overdue_loans()) - assert overdue_loans[0].get('pid') == loan_pid - - create_notifications(types=[ - NotificationType.DUE_SOON, - NotificationType.OVERDUE - ]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + assert overdue_loans[0].get("pid") == loan_pid + + create_notifications(types=[NotificationType.DUE_SOON, NotificationType.OVERDUE]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() assert loan.is_notified(NotificationType.OVERDUE, 1) - assert number_of_notifications_sent( - loan, notification_type=NotificationType.OVERDUE) == 2 + assert ( + number_of_notifications_sent(loan, notification_type=NotificationType.OVERDUE) + == 2 + ) # checkin the item to put it back to it's original state res, _ = postdata( client, - 'api_item.checkin', + "api_item.checkin", dict( item_pid=item_pid, pid=loan_pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) + transaction_user_pid=librarian_martigny.pid, + ), ) assert res.status_code == 200 -def test_clear_and_renew_subscription(patron_type_grown_sion, - patron_sion): +def test_clear_and_renew_subscription(patron_type_grown_sion, patron_sion): """Test the `task patrons.tasks.clear_and_renew_subscription`.""" patron_sion = patron_sion @@ -209,20 +218,21 @@ def test_clear_and_renew_subscription(patron_type_grown_sion, # first step : clear all subscription for the patron and crate an new # obsolete subscription. - if 'subscriptions' in patron_sion.get('patron', {}): - del patron_sion['patron']['subscriptions'] + if "subscriptions" in patron_sion.get("patron", {}): + del patron_sion["patron"]["subscriptions"] start = datetime.now() - timedelta(days=200) end = start + timedelta(days=100) patron_sion.add_subscription(patron_type_grown_sion, start, end) - assert len(patron_sion.get('patron', {}).get('subscriptions', [])) == 1 - assert patron_sion.get('patron', {})['subscriptions'][0]['end_date'] == \ - end.strftime('%Y-%m-%d') + assert len(patron_sion.get("patron", {}).get("subscriptions", [])) == 1 + assert patron_sion.get("patron", {})["subscriptions"][0][ + "end_date" + ] == end.strftime("%Y-%m-%d") # clean old subscription - Reload the patron and check they are no more # subscriptions clean_obsolete_subscriptions() patron_sion = Patron.get_record_by_pid(patron_sion.pid) - assert len(patron_sion.get('patron', {}).get('subscriptions', [])) == 0 + assert len(patron_sion.get("patron", {}).get("subscriptions", [])) == 0 # check for patron needed subscriptions and create new subscription if # needed. As our patron has no subscription and is still connected to @@ -230,22 +240,24 @@ def test_clear_and_renew_subscription(patron_type_grown_sion, # new subscription for this patron check_patron_types_and_add_subscriptions() patron_sion = Patron.get_record_by_pid(patron_sion.pid) - assert len(patron_sion.get('patron', {}).get('subscriptions', [])) == 1 - assert patron_sion.get('patron', {})['subscriptions'][0]['end_date'] == \ - add_years(datetime.now(), 1).strftime('%Y-%m-%d') + assert len(patron_sion.get("patron", {}).get("subscriptions", [])) == 1 + assert patron_sion.get("patron", {})["subscriptions"][0]["end_date"] == add_years( + datetime.now(), 1 + ).strftime("%Y-%m-%d") # run both operation using task_clear_and_renew_subscriptions` and check # the result. The patron should still have one subscription but end_date # must be today. - del patron_sion['patron']['subscriptions'] + del patron_sion["patron"]["subscriptions"] start = datetime.now() - timedelta(days=200) end = start + timedelta(days=100) patron_sion.add_subscription(patron_type_grown_sion, start, end) task_clear_and_renew_subscriptions() patron_sion = Patron.get_record_by_pid(patron_sion.pid) - assert len(patron_sion.get('patron', {}).get('subscriptions', [])) == 1 - assert patron_sion.get('patron', {})['subscriptions'][0]['end_date'] != \ - end.strftime('%Y-%m-%d') + assert len(patron_sion.get("patron", {}).get("subscriptions", [])) == 1 + assert patron_sion.get("patron", {})["subscriptions"][0][ + "end_date" + ] != end.strftime("%Y-%m-%d") # as we disconnect the `create_subscription_patron_transaction` listener # at the beginning, we need to connect it now. @@ -253,55 +265,62 @@ def test_clear_and_renew_subscription(patron_type_grown_sion, def test_clear_obsolete_temporary_item_type_and_location( - item_lib_martigny, item_type_on_site_martigny, - loc_restricted_martigny, item2_lib_martigny): + item_lib_martigny, + item_type_on_site_martigny, + loc_restricted_martigny, + item2_lib_martigny, +): """test task test_clear_obsolete_temporary_item_type_and_location""" item = item_lib_martigny end_date = datetime.now() + timedelta(days=2) - item['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid), - 'end_date': end_date.strftime('%Y-%m-%d') + item["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_on_site_martigny.pid), + "end_date": end_date.strftime("%Y-%m-%d"), } - item['temporary_location'] = { - '$ref': get_ref_for_pid('loc', loc_restricted_martigny.pid), - 'end_date': end_date.strftime('%Y-%m-%d') + item["temporary_location"] = { + "$ref": get_ref_for_pid("loc", loc_restricted_martigny.pid), + "end_date": end_date.strftime("%Y-%m-%d"), } item = item.update(item, dbcommit=True, reindex=True) - assert item.get('temporary_item_type', {}).get('end_date') - assert item.get('temporary_location', {}).get('end_date') + assert item.get("temporary_item_type", {}).get("end_date") + assert item.get("temporary_location", {}).get("end_date") end_date = datetime.now() + timedelta(days=25) - item2_lib_martigny['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid), - 'end_date': end_date.strftime('%Y-%m-%d') + item2_lib_martigny["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_on_site_martigny.pid), + "end_date": end_date.strftime("%Y-%m-%d"), } - item2_lib_martigny['temporary_location'] = { - '$ref': get_ref_for_pid('loc', loc_restricted_martigny.pid), - 'end_date': end_date.strftime('%Y-%m-%d') + item2_lib_martigny["temporary_location"] = { + "$ref": get_ref_for_pid("loc", loc_restricted_martigny.pid), + "end_date": end_date.strftime("%Y-%m-%d"), } item2_lib_martigny = item2_lib_martigny.update( - item2_lib_martigny, dbcommit=True, reindex=True) - assert item2_lib_martigny.get('temporary_item_type', {}).get('end_date') - assert item2_lib_martigny.get('temporary_location', {}).get('end_date') + item2_lib_martigny, dbcommit=True, reindex=True + ) + assert item2_lib_martigny.get("temporary_item_type", {}).get("end_date") + assert item2_lib_martigny.get("temporary_location", {}).get("end_date") over_4_days = datetime.now() + timedelta(days=4) - with freeze_time(over_4_days.strftime('%Y-%m-%d')): + with freeze_time(over_4_days.strftime("%Y-%m-%d")): items = Item.get_items_with_obsolete_temporary_item_type_or_location() assert len(list(items)) # run the tasks msg = clean_obsolete_temporary_item_types_and_locations() - assert msg['deleted fields'] == 2 + assert msg["deleted fields"] == 2 # check after task was ran items = Item.get_items_with_obsolete_temporary_item_type_or_location() assert len(list(items)) == 0 item = Item.get_record_by_pid(item.pid) - assert not item.get('temporary_item_type') - assert not item.get('temporary_location') + assert not item.get("temporary_item_type") + assert not item.get("temporary_location") def test_expired_request_task( - item_on_shelf_martigny_patron_and_loan_pending, yesterday, - loc_public_martigny, patron2_martigny, librarian_martigny + item_on_shelf_martigny_patron_and_loan_pending, + yesterday, + loc_public_martigny, + patron2_martigny, + librarian_martigny, ): """Test the task cancelling the expired request.""" # STEP#0 :: CREATE TWO REQUEST @@ -309,9 +328,9 @@ def test_expired_request_task( # * Create a second request for an other patron. item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item, _ = item.validate_request(**params) loan = Loan.get_record_by_pid(loan.pid) @@ -319,17 +338,17 @@ def test_expired_request_task( pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - assert 'request' in actions - loan2 = Loan.get_record_by_pid(actions['request']['pid']) + assert "request" in actions + loan2 = Loan.get_record_by_pid(actions["request"]["pid"]) # STEP#1 :: SET FIRST REQUEST AS EXPIRED # Update the first request (Loan) to set it as expired updating its # `request_expired_date` attribute. - loan['request_expire_date'] = yesterday.isoformat() + loan["request_expire_date"] = yesterday.isoformat() loan.update(loan, dbcommit=True, reindex=True) - flush_index(LoansSearch.Meta.index) + LoansSearch.flush_and_refresh() loan = LoansSearch().get_record_by_pid(loan.pid) # STEP#2 :: RUN THE TASK @@ -340,5 +359,5 @@ def test_expired_request_task( assert task_result == (1, 1) loan = Loan.get_record_by_pid(loan.pid) loan2 = Loan.get_record_by_pid(loan2.pid) - assert loan['state'] == LoanState.CANCELLED - assert loan2['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.CANCELLED + assert loan2["state"] == LoanState.ITEM_AT_DESK diff --git a/tests/api/test_translations.py b/tests/api/test_translations.py index bf0c57b091..ce02800d4c 100644 --- a/tests/api/test_translations.py +++ b/tests/api/test_translations.py @@ -23,13 +23,8 @@ def test_translations(client, app): """Test translations API.""" - for ln in app.extensions.get('invenio-i18n').get_languages(): - res = client.get( - url_for( - 'api_blueprint.translations', - ln=ln[0] - ) - ) + for ln in app.extensions.get("invenio-i18n").get_languages(): + res = client.get(url_for("api_blueprint.translations", ln=ln[0])) assert res.status_code == 200 assert len(get_json(res)) > 0 @@ -45,18 +40,8 @@ def test_translations_exceptions(client, app): # ... class FakeDomain(object): # ... paths = [] # - res = client.get( - url_for( - 'api_blueprint.translations', - ln='dummy_language' - ) - ) + res = client.get(url_for("api_blueprint.translations", ln="dummy_language")) assert res.status_code == 404 - res = client.get( - url_for( - 'api_blueprint.translations', - ln='doesnotexists' - ) - ) + res = client.get(url_for("api_blueprint.translations", ln="doesnotexists")) assert res.status_code == 404 diff --git a/tests/api/test_user_authentication.py b/tests/api/test_user_authentication.py index b6cbb5d2f4..9bb3743c08 100644 --- a/tests/api/test_user_authentication.py +++ b/tests/api/test_user_authentication.py @@ -31,87 +31,78 @@ def test_login(client, patron_sion, default_user_password): # user does not exists res, _ = postdata( client, - 'invenio_accounts_rest_auth.login', - { - 'email': 'not@exist.com', - 'password': '' - } + "invenio_accounts_rest_auth.login", + {"email": "not@exist.com", "password": ""}, ) assert res.status_code == 400 data = get_json(res) - assert data['message'] == 'Invalid user or password' + assert data["message"] == "Invalid user or password" # wrong password res, _ = postdata( client, - 'invenio_accounts_rest_auth.login', - { - 'email': patron_sion.get('email'), - 'password': 'bad' - } + "invenio_accounts_rest_auth.login", + {"email": patron_sion.get("email"), "password": "bad"}, ) assert res.status_code == 400 data = get_json(res) - assert data['message'] == gettext('INVALID_USER_OR_PASSWORD') + assert data["message"] == gettext("INVALID_USER_OR_PASSWORD") # login by email res, _ = postdata( client, - 'invenio_accounts_rest_auth.login', - { - 'email': patron_sion.get('email'), - 'password': default_user_password - } + "invenio_accounts_rest_auth.login", + {"email": patron_sion.get("email"), "password": default_user_password}, ) data = get_json(res) assert res.status_code == 200 - assert data.get('id') + assert data.get("id") # logout for the next test - client.post(url_for('invenio_accounts_rest_auth.logout')) + client.post(url_for("invenio_accounts_rest_auth.logout")) # login by username res, _ = postdata( client, - 'invenio_accounts_rest_auth.login', - { - 'email': patron_sion.get('username'), - 'password': default_user_password - } + "invenio_accounts_rest_auth.login", + {"email": patron_sion.get("username"), "password": default_user_password}, ) data = get_json(res) assert res.status_code == 200 - assert data.get('id') + assert data.get("id") # logout for the next test - client.post(url_for('invenio_accounts_rest_auth.logout')) + client.post(url_for("invenio_accounts_rest_auth.logout")) -def test_login_without_email(client, patron_sion_without_email1, - default_user_password): +def test_login_without_email(client, patron_sion_without_email1, default_user_password): """Test login with several scenarios.""" patron_sion_without_email1 = patron_sion_without_email1.dumps() # login by username without email res, _ = postdata( client, - 'invenio_accounts_rest_auth.login', + "invenio_accounts_rest_auth.login", { - 'email': patron_sion_without_email1.get('username'), - 'password': default_user_password - } + "email": patron_sion_without_email1.get("username"), + "password": default_user_password, + }, ) assert res.status_code == 200 data = get_json(res) - assert data.get('id') + assert data.get("id") # logout for the next test - client.post(url_for('invenio_accounts_rest_auth.logout')) + client.post(url_for("invenio_accounts_rest_auth.logout")) -def test_change_password(client, app, patron_martigny, - librarian_sion, - librarian_martigny, - default_user_password): +def test_change_password( + client, + app, + patron_martigny, + librarian_sion, + librarian_martigny, + default_user_password, +): """Test login with several scenarios.""" # Fix the size of password - app.config['RERO_ILS_PASSWORD_MIN_LENGTH'] = 8 + app.config["RERO_ILS_PASSWORD_MIN_LENGTH"] = 8 p_martigny = patron_martigny patron_martigny = patron_martigny.dumps() l_sion = librarian_sion @@ -119,12 +110,12 @@ def test_change_password(client, app, patron_martigny, # try to change password with an anonymous user res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'password': default_user_password, - 'new_password': default_user_password, - 'new_password_confirm': default_user_password - } + "password": default_user_password, + "new_password": default_user_password, + "new_password_confirm": default_user_password, + }, ) data = get_json(res) assert res.status_code == 401 @@ -133,58 +124,60 @@ def test_change_password(client, app, patron_martigny, login_user_via_session(client, p_martigny.user) res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'password': default_user_password, - 'new_password': '123456', - 'new_password_confirm': '123456' - } + "password": default_user_password, + "new_password": "123456", + "new_password_confirm": "123456", + }, ) data = get_json(res) assert res.status_code == 400 - assert data.get('message') == 'Validation error.' - assert data.get('errors')[0].get('message') == \ - gettext('Field must be at least 8 characters long.') + assert data.get("message") == "Validation error." + assert data.get("errors")[0].get("message") == gettext( + "Field must be at least 8 characters long." + ) # with a logged user res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'password': default_user_password, - 'new_password': 'Pw123458', - 'new_password_confirm': 'Pw123455' - } + "password": default_user_password, + "new_password": "Pw123458", + "new_password_confirm": "Pw123455", + }, ) data = get_json(res) assert res.status_code == 400 - assert data.get('message') == 'Validation error.' - assert data.get('errors')[0].get('message') == \ - gettext('The 2 passwords are not identical.') + assert data.get("message") == "Validation error." + assert data.get("errors")[0].get("message") == gettext( + "The 2 passwords are not identical." + ) res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'password': default_user_password, - 'new_password': 'Pw123458', - 'new_password_confirm': 'Pw123458' - } + "password": default_user_password, + "new_password": "Pw123458", + "new_password_confirm": "Pw123458", + }, ) data = get_json(res) assert res.status_code == 200 - assert data.get('message') == 'You successfully changed your password.' + assert data.get("message") == "You successfully changed your password." # with a librarian of a different organisation login_user_via_session(client, l_sion.user) res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'username': patron_martigny.get('username'), - 'new_password': 'Pw123458', - 'new_password_confirm': 'Pw123458' - } + "username": patron_martigny.get("username"), + "new_password": "Pw123458", + "new_password_confirm": "Pw123458", + }, ) data = get_json(res) assert res.status_code == 401 @@ -194,43 +187,40 @@ def test_change_password(client, app, patron_martigny, res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'username': patron_martigny.get('username'), - 'new_password': 'Pw123458', - 'new_password_confirm': 'Pw123455' - } + "username": patron_martigny.get("username"), + "new_password": "Pw123458", + "new_password_confirm": "Pw123455", + }, ) data = get_json(res) assert res.status_code == 400 - assert data.get('message') == 'Validation error.' - assert data.get('errors')[0].get('message') == \ - gettext('The 2 passwords are not identical.') + assert data.get("message") == "Validation error." + assert data.get("errors")[0].get("message") == gettext( + "The 2 passwords are not identical." + ) res, _ = postdata( client, - 'invenio_accounts_rest_auth.change_password', + "invenio_accounts_rest_auth.change_password", { - 'username': patron_martigny.get('username'), - 'new_password': 'Pw123458', - 'new_password_confirm': 'Pw123458' - } + "username": patron_martigny.get("username"), + "new_password": "Pw123458", + "new_password_confirm": "Pw123458", + }, ) data = get_json(res) assert res.status_code == 200 - assert data.get('message') == 'You successfully changed your password.' + assert data.get("message") == "You successfully changed your password." # logout for the next test - client.post(url_for('invenio_accounts_rest_auth.logout')) + client.post(url_for("invenio_accounts_rest_auth.logout")) def test_patron_reset_notice(patron_martigny, mailbox): """Test password reset notice template.""" send_password_reset_notice(patron_martigny.user) assert len(mailbox) == 1 - assert re.search( - r'Your password has been successfully reset.', mailbox[0].body - ) - assert re.search( - r'Best regards', mailbox[0].body - ) + assert re.search(r"Your password has been successfully reset.", mailbox[0].body) + assert re.search(r"Best regards", mailbox[0].body) diff --git a/tests/api/users/test_user_api.py b/tests/api/users/test_user_api.py index 46b92f8dbf..0c3b406323 100644 --- a/tests/api/users/test_user_api.py +++ b/tests/api/users/test_user_api.py @@ -26,82 +26,100 @@ def test_generate_password(client, app, librarian_martigny): """Test entrypoint generate password.""" - app.config['RERO_ILS_PASSWORD_MIN_LENGTH'] = 8 - app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] = False + app.config["RERO_ILS_PASSWORD_MIN_LENGTH"] = 8 + app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"] = False # Not Logged - res = client.get(url_for('api_user.password_generate')) + res = client.get(url_for("api_user.password_generate")) assert res.status_code == 401 # Logged as librarian login_user_via_session(client, librarian_martigny.user) - res = client.get(url_for('api_user.password_generate', length=6)) + res = client.get(url_for("api_user.password_generate", length=6)) assert res.status_code == 400 - assert get_json(res)['message'] \ - .find('The password must be at least 8 characters long.') != -1 - - res = client.get(url_for('api_user.password_generate')) + assert ( + get_json(res)["message"].find( + "The password must be at least 8 characters long." + ) + != -1 + ) + + res = client.get(url_for("api_user.password_generate")) assert res.status_code == 200 assert len(res.get_data(as_text=True)) == 8 - res = client.get(url_for('api_user.password_generate', length=12)) + res = client.get(url_for("api_user.password_generate", length=12)) assert res.status_code == 200 assert len(res.get_data(as_text=True)) == 12 - app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] = True - res = client.get(url_for('api_user.password_generate')) + app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"] = True + res = client.get(url_for("api_user.password_generate")) assert res.status_code == 200 assert set(string.punctuation).intersection(res.get_data(as_text=True)) - res = client.get(url_for('api_user.password_generate', length=2)) + res = client.get(url_for("api_user.password_generate", length=2)) assert res.status_code == 400 def test_validate_password(client, app): """Test entrypoint validate password.""" - app.config['RERO_ILS_PASSWORD_MIN_LENGTH'] = 8 - app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] = False + app.config["RERO_ILS_PASSWORD_MIN_LENGTH"] = 8 + app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"] = False - res = client.post(url_for('api_user.password_validate'), json={}) - assert get_json(res)['message'] \ - .find('The password must be filled in') != -1 + res = client.post(url_for("api_user.password_validate"), json={}) + assert get_json(res)["message"].find("The password must be filled in") != -1 assert res.status_code == 400 - res = client.post( - url_for('api_user.password_validate'), json={'password': 'foo'}) - assert get_json(res)['message'] \ - .find('Field must be at least 8 characters long.') != -1 + res = client.post(url_for("api_user.password_validate"), json={"password": "foo"}) + assert ( + get_json(res)["message"].find("Field must be at least 8 characters long.") != -1 + ) assert res.status_code == 400 res = client.post( - url_for('api_user.password_validate'), json={'password': '12345678'}) - assert get_json(res)['message'] \ - .find('The password must contain a lower case character.') != -1 + url_for("api_user.password_validate"), json={"password": "12345678"} + ) + assert ( + get_json(res)["message"].find( + "The password must contain a lower case character." + ) + != -1 + ) assert res.status_code == 400 res = client.post( - url_for('api_user.password_validate'), json={'password': 'a2345678'}) - assert get_json(res)['message'] \ - .find('The password must contain a upper case character.') != -1 + url_for("api_user.password_validate"), json={"password": "a2345678"} + ) + assert ( + get_json(res)["message"].find( + "The password must contain a upper case character." + ) + != -1 + ) assert res.status_code == 400 res = client.post( - url_for('api_user.password_validate'), json={'password': 'aaaaPPPP'}) - assert get_json(res)['message'] \ - .find('The password must contain a number.') != -1 + url_for("api_user.password_validate"), json={"password": "aaaaPPPP"} + ) + assert get_json(res)["message"].find("The password must contain a number.") != -1 assert res.status_code == 400 res = client.post( - url_for('api_user.password_validate'), json={'password': 'FooBar123'}) + url_for("api_user.password_validate"), json={"password": "FooBar123"} + ) assert res.status_code == 200 - app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] = True + app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"] = True res = client.post( - url_for('api_user.password_validate'), json={'password': 'FooBar123'}) - assert get_json(res)['message'] \ - .find('The password must contain a special character.') != -1 + url_for("api_user.password_validate"), json={"password": "FooBar123"} + ) + assert ( + get_json(res)["message"].find("The password must contain a special character.") + != -1 + ) assert res.status_code == 400 res = client.post( - url_for('api_user.password_validate'), json={'password': 'FooBar123$'}) + url_for("api_user.password_validate"), json={"password": "FooBar123$"} + ) assert res.status_code == 200 diff --git a/tests/api/users/test_users_profile_updates.py b/tests/api/users/test_users_profile_updates.py index a9724c654c..5b50d78c17 100644 --- a/tests/api/users/test_users_profile_updates.py +++ b/tests/api/users/test_users_profile_updates.py @@ -30,54 +30,53 @@ def test_user_profile_updates( - client, patron_martigny, system_librarian_martigny, json_header, - mailbox): + client, patron_martigny, system_librarian_martigny, json_header, mailbox +): """Test users profile updates.""" # if none of email nor username is provided the request should failed login_user_via_session(client, patron_martigny.user) user_metadata = User.get_record(patron_martigny.user.id).dumps_metadata() - user_metadata.pop('email', None) - user_metadata.pop('username', None) + user_metadata.pop("email", None) + user_metadata.pop("username", None) res = client.put( - url_for('api_users.users_item', id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 400 assert not (len(mailbox)) # login with a patron has only the patron role, this means we are logging # into the public interface - assert patron_martigny.patron['communication_channel'] == \ - CommunicationChannel.MAIL + assert patron_martigny.patron["communication_channel"] == CommunicationChannel.MAIL login_user_via_session(client, patron_martigny.user) # mailbox is empty assert not (len(mailbox)) user_metadata = User.get_record(patron_martigny.user.id).dumps_metadata() # changing the email by another does not send any reset_password # notification - user_metadata['email'] = 'toto@toto.com' + user_metadata["email"] = "toto@toto.com" res = client.put( - url_for('api_users.users_item', id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 200 assert not (len(mailbox)) patron_martigny = Patron.get_record_by_pid(patron_martigny.pid) # an email was added to patron, communication_channel will change # automatically to email - assert patron_martigny.patron.get('communication_channel') == \ - CommunicationChannel.EMAIL + assert ( + patron_martigny.patron.get("communication_channel") + == CommunicationChannel.EMAIL + ) # removing the email from profile does not send any reset_password # notification - user_metadata.pop('email', None) + user_metadata.pop("email", None) res = client.put( - url_for( - 'api_users.users_item', - id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 200 assert not (len(mailbox)) @@ -85,69 +84,68 @@ def test_user_profile_updates( # autmoatically if user has no email configured and patron has no # additional_communication_email configured patron_martigny = Patron.get_record_by_pid(patron_martigny.pid) - assert patron_martigny.patron.get('communication_channel') == \ - CommunicationChannel.MAIL + assert ( + patron_martigny.patron.get("communication_channel") == CommunicationChannel.MAIL + ) # login as a system_librarian this means we are logging into the # professional interface login_user_via_session(client, system_librarian_martigny.user) # adding an email to a profile does not send any reset_password # notification - user_metadata['email'] = 'toto@toto.com' + user_metadata["email"] = "toto@toto.com" res = client.put( - url_for( - 'api_users.users_item', - id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 200 assert not (len(mailbox)) # removing the email from profile does not send any reset_password # notification - user_metadata.pop('email', None) + user_metadata.pop("email", None) res = client.put( - url_for( - 'api_users.users_item', - id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 200 assert not (len(mailbox)) patron_martigny = Patron.get_record_by_pid(patron_martigny.pid) - assert patron_martigny.patron.get('communication_channel') == \ - CommunicationChannel.MAIL + assert ( + patron_martigny.patron.get("communication_channel") == CommunicationChannel.MAIL + ) def test_user_birthdate( - client, patron_martigny, system_librarian_martigny, json_header): + client, patron_martigny, system_librarian_martigny, json_header +): """Test user birth_date.""" login_user_via_session(client, system_librarian_martigny.user) user_metadata = User.get_record(patron_martigny.user.id).dumps_metadata() # Invalid date of birth - user_metadata['birth_date'] = '0070-01-01' + user_metadata["birth_date"] = "0070-01-01" res = client.put( - url_for('api_users.users_item', id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 400 # Valid date of birth - user_metadata['birth_date'] = '1970-01-01' + user_metadata["birth_date"] = "1970-01-01" res = client.put( - url_for('api_users.users_item', id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 200 - user_metadata['birth_date'] = '2001-01-01' + user_metadata["birth_date"] = "2001-01-01" res = client.put( - url_for('api_users.users_item', id=patron_martigny.user.id), + url_for("api_users.users_item", id=patron_martigny.user.id), data=json.dumps(user_metadata), - headers=json_header + headers=json_header, ) assert res.status_code == 200 diff --git a/tests/api/users/test_users_rest.py b/tests/api/users/test_users_rest.py index 99e4165cf0..febdbb81f9 100644 --- a/tests/api/users/test_users_rest.py +++ b/tests/api/users/test_users_rest.py @@ -26,265 +26,191 @@ from utils import get_json, postdata -def test_users_post_put(client, user_data_tmp, librarian_martigny, - json_header, default_user_password): +def test_users_post_put( + client, user_data_tmp, librarian_martigny, json_header, default_user_password +): """Test users REST api for retrieve, create and update.""" - first_name = user_data_tmp.get('first_name') + first_name = user_data_tmp.get("first_name") # test unauthorized create - user_data_tmp['toto'] = 'toto' - res, data = postdata( - client, - 'api_users.users_list', - user_data_tmp - ) + user_data_tmp["toto"] = "toto" + res, data = postdata(client, "api_users.users_list", user_data_tmp) assert res.status_code == 401 login_user_via_session(client, librarian_martigny.user) # test with invalid password - user_data_tmp['first_name'] = 1 - user_data_tmp['password'] = '12345' - res, data = postdata( - client, - 'api_users.users_list', - user_data_tmp - ) + user_data_tmp["first_name"] = 1 + user_data_tmp["password"] = "12345" + res, data = postdata(client, "api_users.users_list", user_data_tmp) assert res.status_code == 400 # test with invalid first_name - user_data_tmp['first_name'] = 1 - user_data_tmp['password'] = default_user_password - res, data = postdata( - client, - 'api_users.users_list', - user_data_tmp - ) + user_data_tmp["first_name"] = 1 + user_data_tmp["password"] = default_user_password + res, data = postdata(client, "api_users.users_list", user_data_tmp) assert res.status_code == 400 # test valid create - user_data_tmp['first_name'] = first_name - res, data = postdata( - client, - 'api_users.users_list', - user_data_tmp - ) + user_data_tmp["first_name"] = first_name + res, data = postdata(client, "api_users.users_list", user_data_tmp) assert res.status_code == 200 user = get_json(res) - assert user['id'] == 2 - assert user['metadata']['first_name'] == user_data_tmp.get('first_name') + assert user["id"] == 2 + assert user["metadata"]["first_name"] == user_data_tmp.get("first_name") # test get - res = client.get( - url_for( - 'api_users.users_item', - id=2 - ) - ) + res = client.get(url_for("api_users.users_item", id=2)) assert res.status_code == 200 user = get_json(res) - assert user['id'] == 2 - assert user['metadata']['first_name'] == user_data_tmp.get('first_name') + assert user["id"] == 2 + assert user["metadata"]["first_name"] == user_data_tmp.get("first_name") # test valid update - user_data_tmp['first_name'] = 'Johnny' + user_data_tmp["first_name"] = "Johnny" res = client.put( - url_for( - 'api_users.users_item', - id=2), + url_for("api_users.users_item", id=2), data=json.dumps(user_data_tmp), - headers=json_header + headers=json_header, ) assert res.status_code == 200 user = get_json(res) - assert user['id'] == 2 - assert user['metadata']['first_name'] == 'Johnny' + assert user["id"] == 2 + assert user["metadata"]["first_name"] == "Johnny" # test invalid update - user_data_tmp['first_name'] = 1 + user_data_tmp["first_name"] = 1 res = client.put( - url_for( - 'api_users.users_item', - id=2), + url_for("api_users.users_item", id=2), data=json.dumps(user_data_tmp), - headers=json_header + headers=json_header, ) assert res.status_code == 400 # test invalid password - user_data_tmp['first_name'] = 'Johnny' - user_data_tmp['password'] = '1234' + user_data_tmp["first_name"] = "Johnny" + user_data_tmp["password"] = "1234" res = client.put( - url_for( - 'api_users.users_item', - id=2), + url_for("api_users.users_item", id=2), data=json.dumps(user_data_tmp), - headers=json_header + headers=json_header, ) assert res.status_code == 400 # test valid password - user_data_tmp['password'] = 'Pw123456' + user_data_tmp["password"] = "Pw123456" res = client.put( - url_for( - 'api_users.users_item', - id=2), + url_for("api_users.users_item", id=2), data=json.dumps(user_data_tmp), - headers=json_header + headers=json_header, ) assert res.status_code == 200 def test_users_search_api( - client, librarian_martigny, patron_martigny, user_without_profile): + client, librarian_martigny, patron_martigny, user_without_profile +): """Test users search REST API.""" l_martigny = librarian_martigny librarian_martigny = librarian_martigny.dumps() p_martigny = patron_martigny patron_martigny = patron_martigny.dumps() - res = client.get( - url_for( - 'api_users.users_list', - q='' - ) - ) + res = client.get(url_for("api_users.users_list", q="")) assert res.status_code == 401 login_user_via_session(client, l_martigny.user) # empty query => no result - res = client.get( - url_for( - 'api_users.users_list' - ) - ) + res = client.get(url_for("api_users.users_list")) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'] == [] - assert hits['hits']['total']['value'] == 0 + assert hits["hits"]["hits"] == [] + assert hits["hits"]["total"]["value"] == 0 - res = client.get( - url_for( - 'api_users.users_list', - q='' - ) - ) + res = client.get(url_for("api_users.users_list", q="")) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'] == [] - assert hits['hits']['total']['value'] == 0 + assert hits["hits"]["hits"] == [] + assert hits["hits"]["total"]["value"] == 0 # all by username - res = client.get( - url_for( - 'api_users.users_list', - q=patron_martigny['username'] - ) - ) + res = client.get(url_for("api_users.users_list", q=patron_martigny["username"])) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'][0]['metadata']['username'] == \ - patron_martigny['username'] - assert hits['hits']['total']['value'] == 1 + assert ( + hits["hits"]["hits"][0]["metadata"]["username"] == patron_martigny["username"] + ) + assert hits["hits"]["total"]["value"] == 1 # all by email - res = client.get( - url_for( - 'api_users.users_list', - q=patron_martigny['email'] - ) - ) + res = client.get(url_for("api_users.users_list", q=patron_martigny["email"])) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'][0]['metadata']['username'] == \ - patron_martigny['username'] - assert hits['hits']['total']['value'] == 1 + assert ( + hits["hits"]["hits"][0]["metadata"]["username"] == patron_martigny["username"] + ) + assert hits["hits"]["total"]["value"] == 1 # by username res = client.get( - url_for( - 'api_users.users_list', - q='username:' + patron_martigny['username'] - ) + url_for("api_users.users_list", q="username:" + patron_martigny["username"]) ) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'][0]['metadata']['username'] == \ - patron_martigny['username'] - assert hits['hits']['total']['value'] == 1 + assert ( + hits["hits"]["hits"][0]["metadata"]["username"] == patron_martigny["username"] + ) + assert hits["hits"]["total"]["value"] == 1 # by email res = client.get( - url_for( - 'api_users.users_list', - q='email:' + patron_martigny['email'] - ) + url_for("api_users.users_list", q="email:" + patron_martigny["email"]) ) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'][0]['metadata']['username'] == \ - patron_martigny['username'] - assert hits['hits']['total']['value'] == 1 + assert ( + hits["hits"]["hits"][0]["metadata"]["username"] == patron_martigny["username"] + ) + assert hits["hits"]["total"]["value"] == 1 # non patron by email res = client.get( - url_for( - 'api_users.users_list', - q='email:' + user_without_profile.email - ) + url_for("api_users.users_list", q="email:" + user_without_profile.email) ) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'][0]['metadata']['email'] == \ - user_without_profile.email - assert hits['hits']['total']['value'] == 1 + assert hits["hits"]["hits"][0]["metadata"]["email"] == user_without_profile.email + assert hits["hits"]["total"]["value"] == 1 # by uppercase email res = client.get( - url_for( - 'api_users.users_list', - q='email:' + patron_martigny['email'].upper() - ) + url_for("api_users.users_list", q="email:" + patron_martigny["email"].upper()) ) assert res.status_code == 200 hits = get_json(res) - assert hits['hits']['hits'][0]['metadata']['username'] == \ - patron_martigny['username'] - assert hits['hits']['total']['value'] == 1 + assert ( + hits["hits"]["hits"][0]["metadata"]["username"] == patron_martigny["username"] + ) + assert hits["hits"]["total"]["value"] == 1 # Login with patron role login_user_via_session(client, p_martigny.user) - res = client.get( - url_for( - 'api_users.users_list', - q=patron_martigny['username'] - ) - ) + res = client.get(url_for("api_users.users_list", q=patron_martigny["username"])) assert res.status_code == 200 hits = get_json(res) - assert 'metadata' not in hits['hits']['hits'][0] - assert hits['hits']['hits'][0]['id'] == patron_martigny['user_id'] + assert "metadata" not in hits["hits"]["hits"][0] + assert hits["hits"]["hits"][0]["id"] == patron_martigny["user_id"] - res = client.get( - url_for( - 'api_users.users_item', - id=p_martigny.user.id - ) - ) + res = client.get(url_for("api_users.users_item", id=p_martigny.user.id)) assert res.status_code == 200 record = get_json(res) - assert patron_martigny['username'] == record.get('metadata', [])\ - .get('username') + assert patron_martigny["username"] == record.get("metadata", []).get("username") # Login with librarian role login_user_via_session(client, l_martigny.user) - res = client.get( - url_for( - 'api_users.users_list', - q=patron_martigny['username'] - ) - ) + res = client.get(url_for("api_users.users_list", q=patron_martigny["username"])) assert res.status_code == 200 hits = get_json(res) - assert 'metadata' in hits['hits']['hits'][0] - assert hits['hits']['hits'][0]['metadata']['username'] == \ - patron_martigny['username'] + assert "metadata" in hits["hits"]["hits"][0] + assert ( + hits["hits"]["hits"][0]["metadata"]["username"] == patron_martigny["username"] + ) diff --git a/tests/api/vendors/test_vendors.py b/tests/api/vendors/test_vendors.py index 7b7a427671..ee21e418e1 100644 --- a/tests/api/vendors/test_vendors.py +++ b/tests/api/vendors/test_vendors.py @@ -28,73 +28,77 @@ def test_vendors_get(client, librarian_martigny, vendor_martigny): """Test vendor record retrieval.""" # Martigny login_user_via_session(client, librarian_martigny.user) - item_url = url_for( - 'invenio_records_rest.vndr_item', - pid_value=vendor_martigny.pid) - list_url = url_for( - 'invenio_records_rest.vndr_list', - q=f'pid:{vendor_martigny.pid}' - ) + item_url = url_for("invenio_records_rest.vndr_item", pid_value=vendor_martigny.pid) + list_url = url_for("invenio_records_rest.vndr_list", q=f"pid:{vendor_martigny.pid}") res = client.get(item_url) assert res.status_code == 200 - assert res.headers['ETag'] == f'"{vendor_martigny.revision_id}"' + assert res.headers["ETag"] == f'"{vendor_martigny.revision_id}"' data = get_json(res) - assert vendor_martigny.dumps() == data['metadata'] + assert vendor_martigny.dumps() == data["metadata"] # Check metadata - for k in ['created', 'updated', 'metadata', 'links']: + for k in ["created", "updated", "metadata", "links"]: assert k in data -def test_filtered_vendors_get(client, librarian_martigny, - librarian_sion, vendor_martigny, - vendor2_martigny, vendor_sion, vendor2_sion): +def test_filtered_vendors_get( + client, + librarian_martigny, + librarian_sion, + vendor_martigny, + vendor2_martigny, + vendor_sion, + vendor2_sion, +): """Test vendors filter by organisation.""" # Martigny login_user_via_session(client, librarian_martigny.user) - list_url = url_for('invenio_records_rest.vndr_list') + list_url = url_for("invenio_records_rest.vndr_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 # Sion login_user_via_session(client, librarian_sion.user) - list_url = url_for('invenio_records_rest.vndr_list') + list_url = url_for("invenio_records_rest.vndr_list") res = client.get(list_url) assert res.status_code == 200 data = get_json(res) - assert data['hits']['total']['value'] == 2 + assert data["hits"]["total"]["value"] == 2 def test_vendors_can_delete( - client, vendor_martigny, acq_order_fiction_martigny, - acq_invoice_fiction_martigny, holding_lib_martigny_w_patterns): + client, + vendor_martigny, + acq_order_fiction_martigny, + acq_invoice_fiction_martigny, + holding_lib_martigny_w_patterns, +): """Test can delete a vendor with a linked acquisition order.""" can, reasons = vendor_martigny.can_delete assert not can - assert reasons['links']['acq_orders'] - assert reasons['links']['acq_invoices'] - assert reasons['links']['holdings'] + assert reasons["links"]["acq_orders"] + assert reasons["links"]["acq_invoices"] + assert reasons["links"]["holdings"] -def test_vendor_post_update_delete(client, librarian_martigny, - vendor3_martigny_data, json_header): +def test_vendor_post_update_delete( + client, librarian_martigny, vendor3_martigny_data, json_header +): """Test CRUD on vendor.""" login_user_via_session(client, librarian_martigny.user) - item_url = url_for('invenio_records_rest.vndr_item', pid_value='vndr3') + item_url = url_for("invenio_records_rest.vndr_item", pid_value="vndr3") # create - vendor3_martigny_data['pid'] = 'vndr3' + vendor3_martigny_data["pid"] = "vndr3" res, data = postdata( - client, - 'invenio_records_rest.vndr_list', - vendor3_martigny_data + client, "invenio_records_rest.vndr_list", vendor3_martigny_data ) assert res.status_code == 201 @@ -102,21 +106,17 @@ def test_vendor_post_update_delete(client, librarian_martigny, res = client.get(item_url) assert res.status_code == 200 data = get_json(res) - assert data['metadata'] == vendor3_martigny_data + assert data["metadata"] == vendor3_martigny_data # update data = vendor3_martigny_data - data['name'] = 'Test update Name' - res = client.put( - item_url, - data=json.dumps(data), - headers=json_header - ) + data["name"] = "Test update Name" + res = client.put(item_url, data=json.dumps(data), headers=json_header) assert res.status_code == 200 # Check that the returned record matches the given data data = get_json(res) - assert data['metadata']['name'] == 'Test update Name' + assert data["metadata"]["name"] == "Test update Name" # delete res = client.delete(item_url) diff --git a/tests/api/vendors/test_vendors_permissions.py b/tests/api/vendors/test_vendors_permissions.py index d668606e98..05db046679 100644 --- a/tests/api/vendors/test_vendors_permissions.py +++ b/tests/api/vendors/test_vendors_permissions.py @@ -25,23 +25,23 @@ from rero_ils.modules.vendors.permissions import VendorPermissionPolicy -def test_vendor_permissions_api(client, org_sion, patron_martigny, - system_librarian_martigny, - vendor_martigny, vendor_sion): +def test_vendor_permissions_api( + client, + org_sion, + patron_martigny, + system_librarian_martigny, + vendor_martigny, + vendor_sion, +): """Test organisations permissions api.""" - vendor_permissions_url = url_for( - 'api_blueprint.permissions', - route_name='vendors' - ) + vendor_permissions_url = url_for("api_blueprint.permissions", route_name="vendors") vendor_martigny_permission_url = url_for( - 'api_blueprint.permissions', - route_name='vendors', - record_pid=vendor_martigny.pid + "api_blueprint.permissions", + route_name="vendors", + record_pid=vendor_martigny.pid, ) vendor_sion_permission_url = url_for( - 'api_blueprint.permissions', - route_name='vendors', - record_pid=vendor_sion.pid + "api_blueprint.permissions", route_name="vendors", record_pid=vendor_sion.pid ) # Not logged @@ -60,21 +60,26 @@ def test_vendor_permissions_api(client, org_sion, patron_martigny, res = client.get(vendor_martigny_permission_url) assert res.status_code == 200 data = get_json(res) - for action in ['list', 'read', 'create', 'update', 'delete']: - assert data[action]['can'] + for action in ["list", "read", "create", "update", "delete"]: + assert data[action]["can"] res = client.get(vendor_sion_permission_url) assert res.status_code == 200 data = get_json(res) - for action in ['read', 'update', 'delete']: - assert not data[action]['can'] + for action in ["read", "update", "delete"]: + assert not data[action]["can"] -def test_vendor_permissions(patron_martigny, - librarian_martigny, librarian2_martigny, - system_librarian_martigny, - org_martigny, org_sion, - vendor_martigny, vendor_sion): +def test_vendor_permissions( + patron_martigny, + librarian_martigny, + librarian2_martigny, + system_librarian_martigny, + org_martigny, + org_sion, + vendor_martigny, + vendor_sion, +): """Test vendor permissions class.""" # Anonymous user @@ -82,64 +87,70 @@ def test_vendor_permissions(patron_martigny, identity_changed.send( current_app._get_current_object(), identity=AnonymousIdentity() ) - check_permission(VendorPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, {}) + check_permission( + VendorPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + {}, + ) # Patron user # - all actions is denied login_user(patron_martigny.user) - check_permission(VendorPermissionPolicy, { - 'search': False, - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, org_martigny) + check_permission( + VendorPermissionPolicy, + { + "search": False, + "read": False, + "create": False, + "update": False, + "delete": False, + }, + org_martigny, + ) # Full permission user # - Allow all action on any vendor despite organisation owner login_user(system_librarian_martigny.user) - check_permission(VendorPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, org_martigny) + check_permission( + VendorPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + org_martigny, + ) # check permissions on other organisation - check_permission(VendorPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, org_sion) + check_permission( + VendorPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + org_sion, + ) # Librarian with acquisition manager role # - Allow all action on any vendor despite organisation owner login_user(librarian_martigny.user) - check_permission(VendorPermissionPolicy, { - 'search': True, - 'read': True, - 'create': True, - 'update': True, - 'delete': True - }, org_martigny) + check_permission( + VendorPermissionPolicy, + {"search": True, "read": True, "create": True, "update": True, "delete": True}, + org_martigny, + ) # check permissions on other organisation - check_permission(VendorPermissionPolicy, { - 'read': False, - 'create': False, - 'update': False, - 'delete': False - }, org_sion) + check_permission( + VendorPermissionPolicy, + {"read": False, "create": False, "update": False, "delete": False}, + org_sion, + ) # Librarian without acquisition manager role # - can read vendors login_user(librarian2_martigny.user) - check_permission(VendorPermissionPolicy, { - 'search': True, - 'read': True, - 'create': False, - 'update': False, - 'delete': False - }, org_martigny) + check_permission( + VendorPermissionPolicy, + { + "search": True, + "read": True, + "create": False, + "update": False, + "delete": False, + }, + org_martigny, + ) diff --git a/tests/conftest.py b/tests/conftest.py index e32c6266ae..bf0ac2294c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,25 +27,25 @@ from dotenv import load_dotenv pytest_plugins = ( - 'celery.contrib.pytest', - 'fixtures.circulation', - 'fixtures.metadata', - 'fixtures.organisations', - 'fixtures.acquisition', - 'fixtures.sip2', - 'fixtures.basics', - 'fixtures.mef' + "celery.contrib.pytest", + "fixtures.circulation", + "fixtures.metadata", + "fixtures.organisations", + "fixtures.acquisition", + "fixtures.sip2", + "fixtures.basics", + "fixtures.mef", ) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def search(appctx): """Setup and teardown all registered Elasticsearch indices. Scope: module This fixture will create all registered indexes in Elasticsearch and remove once done. Fixtures that perform changes (e.g. index or remove documents), - should used the function-scoped :py:data:`es_clear` fixture to leave the + should used the function-scoped :py:data:`search_clear` fixture to leave the indexes clean for the following tests. """ from invenio_search import current_search, current_search_client @@ -54,7 +54,7 @@ def search(appctx): try: list(current_search.put_templates()) except IndexAlreadyExistsError: - current_search_client.indices.delete_template('*') + current_search_client.indices.delete_template("*") list(current_search.put_templates()) try: @@ -67,21 +67,21 @@ def search(appctx): try: yield current_search_client finally: - current_search_client.indices.delete(index='*') - current_search_client.indices.delete_template('*') + current_search_client.indices.delete(index="*") + current_search_client.indices.delete_template("*") @pytest.fixture(scope="module") def data(): """Load fixture data file.""" - with open(join(dirname(__file__), 'data/data.json')) as f: + with open(join(dirname(__file__), "data/data.json")) as f: return json.load(f) @pytest.fixture(scope="module") def role_policies_data(): """Load fixture role policies data file.""" - path = 'data/policies/role_policies.json' + path = "data/policies/role_policies.json" with open(join(dirname(__file__), path)) as f: return json.load(f) @@ -89,7 +89,7 @@ def role_policies_data(): @pytest.fixture(scope="module") def system_role_policies_data(): """Load fixture role policies data file.""" - path = 'data/policies/system_role_policies.json' + path = "data/policies/system_role_policies.json" with open(join(dirname(__file__), path)) as f: return json.load(f) @@ -97,145 +97,126 @@ def system_role_policies_data(): @pytest.fixture(scope="module") def acquisition(): """Load fixture acquisition file.""" - with open(join(dirname(__file__), 'data/acquisition.json')) as f: + with open(join(dirname(__file__), "data/acquisition.json")) as f: return json.load(f) @pytest.fixture(scope="module") def holdings(): """Load fixture holdings file.""" - with open(join(dirname(__file__), 'data/holdings.json')) as f: + with open(join(dirname(__file__), "data/holdings.json")) as f: return json.load(f) @pytest.fixture(scope="module") def local_fields(): """Load local fields file.""" - with open(join(dirname(__file__), 'data/local_fields.json')) as f: + with open(join(dirname(__file__), "data/local_fields.json")) as f: return json.load(f) @pytest.fixture(scope="module") def mef_entities(): """Load MEF entities file.""" - with open(join(dirname(__file__), 'data/mef.json')) as f: + with open(join(dirname(__file__), "data/mef.json")) as f: return json.load(f) @pytest.fixture(scope="session") def csv_header(): """Load json headers.""" - return [ - ('Accept', 'text/csv'), - ('Content-Type', 'application/json') - ] + return [("Accept", "text/csv"), ("Content-Type", "application/json")] @pytest.fixture(scope="session") def ris_header(): """Load json headers.""" return [ - ('Accept', 'application/x-research-info-systems'), - ('Content-Type', 'application/json') + ("Accept", "application/x-research-info-systems"), + ("Content-Type", "application/json"), ] @pytest.fixture(scope="session") def json_header(): """Load json headers.""" - return [ - ('Accept', 'application/json'), - ('Content-Type', 'application/json') - ] + return [("Accept", "application/json"), ("Content-Type", "application/json")] @pytest.fixture(scope="session") def rero_marcxml_header(): """Load marcxml headers.""" - return [ - ('Accept', 'application/json'), - ('Content-Type', 'application/marcxml+xml') - ] + return [("Accept", "application/json"), ("Content-Type", "application/marcxml+xml")] @pytest.fixture(scope="session") def rero_json_header(): """Load json headers.""" - return [ - ('Accept', 'application/rero+json'), - ('Content-Type', 'application/json') - ] + return [("Accept", "application/rero+json"), ("Content-Type", "application/json")] @pytest.fixture(scope="session") def export_json_header(): """Load json headers.""" - return [ - ('Accept', 'application/export+json'), - ('Content-Type', 'application/json') - ] + return [("Accept", "application/export+json"), ("Content-Type", "application/json")] @pytest.fixture(scope="session") def can_delete_json_header(): """Load can_delete json headers.""" return [ - ('Accept', 'application/can-delete+json'), - ('Content-Type', 'application/json') + ("Accept", "application/can-delete+json"), + ("Content-Type", "application/json"), ] -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def app_config(app_config): """Create temporary instance dir for each test.""" - app_config['CELERY_BROKER_URL'] = 'memory://' - app_config['RATELIMIT_STORAGE_URL'] = 'memory://' - app_config['CACHE_TYPE'] = 'simple' - app_config['SEARCH_ELASTIC_HOSTS'] = None - app_config['SQLALCHEMY_DATABASE_URI'] = \ - 'postgresql+psycopg2://rero-ils:rero-ils@localhost/rero-ils' - app_config['DB_VERSIONING'] = True - app_config['CELERY_CACHE_BACKEND'] = "memory" - app_config['CELERY_RESULT_BACKEND'] = "cache" - app_config['CELERY_TASK_ALWAYS_EAGER'] = True - app_config['CELERY_TASK_EAGER_PROPAGATES'] = True - help_test_dir = join(dirname(__file__), 'data', 'help') - app_config['WIKI_CONTENT_DIR'] = help_test_dir - app_config['WIKI_UPLOAD_FOLDER'] = join(help_test_dir, 'files') - app_config['CACHE_REDIS_URL'] = 'redis://localhost:6379/0' - app_config['ACCOUNTS_SESSION_REDIS_URL'] = 'redis://localhost:6379/1' - app_config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/2' - app_config['RATELIMIT_STORAGE_URL'] = 'redis://localhost:6379/3' - app_config['CELERY_REDIS_SCHEDULER_URL'] = 'redis://localhost:6379/4' - app_config['RERO_IMPORT_CACHE'] = 'redis://localhost:6379/5' - app_config['WTF_CSRF_ENABLED'] = False + app_config["CELERY_BROKER_URL"] = "memory://" + app_config["RATELIMIT_STORAGE_URL"] = "memory://" + app_config["CACHE_TYPE"] = "simple" + app_config["SEARCH_ELASTIC_HOSTS"] = None + app_config["SQLALCHEMY_DATABASE_URI"] = ( + "postgresql+psycopg2://rero-ils:rero-ils@localhost/rero-ils" + ) + app_config["DB_VERSIONING"] = True + app_config["CELERY_CACHE_BACKEND"] = "memory" + app_config["CELERY_RESULT_BACKEND"] = "cache" + app_config["CELERY_TASK_ALWAYS_EAGER"] = True + app_config["CELERY_TASK_EAGER_PROPAGATES"] = True + help_test_dir = join(dirname(__file__), "data", "help") + app_config["WIKI_CONTENT_DIR"] = help_test_dir + app_config["WIKI_UPLOAD_FOLDER"] = join(help_test_dir, "files") + app_config["CACHE_REDIS_URL"] = "redis://localhost:6379/0" + app_config["ACCOUNTS_SESSION_REDIS_URL"] = "redis://localhost:6379/1" + app_config["CELERY_RESULT_BACKEND"] = "redis://localhost:6379/2" + app_config["RATELIMIT_STORAGE_URL"] = "redis://localhost:6379/3" + app_config["CELERY_REDIS_SCHEDULER_URL"] = "redis://localhost:6379/4" + app_config["RERO_IMPORT_CACHE"] = "redis://localhost:6379/5" + app_config["WTF_CSRF_ENABLED"] = False # enable operation logs validation for the tests - app_config['RERO_ILS_ENABLE_OPERATION_LOG_VALIDATION'] = True - app_config['RERO_ILS_MEF_CONFIG'] = { - 'agents': { - 'base_url': 'https://mef.rero.ch/api/agents', - 'sources': ['idref', 'gnd'] + app_config["RERO_ILS_ENABLE_OPERATION_LOG_VALIDATION"] = True + app_config["RERO_ILS_MEF_CONFIG"] = { + "agents": { + "base_url": "https://mef.rero.ch/api/agents", + "sources": ["idref", "gnd"], }, - 'concepts': { - 'base_url': 'https://mef.rero.ch/api/concepts', - 'sources': ['idref'] + "concepts": { + "base_url": "https://mef.rero.ch/api/concepts", + "sources": ["idref"], }, - 'concepts-genreForm': { - 'base_url': 'https://mef.rero.ch/api/concepts', - 'sources': ['idref'], - 'filters': [ - {'idref.bnf_type': 'sujet Rameau'} - ] - }, - 'places': { - 'base_url': 'https://mef.rero.ch/api/places', - 'sources': ['idref'] + "concepts-genreForm": { + "base_url": "https://mef.rero.ch/api/concepts", + "sources": ["idref"], + "filters": [{"idref.bnf_type": "sujet Rameau"}], }, + "places": {"base_url": "https://mef.rero.ch/api/places", "sources": ["idref"]}, } return app_config -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def instance_path(): """Temporary instance path. @@ -247,72 +228,66 @@ def instance_path(): """ # load .env, .flaskenv load_dotenv() - invenio_instance_path = os.environ.get('INVENIO_INSTANCE_PATH') - invenio_static_folder = os.environ.get('INVENIO_STATIC_FOLDER') + invenio_instance_path = os.environ.get("INVENIO_INSTANCE_PATH") + invenio_static_folder = os.environ.get("INVENIO_STATIC_FOLDER") path = invenio_instance_path # static folder if not invenio_static_folder: if invenio_instance_path: - os.environ['INVENIO_STATIC_FOLDER'] = os.path.join( - invenio_instance_path, 'static') + os.environ["INVENIO_STATIC_FOLDER"] = os.path.join( + invenio_instance_path, "static" + ) else: - os.environ['INVENIO_STATIC_FOLDER'] = os.path.join( - sys.prefix, 'var/instance/static') + os.environ["INVENIO_STATIC_FOLDER"] = os.path.join( + sys.prefix, "var/instance/static" + ) # instance path if not path: path = tempfile.mkdtemp() - os.environ['INVENIO_INSTANCE_PATH'] = path + os.environ["INVENIO_INSTANCE_PATH"] = path yield path # clean static folder variable if not invenio_static_folder: - os.environ.pop('INVENIO_STATIC_FOLDER', None) + os.environ.pop("INVENIO_STATIC_FOLDER", None) # clean instance path variable and remove temp dir if not invenio_instance_path: - os.environ.pop('INVENIO_INSTANCE_PATH', None) + os.environ.pop("INVENIO_INSTANCE_PATH", None) shutil.rmtree(path) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def mef_agents_url(app): """Get MEF agent URL from config.""" - return app.config\ - .get('RERO_ILS_MEF_CONFIG', {})\ - .get('agents', {})\ - .get('base_url') + return app.config.get("RERO_ILS_MEF_CONFIG", {}).get("agents", {}).get("base_url") -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def mef_concepts_url(app): """Get MEF agent URL from config.""" - return app.config\ - .get('RERO_ILS_MEF_CONFIG', {})\ - .get('concepts', {})\ - .get('base_url') + return app.config.get("RERO_ILS_MEF_CONFIG", {}).get("concepts", {}).get("base_url") @pytest.fixture(scope="module") def bnf_ean_any_123(): """Load bnf ean any 123 xml file.""" - file_name = join(dirname(__file__), 'data/xml/bnf/bnf_ean_any_123.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/bnf/bnf_ean_any_123.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def bnf_ean_any_9782070541270(): """Load bnf ean any 9782070541270 xml file.""" - file_name = join( - dirname(__file__), 'data/xml/bnf/bnf_ean_any_9782070541270.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/bnf/bnf_ean_any_9782070541270.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def bnf_ean_any_9782072862014(): """Load bnf ean any 9782072862014 xml file.""" - file_name = join( - dirname(__file__), 'data/xml/bnf/bnf_ean_any_9782072862014.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/bnf/bnf_ean_any_9782072862014.xml") + with open(file_name, "rb") as file: return file.read() @@ -320,261 +295,239 @@ def bnf_ean_any_9782072862014(): def bnf_recordid_all_FRBNF370903960000006(): """Load bnf recordid all FRBNF370903960000006 xml file.""" file_name = join( - dirname(__file__), - 'data/xml/bnf/bnf_recordid_all_FRBNF370903960000006.xml' + dirname(__file__), "data/xml/bnf/bnf_recordid_all_FRBNF370903960000006.xml" ) - with open(file_name, 'rb') as file: + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def bnf_anywhere_all_peter(): """Load bnf anywhere all peter xml file.""" - file_name = join( - dirname(__file__), - 'data/xml/bnf/bnf_anywhere_all_peter.xml' - ) - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/bnf/bnf_anywhere_all_peter.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def loc_isbn_all_123(): """Load LoC isbn all 123 xml file.""" - file_name = join(dirname(__file__), 'data/xml/loc/loc_isbn_all_123.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/loc/loc_isbn_all_123.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def loc_isbn_all_9781604689808(): """Load LoC isbn all 9781604689808 xml file.""" - file_name = join( - dirname(__file__), 'data/xml/loc/loc_isbn_all_9781604689808.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/loc/loc_isbn_all_9781604689808.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def loc_isbn_all_9780821417478(): """Load LoC isbn all 9780821417478 xml file.""" - file_name = join( - dirname(__file__), 'data/xml/loc/loc_isbn_all_9780821417478.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/loc/loc_isbn_all_9780821417478.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def loc_anywhere_all_samuelson(): """Load LoC anywhere_all samuelson xml file.""" - file_name = join( - dirname(__file__), 'data/xml/loc/loc_anywhere_all_samuelson.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/loc/loc_anywhere_all_samuelson.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def loc_recordid_all_2014043016(): """Load LoC recordid 2014043016 xml file.""" - file_name = join( - dirname(__file__), 'data/xml/loc/loc_recordid_all_2014043016.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/loc/loc_recordid_all_2014043016.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def loc_without_010(): """Load LoC without 010.""" - file_name = join(dirname(__file__), 'data/xml/loc/loc_without_010.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/loc/loc_without_010.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def dnb_isbn_123(): """Load DNB isbn 123 xml file.""" - file_name = join(dirname(__file__), 'data/xml/dnb/dnb_isbn_123.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/dnb/dnb_isbn_123.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def dnb_isbn_9783862729852(): """Load DNB isbn 9783862729852 file.""" - file_name = join( - dirname(__file__), 'data/xml/dnb/dnb_isbn_9783862729852.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/dnb/dnb_isbn_9783862729852.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def dnb_isbn_3858818526(): """Load DNB isbn 3858818526 file.""" - file_name = join(dirname(__file__), 'data/xml/dnb/dnb_isbn_3858818526.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/dnb/dnb_isbn_3858818526.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def dnb_samuelson(): """Load DNB samuelson file.""" - file_name = join(dirname(__file__), 'data/xml/dnb/dnb_samuelson.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/dnb/dnb_samuelson.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def dnb_recordid_1214325203(): """Load dnb recordid 1214325203 file.""" - file_name = join( - dirname(__file__), 'data/xml/dnb/dnb_recordid_1214325203.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/dnb/dnb_recordid_1214325203.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def slsp_anywhere_123(): """Load slsp anywhere 123 file.""" - file_name = join(dirname(__file__), 'data/xml/slsp/slsp_anywhere_123.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/slsp/slsp_anywhere_123.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def slsp_isbn_9782296076648(): """Load slsp isbn 9782296076648 file.""" - file_name = join( - dirname(__file__), 'data/xml/slsp/slsp_isbn_9782296076648.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/slsp/slsp_isbn_9782296076648.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def slsp_isbn_3908497272(): """Load slsp isbn 3908497272 file.""" - file_name = join( - dirname(__file__), 'data/xml/slsp/slsp_isbn_3908497272.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/slsp/slsp_isbn_3908497272.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def slsp_samuelson(): """Load slsp samuelson file.""" - file_name = join(dirname(__file__), 'data/xml/slsp/slsp_samuelson.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/slsp/slsp_samuelson.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def slsp_recordid_9910137(): """Load slsp recordid 991013724759705501 file.""" - file_name = join( - dirname(__file__), 'data/xml/slsp/slsp_recordid_9910137.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/slsp/slsp_recordid_9910137.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def ugent_anywhere_123(): """Load ugent anywhere 123 file.""" - file_name = join( - dirname(__file__), 'data/xml/ugent/ugent_anywhere_123.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/ugent/ugent_anywhere_123.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def ugent_isbn_9781108422925(): """Load ugent isbn 9781108422925 file.""" - file_name = join( - dirname(__file__), 'data/xml/ugent/ugent_isbn_9781108422925.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/ugent/ugent_isbn_9781108422925.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def ugent_isbn_9780415773867(): """Load ugent isbn 9780415773867 file.""" - file_name = join( - dirname(__file__), 'data/xml/ugent/ugent_isbn_9780415773867.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/ugent/ugent_isbn_9780415773867.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def ugent_samuelson(): """Load ugent samuelson file.""" - file_name = join( - dirname(__file__), 'data/xml/ugent/ugent_samuelson.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/ugent/ugent_samuelson.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def ugent_recordid_001247835(): """Load ugent recordid 001247835 file.""" - file_name = join( - dirname(__file__), 'data/xml/ugent/ugent_recordid_001247835.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/ugent/ugent_recordid_001247835.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def ugent_book_without_26X(): """Load ugent book without 26X file.""" - file_name = join( - dirname(__file__), 'data/xml/ugent/ugent_book_without_26X.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/ugent/ugent_book_without_26X.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def kul_anywhere_123(): """Load kul anywhere 123 file.""" - file_name = join(dirname(__file__), 'data/xml/kul/kul_anywhere_123.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/kul/kul_anywhere_123.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def kul_isbn_9782265089419(): """Load kul isbn 9782265089419 file.""" - file_name = join( - dirname(__file__), 'data/xml/kul/kul_isbn_9782265089419.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/kul/kul_isbn_9782265089419.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def kul_isbn_2804600068(): """Load kul isbn 2804600068 file.""" - file_name = join(dirname(__file__), 'data/xml/kul/kul_isbn_2804600068.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/kul/kul_isbn_2804600068.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def kul_samuelson(): """Load kul samuelson file.""" - file_name = join(dirname(__file__), 'data/xml/kul/kul_samuelson.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/kul/kul_samuelson.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def kul_recordid_9992876296301471(): """Load kul recordid 9992876296301471 file.""" - file_name = join( - dirname(__file__), 'data/xml/kul/kul_recordid_99928762.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/kul/kul_recordid_99928762.xml") + with open(file_name, "rb") as file: return file.read() @pytest.fixture(scope="module") def kul_book_without_26X(): """Load kul book without 26X file.""" - file_name = join( - dirname(__file__), 'data/xml/kul/kul_book_without_26X.xml') - with open(file_name, 'rb') as file: + file_name = join(dirname(__file__), "data/xml/kul/kul_book_without_26X.xml") + with open(file_name, "rb") as file: return file.read() diff --git a/tests/e2e/test_front_page.py b/tests/e2e/test_front_page.py index 0b6811e68c..9261e6f6b9 100644 --- a/tests/e2e/test_front_page.py +++ b/tests/e2e/test_front_page.py @@ -22,9 +22,5 @@ def test_frontpage(live_server, browser): """Test retrieval of front page.""" - browser.get(url_for('rero_ils.index', _external=True)) - assert ( - 'Get into your library' == - browser.find_element_by_tag_name('h1') - .text - ) + browser.get(url_for("rero_ils.index", _external=True)) + assert "Get into your library" == browser.find_element_by_tag_name("h1").text diff --git a/tests/fixtures/acquisition.py b/tests/fixtures/acquisition.py index 4bf2899877..552ad033f9 100644 --- a/tests/fixtures/acquisition.py +++ b/tests/fixtures/acquisition.py @@ -22,20 +22,22 @@ import pytest from api.acquisition.acq_utils import _make_resource -from utils import flush_index - -from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, \ - AcqAccountsSearch -from rero_ils.modules.acquisition.acq_invoices.api import AcquisitionInvoice, \ - AcquisitionInvoicesSearch -from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine, \ - AcqOrderLinesSearch -from rero_ils.modules.acquisition.acq_orders.api import AcqOrder, \ - AcqOrdersSearch -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLine, AcqReceiptLinesSearch -from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, \ - AcqReceiptsSearch + +from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, AcqAccountsSearch +from rero_ils.modules.acquisition.acq_invoices.api import ( + AcquisitionInvoice, + AcquisitionInvoicesSearch, +) +from rero_ils.modules.acquisition.acq_order_lines.api import ( + AcqOrderLine, + AcqOrderLinesSearch, +) +from rero_ils.modules.acquisition.acq_orders.api import AcqOrder, AcqOrdersSearch +from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLine, + AcqReceiptLinesSearch, +) +from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, AcqReceiptsSearch from rero_ils.modules.acquisition.budgets.api import Budget, BudgetsSearch from rero_ils.modules.utils import get_ref_for_pid as get_ref from rero_ils.modules.vendors.api import Vendor, VendorsSearch @@ -44,18 +46,16 @@ @pytest.fixture(scope="module") def vendor_martigny_data(acquisition): """Load vendor data.""" - return deepcopy(acquisition.get('vndr1')) + return deepcopy(acquisition.get("vndr1")) @pytest.fixture(scope="module") def vendor_martigny(app, org_martigny, vendor_martigny_data): """Load vendor record.""" vendor = Vendor.create( - data=vendor_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(VendorsSearch.Meta.index) + data=vendor_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + VendorsSearch.flush_and_refresh() return vendor @@ -63,694 +63,749 @@ def vendor_martigny(app, org_martigny, vendor_martigny_data): def vendor_martigny_tmp(app, org_martigny, vendor_martigny): """Load vendor record.""" vendor = Vendor.create( - data=vendor_martigny, - delete_pid=True, - dbcommit=True, - reindex=True) - flush_index(VendorsSearch.Meta.index) + data=vendor_martigny, delete_pid=True, dbcommit=True, reindex=True + ) + VendorsSearch.flush_and_refresh() return vendor @pytest.fixture(scope="module") def vendor2_martigny_data(acquisition): """Load vendor data.""" - return deepcopy(acquisition.get('vndr2')) + return deepcopy(acquisition.get("vndr2")) @pytest.fixture(scope="module") def vendor2_martigny(app, org_martigny, vendor2_martigny_data): """Load vendor record.""" vendor = Vendor.create( - data=vendor2_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(VendorsSearch.Meta.index) + data=vendor2_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + VendorsSearch.flush_and_refresh() return vendor @pytest.fixture(scope="module") def vendor3_martigny_data(acquisition): """Load vendor 3 data.""" - return deepcopy(acquisition.get('vndr3')) + return deepcopy(acquisition.get("vndr3")) @pytest.fixture(scope="module") def vendor3_martigny(app, org_martigny, vendor3_martigny_data): """Load vendor record.""" vendor = Vendor.create( - data=vendor3_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(VendorsSearch.Meta.index) + data=vendor3_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + VendorsSearch.flush_and_refresh() return vendor @pytest.fixture(scope="module") def vendor_sion_data(acquisition): """Load vendor data.""" - return deepcopy(acquisition.get('vndr4')) + return deepcopy(acquisition.get("vndr4")) @pytest.fixture(scope="module") def vendor_sion(app, org_sion, vendor_sion_data): """Load vendor record.""" vendor = Vendor.create( - data=vendor_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(VendorsSearch.Meta.index) + data=vendor_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + VendorsSearch.flush_and_refresh() return vendor @pytest.fixture(scope="module") def vendor2_sion_data(acquisition): """Load vendor data.""" - return deepcopy(acquisition.get('vndr5')) + return deepcopy(acquisition.get("vndr5")) @pytest.fixture(scope="module") def vendor2_sion(app, org_sion, vendor2_sion_data): """Load vendor record.""" vendor = Vendor.create( - data=vendor2_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(VendorsSearch.Meta.index) + data=vendor2_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + VendorsSearch.flush_and_refresh() return vendor @pytest.fixture(scope="function") def budget_2020_martigny_data_tmp(acquisition): """Load standard budget 2020 of martigny.""" - return deepcopy(acquisition.get('budg1')) + return deepcopy(acquisition.get("budg1")) @pytest.fixture(scope="module") def budget_2020_sion_data(acquisition): """Load budget 2020 sion.""" - return deepcopy(acquisition.get('budg2')) + return deepcopy(acquisition.get("budg2")) @pytest.fixture(scope="module") def budget_2020_martigny_data(acquisition): """Load budget 2020 martigny.""" - return deepcopy(acquisition.get('budg1')) + return deepcopy(acquisition.get("budg1")) @pytest.fixture(scope="module") def budget_2019_martigny_data(acquisition): """Load budget 2019 martigny.""" - return deepcopy(acquisition.get('budg3')) + return deepcopy(acquisition.get("budg3")) @pytest.fixture(scope="module") def budget_2018_martigny_data(acquisition): """Load budget 2018 martigny.""" - return deepcopy(acquisition.get('budg4')) + return deepcopy(acquisition.get("budg4")) @pytest.fixture(scope="module") def budget_2017_martigny_data(acquisition): """Load budget 2017 martigny.""" - return deepcopy(acquisition.get('budg5')) + return deepcopy(acquisition.get("budg5")) @pytest.fixture(scope="module") -def budget_2017_martigny( - app, org_martigny, budget_2017_martigny_data): +def budget_2017_martigny(app, org_martigny, budget_2017_martigny_data): """Load budget 2017 martigny record.""" budget = Budget.create( - data=budget_2017_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(BudgetsSearch.Meta.index) + data=budget_2017_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + BudgetsSearch.flush_and_refresh() return budget @pytest.fixture(scope="module") -def budget_2018_martigny( - app, org_martigny, budget_2018_martigny_data): +def budget_2018_martigny(app, org_martigny, budget_2018_martigny_data): """Load budget 2018 martigny record.""" budget = Budget.create( - data=budget_2018_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(BudgetsSearch.Meta.index) + data=budget_2018_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + BudgetsSearch.flush_and_refresh() return budget @pytest.fixture(scope="module") -def budget_2020_martigny( - app, org_martigny, budget_2020_martigny_data): +def budget_2020_martigny(app, org_martigny, budget_2020_martigny_data): """Load budget 2020 martigny record.""" budget = Budget.create( - data=budget_2020_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(BudgetsSearch.Meta.index) + data=budget_2020_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + BudgetsSearch.flush_and_refresh() return budget @pytest.fixture(scope="module") -def budget_2019_martigny( - app, org_martigny, budget_2019_martigny_data): +def budget_2019_martigny(app, org_martigny, budget_2019_martigny_data): """Load budget 2019 martigny record.""" budget = Budget.create( - data=budget_2019_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(BudgetsSearch.Meta.index) + data=budget_2019_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + BudgetsSearch.flush_and_refresh() return budget @pytest.fixture(scope="module") -def budget_2020_sion( - app, org_sion, budget_2020_sion_data): +def budget_2020_sion(app, org_sion, budget_2020_sion_data): """Load budget 2020 sion record.""" budget = Budget.create( - data=budget_2020_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(BudgetsSearch.Meta.index) + data=budget_2020_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + BudgetsSearch.flush_and_refresh() return budget @pytest.fixture(scope="function") def acq_account_fiction_martigny_data_tmp(acquisition): """Load standard acq account of martigny.""" - return deepcopy(acquisition.get('acac1')) + return deepcopy(acquisition.get("acac1")) @pytest.fixture(scope="module") def acq_account_fiction_martigny_data(acquisition): """Load acq_account lib martigny fiction data.""" - return deepcopy(acquisition.get('acac1')) + return deepcopy(acquisition.get("acac1")) @pytest.fixture(scope="module") def acq_account_fiction_martigny( - app, lib_martigny, acq_account_fiction_martigny_data, - budget_2020_martigny): + app, lib_martigny, acq_account_fiction_martigny_data, budget_2020_martigny +): """Load acq_account lib martigny fiction record.""" acac = AcqAccount.create( data=acq_account_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + reindex=True, + ) + AcqAccountsSearch.flush_and_refresh() return acac @pytest.fixture(scope="module") def acq_account_books_martigny_data(acquisition): """Load acq_account lib martigny books data.""" - return deepcopy(acquisition.get('acac6')) + return deepcopy(acquisition.get("acac6")) @pytest.fixture(scope="module") def acq_account_books_martigny( - app, lib_martigny, acq_account_books_martigny_data, - budget_2020_martigny): + app, lib_martigny, acq_account_books_martigny_data, budget_2020_martigny +): """Load acq_account lib martigny books record.""" acac = AcqAccount.create( data=acq_account_books_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + reindex=True, + ) + AcqAccountsSearch.flush_and_refresh() return acac @pytest.fixture(scope="module") def acq_account_books_saxon_data(acquisition): """Load acq_account lib saxon books data.""" - return deepcopy(acquisition.get('acac2')) + return deepcopy(acquisition.get("acac2")) @pytest.fixture(scope="module") def acq_account_books_saxon( - app, lib_saxon, acq_account_books_saxon_data, budget_2020_martigny): + app, lib_saxon, acq_account_books_saxon_data, budget_2020_martigny +): """Load acq_account lib saxon books record.""" acac = AcqAccount.create( - data=acq_account_books_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + data=acq_account_books_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + AcqAccountsSearch.flush_and_refresh() return acac @pytest.fixture(scope="module") def acq_account_general_fully_data(acquisition): """Load acq_account lib fully general data.""" - return deepcopy(acquisition.get('acac3')) + return deepcopy(acquisition.get("acac3")) @pytest.fixture(scope="module") def acq_account_general_fully( - app, lib_fully, acq_account_general_fully_data, budget_2020_martigny): + app, lib_fully, acq_account_general_fully_data, budget_2020_martigny +): """Load acq_account lib fully general record.""" acac = AcqAccount.create( data=acq_account_general_fully_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + reindex=True, + ) + AcqAccountsSearch.flush_and_refresh() return acac @pytest.fixture(scope="module") def acq_account_fiction_sion_data(acquisition): """Load acq_account lib sion fiction data.""" - return deepcopy(acquisition.get('acac4')) + return deepcopy(acquisition.get("acac4")) @pytest.fixture(scope="module") def acq_account_fiction_sion( - app, lib_saxon, acq_account_fiction_sion_data, budget_2020_sion): + app, lib_saxon, acq_account_fiction_sion_data, budget_2020_sion +): """Load acq_account lib sion fiction record.""" acac = AcqAccount.create( data=acq_account_fiction_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + reindex=True, + ) + AcqAccountsSearch.flush_and_refresh() return acac @pytest.fixture(scope="module") def acq_account_general_aproz_data(acquisition): """Load acq_account lib aproz general data.""" - return deepcopy(acquisition.get('acac5')) + return deepcopy(acquisition.get("acac5")) @pytest.fixture(scope="module") def acq_account_general_aproz( - app, lib_saxon, acq_account_general_aproz_data, budget_2020_sion): + app, lib_saxon, acq_account_general_aproz_data, budget_2020_sion +): """Load acq_account lib aproz general record.""" acac = AcqAccount.create( data=acq_account_general_aproz_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqAccountsSearch.Meta.index) + reindex=True, + ) + AcqAccountsSearch.flush_and_refresh() return acac @pytest.fixture(scope="module") def acq_order_fiction_martigny_data(acquisition): """Load acq_order lib martigny fiction data.""" - return deepcopy(acquisition.get('acor1')) + return deepcopy(acquisition.get("acor1")) @pytest.fixture(scope="function") def acq_order_fiction_martigny_data_tmp(acquisition): """Load acq_order lib martigny fiction data.""" - return deepcopy(acquisition.get('acor1')) + return deepcopy(acquisition.get("acor1")) @pytest.fixture(scope="module") def acq_order_fiction_martigny( - app, lib_martigny, vendor_martigny, acq_order_fiction_martigny_data): + app, lib_martigny, vendor_martigny, acq_order_fiction_martigny_data +): """Load acq_order lib martigny fiction record.""" acor = AcqOrder.create( data=acq_order_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqOrdersSearch.Meta.index) + reindex=True, + ) + AcqOrdersSearch.flush_and_refresh() return acor @pytest.fixture(scope="module") def acq_receipt_fiction_martigny_data(acquisition): """Load acq_receipt lib martigny fiction data.""" - return deepcopy(acquisition.get('acre1')) + return deepcopy(acquisition.get("acre1")) @pytest.fixture(scope="module") def acq_receipt_line_1_fiction_martigny_data(acquisition): """Load acq_receipt_line_1 lib martigny fiction data.""" - return deepcopy(acquisition.get('acrl1')) + return deepcopy(acquisition.get("acrl1")) @pytest.fixture(scope="module") def acq_receipt_line_2_fiction_martigny_data(acquisition): """Load acq_receipt_line_2 lib martigny fiction data.""" - return deepcopy(acquisition.get('acrl2')) + return deepcopy(acquisition.get("acrl2")) @pytest.fixture(scope="function") def acq_receipt_fiction_martigny_data_tmp(acquisition): """Load acq_receipt lib martigny fiction data.""" - return deepcopy(acquisition.get('acre1')) + return deepcopy(acquisition.get("acre1")) @pytest.fixture(scope="function") def acq_receipt_line_1_fiction_martigny_data_tmp(acquisition): """Load acq_receipt_line_1 lib martigny fiction data.""" - return deepcopy(acquisition.get('acrl1')) + return deepcopy(acquisition.get("acrl1")) @pytest.fixture(scope="module") def acq_receipt_fiction_martigny( - app, lib_martigny, acq_order_fiction_martigny, - acq_receipt_fiction_martigny_data, acq_account_fiction_martigny): + app, + lib_martigny, + acq_order_fiction_martigny, + acq_receipt_fiction_martigny_data, + acq_account_fiction_martigny, +): """Load acq_receipt lib martigny fiction record.""" acor = AcqReceipt.create( data=acq_receipt_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptsSearch.Meta.index) + reindex=True, + ) + AcqReceiptsSearch.flush_and_refresh() return acor @pytest.fixture(scope="module") def acq_receipt_line_1_fiction_martigny( - app, acq_receipt_fiction_martigny, acq_order_line_fiction_martigny, - acq_receipt_line_1_fiction_martigny_data): + app, + acq_receipt_fiction_martigny, + acq_order_line_fiction_martigny, + acq_receipt_line_1_fiction_martigny_data, +): """Load acq_receipt_line_1 lib martigny fiction record.""" acrl = AcqReceiptLine.create( data=acq_receipt_line_1_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptLinesSearch.Meta.index) + reindex=True, + ) + AcqReceiptLinesSearch.flush_and_refresh() return acrl @pytest.fixture(scope="module") def acq_receipt_line_2_fiction_martigny( - app, acq_receipt_fiction_martigny, acq_order_line2_fiction_martigny, - acq_receipt_line_2_fiction_martigny_data): + app, + acq_receipt_fiction_martigny, + acq_order_line2_fiction_martigny, + acq_receipt_line_2_fiction_martigny_data, +): """Load acq_receipt_line_2 lib martigny fiction record.""" acrl = AcqReceiptLine.create( data=acq_receipt_line_2_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptLinesSearch.Meta.index) + reindex=True, + ) + AcqReceiptLinesSearch.flush_and_refresh() return acrl @pytest.fixture(scope="module") def acq_order_fiction_saxon_data(acquisition): """Load acq_order lib saxon fiction data.""" - return deepcopy(acquisition.get('acor2')) + return deepcopy(acquisition.get("acor2")) @pytest.fixture(scope="module") def acq_order_fiction_saxon( - app, lib_saxon, vendor2_martigny, acq_order_fiction_saxon_data): + app, lib_saxon, vendor2_martigny, acq_order_fiction_saxon_data +): """Load acq_order lib saxon fiction record.""" acor = AcqOrder.create( - data=acq_order_fiction_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(AcqOrdersSearch.Meta.index) + data=acq_order_fiction_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + AcqOrdersSearch.flush_and_refresh() return acor @pytest.fixture(scope="module") def acq_receipt_fiction_saxon_data(acquisition): """Load acq_receipt lib saxon fiction data.""" - return deepcopy(acquisition.get('acre2')) + return deepcopy(acquisition.get("acre2")) @pytest.fixture(scope="module") def acq_receipt_line_fiction_saxon_data(acquisition): """Load acq_receipt_line lib saxon fiction data.""" - return deepcopy(acquisition.get('acrl3')) + return deepcopy(acquisition.get("acrl3")) @pytest.fixture(scope="module") def acq_receipt_fiction_saxon( - app, lib_saxon, vendor_martigny, acq_order_fiction_saxon, - acq_receipt_fiction_saxon_data, acq_account_books_saxon): + app, + lib_saxon, + vendor_martigny, + acq_order_fiction_saxon, + acq_receipt_fiction_saxon_data, + acq_account_books_saxon, +): """Load acq_receipt lib saxon fiction record.""" acre = AcqReceipt.create( data=acq_receipt_fiction_saxon_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptsSearch.Meta.index) + reindex=True, + ) + AcqReceiptsSearch.flush_and_refresh() return acre @pytest.fixture(scope="module") def acq_receipt_line_fiction_saxon( - app, acq_receipt_fiction_saxon, acq_order_line_fiction_saxon, - acq_receipt_line_fiction_saxon_data): + app, + acq_receipt_fiction_saxon, + acq_order_line_fiction_saxon, + acq_receipt_line_fiction_saxon_data, +): """Load acq_receipt_line lib saxon fiction record.""" acrl = AcqReceiptLine.create( data=acq_receipt_line_fiction_saxon_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptLinesSearch.Meta.index) + reindex=True, + ) + AcqReceiptLinesSearch.flush_and_refresh() return acrl @pytest.fixture(scope="module") def acq_order_fiction_sion_data(acquisition): """Load acq_order lib sion fiction data.""" - return deepcopy(acquisition.get('acor3')) + return deepcopy(acquisition.get("acor3")) @pytest.fixture(scope="module") -def acq_order_fiction_sion( - app, lib_sion, vendor_sion, acq_order_fiction_sion_data): +def acq_order_fiction_sion(app, lib_sion, vendor_sion, acq_order_fiction_sion_data): """Load acq_order lib sion fiction record.""" acor = AcqOrder.create( - data=acq_order_fiction_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(AcqOrdersSearch.Meta.index) + data=acq_order_fiction_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + AcqOrdersSearch.flush_and_refresh() return acor @pytest.fixture(scope="module") def acq_receipt_fiction_sion_data(acquisition): """Load acq_receipt lib sion fiction data.""" - return deepcopy(acquisition.get('acre3')) + return deepcopy(acquisition.get("acre3")) @pytest.fixture(scope="module") def acq_receipt_line_fiction_sion_data(acquisition): """Load acq_receipt_line lib sion fiction data.""" - return deepcopy(acquisition.get('acrl4')) + return deepcopy(acquisition.get("acrl4")) @pytest.fixture(scope="module") def acq_receipt_fiction_sion( - app, lib_sion, vendor_sion, acq_order_fiction_sion, - acq_receipt_fiction_sion_data, acq_account_fiction_sion): + app, + lib_sion, + vendor_sion, + acq_order_fiction_sion, + acq_receipt_fiction_sion_data, + acq_account_fiction_sion, +): """Load acq_receipt lib sion fiction record.""" acor = AcqReceipt.create( data=acq_receipt_fiction_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptsSearch.Meta.index) + reindex=True, + ) + AcqReceiptsSearch.flush_and_refresh() return acor @pytest.fixture(scope="module") def acq_receipt_line_fiction_sion( - app, acq_receipt_fiction_sion, acq_order_line_fiction_sion, - acq_receipt_line_fiction_sion_data): + app, + acq_receipt_fiction_sion, + acq_order_line_fiction_sion, + acq_receipt_line_fiction_sion_data, +): """Load acq_receipt_line lib sion fiction record.""" acrl = AcqReceiptLine.create( data=acq_receipt_line_fiction_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqReceiptLinesSearch.Meta.index) + reindex=True, + ) + AcqReceiptLinesSearch.flush_and_refresh() return acrl @pytest.fixture(scope="module") def acq_order_line_fiction_martigny_data(acquisition): """Load acq_order_line lib martigny fiction data.""" - return deepcopy(acquisition.get('acol1')) + return deepcopy(acquisition.get("acol1")) @pytest.fixture(scope="function") def acq_order_line_fiction_martigny_data_tmp(acquisition): """Load acq_order_line lib martigny fiction data.""" - return deepcopy(acquisition.get('acol1')) + return deepcopy(acquisition.get("acol1")) @pytest.fixture(scope="module") def acq_order_line_fiction_martigny( - app, acq_account_fiction_martigny, document, - acq_order_fiction_martigny, acq_order_line_fiction_martigny_data): + app, + acq_account_fiction_martigny, + document, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny_data, +): """Load acq_order_line lib martigny fiction record.""" acol = AcqOrderLine.create( data=acq_order_line_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqOrderLinesSearch.Meta.index) + reindex=True, + ) + AcqOrderLinesSearch.flush_and_refresh() return acol @pytest.fixture(scope="module") def acq_order_line2_fiction_martigny_data(acquisition): """Load acq_order_line lib martigny fiction data.""" - return deepcopy(acquisition.get('acol2')) + return deepcopy(acquisition.get("acol2")) @pytest.fixture(scope="module") def acq_order_line2_fiction_martigny( - app, acq_account_fiction_martigny, document, - acq_order_fiction_martigny, acq_order_line2_fiction_martigny_data): + app, + acq_account_fiction_martigny, + document, + acq_order_fiction_martigny, + acq_order_line2_fiction_martigny_data, +): """Load acq_order_line lib martigny fiction record.""" acol = AcqOrderLine.create( data=acq_order_line2_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqOrderLinesSearch.Meta.index) + reindex=True, + ) + AcqOrderLinesSearch.flush_and_refresh() return acol @pytest.fixture(scope="module") def acq_order_line3_fiction_martigny_data(acquisition): """Load acq_order_line lib martigny fiction data.""" - return deepcopy(acquisition.get('acol5')) + return deepcopy(acquisition.get("acol5")) @pytest.fixture(scope="module") def acq_order_line3_fiction_martigny( - app, acq_account_fiction_martigny, document, - acq_order_fiction_martigny, acq_order_line3_fiction_martigny_data): + app, + acq_account_fiction_martigny, + document, + acq_order_fiction_martigny, + acq_order_line3_fiction_martigny_data, +): """Load acq_order_line lib martigny fiction record.""" acol = AcqOrderLine.create( data=acq_order_line3_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqOrderLinesSearch.Meta.index) + reindex=True, + ) + AcqOrderLinesSearch.flush_and_refresh() return acol @pytest.fixture(scope="module") def acq_order_line_fiction_saxon_data(acquisition): """Load acq_order_line lib saxon fiction data.""" - return deepcopy(acquisition.get('acol3')) + return deepcopy(acquisition.get("acol3")) @pytest.fixture(scope="module") def acq_order_line_fiction_saxon( - app, acq_account_books_saxon, - acq_order_fiction_saxon, acq_order_line_fiction_saxon_data): + app, + acq_account_books_saxon, + acq_order_fiction_saxon, + acq_order_line_fiction_saxon_data, +): """Load acq_order_line lib saxon fiction record.""" acol = AcqOrderLine.create( data=acq_order_line_fiction_saxon_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqOrderLinesSearch.Meta.index) + reindex=True, + ) + AcqOrderLinesSearch.flush_and_refresh() return acol @pytest.fixture(scope="module") def acq_order_line_fiction_sion_data(acquisition): """Load acq_order_line lib sion fiction data.""" - return deepcopy(acquisition.get('acol4')) + return deepcopy(acquisition.get("acol4")) @pytest.fixture(scope="module") def acq_order_line_fiction_sion( - app, acq_account_fiction_sion, - acq_order_fiction_sion, acq_order_line_fiction_sion_data): + app, + acq_account_fiction_sion, + acq_order_fiction_sion, + acq_order_line_fiction_sion_data, +): """Load acq_order_line lib sion fiction record.""" acol = AcqOrderLine.create( data=acq_order_line_fiction_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcqOrderLinesSearch.Meta.index) + reindex=True, + ) + AcqOrderLinesSearch.flush_and_refresh() return acol @pytest.fixture(scope="module") def acq_invoice_fiction_martigny_data(acquisition): """Load acq_invoice lib martigny fiction data.""" - return deepcopy(acquisition.get('acin1')) + return deepcopy(acquisition.get("acin1")) @pytest.fixture(scope="module") def acq_invoice_fiction_martigny( - app, lib_martigny, vendor_martigny, acq_invoice_fiction_martigny_data, - document, document_ref, acq_order_fiction_martigny, - acq_order_line_fiction_martigny, acq_order_line2_fiction_martigny): + app, + lib_martigny, + vendor_martigny, + acq_invoice_fiction_martigny_data, + document, + document_ref, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny, + acq_order_line2_fiction_martigny, +): """Load acq_invoice lib martigny fiction record.""" acin = AcquisitionInvoice.create( data=acq_invoice_fiction_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcquisitionInvoicesSearch.Meta.index) + reindex=True, + ) + AcquisitionInvoicesSearch.flush_and_refresh() return acin @pytest.fixture(scope="module") def acq_invoice_fiction_saxon_data(acquisition): """Load acq_invoice lib martigny fiction data.""" - return deepcopy(acquisition.get('acin2')) + return deepcopy(acquisition.get("acin2")) @pytest.fixture(scope="module") def acq_invoice_fiction_saxon( - app, lib_saxon, vendor2_martigny, acq_invoice_fiction_saxon_data, - acq_order_fiction_saxon, acq_order_line_fiction_saxon): + app, + lib_saxon, + vendor2_martigny, + acq_invoice_fiction_saxon_data, + acq_order_fiction_saxon, + acq_order_line_fiction_saxon, +): """Load acq_invoice lib saxon fiction record.""" acin = AcquisitionInvoice.create( data=acq_invoice_fiction_saxon_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcquisitionInvoicesSearch.Meta.index) + reindex=True, + ) + AcquisitionInvoicesSearch.flush_and_refresh() return acin @pytest.fixture(scope="module") def acq_invoice_fiction_sion_data(acquisition): """Load acq_invoice lib sion fiction data.""" - return deepcopy(acquisition.get('acin3')) + return deepcopy(acquisition.get("acin3")) @pytest.fixture(scope="module") def acq_invoice_fiction_sion( - app, lib_sion, vendor_sion, acq_invoice_fiction_sion_data, - acq_order_fiction_sion, acq_order_line_fiction_sion): + app, + lib_sion, + vendor_sion, + acq_invoice_fiction_sion_data, + acq_order_fiction_sion, + acq_order_line_fiction_sion, +): """Load acq_invoice lib sion fiction record.""" acin = AcquisitionInvoice.create( data=acq_invoice_fiction_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(AcquisitionInvoicesSearch.Meta.index) + reindex=True, + ) + AcquisitionInvoicesSearch.flush_and_refresh() return acin @pytest.fixture(scope="function") -def acq_full_structure_a(client, lib_martigny, vendor_martigny, document, - org_martigny): +def acq_full_structure_a(client, lib_martigny, vendor_martigny, document, org_martigny): """Create a full acquisition structure. Budget_A @@ -769,118 +824,180 @@ def acq_full_structure_a(client, lib_martigny, vendor_martigny, document, +--> Reception_30_1 + ReceptionLine_30_1_1 (ref to OrderLine_30_1) """ - org_ref = get_ref('org', org_martigny.pid) - lib_ref = get_ref('lib', lib_martigny.pid) - vendor_ref = get_ref('vndr', vendor_martigny.pid) + org_ref = get_ref("org", org_martigny.pid) + lib_ref = get_ref("lib", lib_martigny.pid) + vendor_ref = get_ref("vndr", vendor_martigny.pid) # Budget ============================================== - budget = _make_resource(client, 'budg', { - 'name': 'Budget A', - 'start_date': '2022-01-01', - 'end_date': '2022-12-31', - 'is_active': True, - 'organisation': {'$ref': org_ref} - }) - budget_ref = get_ref('budg', budget.pid) + budget = _make_resource( + client, + "budg", + { + "name": "Budget A", + "start_date": "2022-01-01", + "end_date": "2022-12-31", + "is_active": True, + "organisation": {"$ref": org_ref}, + }, + ) + budget_ref = get_ref("budg", budget.pid) # Accounts ============================================ - acac1 = _make_resource(client, 'acac', { - 'name': 'account_1', - 'number': '000.0000.01', - 'allocated_amount': 1000, - 'budget': {'$ref': budget_ref}, - 'library': {'$ref': lib_ref} - }) - acac2 = _make_resource(client, 'acac', { - 'name': 'account_2', - 'number': '000.0000.02', - 'allocated_amount': 2000, - 'budget': {'$ref': budget_ref}, - 'library': {'$ref': lib_ref} - }) - acac3 = _make_resource(client, 'acac', { - 'name': 'account_3', - 'number': '000.0000.03', - 'allocated_amount': 3000, - 'budget': {'$ref': budget_ref}, - 'library': {'$ref': lib_ref} - }) - acac31 = _make_resource(client, 'acac', { - 'name': 'account_3.1', - 'number': '000.0000.03', - 'allocated_amount': 300, - 'budget': {'$ref': budget_ref}, - 'library': {'$ref': lib_ref}, - 'parent': {'$ref': get_ref('acac', acac3.pid)}, - }) + acac1 = _make_resource( + client, + "acac", + { + "name": "account_1", + "number": "000.0000.01", + "allocated_amount": 1000, + "budget": {"$ref": budget_ref}, + "library": {"$ref": lib_ref}, + }, + ) + acac2 = _make_resource( + client, + "acac", + { + "name": "account_2", + "number": "000.0000.02", + "allocated_amount": 2000, + "budget": {"$ref": budget_ref}, + "library": {"$ref": lib_ref}, + }, + ) + acac3 = _make_resource( + client, + "acac", + { + "name": "account_3", + "number": "000.0000.03", + "allocated_amount": 3000, + "budget": {"$ref": budget_ref}, + "library": {"$ref": lib_ref}, + }, + ) + acac31 = _make_resource( + client, + "acac", + { + "name": "account_3.1", + "number": "000.0000.03", + "allocated_amount": 300, + "budget": {"$ref": budget_ref}, + "library": {"$ref": lib_ref}, + "parent": {"$ref": get_ref("acac", acac3.pid)}, + }, + ) # Orders ============================================== - order_10 = _make_resource(client, 'acor', { - 'vendor': {'$ref': vendor_ref}, - 'library': {'$ref': lib_ref}, - 'type': 'monograph', - }) - order_20 = _make_resource(client, 'acor', { - 'vendor': {'$ref': vendor_ref}, - 'library': {'$ref': lib_ref}, - 'type': 'monograph', - }) - order_30 = _make_resource(client, 'acor', { - 'vendor': {'$ref': vendor_ref}, - 'library': {'$ref': lib_ref}, - 'type': 'monograph', - }) + order_10 = _make_resource( + client, + "acor", + { + "vendor": {"$ref": vendor_ref}, + "library": {"$ref": lib_ref}, + "type": "monograph", + }, + ) + order_20 = _make_resource( + client, + "acor", + { + "vendor": {"$ref": vendor_ref}, + "library": {"$ref": lib_ref}, + "type": "monograph", + }, + ) + order_30 = _make_resource( + client, + "acor", + { + "vendor": {"$ref": vendor_ref}, + "library": {"$ref": lib_ref}, + "type": "monograph", + }, + ) # OrderLines ========================================== - orderline_10_1 = _make_resource(client, 'acol', { - 'acq_account': {'$ref': get_ref('acac', acac1.pid)}, - 'acq_order': {'$ref': get_ref('acor', order_10.pid)}, - 'document': {'$ref': get_ref('doc', document.pid)}, - 'quantity': 4, - 'amount': 25 - }) - orderline_10_2 = _make_resource(client, 'acol', { - 'acq_account': {'$ref': get_ref('acac', acac1.pid)}, - 'acq_order': {'$ref': get_ref('acor', order_10.pid)}, - 'document': {'$ref': get_ref('doc', document.pid)}, - 'quantity': 2, - 'amount': 15 - }) - orderline_30_1 = _make_resource(client, 'acol', { - 'acq_account': {'$ref': get_ref('acac', acac31.pid)}, - 'acq_order': {'$ref': get_ref('acor', order_30.pid)}, - 'document': {'$ref': get_ref('doc', document.pid)}, - 'quantity': 3, - 'amount': 33 - }) + orderline_10_1 = _make_resource( + client, + "acol", + { + "acq_account": {"$ref": get_ref("acac", acac1.pid)}, + "acq_order": {"$ref": get_ref("acor", order_10.pid)}, + "document": {"$ref": get_ref("doc", document.pid)}, + "quantity": 4, + "amount": 25, + }, + ) + orderline_10_2 = _make_resource( + client, + "acol", + { + "acq_account": {"$ref": get_ref("acac", acac1.pid)}, + "acq_order": {"$ref": get_ref("acor", order_10.pid)}, + "document": {"$ref": get_ref("doc", document.pid)}, + "quantity": 2, + "amount": 15, + }, + ) + orderline_30_1 = _make_resource( + client, + "acol", + { + "acq_account": {"$ref": get_ref("acac", acac31.pid)}, + "acq_order": {"$ref": get_ref("acor", order_30.pid)}, + "document": {"$ref": get_ref("doc", document.pid)}, + "quantity": 3, + "amount": 33, + }, + ) # Reception =========================================== - reception_10_1 = _make_resource(client, 'acre', { - 'acq_order': {'$ref': get_ref('acor', order_10.pid)}, - 'exchange_rate': 1, - 'amount_adjustments': [{ - 'label': 'handling fees', - 'amount': 2.0, - 'acq_account': {'$ref': get_ref('acac', acac1.pid)} - }], - 'library': {'$ref': lib_ref} - }) - reception_30_1 = _make_resource(client, 'acre', { - 'acq_order': {'$ref': get_ref('acor', order_30.pid)}, - 'exchange_rate': 1, - 'library': {'$ref': lib_ref} - }) + reception_10_1 = _make_resource( + client, + "acre", + { + "acq_order": {"$ref": get_ref("acor", order_10.pid)}, + "exchange_rate": 1, + "amount_adjustments": [ + { + "label": "handling fees", + "amount": 2.0, + "acq_account": {"$ref": get_ref("acac", acac1.pid)}, + } + ], + "library": {"$ref": lib_ref}, + }, + ) + reception_30_1 = _make_resource( + client, + "acre", + { + "acq_order": {"$ref": get_ref("acor", order_30.pid)}, + "exchange_rate": 1, + "library": {"$ref": lib_ref}, + }, + ) # ReceptionLine ======================================= - receptionLine_10_1_1 = _make_resource(client, 'acrl', { - 'acq_receipt': {'$ref': get_ref('acre', reception_10_1.pid)}, - 'acq_order_line': {'$ref': get_ref('acol', orderline_10_1.pid)}, - 'quantity': 2, - 'amount': 25, - 'receipt_date': '2022-06-01', - 'library': {'$ref': lib_ref} - }) - receptionLine_30_1_1 = _make_resource(client, 'acrl', { - 'acq_receipt': {'$ref': get_ref('acre', reception_30_1.pid)}, - 'acq_order_line': {'$ref': get_ref('acol', orderline_30_1.pid)}, - 'quantity': 1, - 'amount': 30, - 'receipt_date': '2022-07-01', - 'library': {'$ref': lib_ref} - }) + receptionLine_10_1_1 = _make_resource( + client, + "acrl", + { + "acq_receipt": {"$ref": get_ref("acre", reception_10_1.pid)}, + "acq_order_line": {"$ref": get_ref("acol", orderline_10_1.pid)}, + "quantity": 2, + "amount": 25, + "receipt_date": "2022-06-01", + "library": {"$ref": lib_ref}, + }, + ) + receptionLine_30_1_1 = _make_resource( + client, + "acrl", + { + "acq_receipt": {"$ref": get_ref("acre", reception_30_1.pid)}, + "acq_order_line": {"$ref": get_ref("acol", orderline_30_1.pid)}, + "quantity": 1, + "amount": 30, + "receipt_date": "2022-07-01", + "library": {"$ref": lib_ref}, + }, + ) return budget diff --git a/tests/fixtures/circulation.py b/tests/fixtures/circulation.py index 09de67e46a..2ffbb075ef 100644 --- a/tests/fixtures/circulation.py +++ b/tests/fixtures/circulation.py @@ -22,17 +22,18 @@ import mock import pytest -from invenio_circulation.search.api import LoansSearch from invenio_db import db -from utils import create_patron, flush_index, \ - item_record_to_a_specific_loan_state, patch_expiration_date +from utils import ( + create_patron, + item_record_to_a_specific_loan_state, + patch_expiration_date, +) -from rero_ils.modules.cli.fixtures import load_role_policies, \ - load_system_role_policies +from rero_ils.modules.cli.fixtures import load_role_policies, load_system_role_policies from rero_ils.modules.ill_requests.api import ILLRequest, ILLRequestsSearch from rero_ils.modules.items.api import ItemsSearch -from rero_ils.modules.loans.api import Loan -from rero_ils.modules.loans.logs.api import LoanOperationLog +from rero_ils.modules.loans.api import Loan, LoansSearch +from rero_ils.modules.loans.logs.api import LoanOperationLogsSearch from rero_ils.modules.loans.models import LoanState from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.models import NotificationType @@ -47,7 +48,7 @@ @pytest.fixture(scope="module") def roles(base_app, database, role_policies_data, system_role_policies_data): """Create user roles.""" - ds = base_app.extensions['invenio-accounts'].datastore + ds = base_app.extensions["invenio-accounts"].datastore for role_name in UserRole.ALL_ROLES: ds.create_role(name=role_name) ds.commit() @@ -63,22 +64,23 @@ def roles(base_app, database, role_policies_data, system_role_policies_data): @pytest.fixture(scope="module") def system_librarian_martigny_data(data): """Load Martigny system librarian data.""" - return deepcopy(data.get('ptrn1')) + return deepcopy(data.get("ptrn1")) @pytest.fixture(scope="function") def system_librarian_martigny_data_tmp(data): """Load Martigny system librarian data scope function.""" - return deepcopy(data.get('ptrn1')) + return deepcopy(data.get("ptrn1")) @pytest.fixture(scope="module") def system_librarian_martigny( - app, - roles, - lib_martigny, - patron_type_children_martigny, - system_librarian_martigny_data): + app, + roles, + lib_martigny, + patron_type_children_martigny, + system_librarian_martigny_data, +): """Create Martigny system librarian record.""" data = system_librarian_martigny_data yield create_patron(data) @@ -87,21 +89,19 @@ def system_librarian_martigny( @pytest.fixture(scope="module") def system_librarian2_martigny_data(data): """Load Martigny system librarian data.""" - return deepcopy(data.get('ptrn12')) + return deepcopy(data.get("ptrn12")) @pytest.fixture(scope="function") def system_librarian2_martigny_data_tmp(data): """Load Martigny system librarian data scope function.""" - return deepcopy(data.get('ptrn12')) + return deepcopy(data.get("ptrn12")) @pytest.fixture(scope="module") def system_librarian2_martigny( - app, - roles, - lib_martigny, - system_librarian2_martigny_data): + app, roles, lib_martigny, system_librarian2_martigny_data +): """Create Martigny system librarian record.""" data = system_librarian2_martigny_data yield create_patron(data) @@ -111,21 +111,17 @@ def system_librarian2_martigny( @pytest.fixture(scope="module") def librarian_martigny_data(data): """Load Martigny librarian data.""" - return deepcopy(data.get('ptrn2')) + return deepcopy(data.get("ptrn2")) @pytest.fixture(scope="function") def librarian_martigny_data_tmp(data): """Load Martigny librarian data scope function.""" - return deepcopy(data.get('ptrn2')) + return deepcopy(data.get("ptrn2")) @pytest.fixture(scope="module") -def librarian_martigny( - app, - roles, - lib_martigny, - librarian_martigny_data): +def librarian_martigny(app, roles, lib_martigny, librarian_martigny_data): """Create Martigny librarian record.""" data = librarian_martigny_data yield create_patron(data) @@ -135,21 +131,19 @@ def librarian_martigny( @pytest.fixture(scope="module") def librarian_martigny_bourg_data(data): """Load Martigny librarian data.""" - return deepcopy(data.get('ptrn13')) + return deepcopy(data.get("ptrn13")) @pytest.fixture(scope="function") def librarian_martigny_bourg_data_tmp(data): """Load Martigny librarian data scope function.""" - return deepcopy(data.get('ptrn13')) + return deepcopy(data.get("ptrn13")) @pytest.fixture(scope="module") def librarian_martigny_bourg( - app, - roles, - lib_martigny_bourg, - librarian_martigny_bourg_data): + app, roles, lib_martigny_bourg, librarian_martigny_bourg_data +): """Create Martigny bourg librarian record.""" data = librarian_martigny_bourg_data yield create_patron(data) @@ -159,21 +153,17 @@ def librarian_martigny_bourg( @pytest.fixture(scope="module") def librarian2_martigny_data(data): """Load Martigny librarian data.""" - return deepcopy(data.get('ptrn3')) + return deepcopy(data.get("ptrn3")) @pytest.fixture(scope="function") def librarian2_martigny_data_tmp(data): """Load Martigny librarian data scope function.""" - return deepcopy(data.get('ptrn3')) + return deepcopy(data.get("ptrn3")) @pytest.fixture(scope="module") -def librarian2_martigny( - app, - roles, - lib_martigny, - librarian2_martigny_data): +def librarian2_martigny(app, roles, lib_martigny, librarian2_martigny_data): """Create Martigny librarian record.""" data = librarian2_martigny_data yield create_patron(data) @@ -183,21 +173,17 @@ def librarian2_martigny( @pytest.fixture(scope="module") def librarian_saxon_data(data): """Load Saxon librarian data.""" - return deepcopy(data.get('ptrn4')) + return deepcopy(data.get("ptrn4")) @pytest.fixture(scope="function") def librarian_saxon_data_tmp(data): """Load Saxon librarian data scope function.""" - return deepcopy(data.get('ptrn4')) + return deepcopy(data.get("ptrn4")) @pytest.fixture(scope="module") -def librarian_saxon( - app, - roles, - lib_saxon, - librarian_saxon_data): +def librarian_saxon(app, roles, lib_saxon, librarian_saxon_data): """Create Saxon librarian record.""" data = librarian_saxon_data yield create_patron(data) @@ -207,21 +193,17 @@ def librarian_saxon( @pytest.fixture(scope="module") def librarian_fully_data(data): """Load Fully librarian data.""" - return deepcopy(data.get('ptrn5')) + return deepcopy(data.get("ptrn5")) @pytest.fixture(scope="function") def librarian_fully_data_tmp(data): """Load Fully librarian data scope function.""" - return deepcopy(data.get('ptrn5')) + return deepcopy(data.get("ptrn5")) @pytest.fixture(scope="module") -def librarian_fully( - app, - roles, - lib_fully, - librarian_fully_data): +def librarian_fully(app, roles, lib_fully, librarian_fully_data): """Create Fully librarian record.""" data = librarian_fully_data yield create_patron(data) @@ -231,22 +213,19 @@ def librarian_fully( @pytest.fixture(scope="module") def patron_martigny_data(data): """Load Martigny patron data.""" - return deepcopy(patch_expiration_date(data.get('ptrn6'))) + return deepcopy(patch_expiration_date(data.get("ptrn6"))) @pytest.fixture(scope="function") def patron_martigny_data_tmp(data): """Load Martigny patron data scope function.""" - return deepcopy(patch_expiration_date(data.get('ptrn6'))) + return deepcopy(patch_expiration_date(data.get("ptrn6"))) @pytest.fixture(scope="module") def patron_martigny( - app, - roles, - lib_martigny, - patron_type_children_martigny, - patron_martigny_data): + app, roles, lib_martigny, patron_type_children_martigny, patron_martigny_data +): """Create Martigny patron record.""" data = patron_martigny_data yield create_patron(data) @@ -255,16 +234,17 @@ def patron_martigny( @pytest.fixture(scope="module") def librarian_patron_martigny_data(data): """Load Martigny librarian patron data.""" - return deepcopy(patch_expiration_date(data.get('ptrn14'))) + return deepcopy(patch_expiration_date(data.get("ptrn14"))) @pytest.fixture(scope="module") def librarian_patron_martigny( - app, - roles, - lib_martigny, - patron_type_children_martigny, - librarian_patron_martigny_data): + app, + roles, + lib_martigny, + patron_type_children_martigny, + librarian_patron_martigny_data, +): """Create Martigny librarian patron record.""" data = librarian_patron_martigny_data yield create_patron(data) @@ -274,16 +254,13 @@ def librarian_patron_martigny( @pytest.fixture(scope="module") def patron2_martigny_data(data): """Load Martigny patron data.""" - return deepcopy(patch_expiration_date(data.get('ptrn7'))) + return deepcopy(patch_expiration_date(data.get("ptrn7"))) @pytest.fixture(scope="module") def patron2_martigny( - app, - roles, - lib_martigny, - patron_type_adults_martigny, - patron2_martigny_data): + app, roles, lib_martigny, patron_type_adults_martigny, patron2_martigny_data +): """Create Martigny patron record.""" data = patron2_martigny_data yield create_patron(data) @@ -293,17 +270,18 @@ def patron2_martigny( @pytest.fixture(scope="module") def patron3_martigny_blocked_data(data): """Load Martigny blocked patron data.""" - return deepcopy(patch_expiration_date(data.get('ptrn11'))) + return deepcopy(patch_expiration_date(data.get("ptrn11"))) @pytest.fixture(scope="module") def patron3_martigny_blocked( - app, - roles, - lib_martigny, - lib_saxon, - patron_type_adults_martigny, - patron3_martigny_blocked_data): + app, + roles, + lib_martigny, + lib_saxon, + patron_type_adults_martigny, + patron3_martigny_blocked_data, +): """Create Martigny patron record.""" data = patron3_martigny_blocked_data yield create_patron(data) @@ -312,16 +290,13 @@ def patron3_martigny_blocked( @pytest.fixture(scope="module") def patron4_martigny_data(data): """Load Martigny patron data.""" - return deepcopy(patch_expiration_date((data.get('ptrn12')))) + return deepcopy(patch_expiration_date((data.get("ptrn12")))) @pytest.fixture(scope="module") def patron4_martigny( - app, - roles, - lib_martigny, - patron_type_adults_martigny, - patron4_martigny_data): + app, roles, lib_martigny, patron_type_adults_martigny, patron4_martigny_data +): """Create Martigny patron record.""" data = patron4_martigny_data yield create_patron(data) @@ -331,21 +306,17 @@ def patron4_martigny( @pytest.fixture(scope="module") def system_librarian_sion_data(data): """Load Sion system librarian data.""" - return deepcopy(data.get('ptrn8')) + return deepcopy(data.get("ptrn8")) @pytest.fixture(scope="function") def system_librarian_sion_data_tmp(data): """Load Sion system librarian data scope function.""" - return deepcopy(data.get('ptrn8')) + return deepcopy(data.get("ptrn8")) @pytest.fixture(scope="module") -def system_librarian_sion( - app, - roles, - lib_sion, - system_librarian_sion_data): +def system_librarian_sion(app, roles, lib_sion, system_librarian_sion_data): """Create Sion system librarian record.""" data = system_librarian_sion_data yield create_patron(data) @@ -355,15 +326,11 @@ def system_librarian_sion( @pytest.fixture(scope="module") def librarian_sion_data(data): """Load sion librarian data.""" - return deepcopy(data.get('ptrn9')) + return deepcopy(data.get("ptrn9")) @pytest.fixture(scope="module") -def librarian_sion( - app, - roles, - lib_sion, - librarian_sion_data): +def librarian_sion(app, roles, lib_sion, librarian_sion_data): """Create sion librarian record.""" data = librarian_sion_data yield create_patron(data) @@ -373,22 +340,17 @@ def librarian_sion( @pytest.fixture(scope="module") def patron_sion_data(data): """Load Sion patron data.""" - return deepcopy(patch_expiration_date(data.get('ptrn10'))) + return deepcopy(patch_expiration_date(data.get("ptrn10"))) @pytest.fixture(scope="function") def patron_sion_data_tmp(data): """Load Sion patron data scope function.""" - return deepcopy(patch_expiration_date(data.get('ptrn10'))) + return deepcopy(patch_expiration_date(data.get("ptrn10"))) @pytest.fixture(scope="module") -def patron_sion( - app, - roles, - lib_sion, - patron_type_grown_sion, - patron_sion_data): +def patron_sion(app, roles, lib_sion, patron_type_grown_sion, patron_sion_data): """Create Sion patron record.""" data = patron_sion_data yield create_patron(data) @@ -396,74 +358,64 @@ def patron_sion( @pytest.fixture(scope="module") def patron_sion_multiple( - app, - roles, - lib_sion, - patron_type_grown_sion, - patron2_martigny_data): + app, roles, lib_sion, patron_type_grown_sion, patron2_martigny_data +): """Create a Sion patron with the same user as Martigny patron.""" data = deepcopy(patron2_martigny_data) - data['pid'] = 'ptrn13' - data['patron']['barcode'] = ['42421313123'] - data['roles'] = [ - 'patron', - 'pro_read_only', - 'pro_catalog_manager', - 'pro_circulation_manager', - 'pro_user_manager' + data["pid"] = "ptrn13" + data["patron"]["barcode"] = ["42421313123"] + data["roles"] = [ + "patron", + "pro_read_only", + "pro_catalog_manager", + "pro_circulation_manager", + "pro_user_manager", ] pid = lib_sion.pid - data['libraries'] = [{'$ref': f'https://bib.rero.ch/api/libraries/{pid}'}] + data["libraries"] = [{"$ref": f"https://bib.rero.ch/api/libraries/{pid}"}] pid = patron_type_grown_sion.pid - data['patron']['type'] = { - '$ref': f'https://bib.rero.ch/api/patron_types/{pid}'} + data["patron"]["type"] = {"$ref": f"https://bib.rero.ch/api/patron_types/{pid}"} yield create_patron(data) @pytest.fixture(scope="module") def patron_sion_without_email1( - app, - roles, - lib_sion, - patron_type_grown_sion, - patron_sion_data): + app, roles, lib_sion, patron_type_grown_sion, patron_sion_data +): """Create Sion patron without sending reset password instruction.""" data = deepcopy(patron_sion_data) - del data['email'] - data['pid'] = 'ptrn10wthoutemail' - data['username'] = 'withoutemail' - data['patron']['barcode'] = ['18936287'] - data['patron']['communication_channel'] = CommunicationChannel.MAIL + del data["email"] + data["pid"] = "ptrn10wthoutemail" + data["username"] = "withoutemail" + data["patron"]["barcode"] = ["18936287"] + data["patron"]["communication_channel"] = CommunicationChannel.MAIL yield create_patron(data) @pytest.fixture(scope="module") def patron_sion_with_additional_email( - app, - roles, - lib_sion, - patron_type_grown_sion, - patron_sion_data): + app, roles, lib_sion, patron_type_grown_sion, patron_sion_data +): """Create Sion patron with an additional email only.""" data = deepcopy(patron_sion_data) - del data['email'] - data['pid'] = 'ptrn10additionalemail' - data['username'] = 'additionalemail' - data['patron']['barcode'] = ['additionalemail'] - data['patron']['additional_communication_email'] = \ - 'additional+jules@gmail.com' + del data["email"] + data["pid"] = "ptrn10additionalemail" + data["username"] = "additionalemail" + data["patron"]["barcode"] = ["additionalemail"] + data["patron"]["additional_communication_email"] = "additional+jules@gmail.com" yield create_patron(data) # ------------ Loans: pending loan ---------- @pytest.fixture(scope="module") def loan_pending_martigny( - app, - item_lib_fully, - loc_public_martigny, - librarian_martigny, - patron2_martigny, - circulation_policies): + app, + item_lib_fully, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + circulation_policies, +): """Create loan record with state pending. item_lib_fully is requested by patron2_martigny. @@ -475,12 +427,11 @@ def loan_pending_martigny( transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, pickup_location_pid=loc_public_martigny.pid, - document_pid=extracted_data_from_ref(item_lib_fully.get('document')) + document_pid=extracted_data_from_ref(item_lib_fully.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - return list(item_lib_fully.get_loans_by_item_pid( - item_pid=item_lib_fully.pid))[0] + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + return list(item_lib_fully.get_loans_by_item_pid(item_pid=item_lib_fully.pid))[0] @pytest.fixture(scope="module") @@ -498,15 +449,16 @@ def loan_pending_martigny_data_tmp(loan_pending_martigny): # ------------ Loans: validated loan ---------- @pytest.fixture(scope="module") def loan_validated_martigny( - app, - document, - item2_lib_martigny, - loc_public_martigny, - lib_martigny, - item_type_standard_martigny, - librarian_martigny, - patron_martigny, - circulation_policies): + app, + document, + item2_lib_martigny, + loc_public_martigny, + lib_martigny, + item_type_standard_martigny, + librarian_martigny, + patron_martigny, + circulation_policies, +): """Request and validate item to a patron. item2_lib_martigny is requested and validated to patron_martigny. @@ -519,15 +471,15 @@ def loan_validated_martigny( transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, pickup_location_pid=loc_public_martigny.pid, - document_pid=extracted_data_from_ref( - item2_lib_martigny.get('document')) + document_pid=extracted_data_from_ref(item2_lib_martigny.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(NotificationsSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + NotificationsSearch.flush_and_refresh() - loan = list(item2_lib_martigny.get_loans_by_item_pid( - item_pid=item2_lib_martigny.pid))[0] + loan = list( + item2_lib_martigny.get_loans_by_item_pid(item_pid=item2_lib_martigny.pid) + )[0] item2_lib_martigny.validate_request( pid=loan.pid, patron_pid=patron_martigny.pid, @@ -535,28 +487,29 @@ def loan_validated_martigny( transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, pickup_location_pid=loc_public_martigny.pid, - document_pid=extracted_data_from_ref( - item2_lib_martigny.get('document')) + document_pid=extracted_data_from_ref(item2_lib_martigny.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(NotificationsSearch.Meta.index) - loan = list(item2_lib_martigny.get_loans_by_item_pid( - item_pid=item2_lib_martigny.pid))[0] + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + NotificationsSearch.flush_and_refresh() + loan = list( + item2_lib_martigny.get_loans_by_item_pid(item_pid=item2_lib_martigny.pid) + )[0] return loan @pytest.fixture(scope="module") def loan2_validated_martigny( - app, - document, - item3_lib_martigny, - loc_public_martigny, - loc_restricted_martigny, - item_type_standard_martigny, - librarian_martigny, - patron_martigny, - circulation_policies): + app, + document, + item3_lib_martigny, + loc_public_martigny, + loc_restricted_martigny, + item_type_standard_martigny, + librarian_martigny, + patron_martigny, + circulation_policies, +): """Request and validate item to a patron. item3_lib_martigny is requested and validated to patron_martigny. @@ -565,7 +518,8 @@ def loan2_validated_martigny( # delete old loans for loan in item3_lib_martigny.get_loans_by_item_pid( - item_pid=item3_lib_martigny.pid): + item_pid=item3_lib_martigny.pid + ): loan.delete(dbcommit=True, delindex=True) item3_lib_martigny.request( @@ -574,15 +528,15 @@ def loan2_validated_martigny( transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, pickup_location_pid=loc_restricted_martigny.pid, - document_pid=extracted_data_from_ref( - item3_lib_martigny.get('document')) + document_pid=extracted_data_from_ref(item3_lib_martigny.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(NotificationsSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + NotificationsSearch.flush_and_refresh() - loan = list(item3_lib_martigny.get_loans_by_item_pid( - item_pid=item3_lib_martigny.pid))[0] + loan = list( + item3_lib_martigny.get_loans_by_item_pid(item_pid=item3_lib_martigny.pid) + )[0] item3_lib_martigny.validate_request( pid=loan.pid, @@ -591,27 +545,28 @@ def loan2_validated_martigny( transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, pickup_location_pid=loc_restricted_martigny.pid, - document_pid=extracted_data_from_ref( - item3_lib_martigny.get('document')) + document_pid=extracted_data_from_ref(item3_lib_martigny.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(NotificationsSearch.Meta.index) - loan = list(item3_lib_martigny.get_loans_by_item_pid( - item_pid=item3_lib_martigny.pid))[0] + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + NotificationsSearch.flush_and_refresh() + loan = list( + item3_lib_martigny.get_loans_by_item_pid(item_pid=item3_lib_martigny.pid) + )[0] return loan @pytest.fixture(scope="module") def loan_validated_sion( - app, - document, - item2_lib_sion, - loc_public_sion, - item_type_regular_sion, - librarian_sion, - patron_sion, - circulation_policies): + app, + document, + item2_lib_sion, + loc_public_sion, + item_type_regular_sion, + librarian_sion, + patron_sion, + circulation_policies, +): """Request and validate item to a patron.""" transaction_date = datetime.now(timezone.utc).isoformat() @@ -621,17 +576,15 @@ def loan_validated_sion( transaction_user_pid=librarian_sion.pid, transaction_date=transaction_date, pickup_location_pid=loc_public_sion.pid, - document_pid=item2_lib_sion.replace_refs()['document']['pid'] + document_pid=item2_lib_sion.replace_refs()["document"]["pid"], ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(NotificationsSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + NotificationsSearch.flush_and_refresh() - loan = list(item2_lib_sion.get_loans_by_item_pid( - item_pid=item2_lib_sion.pid))[0] + loan = list(item2_lib_sion.get_loans_by_item_pid(item_pid=item2_lib_sion.pid))[0] with mock.patch( - 'rero_ils.modules.loans.logs.api.current_librarian', - librarian_sion + "rero_ils.modules.loans.logs.api.current_librarian", librarian_sion ): item2_lib_sion.validate_request( pid=loan.pid, @@ -640,14 +593,13 @@ def loan_validated_sion( transaction_user_pid=librarian_sion.pid, transaction_date=transaction_date, pickup_location_pid=loc_public_sion.pid, - document_pid=item2_lib_sion.replace_refs()['document']['pid'] + document_pid=item2_lib_sion.replace_refs()["document"]["pid"], ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoanOperationLog.index_name) - loan = list(item2_lib_sion.get_loans_by_item_pid( - item_pid=item2_lib_sion.pid))[0] + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + NotificationsSearch.flush_and_refresh() + LoanOperationLogsSearch.flush_and_refresh() + loan = list(item2_lib_sion.get_loans_by_item_pid(item_pid=item2_lib_sion.pid))[0] return loan @@ -656,8 +608,7 @@ def loan_validated_sion( def notification_availability_martigny(loan_validated_martigny): """Availability notification of martigny.""" return get_notification( - loan_validated_martigny, - notification_type=NotificationType.AVAILABILITY + loan_validated_martigny, notification_type=NotificationType.AVAILABILITY ) @@ -665,8 +616,7 @@ def notification_availability_martigny(loan_validated_martigny): def notification2_availability_martigny(loan2_validated_martigny): """Availability notification of martigny.""" return get_notification( - loan2_validated_martigny, - notification_type=NotificationType.AVAILABILITY + loan2_validated_martigny, notification_type=NotificationType.AVAILABILITY ) @@ -674,8 +624,7 @@ def notification2_availability_martigny(loan2_validated_martigny): def notification_availability_sion(loan_validated_sion): """Availability notification of sion.""" return get_notification( - loan_validated_sion, - notification_type=NotificationType.AVAILABILITY + loan_validated_sion, notification_type=NotificationType.AVAILABILITY ) @@ -683,8 +632,7 @@ def notification_availability_sion(loan_validated_sion): def notification_availability_sion2(loan_validated_sion2): """Availability notification of sion.""" return get_notification( - loan_validated_sion2, - notification_type=NotificationType.AVAILABILITY + loan_validated_sion2, notification_type=NotificationType.AVAILABILITY ) @@ -692,7 +640,7 @@ def notification_availability_sion2(loan_validated_sion2): @pytest.fixture(scope="function") def dummy_notification(data): """Notification data scope function.""" - return deepcopy(data.get('dummy_notif')) + return deepcopy(data.get("dummy_notif")) # ------------ Patron Transactions: Lib Martigny overdue scenario ---------- @@ -706,7 +654,7 @@ def loan_due_soon_martigny( librarian_martigny, patron_martigny, circulation_policies, - tomorrow + tomorrow, ): """Checkout an item to a patron ; item4_lib_martigny is due_soon.""" item = item4_lib_martigny @@ -715,17 +663,17 @@ def loan_due_soon_martigny( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, transaction_date=datetime.now(timezone.utc).isoformat(), - document_pid=extracted_data_from_ref(item.get('document')) + document_pid=extracted_data_from_ref(item.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(LoanOperationLog.index_name) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + LoanOperationLogsSearch.flush_and_refresh() loan_pid = item.get_loan_pid_with_item_on_loan(item.pid) loan = Loan.get_record_by_pid(loan_pid) # Updating the end_date to a very soon date, will fired the extension hook # to compute a valid due_soon date - loan['end_date'] = tomorrow.isoformat() + loan["end_date"] = tomorrow.isoformat() return loan.update(loan, dbcommit=True, reindex=True) @@ -735,23 +683,24 @@ def notification_due_soon_martigny(app, loan_due_soon_martigny): notification = loan_due_soon_martigny.create_notification( _type=NotificationType.DUE_SOON ).pop() - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(PatronTransactionsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + PatronTransactionsSearch.flush_and_refresh() return notification # ------------ Patron Transactions: Lib Martigny overdue scenario ---------- @pytest.fixture(scope="module") def loan_overdue_martigny( - app, - document, - item4_lib_martigny, - loc_public_martigny, - item_type_standard_martigny, - librarian_martigny, - patron_martigny, - circulation_policies): + app, + document, + item4_lib_martigny, + loc_public_martigny, + item_type_standard_martigny, + librarian_martigny, + patron_martigny, + circulation_policies, +): """Checkout an item to a patron. item4_lib_martigny is overdue. @@ -763,16 +712,15 @@ def loan_overdue_martigny( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, - document_pid=extracted_data_from_ref( - item4_lib_martigny.get('document')) + document_pid=extracted_data_from_ref(item4_lib_martigny.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() loan = Loan.get_record_by_pid( - item4_lib_martigny.get_loan_pid_with_item_on_loan( - item4_lib_martigny.pid)) + item4_lib_martigny.get_loan_pid_with_item_on_loan(item4_lib_martigny.pid) + ) end_date = datetime.now(timezone.utc) - timedelta(days=25) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() return loan.update(loan, dbcommit=True, reindex=True) @@ -782,9 +730,9 @@ def notification_late_martigny(app, loan_overdue_martigny): notification = loan_overdue_martigny.create_notification( _type=NotificationType.OVERDUE ).pop() - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(PatronTransactionsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + PatronTransactionsSearch.flush_and_refresh() return notification @@ -796,9 +744,7 @@ def patron_transaction_overdue_martigny(app, notification_late_martigny): @pytest.fixture(scope="module") -def patron_transaction_overdue_event_martigny( - app, - patron_transaction_overdue_martigny): +def patron_transaction_overdue_event_martigny(app, patron_transaction_overdue_martigny): """Return overdue events for patron transaction for a notification.""" for event in patron_transaction_overdue_martigny.events: return event @@ -806,8 +752,8 @@ def patron_transaction_overdue_event_martigny( @pytest.fixture(scope="module") def patron_transaction_overdue_events_martigny( - app, - patron_transaction_overdue_martigny): + app, patron_transaction_overdue_martigny +): """Return overdue events for patron transaction for a notification.""" return patron_transaction_overdue_martigny.events @@ -815,14 +761,15 @@ def patron_transaction_overdue_events_martigny( # ------------ Patron Transactions: Lib Saxon overdue scenario ---------- @pytest.fixture(scope="module") def loan_overdue_saxon( - app, - document, - item2_lib_saxon, - loc_public_martigny, - item_type_standard_martigny, - librarian_martigny, - patron_martigny, - circulation_policies): + app, + document, + item2_lib_saxon, + loc_public_martigny, + item_type_standard_martigny, + librarian_martigny, + patron_martigny, + circulation_policies, +): """Checkout an item to a patron. item2_lib_saxon is overdue. @@ -834,16 +781,15 @@ def loan_overdue_saxon( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, transaction_date=transaction_date, - document_pid=extracted_data_from_ref( - item2_lib_saxon.get('document')) + document_pid=extracted_data_from_ref(item2_lib_saxon.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() loan = Loan.get_record_by_pid( - item2_lib_saxon.get_loan_pid_with_item_on_loan( - item2_lib_saxon.pid)) + item2_lib_saxon.get_loan_pid_with_item_on_loan(item2_lib_saxon.pid) + ) end_date = datetime.now(timezone.utc) - timedelta(days=25) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) return loan @@ -854,9 +800,9 @@ def notification_late_saxon(app, loan_overdue_saxon): notification = loan_overdue_saxon.create_notification( _type=NotificationType.OVERDUE ).pop() - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(PatronTransactionsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + PatronTransactionsSearch.flush_and_refresh() return notification @@ -868,9 +814,7 @@ def patron_transaction_overdue_saxon(app, notification_late_saxon): @pytest.fixture(scope="module") -def patron_transaction_overdue_event_saxon( - app, - patron_transaction_overdue_saxon): +def patron_transaction_overdue_event_saxon(app, patron_transaction_overdue_saxon): """Return overdue events for patron transaction for a notification.""" for event in patron_transaction_overdue_saxon.events: return event @@ -879,32 +823,33 @@ def patron_transaction_overdue_event_saxon( @pytest.fixture(scope="module") def patron_transaction_overdue_saxon_data(data): """Load Martigny patron transaction martigny data.""" - return deepcopy(data.get('dummy_patron_transaction')) + return deepcopy(data.get("dummy_patron_transaction")) @pytest.fixture(scope="module") def patron_transaction_overdue_event_saxon_data(data): """Load Martigny patron transaction martigny data.""" - return deepcopy(data.get('dummy_patron_transaction_event')) + return deepcopy(data.get("dummy_patron_transaction_event")) @pytest.fixture(scope="module") def patron_transaction_photocopy_martigny_data(data): """Load photocopy patron transaction data.""" - return deepcopy(data.get('pttr3')) + return deepcopy(data.get("pttr3")) # ------------ Patron Transactions: Lib Sion overdue scenario ---------- @pytest.fixture(scope="module") def loan_overdue_sion( - app, - document, - item_lib_sion, - loc_public_sion, - item_type_regular_sion, - librarian_sion, - patron_sion, - circulation_policies): + app, + document, + item_lib_sion, + loc_public_sion, + item_type_regular_sion, + librarian_sion, + patron_sion, + circulation_policies, +): """Checkout an item to a patron. item_lib_sion is overdue. @@ -916,15 +861,15 @@ def loan_overdue_sion( transaction_location_pid=loc_public_sion.pid, transaction_user_pid=librarian_sion.pid, transaction_date=transaction_date, - document_pid=extracted_data_from_ref(item_lib_sion.get('document')) + document_pid=extracted_data_from_ref(item_lib_sion.get("document")), ) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() loan = Loan.get_record_by_pid( item_lib_sion.get_loan_pid_with_item_on_loan(item_lib_sion.pid) ) end_date = datetime.now(timezone.utc) - timedelta(days=25) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) return loan @@ -935,9 +880,9 @@ def notification_late_sion(app, loan_overdue_sion): notification = loan_overdue_sion.create_notification( _type=NotificationType.OVERDUE ).pop() - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) - flush_index(PatronTransactionsSearch.Meta.index) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() + PatronTransactionsSearch.flush_and_refresh() return notification @@ -949,9 +894,7 @@ def patron_transaction_overdue_sion(app, notification_late_sion): @pytest.fixture(scope="module") -def patron_transaction_overdue_event_sion( - app, - patron_transaction_overdue_sion): +def patron_transaction_overdue_event_sion(app, patron_transaction_overdue_sion): """Return overdue events for patron transaction for a notification.""" for event in patron_transaction_overdue_sion.events: return event @@ -960,22 +903,25 @@ def patron_transaction_overdue_event_sion( @pytest.fixture(scope="module") def patron_transaction_overdue_sion_data(data): """Load Sion patron transaction martigny data.""" - return deepcopy(data.get('dummy_patron_transaction_sion')) + return deepcopy(data.get("dummy_patron_transaction_sion")) @pytest.fixture(scope="module") def patron_transaction_overdue_event_sion_data(data): """Load Sion patron transaction martigny data.""" - return deepcopy(data.get('dummy_patron_transaction_event_sion')) + return deepcopy(data.get("dummy_patron_transaction_event_sion")) # ------------ Loans and items for circulation actions ---------- @pytest.fixture(scope="module") def item_on_shelf_martigny_patron_and_loan_pending( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies,): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_shelf requested by a patron. :return item: the created or copied item. @@ -983,24 +929,29 @@ def item_on_shelf_martigny_patron_and_loan_pending( :return loan: the pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item2_on_shelf_martigny_patron_and_loan_pending( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies,): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_shelf requested by a patron. :return item: the created or copied item. @@ -1008,24 +959,29 @@ def item2_on_shelf_martigny_patron_and_loan_pending( :return loan: the pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item_at_desk_martigny_patron_and_loan_at_desk( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item with a validated pending request. :return item: the created or copied item. @@ -1033,24 +989,29 @@ def item_at_desk_martigny_patron_and_loan_at_desk( :return loan: the validated pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item2_at_desk_martigny_patron_and_loan_at_desk( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item with a validated pending request. :return item: the created or copied item. @@ -1058,24 +1019,29 @@ def item2_at_desk_martigny_patron_and_loan_at_desk( :return loan: the validated pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item3_at_desk_martigny_patron_and_loan_at_desk( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item with a validated pending request. :return item: the created or copied item. @@ -1083,24 +1049,29 @@ def item3_at_desk_martigny_patron_and_loan_at_desk( :return loan: the validated pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item4_at_desk_martigny_patron_and_loan_at_desk( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item with a validated pending request. :return item: the created or copied item. @@ -1108,24 +1079,29 @@ def item4_at_desk_martigny_patron_and_loan_at_desk( :return loan: the validated pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item5_at_desk_martigny_patron_and_loan_at_desk( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item with a validated pending request. :return item: the created or copied item. @@ -1133,24 +1109,29 @@ def item5_at_desk_martigny_patron_and_loan_at_desk( :return loan: the validated pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_AT_DESK, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item_on_shelf_fully_patron_and_loan_pending( - app, - librarian_martigny, - item_lib_fully, loc_public_fully, - patron_martigny, circulation_policies,): + app, + librarian_martigny, + item_lib_fully, + loc_public_fully, + patron_martigny, + circulation_policies, +): """Creates an item on_shelf requested by a patron. :return item: the created or copied item. @@ -1158,24 +1139,26 @@ def item_on_shelf_fully_patron_and_loan_pending( :return loan: the pending loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( - item=item_lib_fully, - loan_state=LoanState.PENDING, - params=params, copy_item=True) + item=item_lib_fully, loan_state=LoanState.PENDING, params=params, copy_item=True + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item_on_loan_martigny_patron_and_loan_on_loan( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_loan. :return item: the created or copied item. @@ -1183,24 +1166,29 @@ def item_on_loan_martigny_patron_and_loan_on_loan( :return loan: the ITEM_ON_LOAN loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item2_on_loan_martigny_patron_and_loan_on_loan( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_loan. :return item: the created or copied item. @@ -1208,24 +1196,29 @@ def item2_on_loan_martigny_patron_and_loan_on_loan( :return loan: the ITEM_ON_LOAN loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item3_on_loan_martigny_patron_and_loan_on_loan( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_loan. :return item: the created or copied item. @@ -1233,24 +1226,29 @@ def item3_on_loan_martigny_patron_and_loan_on_loan( :return loan: the ITEM_ON_LOAN loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item4_on_loan_martigny_patron_and_loan_on_loan( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_loan. :return item: the created or copied item. @@ -1258,24 +1256,29 @@ def item4_on_loan_martigny_patron_and_loan_on_loan( :return loan: the ITEM_ON_LOAN loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item5_on_loan_martigny_patron_and_loan_on_loan( - app, - librarian_martigny, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item on_loan. :return item: the created or copied item. @@ -1283,24 +1286,29 @@ def item5_on_loan_martigny_patron_and_loan_on_loan( :return loan: the ITEM_ON_LOAN loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item_on_loan_fully_patron_and_loan_on_loan( - app, - librarian_martigny, - item_lib_fully, loc_public_fully, - patron_martigny, circulation_policies): + app, + librarian_martigny, + item_lib_fully, + loc_public_fully, + patron_martigny, + circulation_policies, +): """Creates an item on_loan. :return item: the created or copied item. @@ -1308,24 +1316,30 @@ def item_on_loan_fully_patron_and_loan_on_loan( :return loan: the ITEM_ON_LOAN loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_fully, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item_in_transit_martigny_patron_and_loan_for_pickup( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit for pickup. :return item: the created or copied item. @@ -1333,24 +1347,30 @@ def item_in_transit_martigny_patron_and_loan_for_pickup( :return loan: the ITEM_IN_TRANSIT_FOR_PICKUP loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item2_in_transit_martigny_patron_and_loan_for_pickup( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit for pickup. :return item: the created or copied item. @@ -1358,24 +1378,30 @@ def item2_in_transit_martigny_patron_and_loan_for_pickup( :return loan: the ITEM_IN_TRANSIT_FOR_PICKUP loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item3_in_transit_martigny_patron_and_loan_for_pickup( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit for pickup. :return item: the created or copied item. @@ -1383,24 +1409,30 @@ def item3_in_transit_martigny_patron_and_loan_for_pickup( :return loan: the ITEM_IN_TRANSIT_FOR_PICKUP loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item_in_transit_martigny_patron_and_loan_to_house( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit to house. :return item: the created or copied item. @@ -1408,25 +1440,31 @@ def item_in_transit_martigny_patron_and_loan_to_house( :return loan: the ITEM_IN_TRANSIT_TO_HOUSE loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item2_in_transit_martigny_patron_and_loan_to_house( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit to house. :return item: the created or copied item. @@ -1434,25 +1472,31 @@ def item2_in_transit_martigny_patron_and_loan_to_house( :return loan: the ITEM_IN_TRANSIT_TO_HOUSE loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item3_in_transit_martigny_patron_and_loan_to_house( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit to house. :return item: the created or copied item. @@ -1460,25 +1504,31 @@ def item3_in_transit_martigny_patron_and_loan_to_house( :return loan: the ITEM_IN_TRANSIT_TO_HOUSE loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item4_in_transit_martigny_patron_and_loan_to_house( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit to house. :return item: the created or copied item. @@ -1486,25 +1536,31 @@ def item4_in_transit_martigny_patron_and_loan_to_house( :return loan: the ITEM_IN_TRANSIT_TO_HOUSE loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item5_in_transit_martigny_patron_and_loan_to_house( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit to house. :return item: the created or copied item. @@ -1512,25 +1568,31 @@ def item5_in_transit_martigny_patron_and_loan_to_house( :return loan: the ITEM_IN_TRANSIT_TO_HOUSE loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @pytest.fixture(scope="module") def item6_in_transit_martigny_patron_and_loan_to_house( - app, - librarian_martigny, loc_public_fully, - item_lib_martigny, loc_public_martigny, - patron_martigny, circulation_policies): + app, + librarian_martigny, + loc_public_fully, + item_lib_martigny, + loc_public_martigny, + patron_martigny, + circulation_policies, +): """Creates an item in_transit to house. :return item: the created or copied item. @@ -1538,16 +1600,18 @@ def item6_in_transit_martigny_patron_and_loan_to_house( :return loan: the ITEM_IN_TRANSIT_TO_HOUSE loan. """ params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params, copy_item=True) + params=params, + copy_item=True, + ) return item, patron_martigny, loan @@ -1555,64 +1619,59 @@ def item6_in_transit_martigny_patron_and_loan_to_house( @pytest.fixture(scope="module") def ill_request_martigny_data(data): """Load ill request for Martigny location.""" - return deepcopy(data.get('illr1')) + return deepcopy(data.get("illr1")) @pytest.fixture(scope="function") def ill_request_martigny_data_tmp(data): """Load ill request for Martigny location.""" - return deepcopy(data.get('illr1')) + return deepcopy(data.get("illr1")) @pytest.fixture(scope="module") -def ill_request_martigny(app, loc_public_martigny, patron_martigny, - ill_request_martigny_data): +def ill_request_martigny( + app, loc_public_martigny, patron_martigny, ill_request_martigny_data +): """Create ill request for Martigny location.""" illr = ILLRequest.create( - data=ill_request_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ILLRequestsSearch.Meta.index) - flush_index(OperationLogsSearch.Meta.index) + data=ill_request_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + ILLRequestsSearch.flush_and_refresh() + OperationLogsSearch.flush_and_refresh() return illr @pytest.fixture(scope="module") def ill_request_martigny2_data(data): """Load ill request for martigny2 location.""" - return deepcopy(data.get('illr3')) + return deepcopy(data.get("illr3")) @pytest.fixture(scope="module") -def ill_request_martigny2(app, loc_public_martigny, patron_martigny_no_email, - ill_request_martigny2_data): +def ill_request_martigny2( + app, loc_public_martigny, patron_martigny_no_email, ill_request_martigny2_data +): """Create ill request for Martigny2 location.""" illr = ILLRequest.create( - data=ill_request_martigny2_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ILLRequestsSearch.Meta.index) + data=ill_request_martigny2_data, delete_pid=False, dbcommit=True, reindex=True + ) + ILLRequestsSearch.flush_and_refresh() return illr @pytest.fixture(scope="module") def ill_request_sion_data(data): """Load ill request for Sion location.""" - return deepcopy(data.get('illr2')) + return deepcopy(data.get("illr2")) @pytest.fixture(scope="module") -def ill_request_sion(app, loc_public_sion, patron_sion, - ill_request_sion_data): +def ill_request_sion(app, loc_public_sion, patron_sion, ill_request_sion_data): """Create ill request for Sion location.""" illr = ILLRequest.create( - data=ill_request_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ILLRequestsSearch.Meta.index) + data=ill_request_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + ILLRequestsSearch.flush_and_refresh() return illr @@ -1620,10 +1679,10 @@ def ill_request_sion(app, loc_public_sion, patron_sion, @pytest.fixture(scope="module") def user_data_tmp(data): """Load user data.""" - return deepcopy(data.get('user1')) + return deepcopy(data.get("user1")) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def default_user_password(): """Default user password.""" - return 'Pw123456' + return "Pw123456" diff --git a/tests/fixtures/mef.py b/tests/fixtures/mef.py index 91e6e8b460..132a4f9680 100644 --- a/tests/fixtures/mef.py +++ b/tests/fixtures/mef.py @@ -21,24 +21,23 @@ import pytest -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) @pytest.fixture(scope="module") def mef_concept1_data(mef_entities): """Load MEF concept_1 data.""" - return deepcopy(mef_entities.get('concept_1')) + return deepcopy(mef_entities.get("concept_1")) @pytest.fixture(scope="module") def mef_concept1(mef_concept1_data): """Load MEF concept_1 data.""" entity = RemoteEntity.create( - data=mef_concept1_data, - dbcommit=True, - reindex=True, - delete_pid=False + data=mef_concept1_data, dbcommit=True, reindex=True, delete_pid=False ) RemoteEntitiesSearch.flush_and_refresh() return entity @@ -47,7 +46,7 @@ def mef_concept1(mef_concept1_data): @pytest.fixture(scope="module") def mef_concept1_data_tmp(mef_entities): """Load MEF concept_1 data.""" - return deepcopy(mef_entities.get('concept_1')) + return deepcopy(mef_entities.get("concept_1")) @pytest.fixture(scope="module") @@ -55,29 +54,25 @@ def mef_concept1_es_response(mef_concept1_data_tmp): """Get MEF ES response for `concept_1` entities.""" # transform data to a valid MEF ES hit response data = deepcopy(mef_concept1_data_tmp) - data['$schema'] = \ - 'https://mef.rero.ch/schemas/concepts_mef/mef-concept-v0.0.1.json' - data.pop('type', None) + data["$schema"] = "https://mef.rero.ch/schemas/concepts_mef/mef-concept-v0.0.1.json" + data.pop("type", None) - return {'hits': {'hits': [{ - 'id': data['idref']['pid'], - 'metadata': data - }]}} + return {"hits": {"hits": [{"id": data["idref"]["pid"], "metadata": data}]}} @pytest.fixture(scope="module") def mef_concept2_es_response(mef_entities): """Load MEF es_concept_1 data.""" - return deepcopy(mef_entities.get('es_concepts_1')) + return deepcopy(mef_entities.get("es_concepts_1")) @pytest.fixture(scope="module") def mef_agents1_es_response(mef_entities): """Load MEF es_agents_1 data.""" - return deepcopy(mef_entities.get('es_agents_1')) + return deepcopy(mef_entities.get("es_agents_1")) @pytest.fixture(scope="module") def mef_places1_es_response(mef_entities): """Load MEF es_places_1 data.""" - return deepcopy(mef_entities.get('es_places_1')) + return deepcopy(mef_entities.get("es_places_1")) diff --git a/tests/fixtures/metadata.py b/tests/fixtures/metadata.py index fed7616f87..10b022bc5e 100644 --- a/tests/fixtures/metadata.py +++ b/tests/fixtures/metadata.py @@ -26,21 +26,23 @@ import mock import pytest from invenio_files_rest.models import Location -from utils import flush_index, mock_response +from utils import mock_response from rero_ils.modules.documents.api import Document, DocumentsSearch -from rero_ils.modules.entities.local_entities.api import LocalEntitiesSearch, \ - LocalEntity -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity -from rero_ils.modules.files.cli import create_pdf_record_files, \ - load_files_for_document +from rero_ils.modules.entities.local_entities.api import ( + LocalEntitiesSearch, + LocalEntity, +) +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) +from rero_ils.modules.files.cli import create_pdf_record_files, load_files_for_document from rero_ils.modules.holdings.api import Holding, HoldingsSearch from rero_ils.modules.items.api import Item, ItemsSearch from rero_ils.modules.local_fields.api import LocalField, LocalFieldsSearch from rero_ils.modules.operation_logs.api import OperationLog -from rero_ils.modules.stats_cfg.api import StatConfiguration, \ - StatsConfigurationSearch +from rero_ils.modules.stats_cfg.api import StatConfiguration, StatsConfigurationSearch from rero_ils.modules.templates.api import Template, TemplatesSearch from rero_ils.modules.utils import get_ref_for_pid @@ -70,140 +72,127 @@ def file_location(database): @pytest.fixture(scope="module") def ebook_1_data(data): """Load ebook 1 data.""" - return deepcopy(data.get('ebook1')) + return deepcopy(data.get("ebook1")) @pytest.fixture(scope="module") def ebook_1(app, ebook_1_data): """Load ebook 1 record.""" - del ebook_1_data['electronicLocator'] + del ebook_1_data["electronicLocator"] doc = Document.create( - data=ebook_1_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=ebook_1_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def ebook_2_data(data): """Load ebook 2 data.""" - return deepcopy(data.get('ebook2')) + return deepcopy(data.get("ebook2")) @pytest.fixture(scope="module") def ebook_2(app, ebook_2_data): """Load ebook 2 record.""" - del ebook_2_data['electronicLocator'] + del ebook_2_data["electronicLocator"] doc = Document.create( - data=ebook_2_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=ebook_2_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def ebook_3_data(data): """Load ebook 3 data.""" - return deepcopy(data.get('ebook3')) + return deepcopy(data.get("ebook3")) @pytest.fixture(scope="module") def ebook_3(app, ebook_3_data): """Load ebook 3 record.""" - del ebook_3_data['electronicLocator'] + del ebook_3_data["electronicLocator"] doc = Document.create( - data=ebook_3_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=ebook_3_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def ebook_4_data(data): """Load ebook 4 data.""" - return deepcopy(data.get('ebook4')) + return deepcopy(data.get("ebook4")) @pytest.fixture(scope="module") def ebook_4(app, ebook_4_data): """Load ebook 4 record.""" - del ebook_4_data['electronicLocator'] + del ebook_4_data["electronicLocator"] doc = Document.create( - data=ebook_4_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=ebook_4_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def ebook_5_data(data): """Load ebook 5 data.""" - return deepcopy(data.get('ebook5')) + return deepcopy(data.get("ebook5")) @pytest.fixture(scope="module") def ebook_5(app, ebook_5_data): """Load ebook 5 record.""" - del ebook_5_data['electronicLocator'] + del ebook_5_data["electronicLocator"] doc = Document.create( - data=ebook_5_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=ebook_5_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def document_data(data): """Load document data.""" - return deepcopy(data.get('doc1')) + return deepcopy(data.get("doc1")) @pytest.fixture(scope="module") def document_chinese_data(data): """Load chinese document data.""" - return deepcopy(data.get('doc4')) + return deepcopy(data.get("doc4")) @pytest.fixture(scope="function") def document_data_tmp(data): """Load document data scope function.""" - return deepcopy(data.get('doc1')) + return deepcopy(data.get("doc1")) @pytest.fixture(scope="module") def document(app, document_data): """Load document record.""" doc = Document.create( - data=document_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=document_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def document_with_files(document, lib_martigny, file_location): """Create a document with a pdf file attached.""" metadata = dict( - library={'$ref': get_ref_for_pid('lib', lib_martigny.pid)}, - collections=['col1', 'col2'] + library={"$ref": get_ref_for_pid("lib", lib_martigny.pid)}, + collections=["col1", "col2"], ) create_pdf_record_files(document, metadata, flush=True) - file_path = join(dirname(__file__), '../data/help/files/logo_rero_ils.png') - load_files_for_document( - document=document, metadata=metadata, files=[file_path]) - flush_index(DocumentsSearch.Meta.index) + file_path = join(dirname(__file__), "../data/help/files/logo_rero_ils.png") + load_files_for_document(document=document, metadata=metadata, files=[file_path]) + DocumentsSearch.flush_and_refresh() yield document @@ -211,11 +200,9 @@ def document_with_files(document, lib_martigny, file_location): def document_with_issn(app, journal_data_with_issn): """Load document record.""" doc = Document.create( - data=journal_data_with_issn, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=journal_data_with_issn, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @@ -223,125 +210,114 @@ def document_with_issn(app, journal_data_with_issn): def document2_with_issn(app, journal2_data_with_issn): """Load document record.""" doc = Document.create( - data=journal2_data_with_issn, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=journal2_data_with_issn, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def document_data_ref(data): """Load document ref data.""" - return deepcopy(data.get('doc2')) + return deepcopy(data.get("doc2")) @pytest.fixture(scope="module") def document_data_subject_ref(data): """Load document ref data.""" - return deepcopy(data.get('doc9')) + return deepcopy(data.get("doc9")) @pytest.fixture(scope="module") def document2_data_ref(data): """Load document ref data.""" - return deepcopy(data.get('doc7')) + return deepcopy(data.get("doc7")) @pytest.fixture(scope="module") def export_document_data(data): """Load document data.""" - return deepcopy(data.get('doc8')) + return deepcopy(data.get("doc8")) @pytest.fixture(scope="module") def export_document(app, export_document_data): """Load document record.""" doc = Document.create( - data=export_document_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=export_document_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def journal_data_with_issn(data): """Load journal document with issn data.""" - return deepcopy(data.get('doc5')) + return deepcopy(data.get("doc5")) @pytest.fixture(scope="module") def journal2_data_with_issn(data): """Load journal document with issn data and periodical subtype.""" - return deepcopy(data.get('doc6')) + return deepcopy(data.get("doc6")) @pytest.fixture(scope="module") def journal_data(holdings): """Load journal data.""" - return deepcopy(holdings.get('doc4')) + return deepcopy(holdings.get("doc4")) @pytest.fixture(scope="module") def journal(app, journal_data): """Load journal record.""" doc = Document.create( - data=journal_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=journal_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def entity_topic_data(data): """Load mef concept topic data.""" - return deepcopy(data.get('ent_topic')) + return deepcopy(data.get("ent_topic")) @pytest.fixture(scope="function") def entity_topic_data_tmp(app, data): """Load mef concept data topic scope function.""" - entity_topic = deepcopy(data.get('ent_topic')) - for source in app.config.get('RERO_ILS_AGENTS_SOURCES', []): + entity_topic = deepcopy(data.get("ent_topic")) + for source in app.config.get("RERO_ILS_AGENTS_SOURCES", []): if source in entity_person: - entity_topic[source].pop('$schema', None) + entity_topic[source].pop("$schema", None) return entity_topic @pytest.fixture(scope="module") def entity_topic_data_2(data): """Load mef concept topic data.""" - return deepcopy(data.get('ent_topic2')) + return deepcopy(data.get("ent_topic2")) @pytest.fixture(scope="module") def entity_topic_data_temporal(data): """Load mef concept topic temporal data.""" - return deepcopy(data.get('ent_topic_temporal')) + return deepcopy(data.get("ent_topic_temporal")) @pytest.fixture(scope="module") def entity_place_data(data): """Load mef place data.""" - return deepcopy(data.get('ent_place')) + return deepcopy(data.get("ent_place")) @pytest.fixture(scope="module") def entity_person_response_data(entity_topic_data): """Load mef concept topic response data.""" return { - 'hits': { - 'hits': [ - { - 'id': entity_topic_data['pid'], - 'metadata': entity_topic_data - } - ] + "hits": { + "hits": [{"id": entity_topic_data["pid"], "metadata": entity_topic_data}] } } @@ -350,27 +326,25 @@ def entity_person_response_data(entity_topic_data): def entity_topic(app, entity_topic_data): """Load contribution person record.""" cont = RemoteEntity.create( - data=entity_topic_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(RemoteEntitiesSearch.Meta.index) + data=entity_topic_data, delete_pid=False, dbcommit=True, reindex=True + ) + RemoteEntitiesSearch.flush_and_refresh() return cont @pytest.fixture(scope="module") def entity_person_data(data): """Load mef contribution person data.""" - return deepcopy(data.get('ent_pers')) + return deepcopy(data.get("ent_pers")) @pytest.fixture(scope="function") def entity_person_data_tmp(app, data): """Load mef contribution data person scope function.""" - entity_person = deepcopy(data.get('ent_pers')) - for source in app.config.get('RERO_ILS_AGENTS_SOURCES', []): + entity_person = deepcopy(data.get("ent_pers")) + for source in app.config.get("RERO_ILS_AGENTS_SOURCES", []): if source in entity_person: - entity_person[source].pop('$schema', None) + entity_person[source].pop("$schema", None) return entity_person @@ -378,13 +352,8 @@ def entity_person_data_tmp(app, data): def entity_person_response_data(entity_person_data): """Load mef contribution person response data.""" return { - 'hits': { - 'hits': [ - { - 'id': entity_person_data['pid'], - 'metadata': entity_person_data - } - ] + "hits": { + "hits": [{"id": entity_person_data["pid"], "metadata": entity_person_data}] } } @@ -393,71 +362,65 @@ def entity_person_response_data(entity_person_data): def entity_person(app, entity_person_data): """Load contribution person record.""" cont = RemoteEntity.create( - data=entity_person_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(RemoteEntitiesSearch.Meta.index) + data=entity_person_data, delete_pid=False, dbcommit=True, reindex=True + ) + RemoteEntitiesSearch.flush_and_refresh() return cont @pytest.fixture(scope="module") def entity_person_data_all(data): """Load mef contribution person data.""" - return deepcopy(data.get('ent_pers_all')) + return deepcopy(data.get("ent_pers_all")) @pytest.fixture(scope="module") def entity_person_all(app, entity_person_data_all): """Load contribution person record.""" cont = RemoteEntity.create( - data=entity_person_data_all, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(RemoteEntitiesSearch.Meta.index) + data=entity_person_data_all, delete_pid=False, dbcommit=True, reindex=True + ) + RemoteEntitiesSearch.flush_and_refresh() return cont @pytest.fixture(scope="module") def entity_person_rero_data(data): """Load mef person data.""" - return deepcopy(data.get('ent_pers_rero')) + return deepcopy(data.get("ent_pers_rero")) @pytest.fixture(scope="module") def entity_person_rero(app, entity_person_rero_data): """Create mef person record.""" pers = RemoteEntity.create( - data=entity_person_rero_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(RemoteEntitiesSearch.Meta.index) + data=entity_person_rero_data, delete_pid=False, dbcommit=True, reindex=True + ) + RemoteEntitiesSearch.flush_and_refresh() return pers @pytest.fixture(scope="module") def entity_organisation_data(data): """Load mef contribution organisation data.""" - return deepcopy(data.get('ent_org')) + return deepcopy(data.get("ent_org")) @pytest.fixture(scope="function") def entity_organisation_data_tmp(data): """Load mef contribution data organisation scope function.""" - return deepcopy(data.get('cont_oeg')) + return deepcopy(data.get("cont_oeg")) @pytest.fixture(scope="module") def entity_organisation_response_data(entity_organisation_data): """Load mef contribution organisation response data.""" return { - 'hits': { - 'hits': [ + "hits": { + "hits": [ { - 'id': entity_organisation_data['pid'], - 'metadata': entity_organisation_data + "id": entity_organisation_data["pid"], + "metadata": entity_organisation_data, } ] } @@ -468,98 +431,83 @@ def entity_organisation_response_data(entity_organisation_data): def entity_organisation(app, entity_organisation_data): """Create mef contribution organisation record.""" org = RemoteEntity.create( - data=entity_organisation_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(RemoteEntitiesSearch.Meta.index) + data=entity_organisation_data, delete_pid=False, dbcommit=True, reindex=True + ) + RemoteEntitiesSearch.flush_and_refresh() return org @pytest.fixture(scope="module") def person2_data(data): """Load mef person data.""" - return deepcopy(data.get('ent_pers2')) + return deepcopy(data.get("ent_pers2")) @pytest.fixture(scope="function") def person2_data_tmp(data): """Load mef person data scope function.""" - return deepcopy(data.get('ent_pers2')) + return deepcopy(data.get("ent_pers2")) @pytest.fixture(scope="module") def person2_response_data(person2_data): """Load mef person response data.""" - return { - 'hits': { - 'hits': [ - { - 'id': person2_data['pid'], - 'metadata': person2_data - } - ] - } - } + return {"hits": {"hits": [{"id": person2_data["pid"], "metadata": person2_data}]}} @pytest.fixture(scope="module") def person2(app, person2_data): """Create mef person record.""" pers = RemoteEntity.create( - data=person2_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(RemoteEntitiesSearch.Meta.index) + data=person2_data, delete_pid=False, dbcommit=True, reindex=True + ) + RemoteEntitiesSearch.flush_and_refresh() return pers @pytest.fixture(scope="module") def local_entity_person_data(data): """Load mef contribution person data.""" - return deepcopy(data.get('locent_pers')) + return deepcopy(data.get("locent_pers")) @pytest.fixture(scope="module") def local_entity_person2_data(data): """Load mef contribution person data.""" - return deepcopy(data.get('locent_pers2')) + return deepcopy(data.get("locent_pers2")) @pytest.fixture(scope="module") def local_entity_org_data(data): """Load mef contribution person data.""" - return deepcopy(data.get('locent_org')) + return deepcopy(data.get("locent_org")) @pytest.fixture(scope="module") def local_entity_org2_data(data): """Load mef contribution person data.""" - return deepcopy(data.get('locent_org2')) + return deepcopy(data.get("locent_org2")) @pytest.fixture(scope="module") def local_entity_work_data(data): """Load mef contribution person data.""" - return deepcopy(data.get('locent_work')) + return deepcopy(data.get("locent_work")) @pytest.fixture(scope="module") def local_entity_genre_form_data(data): """Load mef genreForm local entity data.""" - return deepcopy(data.get('locent_genreForm')) + return deepcopy(data.get("locent_genreForm")) @pytest.fixture(scope="module") def local_entity_person(app, local_entity_person_data): """Create mef person record.""" pers = LocalEntity.create( - data=local_entity_person_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalEntitiesSearch.Meta.index) + data=local_entity_person_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalEntitiesSearch.flush_and_refresh() return pers @@ -567,11 +515,9 @@ def local_entity_person(app, local_entity_person_data): def local_entity_person2(app, local_entity_person2_data): """Create mef person record.""" pers = LocalEntity.create( - data=local_entity_person2_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalEntitiesSearch.Meta.index) + data=local_entity_person2_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalEntitiesSearch.flush_and_refresh() return pers @@ -579,11 +525,9 @@ def local_entity_person2(app, local_entity_person2_data): def local_entity_org(app, local_entity_org_data): """Create mef person record.""" org = LocalEntity.create( - data=local_entity_org_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalEntitiesSearch.Meta.index) + data=local_entity_org_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalEntitiesSearch.flush_and_refresh() return org @@ -591,11 +535,9 @@ def local_entity_org(app, local_entity_org_data): def local_entity_org2(app, local_entity_org2_data): """Create mef person record.""" org = LocalEntity.create( - data=local_entity_org2_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalEntitiesSearch.Meta.index) + data=local_entity_org2_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalEntitiesSearch.flush_and_refresh() return org @@ -603,329 +545,293 @@ def local_entity_org2(app, local_entity_org2_data): def local_entity_genre_form(app, local_entity_genre_form_data): """Create mef person record.""" entity = LocalEntity.create( - data=local_entity_genre_form_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalEntitiesSearch.Meta.index) + data=local_entity_genre_form_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalEntitiesSearch.flush_and_refresh() return entity @pytest.fixture(scope="module") -@mock.patch('requests.Session.get') -def document_ref(mock_contributions_mef_get, - app, document_data_ref, entity_person_response_data): +@mock.patch("requests.Session.get") +def document_ref( + mock_contributions_mef_get, app, document_data_ref, entity_person_response_data +): """Load document with mef records reference.""" mock_contributions_mef_get.return_value = mock_response( json_data=entity_person_response_data ) doc = Document.create( - data=document_data_ref, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=document_data_ref, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") -@mock.patch('requests.Session.get') -def document2_ref(mock_persons_mef_get, - app, document2_data_ref, person2_response_data): +@mock.patch("requests.Session.get") +def document2_ref(mock_persons_mef_get, app, document2_data_ref, person2_response_data): """Load document with mef records reference.""" - mock_persons_mef_get.return_value = mock_response( - json_data=person2_response_data - ) + mock_persons_mef_get.return_value = mock_response(json_data=person2_response_data) doc = Document.create( - data=document2_data_ref, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=document2_data_ref, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def document_sion_items_data(data): """Load document data for sion items.""" - return deepcopy(data.get('doc3')) + return deepcopy(data.get("doc3")) @pytest.fixture(scope="function") def document_sion_items_data_tmp(data): """Load document data for sion items scope function.""" - return deepcopy(data.get('doc3')) + return deepcopy(data.get("doc3")) @pytest.fixture(scope="module") def document_sion_items(app, document_sion_items_data): """Create document data for sion items.""" doc = Document.create( - data=document_sion_items_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(DocumentsSearch.Meta.index) + data=document_sion_items_data, delete_pid=False, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() return doc @pytest.fixture(scope="module") def item_lib_martigny_data(data): """Load item of martigny library.""" - return deepcopy(data.get('item1')) + return deepcopy(data.get("item1")) @pytest.fixture(scope="module") def item_lib_martigny_bourg_data(data): """Load item of martigny bourg library.""" - return deepcopy(data.get('item10')) + return deepcopy(data.get("item10")) @pytest.fixture(scope="module") def provisional_item_lib_martigny_data(data): """Load provisional item of martigny library.""" - return deepcopy(data.get('item11')) + return deepcopy(data.get("item11")) @pytest.fixture(scope="function") def item_lib_martigny_data_tmp(data): """Load item of martigny library scope function.""" - return deepcopy(data.get('item1')) + return deepcopy(data.get("item1")) @pytest.fixture(scope="module") def item_lib_martigny( - app, - document, - item_lib_martigny_data, - loc_public_martigny, - item_type_standard_martigny): + app, + document, + item_lib_martigny_data, + loc_public_martigny, + item_type_standard_martigny, +): """Create item of martigny library.""" item = Item.create( - data=item_lib_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item_lib_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item_lib_martigny_bourg( - app, - document, - item_lib_martigny_bourg_data, - loc_public_martigny_bourg, - item_type_standard_martigny): + app, + document, + item_lib_martigny_bourg_data, + loc_public_martigny_bourg, + item_type_standard_martigny, +): """Create item of martigny library bourg.""" item = Item.create( - data=item_lib_martigny_bourg_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item_lib_martigny_bourg_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item2_lib_martigny_data(data): """Load item of martigny library.""" - return deepcopy(data.get('item5')) + return deepcopy(data.get("item5")) @pytest.fixture(scope="function") def item2_lib_martigny_data_tmp(data): """Load item of martigny library scope function.""" - return deepcopy(data.get('item5')) + return deepcopy(data.get("item5")) @pytest.fixture(scope="module") def item2_lib_martigny( - app, - document, - item2_lib_martigny_data, - loc_public_martigny, - item_type_standard_martigny): + app, + document, + item2_lib_martigny_data, + loc_public_martigny, + item_type_standard_martigny, +): """Create item2 of martigny library.""" item = Item.create( - data=item2_lib_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item2_lib_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item3_lib_martigny_data(data): """Load item of martigny library.""" - return deepcopy(data.get('item7')) + return deepcopy(data.get("item7")) @pytest.fixture(scope="function") def item3_lib_martigny_data_tmp(data): """Load item of martigny library scope function.""" - return deepcopy(data.get('item7')) + return deepcopy(data.get("item7")) @pytest.fixture(scope="module") def item3_lib_martigny( - app, - document, - item3_lib_martigny_data, - loc_public_martigny, - item_type_standard_martigny): + app, + document, + item3_lib_martigny_data, + loc_public_martigny, + item_type_standard_martigny, +): """Create item3 of martigny library.""" item = Item.create( - data=item3_lib_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item3_lib_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item4_lib_martigny_data(data): """Load item of martigny library.""" - return deepcopy(data.get('item8')) + return deepcopy(data.get("item8")) @pytest.fixture(scope="function") def item4_lib_martigny_data_tmp(data): """Load item of martigny library scope function.""" - return deepcopy(data.get('item8')) + return deepcopy(data.get("item8")) @pytest.fixture(scope="module") def item4_lib_martigny( - app, - document, - item4_lib_martigny_data, - loc_public_martigny, - item_type_standard_martigny): + app, + document, + item4_lib_martigny_data, + loc_public_martigny, + item_type_standard_martigny, +): """Create item of martigny library.""" item = Item.create( - data=item4_lib_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item4_lib_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item_lib_saxon_data(data): """Load item of saxon library.""" - return deepcopy(data.get('item2')) + return deepcopy(data.get("item2")) @pytest.fixture(scope="module") def item_lib_saxon( - app, - document, - item_lib_saxon_data, - loc_public_saxon, - item_type_standard_martigny): + app, document, item_lib_saxon_data, loc_public_saxon, item_type_standard_martigny +): """Create item of saxon library.""" item = Item.create( - data=item_lib_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item_lib_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item_lib_fully_data(data): """Load item of fully library.""" - return deepcopy(data.get('item3')) + return deepcopy(data.get("item3")) @pytest.fixture(scope="module") def item_lib_fully( - app, - document, - item_lib_fully_data, - loc_public_fully, - item_type_standard_martigny): + app, document, item_lib_fully_data, loc_public_fully, item_type_standard_martigny +): """Create item of fully library.""" item = Item.create( - data=item_lib_fully_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item_lib_fully_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item_lib_sion_data(data): """Load item of sion library.""" - return deepcopy(data.get('item4')) + return deepcopy(data.get("item4")) @pytest.fixture(scope="module") def item_lib_sion( - app, - document_sion_items, - item_lib_sion_data, - loc_public_sion, - item_type_regular_sion): + app, + document_sion_items, + item_lib_sion_data, + loc_public_sion, + item_type_regular_sion, +): """Create item of sion library.""" item = Item.create( - data=item_lib_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item_lib_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item2_lib_sion_data(data): """Load item of sion library.""" - return deepcopy(data.get('item6')) + return deepcopy(data.get("item6")) @pytest.fixture(scope="module") def item2_lib_sion( - app, - document, - item2_lib_sion_data, - loc_restricted_sion, - item_type_regular_sion): + app, document, item2_lib_sion_data, loc_restricted_sion, item_type_regular_sion +): """Create item of sion library.""" item = Item.create( - data=item2_lib_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item2_lib_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @pytest.fixture(scope="module") def item2_lib_saxon_data(data): """Load item of saxon library.""" - return deepcopy(data.get('item9')) + return deepcopy(data.get("item9")) @pytest.fixture(scope="module") def item2_lib_saxon( - app, - document, - item2_lib_saxon_data, - loc_public_saxon, - item_type_standard_martigny): + app, document, item2_lib_saxon_data, loc_public_saxon, item_type_standard_martigny +): """Create item of saxon library.""" item = Item.create( - data=item2_lib_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemsSearch.Meta.index) + data=item2_lib_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemsSearch.flush_and_refresh() return item @@ -935,493 +841,511 @@ def item2_lib_saxon( @pytest.fixture(scope="module") def holding_lib_martigny_data(holdings): """Load holding of martigny library.""" - return deepcopy(holdings.get('holding1')) + return deepcopy(holdings.get("holding1")) @pytest.fixture(scope="function") def holding_lib_martigny_data_tmp(holdings): """Load holding of martigny library scope function.""" - return deepcopy(holdings.get('holding1')) + return deepcopy(holdings.get("holding1")) @pytest.fixture(scope="module") -def holding_lib_martigny(app, loc_public_martigny, item_type_standard_martigny, - document, holding_lib_martigny_data): +def holding_lib_martigny( + app, + loc_public_martigny, + item_type_standard_martigny, + document, + holding_lib_martigny_data, +): """Create holding of martigny library.""" holding = Holding.create( - data=holding_lib_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + data=holding_lib_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + HoldingsSearch.flush_and_refresh() return holding @pytest.fixture(scope="module") def holding_lib_saxon_data(holdings): """Load holding of saxon library.""" - return deepcopy(holdings.get('holding2')) + return deepcopy(holdings.get("holding2")) @pytest.fixture(scope="module") -def holding_lib_saxon(app, document, holding_lib_saxon_data, - loc_public_saxon, item_type_standard_martigny): +def holding_lib_saxon( + app, document, holding_lib_saxon_data, loc_public_saxon, item_type_standard_martigny +): """Create holding of saxon library.""" holding = Holding.create( - data=holding_lib_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + data=holding_lib_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + HoldingsSearch.flush_and_refresh() return holding @pytest.fixture(scope="module") def holding_lib_fully_data(holdings): """Load holding of fully library.""" - return deepcopy(holdings.get('holding3')) + return deepcopy(holdings.get("holding3")) @pytest.fixture(scope="module") -def holding_lib_fully(app, document, holding_lib_fully_data, - loc_public_fully, item_type_standard_martigny): +def holding_lib_fully( + app, document, holding_lib_fully_data, loc_public_fully, item_type_standard_martigny +): """Create holding of fully library.""" holding = Holding.create( - data=holding_lib_fully_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + data=holding_lib_fully_data, delete_pid=False, dbcommit=True, reindex=True + ) + HoldingsSearch.flush_and_refresh() return holding @pytest.fixture(scope="module") def holding_lib_sion_data(holdings): """Load holding of sion library.""" - return deepcopy(holdings.get('holding4')) + return deepcopy(holdings.get("holding4")) @pytest.fixture(scope="module") -def holding_lib_sion(app, document, holding_lib_sion_data, - loc_public_sion, item_type_internal_sion): +def holding_lib_sion( + app, document, holding_lib_sion_data, loc_public_sion, item_type_internal_sion +): """Create holding of sion library.""" holding = Holding.create( - data=holding_lib_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + data=holding_lib_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + HoldingsSearch.flush_and_refresh() return holding # --------- Holdings with patterns records ----------- + @pytest.fixture(scope="module") def holding_lib_martigny_w_patterns_data(holdings): """Load holding of martigny library.""" - return deepcopy(holdings.get('holding5')) + return deepcopy(holdings.get("holding5")) @pytest.fixture(scope="module") def holding_lib_martigny_w_patterns( - app, journal, holding_lib_martigny_w_patterns_data, - loc_public_martigny, item_type_standard_martigny, - vendor_martigny): + app, + journal, + holding_lib_martigny_w_patterns_data, + loc_public_martigny, + item_type_standard_martigny, + vendor_martigny, +): """Create holding of martigny library with patterns.""" holding = Holding.create( data=holding_lib_martigny_w_patterns_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + reindex=True, + ) + HoldingsSearch.flush_and_refresh() return holding @pytest.fixture(scope="module") def holding_lib_saxon_w_patterns_data(holdings): """Load holding of martigny library.""" - return deepcopy(holdings.get('holding8')) + return deepcopy(holdings.get("holding8")) @pytest.fixture(scope="module") def holding_lib_saxon_w_patterns( - app, journal, holding_lib_saxon_w_patterns_data, - loc_public_saxon, item_type_standard_martigny, - vendor_martigny): + app, + journal, + holding_lib_saxon_w_patterns_data, + loc_public_saxon, + item_type_standard_martigny, + vendor_martigny, +): """Create holding of saxon library with patterns.""" holding = Holding.create( data=holding_lib_saxon_w_patterns_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + reindex=True, + ) + HoldingsSearch.flush_and_refresh() return holding @pytest.fixture(scope="module") def holding_lib_sion_w_patterns_data(holdings): """Load holding of sion library.""" - return deepcopy(holdings.get('holding6')) + return deepcopy(holdings.get("holding6")) @pytest.fixture(scope="module") def holding_lib_sion_w_patterns( - app, journal, holding_lib_sion_w_patterns_data, - loc_public_sion, item_type_regular_sion): + app, + journal, + holding_lib_sion_w_patterns_data, + loc_public_sion, + item_type_regular_sion, +): """Create holding of sion library with patterns.""" holding = Holding.create( data=holding_lib_sion_w_patterns_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + reindex=True, + ) + HoldingsSearch.flush_and_refresh() return holding @pytest.fixture(scope="module") def holding_lib_sion_electronic_data(holdings): """Load electronic holding of Martigny library.""" - return deepcopy(holdings.get('holding7')) + return deepcopy(holdings.get("holding7")) @pytest.fixture(scope="module") def holding_lib_sion_electronic( - app, ebook_5, holding_lib_sion_electronic_data, - loc_public_sion, item_type_online_sion): + app, + ebook_5, + holding_lib_sion_electronic_data, + loc_public_sion, + item_type_online_sion, +): """Create electronic holding of Martigny library.""" holding = Holding.create( data=holding_lib_sion_electronic_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(HoldingsSearch.Meta.index) + reindex=True, + ) + HoldingsSearch.flush_and_refresh() return holding + # --------- Pattern records ----------- @pytest.fixture(scope="module") def pattern_quarterly_one_level_data(holdings): """Load holding with patterns of martigny library scope function.""" - del holdings['pattern1']['template_name'] - return deepcopy(holdings.get('pattern1')) + del holdings["pattern1"]["template_name"] + return deepcopy(holdings.get("pattern1")) @pytest.fixture(scope="module") def pattern_yearly_one_level_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern2']['template_name'] - return deepcopy(holdings.get('pattern2')) + del holdings["pattern2"]["template_name"] + return deepcopy(holdings.get("pattern2")) @pytest.fixture(scope="module") def pattern_yearly_one_level_with_label_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern3']['template_name'] - return deepcopy(holdings.get('pattern3')) + del holdings["pattern3"]["template_name"] + return deepcopy(holdings.get("pattern3")) @pytest.fixture(scope="module") def pattern_yearly_two_times_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern4']['template_name'] - return deepcopy(holdings.get('pattern4')) + del holdings["pattern4"]["template_name"] + return deepcopy(holdings.get("pattern4")) @pytest.fixture(scope="module") def pattern_quarterly_two_levels_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern5']['template_name'] - return deepcopy(holdings.get('pattern5')) + del holdings["pattern5"]["template_name"] + return deepcopy(holdings.get("pattern5")) @pytest.fixture(scope="module") def pattern_quarterly_two_levels_with_season_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern6']['template_name'] - return deepcopy(holdings.get('pattern6')) + del holdings["pattern6"]["template_name"] + return deepcopy(holdings.get("pattern6")) @pytest.fixture(scope="module") def pattern_half_yearly_one_level_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern7']['template_name'] - return deepcopy(holdings.get('pattern7')) + del holdings["pattern7"]["template_name"] + return deepcopy(holdings.get("pattern7")) @pytest.fixture(scope="module") def pattern_bimonthly_every_two_months_one_level_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern8']['template_name'] - return deepcopy(holdings.get('pattern8')) + del holdings["pattern8"]["template_name"] + return deepcopy(holdings.get("pattern8")) @pytest.fixture(scope="module") def pattern_half_yearly_two_levels_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern9']['template_name'] - return deepcopy(holdings.get('pattern9')) + del holdings["pattern9"]["template_name"] + return deepcopy(holdings.get("pattern9")) @pytest.fixture(scope="module") def pattern_bimonthly_every_two_months_two_levels_data(holdings): """Load patterns of martigny library scope function.""" - del holdings['pattern10']['template_name'] - return deepcopy(holdings.get('pattern10')) + del holdings["pattern10"]["template_name"] + return deepcopy(holdings.get("pattern10")) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def ebooks_1_xml(): """Load ebook1 xml file.""" - filepath = join(dirname(__file__), '..', 'data', 'xml', 'ebook1.xml') + filepath = join(dirname(__file__), "..", "data", "xml", "ebook1.xml") with open(filepath) as fh: return fh.read() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def ebooks_2_xml(): """Load ebook2 xml file.""" - filepath = join(dirname(__file__), '..', 'data', 'xml', 'ebook2.xml') + filepath = join(dirname(__file__), "..", "data", "xml", "ebook2.xml") with open(filepath) as fh: return fh.read() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def babel_filehandle(): """Load ebook2 xml file.""" - return open( - join(dirname(__file__), '..', 'data', 'babel_extraction.json'), - 'rb' - ) + return open(join(dirname(__file__), "..", "data", "babel_extraction.json"), "rb") -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def documents_marcxml(): """Load marc xml records in one file.""" - filepath = join(dirname(__file__), '..', 'data', 'xml', 'documents.xml') + filepath = join(dirname(__file__), "..", "data", "xml", "documents.xml") with open(filepath) as fh: return fh.read() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def document_marcxml(): """Load one marc xml record in one file.""" - filepath = join(dirname(__file__), '..', 'data', 'xml', 'document.xml') + filepath = join(dirname(__file__), "..", "data", "xml", "document.xml") with open(filepath) as fh: return fh.read() + # --------- Template records ----------- @pytest.fixture(scope="function") def templ_doc_public_martigny_data_tmp(data): """Load template for a public document martigny data scope function.""" - return deepcopy(data.get('tmpl1')) + return deepcopy(data.get("tmpl1")) @pytest.fixture(scope="module") def templ_doc_public_martigny_data(data): """Load template for a public document martigny data.""" - return deepcopy(data.get('tmpl1')) + return deepcopy(data.get("tmpl1")) @pytest.fixture(scope="module") def templ_doc_public_martigny( - app, org_martigny, templ_doc_public_martigny_data, - system_librarian_martigny): + app, org_martigny, templ_doc_public_martigny_data, system_librarian_martigny +): """Create template for a public document martigny.""" template = Template.create( data=templ_doc_public_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + reindex=True, + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_doc_private_martigny_data(data): """Load template for a private document martigny data.""" - return deepcopy(data.get('tmpl2')) + return deepcopy(data.get("tmpl2")) @pytest.fixture(scope="function") def templ_doc_private_martigny_data_tmp(data): """Load template for a private document martigny data.""" - return deepcopy(data.get('tmpl2')) + return deepcopy(data.get("tmpl2")) @pytest.fixture(scope="module") def templ_doc_private_martigny( - app, org_martigny, templ_doc_private_martigny_data, - librarian_martigny): + app, org_martigny, templ_doc_private_martigny_data, librarian_martigny +): """Create template for a private document martigny.""" template = Template.create( data=templ_doc_private_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + reindex=True, + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_doc_private_saxon_data(data): """Load template for a private document saxon data.""" - return deepcopy(data.get('tmpl7')) + return deepcopy(data.get("tmpl7")) @pytest.fixture(scope="module") def templ_doc_private_saxon( - app, org_martigny, templ_doc_private_saxon_data, - librarian_saxon): + app, org_martigny, templ_doc_private_saxon_data, librarian_saxon +): """Create template for a private document saxon.""" template = Template.create( - data=templ_doc_private_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + data=templ_doc_private_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_doc_public_saxon_data(data): """Load template for a public document saxon data.""" - return deepcopy(data.get('tmpl8')) + return deepcopy(data.get("tmpl8")) @pytest.fixture(scope="module") def templ_doc_public_saxon( - app, org_martigny, templ_doc_public_saxon_data, - librarian_saxon): + app, org_martigny, templ_doc_public_saxon_data, librarian_saxon +): """Create template for a public document saxon.""" template = Template.create( - data=templ_doc_public_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + data=templ_doc_public_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_doc_public_sion_data(data): """Load template for a public document sion data.""" - return deepcopy(data.get('tmpl3')) + return deepcopy(data.get("tmpl3")) @pytest.fixture(scope="module") def templ_doc_public_sion( - app, org_sion, templ_doc_public_sion_data, - system_librarian_sion): + app, org_sion, templ_doc_public_sion_data, system_librarian_sion +): """Create template for a public document sion.""" template = Template.create( - data=templ_doc_public_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + data=templ_doc_public_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_doc_private_sion_data(data): """Load template for a private document sion data.""" - return deepcopy(data.get('tmpl9')) + return deepcopy(data.get("tmpl9")) @pytest.fixture(scope="module") def templ_doc_private_sion( - app, org_sion, templ_doc_private_sion_data, - system_librarian_sion): + app, org_sion, templ_doc_private_sion_data, system_librarian_sion +): """Create template for a private document sion.""" template = Template.create( - data=templ_doc_private_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + data=templ_doc_private_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_holdings_public_martigny_data(data): """Load template for a public holdings martigny data.""" - return deepcopy(data.get('tmpl4')) + return deepcopy(data.get("tmpl4")) @pytest.fixture(scope="module") def templ_holdings_public_martigny( - app, org_martigny, templ_holdings_public_martigny_data, - system_librarian_martigny): + app, org_martigny, templ_holdings_public_martigny_data, system_librarian_martigny +): """Load template for a public holdings martigny.""" template = Template.create( data=templ_holdings_public_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + reindex=True, + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_item_public_martigny_data(data): """Load template for a public item martigny data.""" - return deepcopy(data.get('tmpl5')) + return deepcopy(data.get("tmpl5")) @pytest.fixture(scope="module") def templ_item_public_martigny( - app, org_martigny, templ_item_public_martigny_data, - system_librarian_martigny): + app, org_martigny, templ_item_public_martigny_data, system_librarian_martigny +): """Load template for a public item martigny.""" template = Template.create( data=templ_item_public_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + reindex=True, + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_hold_public_martigny_data(data): """Load template for a public holding martigny data.""" - return deepcopy(data.get('tmpl4')) + return deepcopy(data.get("tmpl4")) @pytest.fixture(scope="module") def templ_hold_public_martigny( - app, org_martigny, templ_hold_public_martigny_data, - system_librarian_martigny): + app, org_martigny, templ_hold_public_martigny_data, system_librarian_martigny +): """Load template for a public holding martigny.""" template = Template.create( data=templ_hold_public_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + reindex=True, + ) + TemplatesSearch.flush_and_refresh() return template @pytest.fixture(scope="module") def templ_patron_public_martigny_data(data): """Load template for a public patron martigny data.""" - return deepcopy(data.get('tmpl6')) + return deepcopy(data.get("tmpl6")) @pytest.fixture(scope="module") -def templ_patron_public_martigny(app, org_martigny, - templ_patron_public_martigny_data, - system_librarian_martigny): +def templ_patron_public_martigny( + app, org_martigny, templ_patron_public_martigny_data, system_librarian_martigny +): """Load template for a public item martigny.""" template = Template.create( data=templ_patron_public_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(TemplatesSearch.Meta.index) + reindex=True, + ) + TemplatesSearch.flush_and_refresh() return template @@ -1429,56 +1353,48 @@ def templ_patron_public_martigny(app, org_martigny, @pytest.fixture(scope="module") def local_field_martigny_data(local_fields): """Load Local field 1 data.""" - return deepcopy(local_fields.get('lofi1')) + return deepcopy(local_fields.get("lofi1")) @pytest.fixture(scope="module") -def local_field_martigny(app, org_martigny, document, - local_field_martigny_data): +def local_field_martigny(app, org_martigny, document, local_field_martigny_data): """Load local field.""" local_field = LocalField.create( - data=local_field_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalFieldsSearch.Meta.index) + data=local_field_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalFieldsSearch.flush_and_refresh() return local_field @pytest.fixture(scope="module") def local_field_sion_data(local_fields): """Load Local field 2 data.""" - return deepcopy(local_fields.get('lofi2')) + return deepcopy(local_fields.get("lofi2")) @pytest.fixture(scope="module") def local_field_sion(app, org_sion, document, local_field_sion_data): """Load local field.""" local_field = LocalField.create( - data=local_field_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalFieldsSearch.Meta.index) + data=local_field_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalFieldsSearch.flush_and_refresh() return local_field @pytest.fixture(scope="module") def local_field_3_martigny_data(local_fields): """Load Local field 3 data.""" - return deepcopy(local_fields.get('lofi3')) + return deepcopy(local_fields.get("lofi3")) @pytest.fixture(scope="module") -def local_field_3_martigny(app, org_martigny, document, - local_field_3_martigny_data): +def local_field_3_martigny(app, org_martigny, document, local_field_3_martigny_data): """Load local field.""" local_field = LocalField.create( - data=local_field_3_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocalFieldsSearch.Meta.index) + data=local_field_3_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocalFieldsSearch.flush_and_refresh() return local_field @@ -1487,9 +1403,9 @@ def local_field_3_martigny(app, org_martigny, document, def operation_log_data(data): """Load operation log record.""" # change the date foe the right year - data = data.get('oplg1') - date = data['date'] - data['date'] = f'{datetime.now().year}{date[4:]}' + data = data.get("oplg1") + date = data["date"] + data["date"] = f"{datetime.now().year}{date[4:]}" return deepcopy(data) @@ -1503,40 +1419,30 @@ def operation_log(operation_log_data, item_lib_sion): @pytest.fixture(scope="module") def stats_cfg_martigny_data(data): """Load statistics configuration of martigny organisation.""" - return deepcopy(data.get('stats_cfg1')) + return deepcopy(data.get("stats_cfg1")) @pytest.fixture(scope="module") -def stats_cfg_martigny( - app, - stats_cfg_martigny_data, - system_librarian_martigny): +def stats_cfg_martigny(app, stats_cfg_martigny_data, system_librarian_martigny): """Create stats_cfg of martigny organisation.""" stats_cfg = StatConfiguration.create( - data=stats_cfg_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(StatsConfigurationSearch.Meta.index) + data=stats_cfg_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + StatsConfigurationSearch.flush_and_refresh() yield stats_cfg @pytest.fixture(scope="module") def stats_cfg_sion_data(data): """Load statistics configuration of sion organisation.""" - return deepcopy(data.get('stats_cfg2')) + return deepcopy(data.get("stats_cfg2")) @pytest.fixture(scope="module") -def stats_cfg_sion( - app, - stats_cfg_sion_data, - system_librarian_sion): +def stats_cfg_sion(app, stats_cfg_sion_data, system_librarian_sion): """Create stats_cfg of sion organisation.""" stats_cfg = StatConfiguration.create( - data=stats_cfg_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(StatsConfigurationSearch.Meta.index) + data=stats_cfg_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + StatsConfigurationSearch.flush_and_refresh() yield stats_cfg diff --git a/tests/fixtures/organisations.py b/tests/fixtures/organisations.py index bd6464a6d6..7c504e1773 100644 --- a/tests/fixtures/organisations.py +++ b/tests/fixtures/organisations.py @@ -20,33 +20,29 @@ from copy import deepcopy import pytest -from utils import flush_index from rero_ils.modules.circ_policies.api import CircPoliciesSearch, CircPolicy from rero_ils.modules.collections.api import Collection, CollectionsSearch from rero_ils.modules.item_types.api import ItemType, ItemTypesSearch from rero_ils.modules.libraries.api import LibrariesSearch, Library from rero_ils.modules.locations.api import Location, LocationsSearch -from rero_ils.modules.organisations.api import Organisation, \ - OrganisationsSearch +from rero_ils.modules.organisations.api import Organisation, OrganisationsSearch from rero_ils.modules.patron_types.api import PatronType, PatronTypesSearch @pytest.fixture(scope="module") def org_martigny_data(data): """Martigny organisation data.""" - return (data.get('org1')) + return data.get("org1") @pytest.fixture(scope="module") def org_martigny(app, org_martigny_data): """Create Martigny organisation.""" org = Organisation.create( - data=org_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(OrganisationsSearch.Meta.index) + data=org_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + OrganisationsSearch.flush_and_refresh() return org @@ -54,53 +50,47 @@ def org_martigny(app, org_martigny_data): def organisation_temp(app, org_martigny): """Scope function organisation data.""" org = Organisation.create( - data=org_martigny, - dbcommit=True, - delete_pid=True, - reindex=True) - flush_index(OrganisationsSearch.Meta.index) + data=org_martigny, dbcommit=True, delete_pid=True, reindex=True + ) + OrganisationsSearch.flush_and_refresh() return org @pytest.fixture(scope="module") def org_sion_data(data): """Sion organisation data..""" - return (data.get('org2')) + return data.get("org2") @pytest.fixture(scope="module") def org_sion(app, org_sion_data): """Create Sion organisation.""" org = Organisation.create( - data=org_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(OrganisationsSearch.Meta.index) + data=org_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + OrganisationsSearch.flush_and_refresh() return org @pytest.fixture(scope="module") def lib_martigny_data(data): """Martigny-ville library data.""" - return deepcopy(data.get('lib1')) + return deepcopy(data.get("lib1")) @pytest.fixture(scope="module") def lib_martigny_bourg_data(data): """Martigny-bourg library data.""" - return deepcopy(data.get('lib7')) + return deepcopy(data.get("lib7")) @pytest.fixture(scope="module") def lib_martigny(app, org_martigny, lib_martigny_data): """Martigny-ville library.""" lib = Library.create( - data=lib_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @@ -108,215 +98,219 @@ def lib_martigny(app, org_martigny, lib_martigny_data): def lib_martigny_bourg(app, org_martigny, lib_martigny_bourg_data): """Martigny-bourg library.""" lib = Library.create( - data=lib_martigny_bourg_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_martigny_bourg_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @pytest.fixture(scope="module") def lib_saillon_data(data): """Saillon library data.""" - return deepcopy(data.get('lib6')) + return deepcopy(data.get("lib6")) @pytest.fixture(scope="module") def lib_saillon(app, org_martigny, lib_saillon_data): """Saillon library.""" lib = Library.create( - data=lib_saillon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_saillon_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @pytest.fixture(scope="module") def lib_saxon_data(data): """Saxon library data.""" - return deepcopy(data.get('lib2')) + return deepcopy(data.get("lib2")) @pytest.fixture(scope="module") def lib_saxon(app, org_martigny, lib_saxon_data): """Saxon library.""" lib = Library.create( - data=lib_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @pytest.fixture(scope="module") def lib_fully_data(data): """Fully library data.""" - return deepcopy(data.get('lib3')) + return deepcopy(data.get("lib3")) @pytest.fixture(scope="module") def lib_fully(app, org_martigny, lib_fully_data): """Fully library.""" lib = Library.create( - data=lib_fully_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_fully_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @pytest.fixture(scope="module") def lib_sion_data(data): """Sion library data.""" - return deepcopy(data.get('lib4')) + return deepcopy(data.get("lib4")) @pytest.fixture(scope="module") def lib_sion(app, org_sion, lib_sion_data): """Sion library.""" lib = Library.create( - data=lib_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @pytest.fixture(scope="module") def lib_aproz_data(data): """Aproz library data.""" - return deepcopy(data.get('lib5')) + return deepcopy(data.get("lib5")) @pytest.fixture(scope="module") def lib_aproz(app, org_sion, lib_aproz_data): """Aproz library.""" lib = Library.create( - data=lib_aproz_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LibrariesSearch.Meta.index) + data=lib_aproz_data, delete_pid=False, dbcommit=True, reindex=True + ) + LibrariesSearch.flush_and_refresh() return lib @pytest.fixture(scope="module") def loc_public_martigny_data(data): """Load public space location for Martigny ville.""" - return deepcopy(data.get('loc1')) + return deepcopy(data.get("loc1")) @pytest.fixture(scope="module") def loc_public_martigny_bourg_data(data): """Load public space location for Martigny bourg.""" - return deepcopy(data.get('loc15')) + return deepcopy(data.get("loc15")) @pytest.fixture(scope="module") def loc_restricted_martigny_bourg_data(data): """Load restricted space location for Martigny bourg.""" - return deepcopy(data.get('loc16')) + return deepcopy(data.get("loc16")) @pytest.fixture(scope="module") def loc_public_saillon_data(data): """Load public space location for Saillon.""" - return deepcopy(data.get('loc14')) + return deepcopy(data.get("loc14")) @pytest.fixture(scope="module") def loc_restricted_martigny_data(data): """Load restricted space location for Martigny ville.""" - return deepcopy(data.get('loc2')) + return deepcopy(data.get("loc2")) @pytest.fixture(scope="module") def loc_public_saxon_data(data): """Load public space location for Saxon.""" - return deepcopy(data.get('loc3')) + return deepcopy(data.get("loc3")) @pytest.fixture(scope="module") def loc_restricted_saxon_data(data): """Load restricted space location for saxon.""" - return deepcopy(data.get('loc4')) + return deepcopy(data.get("loc4")) @pytest.fixture(scope="module") def loc_public_fully_data(data): """Load public space location for Fully.""" - return deepcopy(data.get('loc5')) + return deepcopy(data.get("loc5")) @pytest.fixture(scope="module") def loc_restricted_fully_data(data): """Load restricted space location for Fully.""" - return deepcopy(data.get('loc6')) + return deepcopy(data.get("loc6")) @pytest.fixture(scope="module") def loc_public_sion_data(data): """Load public space location for Sion.""" - return deepcopy(data.get('loc7')) + return deepcopy(data.get("loc7")) @pytest.fixture(scope="module") def loc_restricted_sion_data(data): """Load restricted space location for Sion.""" - return deepcopy(data.get('loc8')) + return deepcopy(data.get("loc8")) @pytest.fixture(scope="module") def loc_online_martigny_data(data): """Load online space location for Martigny.""" - return deepcopy(data.get('loc9')) + return deepcopy(data.get("loc9")) @pytest.fixture(scope="module") def loc_online_saxon_data(data): """Load online space location for Saxon.""" - return deepcopy(data.get('loc10')) + return deepcopy(data.get("loc10")) @pytest.fixture(scope="module") def loc_online_fully_data(data): """Load online space location for Fully.""" - return deepcopy(data.get('loc11')) + return deepcopy(data.get("loc11")) @pytest.fixture(scope="module") def loc_online_sion_data(data): """Load online space location for Sion.""" - return deepcopy(data.get('loc12')) + return deepcopy(data.get("loc12")) @pytest.fixture(scope="module") def loc_online_aproz_data(data): """Load online space location for Aproz.""" - return deepcopy(data.get('loc13')) - - -@pytest.fixture(scope="module") -def locations(loc_public_martigny, loc_restricted_martigny, - loc_public_saxon, loc_restricted_saxon, - loc_public_fully, loc_restricted_fully, - loc_public_sion, loc_restricted_sion, - loc_online_martigny, loc_online_saxon, - loc_online_fully, loc_online_sion, loc_online_aproz): + return deepcopy(data.get("loc13")) + + +@pytest.fixture(scope="module") +def locations( + loc_public_martigny, + loc_restricted_martigny, + loc_public_saxon, + loc_restricted_saxon, + loc_public_fully, + loc_restricted_fully, + loc_public_sion, + loc_restricted_sion, + loc_online_martigny, + loc_online_saxon, + loc_online_fully, + loc_online_sion, + loc_online_aproz, +): """Create all locations.""" return [ - loc_public_martigny, loc_restricted_martigny, - loc_public_saxon, loc_restricted_saxon, - loc_public_fully, loc_restricted_fully, - loc_public_sion, loc_restricted_sion, - loc_online_martigny, loc_online_saxon, - loc_online_fully, loc_online_sion, loc_online_aproz + loc_public_martigny, + loc_restricted_martigny, + loc_public_saxon, + loc_restricted_saxon, + loc_public_fully, + loc_restricted_fully, + loc_public_sion, + loc_restricted_sion, + loc_online_martigny, + loc_online_saxon, + loc_online_fully, + loc_online_sion, + loc_online_aproz, ] @@ -324,37 +318,37 @@ def locations(loc_public_martigny, loc_restricted_martigny, def loc_public_martigny(app, lib_martigny, loc_public_martigny_data): """Create public space location for Martigny ville.""" loc = Location.create( - data=loc_public_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_public_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @pytest.fixture(scope="module") -def loc_public_martigny_bourg( - app, lib_martigny_bourg, loc_public_martigny_bourg_data): +def loc_public_martigny_bourg(app, lib_martigny_bourg, loc_public_martigny_bourg_data): """Create public space location for Martigny bourg.""" loc = Location.create( data=loc_public_martigny_bourg_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + reindex=True, + ) + LocationsSearch.flush_and_refresh() return loc @pytest.fixture(scope="module") def loc_restricted_martigny_bourg( - app, lib_martigny_bourg, loc_restricted_martigny_bourg_data): + app, lib_martigny_bourg, loc_restricted_martigny_bourg_data +): """Create restricted space location for Martigny bourg.""" loc = Location.create( data=loc_restricted_martigny_bourg_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + reindex=True, + ) + LocationsSearch.flush_and_refresh() return loc @@ -362,11 +356,9 @@ def loc_restricted_martigny_bourg( def loc_public_saillon(app, lib_saillon, loc_public_saillon_data): """Create public space location for saillon.""" loc = Location.create( - data=loc_public_saillon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_public_saillon_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -374,11 +366,9 @@ def loc_public_saillon(app, lib_saillon, loc_public_saillon_data): def loc_restricted_martigny(app, lib_martigny, loc_restricted_martigny_data): """Create restricted space location for Martigny ville.""" loc = Location.create( - data=loc_restricted_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_restricted_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -386,11 +376,9 @@ def loc_restricted_martigny(app, lib_martigny, loc_restricted_martigny_data): def loc_public_saxon(app, lib_saxon, loc_public_saxon_data): """Create public space location for saxon.""" loc = Location.create( - data=loc_public_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_public_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -398,11 +386,9 @@ def loc_public_saxon(app, lib_saxon, loc_public_saxon_data): def loc_restricted_saxon(app, lib_saxon, loc_restricted_saxon_data): """Create restricted space location for saxon.""" loc = Location.create( - data=loc_restricted_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_restricted_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -410,11 +396,9 @@ def loc_restricted_saxon(app, lib_saxon, loc_restricted_saxon_data): def loc_public_fully(app, lib_fully, loc_public_fully_data): """Create public space location for fully.""" loc = Location.create( - data=loc_public_fully_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_public_fully_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -422,11 +406,9 @@ def loc_public_fully(app, lib_fully, loc_public_fully_data): def loc_restricted_fully(app, lib_fully, loc_restricted_fully_data): """Create restricted space location for fully.""" loc = Location.create( - data=loc_restricted_fully_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_restricted_fully_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -434,11 +416,9 @@ def loc_restricted_fully(app, lib_fully, loc_restricted_fully_data): def loc_public_sion(app, lib_sion, loc_public_sion_data): """Create public space location for sion.""" loc = Location.create( - data=loc_public_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_public_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -446,11 +426,9 @@ def loc_public_sion(app, lib_sion, loc_public_sion_data): def loc_restricted_sion(app, lib_sion, loc_restricted_sion_data): """Create restricted space location for sion.""" loc = Location.create( - data=loc_restricted_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_restricted_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -458,11 +436,9 @@ def loc_restricted_sion(app, lib_sion, loc_restricted_sion_data): def loc_online_martigny(app, lib_martigny, loc_online_martigny_data): """Create online space location for Martigny.""" loc = Location.create( - data=loc_online_martigny_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_online_martigny_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -470,11 +446,9 @@ def loc_online_martigny(app, lib_martigny, loc_online_martigny_data): def loc_online_saxon(app, lib_saxon, loc_online_saxon_data): """Create online space location for Saxon.""" loc = Location.create( - data=loc_online_saxon_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_online_saxon_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -482,11 +456,9 @@ def loc_online_saxon(app, lib_saxon, loc_online_saxon_data): def loc_online_fully(app, lib_fully, loc_online_fully_data): """Create online space location for Fully.""" loc = Location.create( - data=loc_online_fully_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_online_fully_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -494,11 +466,9 @@ def loc_online_fully(app, lib_fully, loc_online_fully_data): def loc_online_sion(app, lib_sion, loc_online_sion_data): """Create online space location for Sion.""" loc = Location.create( - data=loc_online_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_online_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @@ -506,349 +476,339 @@ def loc_online_sion(app, lib_sion, loc_online_sion_data): def loc_online_aproz(app, lib_aproz, loc_online_aproz_data): """Create online space location for aproz.""" loc = Location.create( - data=loc_online_aproz_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(LocationsSearch.Meta.index) + data=loc_online_aproz_data, delete_pid=False, dbcommit=True, reindex=True + ) + LocationsSearch.flush_and_refresh() return loc @pytest.fixture(scope="function") def item_type_data_tmp(data): """Load standard item type of martigny.""" - return deepcopy(data.get('itty1')) + return deepcopy(data.get("itty1")) @pytest.fixture(scope="module") def item_type_standard_martigny_data(data): """Load standard item type of martigny.""" - return deepcopy(data.get('itty1')) + return deepcopy(data.get("itty1")) @pytest.fixture(scope="module") -def item_type_standard_martigny( - app, org_martigny, item_type_standard_martigny_data): +def item_type_standard_martigny(app, org_martigny, item_type_standard_martigny_data): """Create standard item type of martigny.""" itty = ItemType.create( data=item_type_standard_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + reindex=True, + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_on_site_martigny_data(data): """Load on-site item type of martigny.""" - return deepcopy(data.get('itty2')) + return deepcopy(data.get("itty2")) @pytest.fixture(scope="module") -def item_type_on_site_martigny( - app, org_martigny, item_type_on_site_martigny_data): +def item_type_on_site_martigny(app, org_martigny, item_type_on_site_martigny_data): """Create on_site item type of martigny.""" itty = ItemType.create( data=item_type_on_site_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + reindex=True, + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_specific_martigny_data(data): """Load specific item type of martigny.""" - return deepcopy(data.get('itty3')) + return deepcopy(data.get("itty3")) @pytest.fixture(scope="module") -def item_type_specific_martigny( - app, org_martigny, item_type_specific_martigny_data): +def item_type_specific_martigny(app, org_martigny, item_type_specific_martigny_data): """Create specific item type of martigny.""" itty = ItemType.create( data=item_type_specific_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + reindex=True, + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_regular_sion_data(data): """Load regular item type of sion.""" - return deepcopy(data.get('itty4')) + return deepcopy(data.get("itty4")) @pytest.fixture(scope="module") -def item_type_regular_sion( - app, org_sion, item_type_regular_sion_data): +def item_type_regular_sion(app, org_sion, item_type_regular_sion_data): """Create regular item type of sion.""" itty = ItemType.create( - data=item_type_regular_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + data=item_type_regular_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_internal_sion_data(data): """Load internal item type of sion.""" - return deepcopy(data.get('itty5')) + return deepcopy(data.get("itty5")) @pytest.fixture(scope="module") -def item_type_internal_sion( - app, org_sion, item_type_internal_sion_data): +def item_type_internal_sion(app, org_sion, item_type_internal_sion_data): """Create internal item type of sion.""" itty = ItemType.create( - data=item_type_internal_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + data=item_type_internal_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_particular_sion_data(data): """Load particular item type of sion.""" - return deepcopy(data.get('itty6')) + return deepcopy(data.get("itty6")) @pytest.fixture(scope="module") -def item_type_particular_sion( - app, org_sion, item_type_particular_sion_data): +def item_type_particular_sion(app, org_sion, item_type_particular_sion_data): """Create particular item type of sion.""" itty = ItemType.create( data=item_type_particular_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + reindex=True, + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_online_martigny_data(data): """Load onine item type of martigny.""" - return deepcopy(data.get('itty7')) + return deepcopy(data.get("itty7")) @pytest.fixture(scope="module") -def item_type_online_martigny( - app, org_martigny, item_type_online_martigny_data): +def item_type_online_martigny(app, org_martigny, item_type_online_martigny_data): """Create particular item type of martigny.""" itty = ItemType.create( data=item_type_online_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + reindex=True, + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_online_sion_data(data): """Load particular item type of sion.""" - return deepcopy(data.get('itty8')) + return deepcopy(data.get("itty8")) @pytest.fixture(scope="module") -def item_type_online_sion( - app, org_sion, item_type_online_sion_data): +def item_type_online_sion(app, org_sion, item_type_online_sion_data): """Create particular item type of sion.""" itty = ItemType.create( - data=item_type_online_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + data=item_type_online_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def item_type_missing_martigny_data(data): """Load missing item type of martigny.""" - return deepcopy(data.get('itty9')) + return deepcopy(data.get("itty9")) @pytest.fixture(scope="module") -def item_type_missing_martigny( - app, org_martigny, item_type_missing_martigny_data): +def item_type_missing_martigny(app, org_martigny, item_type_missing_martigny_data): """Create missing item type of martigny.""" itty = ItemType.create( data=item_type_missing_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(ItemTypesSearch.Meta.index) + reindex=True, + ) + ItemTypesSearch.flush_and_refresh() return itty @pytest.fixture(scope="module") def patron_type_children_martigny_data(data): """Load children patron type of martigny.""" - return deepcopy(data.get('ptty1')) + return deepcopy(data.get("ptty1")) @pytest.fixture(scope="function") def patron_type_data_tmp(data): """Load children patron type of martigny scope function.""" - return deepcopy(data.get('ptty1')) + return deepcopy(data.get("ptty1")) @pytest.fixture(scope="function") def patron_type_tmp(db, org_martigny, patron_type_children_martigny_data): """Create scope function children patron type of martigny.""" ptty = PatronType.create( - data=patron_type_children_martigny_data, - dbcommit=True, - delete_pid=True) + data=patron_type_children_martigny_data, dbcommit=True, delete_pid=True + ) return ptty @pytest.fixture(scope="module") def patron_type_children_martigny( - app, org_martigny, patron_type_children_martigny_data): + app, org_martigny, patron_type_children_martigny_data +): """Create children patron type of martigny.""" ptty = PatronType.create( data=patron_type_children_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(PatronTypesSearch.Meta.index) + reindex=True, + ) + PatronTypesSearch.flush_and_refresh() return ptty @pytest.fixture(scope="module") def patron_type_adults_martigny_data(data): """Load adults patron type of martigny.""" - return deepcopy(data.get('ptty2')) + return deepcopy(data.get("ptty2")) @pytest.fixture(scope="module") -def patron_type_adults_martigny( - app, org_martigny, patron_type_adults_martigny_data): +def patron_type_adults_martigny(app, org_martigny, patron_type_adults_martigny_data): """Create adults patron type of martigny.""" ptty = PatronType.create( data=patron_type_adults_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(PatronTypesSearch.Meta.index) + reindex=True, + ) + PatronTypesSearch.flush_and_refresh() return ptty @pytest.fixture(scope="module") def patron_type_youngsters_sion_data(data): """Load youngsters patron type of sion.""" - return deepcopy(data.get('ptty3')) + return deepcopy(data.get("ptty3")) @pytest.fixture(scope="module") -def patron_type_youngsters_sion( - app, org_sion, patron_type_youngsters_sion_data): +def patron_type_youngsters_sion(app, org_sion, patron_type_youngsters_sion_data): """Crate youngsters patron type of sion.""" ptty = PatronType.create( data=patron_type_youngsters_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(PatronTypesSearch.Meta.index) + reindex=True, + ) + PatronTypesSearch.flush_and_refresh() return ptty @pytest.fixture(scope="module") def patron_type_grown_sion_data(data): """Load grown patron type of sion.""" - return deepcopy(data.get('ptty4')) + return deepcopy(data.get("ptty4")) @pytest.fixture(scope="module") -def patron_type_grown_sion( - app, org_sion, patron_type_grown_sion_data): +def patron_type_grown_sion(app, org_sion, patron_type_grown_sion_data): """Crate grown patron type of sion.""" ptty = PatronType.create( - data=patron_type_grown_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(PatronTypesSearch.Meta.index) + data=patron_type_grown_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + PatronTypesSearch.flush_and_refresh() return ptty @pytest.fixture(scope="function") def circ_policy_martigny_data_tmp(data): """Load default circ policy for organisation martigny scope function.""" - return deepcopy(data.get('cipo1')) + return deepcopy(data.get("cipo1")) @pytest.fixture(scope="module") def circ_policy_default_martigny_data(data): """Load default circ policy for organisation martigny.""" - return deepcopy(data.get('cipo1')) + return deepcopy(data.get("cipo1")) @pytest.fixture(scope="module") def circ_policy_default_martigny( - app, org_martigny, lib_martigny, lib_saxon, - circ_policy_default_martigny_data): + app, org_martigny, lib_martigny, lib_saxon, circ_policy_default_martigny_data +): """Create default circ policy for organisation martigny.""" cipo = CircPolicy.create( data=circ_policy_default_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + reindex=True, + ) + CircPoliciesSearch.flush_and_refresh() return cipo @pytest.fixture(scope="module") def circ_policy_default_sion_data(data): """Load default circ policy for organisation sion.""" - return deepcopy(data.get('cipo4')) + return deepcopy(data.get("cipo4")) @pytest.fixture(scope="module") -def circ_policy_default_sion( - app, org_sion, lib_sion, circ_policy_default_sion_data): +def circ_policy_default_sion(app, org_sion, lib_sion, circ_policy_default_sion_data): """Create default circ policy for organisation sion.""" cipo = CircPolicy.create( data=circ_policy_default_sion_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + reindex=True, + ) + CircPoliciesSearch.flush_and_refresh() return cipo @pytest.fixture(scope="module") def circ_policy_short_martigny_data(data): """Load short circ policy for organisation martigny.""" - return deepcopy(data.get('cipo2')) + return deepcopy(data.get("cipo2")) @pytest.fixture(scope="module") def circ_policy_short_martigny( - app, - patron_type_children_martigny, - patron_type_adults_martigny, - item_type_standard_martigny, - item_type_specific_martigny, - circ_policy_short_martigny_data): + app, + patron_type_children_martigny, + patron_type_adults_martigny, + item_type_standard_martigny, + item_type_specific_martigny, + circ_policy_short_martigny_data, +): """Create short circ policy for organisation martigny.""" cipo = CircPolicy.create( data=circ_policy_short_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + reindex=True, + ) + CircPoliciesSearch.flush_and_refresh() return cipo @@ -858,16 +818,17 @@ def circ_policy_temp_martigny_data(data): library martigny-ville. """ - return deepcopy(data.get('cipo3')) + return deepcopy(data.get("cipo3")) @pytest.fixture(scope="module") def circ_policy_temp_martigny( - app, - lib_martigny, - patron_type_adults_martigny, - item_type_on_site_martigny, - circ_policy_temp_martigny_data): + app, + lib_martigny, + patron_type_adults_martigny, + item_type_on_site_martigny, + circ_policy_temp_martigny_data, +): """Create temporary circ policy for organisation martigny. library martigny. @@ -876,87 +837,92 @@ def circ_policy_temp_martigny( data=circ_policy_temp_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + reindex=True, + ) + CircPoliciesSearch.flush_and_refresh() return cipo @pytest.fixture(scope="module") def circ_policy_ebooks_martigny_data(data): """Load ebooks circ policy for organisation martigny.""" - return deepcopy(data.get('cipo5')) + return deepcopy(data.get("cipo5")) @pytest.fixture(scope="module") def circ_policy_ebooks_martigny( - app, - patron_type_adults_martigny, - patron_type_children_martigny, - item_type_online_martigny, - circ_policy_ebooks_martigny_data): + app, + patron_type_adults_martigny, + patron_type_children_martigny, + item_type_online_martigny, + circ_policy_ebooks_martigny_data, +): """Create ebooks circ policy for organisation martigny.""" cipo = CircPolicy.create( data=circ_policy_ebooks_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + reindex=True, + ) + CircPoliciesSearch.flush_and_refresh() return cipo @pytest.fixture(scope="module") def circ_policy_ebooks_sion_data(data): """Load ebooks circ policy for organisation sion.""" - return deepcopy(data.get('cipo6')) + return deepcopy(data.get("cipo6")) @pytest.fixture(scope="module") def circ_policy_ebooks_sion( - app, - patron_type_youngsters_sion, - patron_type_grown_sion, - item_type_online_sion, - circ_policy_ebooks_sion_data): + app, + patron_type_youngsters_sion, + patron_type_grown_sion, + item_type_online_sion, + circ_policy_ebooks_sion_data, +): """Create ebooks circ policy for organisation sion.""" cipo = CircPolicy.create( - data=circ_policy_ebooks_sion_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + data=circ_policy_ebooks_sion_data, delete_pid=False, dbcommit=True, reindex=True + ) + CircPoliciesSearch.flush_and_refresh() return cipo @pytest.fixture(scope="module") def circ_policy_less_than_one_day_martigny_data(data): """Load short circ policy for organisation martigny.""" - return deepcopy(data.get('cipo7')) + return deepcopy(data.get("cipo7")) @pytest.fixture(scope="module") def circ_policy_less_than_one_day_martigny( - app, - patron_type_adults_martigny, - item_type_standard_martigny, - circ_policy_less_than_one_day_martigny_data): + app, + patron_type_adults_martigny, + item_type_standard_martigny, + circ_policy_less_than_one_day_martigny_data, +): """Create short circ policy for organisation martigny.""" cipo = CircPolicy.create( data=circ_policy_less_than_one_day_martigny_data, delete_pid=False, dbcommit=True, - reindex=True) - flush_index(CircPoliciesSearch.Meta.index) + reindex=True, + ) + CircPoliciesSearch.flush_and_refresh() return cipo @pytest.fixture(scope="module") def circulation_policies( - circ_policy_default_martigny, - circ_policy_default_sion, - circ_policy_short_martigny, - circ_policy_temp_martigny, - circ_policy_ebooks_martigny, - circ_policy_ebooks_sion): + circ_policy_default_martigny, + circ_policy_default_sion, + circ_policy_short_martigny, + circ_policy_temp_martigny, + circ_policy_ebooks_martigny, + circ_policy_ebooks_sion, +): """Load all circulation policies.""" return [ circ_policy_default_martigny, @@ -965,65 +931,64 @@ def circulation_policies( circ_policy_temp_martigny, circ_policy_ebooks_martigny, circ_policy_ebooks_sion, - circ_policy_less_than_one_day_martigny + circ_policy_less_than_one_day_martigny, ] @pytest.fixture(scope="module") def coll_martigny_1_data(data): """Load collection Martigny 1.""" - return deepcopy(data.get('coll_martigny_1')) + return deepcopy(data.get("coll_martigny_1")) @pytest.fixture(scope="module") def coll_martigny_1( - app, org_martigny, coll_martigny_1_data, - item_lib_martigny, item2_lib_martigny): + app, org_martigny, coll_martigny_1_data, item_lib_martigny, item2_lib_martigny +): """Create collection Martigny 1.""" coll = Collection.create( - data=coll_martigny_1_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(CollectionsSearch.Meta.index) + data=coll_martigny_1_data, delete_pid=False, dbcommit=True, reindex=True + ) + CollectionsSearch.flush_and_refresh() return coll @pytest.fixture(scope="module") def coll_sion_1_data(data): """Load collection Sion 1.""" - return deepcopy(data.get('coll_sion_1')) + return deepcopy(data.get("coll_sion_1")) @pytest.fixture(scope="module") def coll_sion_1( - app, org_sion, lib_sion, coll_sion_1_data, item_lib_sion, - item2_lib_sion): + app, org_sion, lib_sion, coll_sion_1_data, item_lib_sion, item2_lib_sion +): """Create collection Sion 1.""" coll = Collection.create( - data=coll_sion_1_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(CollectionsSearch.Meta.index) + data=coll_sion_1_data, delete_pid=False, dbcommit=True, reindex=True + ) + CollectionsSearch.flush_and_refresh() return coll @pytest.fixture(scope="module") def coll_saxon_1_data(data): """Load collection Saxon 1.""" - return deepcopy(data.get('coll_saxon_1')) + return deepcopy(data.get("coll_saxon_1")) @pytest.fixture(scope="module") def coll_saxon_1( - app, org_martigny, lib_saxon, coll_saxon_1_data, item2_lib_martigny, - item_lib_martigny): + app, + org_martigny, + lib_saxon, + coll_saxon_1_data, + item2_lib_martigny, + item_lib_martigny, +): """Create collection Saxon 1.""" coll = Collection.create( - data=coll_saxon_1_data, - delete_pid=False, - dbcommit=True, - reindex=True) - flush_index(CollectionsSearch.Meta.index) + data=coll_saxon_1_data, delete_pid=False, dbcommit=True, reindex=True + ) + CollectionsSearch.flush_and_refresh() return coll diff --git a/tests/fixtures/sip2.py b/tests/fixtures/sip2.py index 16c6a19c6d..34ea28a304 100644 --- a/tests/fixtures/sip2.py +++ b/tests/fixtures/sip2.py @@ -21,26 +21,30 @@ from copy import deepcopy import pytest -from utils import create_patron, create_selfcheck_terminal, \ - create_user_token, patch_expiration_date +from utils import ( + create_patron, + create_selfcheck_terminal, + create_user_token, + patch_expiration_date, +) @pytest.fixture(scope="module") def selfcheck_librarian_martigny_data(data): """Load Martigny librarian data.""" - return deepcopy(patch_expiration_date(data.get('ptrn2'))) + return deepcopy(patch_expiration_date(data.get("ptrn2"))) @pytest.fixture(scope="module") -def selfcheck_librarian_martigny(app, roles, loc_public_martigny, - librarian_martigny, - selfcheck_termial_martigny_data): +def selfcheck_librarian_martigny( + app, roles, loc_public_martigny, librarian_martigny, selfcheck_termial_martigny_data +): """Create selfcheck config and token for Martigny librarian.""" # create token for selfcheck terminal create_user_token( - client_name='selfcheck_token', + client_name="selfcheck_token", user=librarian_martigny.user, - access_token=selfcheck_termial_martigny_data.get('access_token') + access_token=selfcheck_termial_martigny_data.get("access_token"), ) # create config for selfcheck terminal @@ -52,24 +56,28 @@ def selfcheck_librarian_martigny(app, roles, loc_public_martigny, def selfcheck_termial_martigny_data(data): """Load Martigny librarian SIP2 account data.""" return { - 'name': 'sip2Test', - 'access_token': 'TESTACCESSTOKEN', - 'organisation_pid': 'org1', - 'library_pid': 'lib1', - 'location_pid': 'loc1', + "name": "sip2Test", + "access_token": "TESTACCESSTOKEN", + "organisation_pid": "org1", + "library_pid": "lib1", + "location_pid": "loc1", } @pytest.fixture(scope="module") def selfcheck_patron_martigny_data(data): """Load Martigny librarian data.""" - return deepcopy(patch_expiration_date(data.get('ptrn6'))) + return deepcopy(patch_expiration_date(data.get("ptrn6"))) @pytest.fixture(scope="module") -def selfcheck_patron_martigny(app, roles, lib_martigny, - patron_type_children_martigny, - selfcheck_patron_martigny_data): +def selfcheck_patron_martigny( + app, + roles, + lib_martigny, + patron_type_children_martigny, + selfcheck_patron_martigny_data, +): """Create Martigny patron without sending reset password instruction.""" # create patron account data = selfcheck_patron_martigny_data diff --git a/tests/scheduler/conftest.py b/tests/scheduler/conftest.py index 68055af8cf..9a87d2ead4 100644 --- a/tests/scheduler/conftest.py +++ b/tests/scheduler/conftest.py @@ -31,28 +31,28 @@ def create_app(): return create_api -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def app_config(app_config): """Create temporary instance dir for each test.""" - app_config['CELERY_BROKER_URL'] = 'memory://' - app_config['RATELIMIT_STORAGE_URL'] = 'memory://' - app_config['CACHE_TYPE'] = 'simple' - app_config['SEARCH_ELASTIC_HOSTS'] = None - app_config['DB_VERSIONING'] = True - app_config['CELERY_CACHE_BACKEND'] = 'memory' - app_config['CELERY_RESULT_BACKEND'] = 'cache' - app_config['CELERY_TASK_ALWAYS_EAGER'] = True - app_config['CELERY_TASK_EAGER_PROPAGATES'] = True - app_config['CELERY_BEAT_SCHEDULER'] = 'rero_ils.schedulers.RedisScheduler' - app_config['CELERY_REDIS_SCHEDULER_URL'] = 'redis://localhost:6379/4' - app_config['CELERY_BEAT_SCHEDULE'] = { - 'bulk-indexer': { - 'task': 'rero_ils.modules.tasks.process_bulk_queue', - 'schedule': timedelta(minutes=60), - 'enabled': False + app_config["CELERY_BROKER_URL"] = "memory://" + app_config["RATELIMIT_STORAGE_URL"] = "memory://" + app_config["CACHE_TYPE"] = "simple" + app_config["SEARCH_ELASTIC_HOSTS"] = None + app_config["DB_VERSIONING"] = True + app_config["CELERY_CACHE_BACKEND"] = "memory" + app_config["CELERY_RESULT_BACKEND"] = "cache" + app_config["CELERY_TASK_ALWAYS_EAGER"] = True + app_config["CELERY_TASK_EAGER_PROPAGATES"] = True + app_config["CELERY_BEAT_SCHEDULER"] = "rero_ils.schedulers.RedisScheduler" + app_config["CELERY_REDIS_SCHEDULER_URL"] = "redis://localhost:6379/4" + app_config["CELERY_BEAT_SCHEDULE"] = { + "bulk-indexer": { + "task": "rero_ils.modules.tasks.process_bulk_queue", + "schedule": timedelta(minutes=60), + "enabled": False, } } - help_test_dir = join(dirname(__file__), 'data', 'help') - app_config['WIKI_CONTENT_DIR'] = help_test_dir - app_config['WIKI_UPLOAD_FOLDER'] = join(help_test_dir, 'files') + help_test_dir = join(dirname(__file__), "data", "help") + app_config["WIKI_CONTENT_DIR"] = help_test_dir + app_config["WIKI_UPLOAD_FOLDER"] = join(help_test_dir, "files") return app_config diff --git a/tests/scheduler/test_scheduler.py b/tests/scheduler/test_scheduler.py index 282e81a32b..af34d1cf01 100644 --- a/tests/scheduler/test_scheduler.py +++ b/tests/scheduler/test_scheduler.py @@ -20,98 +20,89 @@ from celery import current_app as current_celery from click.testing import CliRunner -from rero_ils.schedulers import RedisScheduler, current_scheduler, \ - enable_tasks, info, init +from rero_ils.schedulers import ( + RedisScheduler, + current_scheduler, + enable_tasks, + info, + init, +) def test_scheduler(app): """Test scheduler.""" display_tasks = [ - ('- bulk-indexer = rero_ils.modules.tasks.process_bulk_queue ' - ' ' - 'kwargs:{} ' - 'options:{} ' - 'enabled:False') + ( + "- bulk-indexer = rero_ils.modules.tasks.process_bulk_queue " + " " + "kwargs:{} " + "options:{} " + "enabled:False" + ) ] # clean the REDIS DB current_scheduler._remove_db() # create the scheduled test tasks RedisScheduler(app=current_celery) assert current_scheduler.display_all() == display_tasks - assert not current_scheduler.get_entry_enabled('bulk-indexer') + assert not current_scheduler.get_entry_enabled("bulk-indexer") - entry = current_scheduler.get('bulk-indexer') + entry = current_scheduler.get("bulk-indexer") assert not current_scheduler.is_due(entry).is_due - current_scheduler.set_entry_enabled('bulk-indexer', True) - assert current_scheduler.get_entry_enabled('bulk-indexer') - enabled_task = display_tasks[0].replace( - 'enabled:False', - 'enabled:True' - ) + current_scheduler.set_entry_enabled("bulk-indexer", True) + assert current_scheduler.get_entry_enabled("bulk-indexer") + enabled_task = display_tasks[0].replace("enabled:False", "enabled:True") assert current_scheduler.display_all() == [enabled_task] - entry = current_scheduler.get('bulk-indexer') + entry = current_scheduler.get("bulk-indexer") assert not current_scheduler.is_due(entry).is_due - current_scheduler.remove('bulk-indexer') + current_scheduler.remove("bulk-indexer") assert current_scheduler.display_all() == [] assert current_scheduler.add_entry(entry, enable=False) assert current_scheduler.display_all() == display_tasks - entry.kwargs['test'] = 'test' + entry.kwargs["test"] = "test" current_scheduler.set(entry, enable=False) - test_task = display_tasks[0].replace( - 'kwargs:{}', - "kwargs:{'test': 'test'}" - ) + test_task = display_tasks[0].replace("kwargs:{}", "kwargs:{'test': 'test'}") assert current_scheduler.display_all() == [test_task] -def test_scheduler_cli(app, script_info): +def test_scheduler_cli(app): """Test scheduler cli.""" display_tasks = [ - ('- bulk-indexer = rero_ils.modules.tasks.process_bulk_queue ' - ' ' - 'kwargs:{} ' - 'options:{} ' - 'enabled:False') + ( + "- bulk-indexer = rero_ils.modules.tasks.process_bulk_queue " + " " + "kwargs:{} " + "options:{} " + "enabled:False" + ) ] runner = CliRunner() - res = runner.invoke(init, ['-r', '-v'], obj=script_info) - assert res.output.strip().split('\n') == [ - 'Reset REDIS scheduler!', - display_tasks[0] + res = runner.invoke(init, ["-r", "-v"]) + assert res.output.strip().split("\n") == [ + "Reset REDIS scheduler!", + display_tasks[0], ] - res = runner.invoke(init, ['-v'], obj=script_info) - assert res.output.strip().split('\n') == [ - 'Initalize REDIS scheduler!', - display_tasks[0] + res = runner.invoke(init, ["-v"]) + assert res.output.strip().split("\n") == [ + "Initalize REDIS scheduler!", + display_tasks[0], ] - res = runner.invoke(enable_tasks, ['-a', '-v'], obj=script_info) - enabled_task = display_tasks[0].replace( - 'enabled:False', - 'enabled:True' - ) - assert res.output.strip().split('\n') == [ - 'Scheduler tasks enabled:', - enabled_task - ] + res = runner.invoke(enable_tasks, ["-a", "-v"]) + enabled_task = display_tasks[0].replace("enabled:False", "enabled:True") + assert res.output.strip().split("\n") == ["Scheduler tasks enabled:", enabled_task] - res = runner.invoke( - enable_tasks, ['-v', '-n bulk-indexer', '-n dummy', '-d'], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Scheduler tasks enabled:', + res = runner.invoke(enable_tasks, ["-v", "-n bulk-indexer", "-n dummy", "-d"]) + assert res.output.strip().split("\n") == [ + "Scheduler tasks enabled:", display_tasks[0], - 'Not found entry: dummy' + "Not found entry: dummy", ] - res = runner.invoke(info, [], obj=script_info) - assert res.output.strip().split('\n') == [ - 'Scheduled tasks:', - display_tasks[0] - ] + res = runner.invoke(info, []) + assert res.output.strip().split("\n") == ["Scheduled tasks:", display_tasks[0]] diff --git a/tests/ui/acq_accounts/test_acq_accounts_jsonresolver.py b/tests/ui/acq_accounts/test_acq_accounts_jsonresolver.py index 7756f42865..fd00a0b302 100644 --- a/tests/ui/acq_accounts/test_acq_accounts_jsonresolver.py +++ b/tests/ui/acq_accounts/test_acq_accounts_jsonresolver.py @@ -26,10 +26,10 @@ def test_acq_accounts_jsonresolver(acq_account_fiction_martigny): """Acquisition accounts resolver tests.""" - rec = Record.create({ - 'acq_account': {'$ref': 'https://bib.rero.ch/api/acq_accounts/acac1'} - }) - assert extracted_data_from_ref(rec.get('acq_account')) == 'acac1' + rec = Record.create( + {"acq_account": {"$ref": "https://bib.rero.ch/api/acq_accounts/acac1"}} + ) + assert extracted_data_from_ref(rec.get("acq_account")) == "acac1" # deleted record acq_account_fiction_martigny.delete() @@ -37,8 +37,8 @@ def test_acq_accounts_jsonresolver(acq_account_fiction_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'acq_account': {'$ref': 'https://bib.rero.ch/api/acq_accounts/n_e'} - }) + rec = Record.create( + {"acq_account": {"$ref": "https://bib.rero.ch/api/acq_accounts/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/acq_accounts/test_acq_accounts_mapping.py b/tests/ui/acq_accounts/test_acq_accounts_mapping.py index 3fd0c871f0..92029b2617 100644 --- a/tests/ui/acq_accounts/test_acq_accounts_mapping.py +++ b/tests/ui/acq_accounts/test_acq_accounts_mapping.py @@ -18,21 +18,18 @@ """Acquisition account Record mapping tests.""" from utils import get_mapping -from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, \ - AcqAccountsSearch +from rero_ils.modules.acquisition.acq_accounts.api import AcqAccount, AcqAccountsSearch -def test_acq_accounts_es_mapping(search, db, acq_account_fiction_martigny_data, - budget_2020_martigny, lib_martigny): +def test_acq_accounts_es_mapping( + search, db, acq_account_fiction_martigny_data, budget_2020_martigny, lib_martigny +): """Test acquisition account elasticsearch mapping.""" search = AcqAccountsSearch() mapping = get_mapping(search.Meta.index) assert mapping account = AcqAccount.create( - acq_account_fiction_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + acq_account_fiction_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) account.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/acq_invoices/test_acq_invoices_jsonresolver.py b/tests/ui/acq_invoices/test_acq_invoices_jsonresolver.py index a1d3ad3de6..d20eb068e6 100644 --- a/tests/ui/acq_invoices/test_acq_invoices_jsonresolver.py +++ b/tests/ui/acq_invoices/test_acq_invoices_jsonresolver.py @@ -26,18 +26,18 @@ def test_acq_invoices_jsonresolver(acq_invoice_fiction_martigny): """Acquisition invoices resolver tests.""" - rec = Record.create({ - 'acq_invoice': {'$ref': 'https://bib.rero.ch/api/acq_invoices/acin1'} - }) - assert extracted_data_from_ref(rec.get('acq_invoice')) == 'acin1' + rec = Record.create( + {"acq_invoice": {"$ref": "https://bib.rero.ch/api/acq_invoices/acin1"}} + ) + assert extracted_data_from_ref(rec.get("acq_invoice")) == "acin1" # deleted record acq_invoice_fiction_martigny.delete() with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'acq_invoice': {'$ref': 'https://bib.rero.ch/api/acq_invoices/n_e'} - }) + rec = Record.create( + {"acq_invoice": {"$ref": "https://bib.rero.ch/api/acq_invoices/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/acq_invoices/test_acq_invoices_mapping.py b/tests/ui/acq_invoices/test_acq_invoices_mapping.py index 49ad0d76ae..c5d121bc57 100644 --- a/tests/ui/acq_invoices/test_acq_invoices_mapping.py +++ b/tests/ui/acq_invoices/test_acq_invoices_mapping.py @@ -18,23 +18,30 @@ """Acquisition invoice record mapping tests.""" from utils import get_mapping -from rero_ils.modules.acquisition.acq_invoices.api import AcquisitionInvoice, \ - AcquisitionInvoicesSearch +from rero_ils.modules.acquisition.acq_invoices.api import ( + AcquisitionInvoice, + AcquisitionInvoicesSearch, +) def test_acq_invoices_es_mapping( - es, db, lib_martigny, vendor_martigny, acq_invoice_fiction_martigny_data, - document, document_ref, acq_order_fiction_martigny, - acq_order_line_fiction_martigny, acq_order_line2_fiction_martigny): + es, + db, + lib_martigny, + vendor_martigny, + acq_invoice_fiction_martigny_data, + document, + document_ref, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny, + acq_order_line2_fiction_martigny, +): """Test acquisition account elasticsearch mapping.""" search = AcquisitionInvoicesSearch() mapping = get_mapping(search.Meta.index) assert mapping invoice = AcquisitionInvoice.create( - acq_invoice_fiction_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + acq_invoice_fiction_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) invoice.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/acq_order_lines/test_acq_order_lines_api.py b/tests/ui/acq_order_lines/test_acq_order_lines_api.py index a69a1eff7f..da71ba8cf7 100644 --- a/tests/ui/acq_order_lines/test_acq_order_lines_api.py +++ b/tests/ui/acq_order_lines/test_acq_order_lines_api.py @@ -30,7 +30,7 @@ def test_order_line_properties( acq_order_fiction_martigny, acq_order_line_fiction_martigny, acq_account_fiction_martigny, - document + document, ): """Test order line properties.""" order_line = acq_order_line_fiction_martigny @@ -38,22 +38,20 @@ def test_order_line_properties( assert order_line.order_pid == acq_order_fiction_martigny.pid assert order_line.order.pid == acq_order_fiction_martigny.pid assert order_line.document.pid == document.pid - assert order_line.unreceived_quantity == order_line.get('quantity') + assert order_line.unreceived_quantity == order_line.get("quantity") def test_order_line_validation_extension( - acq_order_line_fiction_martigny_data, - acq_account_fiction_martigny, - ebook_1 + acq_order_line_fiction_martigny_data, acq_account_fiction_martigny, ebook_1 ): """Test order line validation extension.""" data = deepcopy(acq_order_line_fiction_martigny_data) - del data['pid'] + del data["pid"] # An order line cannot be linked to an harvested document - ebook_ref = get_ref_for_pid('doc', ebook_1.pid) + ebook_ref = get_ref_for_pid("doc", ebook_1.pid) test_data = deepcopy(data) - test_data['document']['$ref'] = ebook_ref + test_data["document"]["$ref"] = ebook_ref with pytest.raises(ValidationError) as error: AcqOrderLine.create(test_data, delete_pid=True) - assert 'Cannot link to an harvested document' in str(error.value) + assert "Cannot link to an harvested document" in str(error.value) diff --git a/tests/ui/acq_order_lines/test_acq_order_lines_jsonresolver.py b/tests/ui/acq_order_lines/test_acq_order_lines_jsonresolver.py index 33d78db057..77432b0f4b 100644 --- a/tests/ui/acq_order_lines/test_acq_order_lines_jsonresolver.py +++ b/tests/ui/acq_order_lines/test_acq_order_lines_jsonresolver.py @@ -24,25 +24,20 @@ from rero_ils.modules.utils import extracted_data_from_ref -def test_acq_order_lines_jsonresolver( - document, acq_order_line_fiction_martigny): +def test_acq_order_lines_jsonresolver(document, acq_order_line_fiction_martigny): """Acquisition order lines resolver tests.""" - rec = Record.create({ - 'acq_order_line': { - '$ref': 'https://bib.rero.ch/api/acq_order_lines/acol1' - } - }) - assert extracted_data_from_ref(rec.get('acq_order_line')) == 'acol1' + rec = Record.create( + {"acq_order_line": {"$ref": "https://bib.rero.ch/api/acq_order_lines/acol1"}} + ) + assert extracted_data_from_ref(rec.get("acq_order_line")) == "acol1" # deleted record acq_order_line_fiction_martigny.delete() with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'acq_order_line': { - '$ref': 'https://bib.rero.ch/api/acq_order_lines/n_e' - } - }) + rec = Record.create( + {"acq_order_line": {"$ref": "https://bib.rero.ch/api/acq_order_lines/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/acq_order_lines/test_acq_order_lines_mapping.py b/tests/ui/acq_order_lines/test_acq_order_lines_mapping.py index 97ac06806d..66f69a12f9 100644 --- a/tests/ui/acq_order_lines/test_acq_order_lines_mapping.py +++ b/tests/ui/acq_order_lines/test_acq_order_lines_mapping.py @@ -19,13 +19,20 @@ from utils import get_mapping -from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine, \ - AcqOrderLinesSearch +from rero_ils.modules.acquisition.acq_order_lines.api import ( + AcqOrderLine, + AcqOrderLinesSearch, +) def test_acq_order_lines_es_mapping( - es, db, document, acq_account_fiction_martigny, - acq_order_fiction_martigny, acq_order_line_fiction_martigny_data): + es, + db, + document, + acq_account_fiction_martigny, + acq_order_fiction_martigny, + acq_order_line_fiction_martigny_data, +): """Test aquisition order line elasticsearch mapping.""" search = AcqOrderLinesSearch() mapping = get_mapping(search.Meta.index) @@ -34,7 +41,7 @@ def test_acq_order_lines_es_mapping( acq_order_line_fiction_martigny_data, dbcommit=True, reindex=True, - delete_pid=True + delete_pid=True, ) assert mapping == get_mapping(search.Meta.index) acq_line.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/acq_orders/test_acq_orders_api.py b/tests/ui/acq_orders/test_acq_orders_api.py index a6f9d28471..b4b3dc6838 100644 --- a/tests/ui/acq_orders/test_acq_orders_api.py +++ b/tests/ui/acq_orders/test_acq_orders_api.py @@ -18,13 +18,15 @@ """Acquisition orders API tests.""" -from utils import flush_index - -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineNoteType, AcqOrderLineStatus +from rero_ils.modules.acquisition.acq_order_lines.models import ( + AcqOrderLineNoteType, + AcqOrderLineStatus, +) from rero_ils.modules.acquisition.acq_orders.api import AcqOrdersSearch -from rero_ils.modules.acquisition.acq_orders.models import AcqOrderNoteType, \ - AcqOrderStatus +from rero_ils.modules.acquisition.acq_orders.models import ( + AcqOrderNoteType, + AcqOrderStatus, +) from rero_ils.modules.utils import get_ref_for_pid @@ -32,7 +34,7 @@ def test_order_properties( acq_order_fiction_martigny, acq_order_line_fiction_martigny, acq_order_line2_fiction_martigny, - yesterday + yesterday, ): """Test order properties.""" acol1 = acq_order_line_fiction_martigny @@ -44,46 +46,43 @@ def test_order_properties( assert acor.status == AcqOrderStatus.PENDING # ORDER LINES ------------------------------------------------------------- - assert len(list(acor.get_order_lines())) == \ - acor.get_order_lines(output='count') + assert len(list(acor.get_order_lines())) == acor.get_order_lines(output="count") # TOTAL AMOUNT ------------------------------------------------------------ - total_amount = acol1.get('total_amount') + acol2.get('total_amount') + total_amount = acol1.get("total_amount") + acol2.get("total_amount") assert acor.get_order_provisional_total_amount() == total_amount - acol1['is_cancelled'] = True + acol1["is_cancelled"] = True acol1.update(acol1, dbcommit=True, reindex=True) - assert acor.get_order_provisional_total_amount() == \ - acol2.get('total_amount') + assert acor.get_order_provisional_total_amount() == acol2.get("total_amount") # RESET CHANGES - acol1['is_cancelled'] = False + acol1["is_cancelled"] = False acol1.update(acol1, dbcommit=True, reindex=True) # ORDER DATE -------------------------------------------------------------- assert acor.order_date is None - acol2['order_date'] = yesterday.strftime('%Y-%m-%d') + acol2["order_date"] = yesterday.strftime("%Y-%m-%d") acol2.update(acol2, dbcommit=True, reindex=True) - assert acor.order_date == yesterday.strftime('%Y-%m-%d') + assert acor.order_date == yesterday.strftime("%Y-%m-%d") assert acor.status == AcqOrderStatus.ORDERED # reset changes - del acol2['order_date'] + del acol2["order_date"] acol2.update(acol2, dbcommit=True, reindex=True) # NOTES ------------------------------------------------------------------- - note_content = 'test note content' + note_content = "test note content" assert acor.get_note(AcqOrderNoteType.VENDOR) is None - acor.setdefault('notes', []).append({ - 'type': AcqOrderNoteType.VENDOR, - 'content': note_content - }) + acor.setdefault("notes", []).append( + {"type": AcqOrderNoteType.VENDOR, "content": note_content} + ) assert acor.get_note(AcqOrderNoteType.VENDOR) == note_content - del acor['notes'] + del acor["notes"] # Check that `related notes` content return the note from `acol1` assert any( - note[0]['type'] == AcqOrderLineNoteType.STAFF + note[0]["type"] == AcqOrderLineNoteType.STAFF and note[1] == acol1.__class__ and note[2] == acol1.pid for note in acor.get_related_notes() @@ -94,24 +93,19 @@ def test_order_properties( assert acor.item_received_quantity == 0 -def test_get_related_orders( - acq_order_fiction_martigny, - acq_order_fiction_saxon -): +def test_get_related_orders(acq_order_fiction_martigny, acq_order_fiction_saxon): """Test relations between acquisition order.""" acor_martigny = acq_order_fiction_martigny acor_saxon = acq_order_fiction_saxon - acor_saxon['previousVersion'] = { - '$ref': get_ref_for_pid('acor', acor_martigny.pid) - } + acor_saxon["previousVersion"] = {"$ref": get_ref_for_pid("acor", acor_martigny.pid)} # remove dynamic loaded key - acor_saxon.pop('account_statement', None) - acor_saxon.pop('status', None) - acor_saxon.pop('order_date', None) + acor_saxon.pop("account_statement", None) + acor_saxon.pop("status", None) + acor_saxon.pop("order_date", None) acor_saxon = acor_saxon.update(acor_saxon, dbcommit=True, reindex=True) - flush_index(AcqOrdersSearch.Meta.index) + AcqOrdersSearch.flush_and_refresh() related_acors = list(acor_martigny.get_related_orders()) assert related_acors == [acor_saxon] - assert acor_martigny.get_related_orders(output='count') == 1 - assert acor_martigny.get_links_to_me(True)['orders'] == [acor_saxon.pid] + assert acor_martigny.get_related_orders(output="count") == 1 + assert acor_martigny.get_links_to_me(True)["orders"] == [acor_saxon.pid] diff --git a/tests/ui/acq_orders/test_acq_orders_jsonresolver.py b/tests/ui/acq_orders/test_acq_orders_jsonresolver.py index 95951030ab..f795cf664b 100644 --- a/tests/ui/acq_orders/test_acq_orders_jsonresolver.py +++ b/tests/ui/acq_orders/test_acq_orders_jsonresolver.py @@ -26,18 +26,18 @@ def test_acq_orders_jsonresolver(acq_order_fiction_martigny): """Acquisition orders resolver tests.""" - rec = Record.create({ - 'acq_order': {'$ref': 'https://bib.rero.ch/api/acq_orders/acor1'} - }) - assert extracted_data_from_ref(rec.get('acq_order')) == 'acor1' + rec = Record.create( + {"acq_order": {"$ref": "https://bib.rero.ch/api/acq_orders/acor1"}} + ) + assert extracted_data_from_ref(rec.get("acq_order")) == "acor1" # deleted record acq_order_fiction_martigny.delete() with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'acq_order': {'$ref': 'https://bib.rero.ch/api/acq_orders/n_e'} - }) + rec = Record.create( + {"acq_order": {"$ref": "https://bib.rero.ch/api/acq_orders/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/acq_orders/test_acq_orders_mapping.py b/tests/ui/acq_orders/test_acq_orders_mapping.py index 2d6dc393f3..bfc69651c4 100644 --- a/tests/ui/acq_orders/test_acq_orders_mapping.py +++ b/tests/ui/acq_orders/test_acq_orders_mapping.py @@ -18,21 +18,18 @@ """Acquisition invoice record mapping tests.""" from utils import get_mapping -from rero_ils.modules.acquisition.acq_orders.api import AcqOrder, \ - AcqOrdersSearch +from rero_ils.modules.acquisition.acq_orders.api import AcqOrder, AcqOrdersSearch -def test_acq_orders_es_mapping(search, db, lib_martigny, vendor_martigny, - acq_order_fiction_martigny_data): +def test_acq_orders_es_mapping( + search, db, lib_martigny, vendor_martigny, acq_order_fiction_martigny_data +): """Test acquisition orders elasticsearch mapping.""" search = AcqOrdersSearch() mapping = get_mapping(search.Meta.index) assert mapping invoice = AcqOrder.create( - acq_order_fiction_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + acq_order_fiction_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) invoice.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/acq_receipt_lines/test_acq_receipt_lines_api.py b/tests/ui/acq_receipt_lines/test_acq_receipt_lines_api.py index 3c89492262..2c1a357f6a 100644 --- a/tests/ui/acq_receipt_lines/test_acq_receipt_lines_api.py +++ b/tests/ui/acq_receipt_lines/test_acq_receipt_lines_api.py @@ -18,14 +18,16 @@ """Acquisition receipts line API tests.""" -from rero_ils.modules.acquisition.acq_receipt_lines.models import \ - AcqReceiptLineNoteType +from rero_ils.modules.acquisition.acq_receipt_lines.models import AcqReceiptLineNoteType -def test_receipt_lines_properties(acq_receipt_fiction_martigny, - acq_receipt_line_1_fiction_martigny, - acq_order_line_fiction_martigny, - lib_martigny, acq_account_fiction_martigny): +def test_receipt_lines_properties( + acq_receipt_fiction_martigny, + acq_receipt_line_1_fiction_martigny, + acq_order_line_fiction_martigny, + lib_martigny, + acq_account_fiction_martigny, +): """Test receipt line properties.""" acrl1 = acq_receipt_line_1_fiction_martigny acre = acq_receipt_fiction_martigny @@ -36,7 +38,7 @@ def test_receipt_lines_properties(acq_receipt_fiction_martigny, # ORDER LINE -------------------------------------------------------------- assert acrl1.order_line_pid == acq_order_line_fiction_martigny.pid acol = acq_order_line_fiction_martigny - assert acol.receipt_date.strftime('%Y-%m-%d') == acrl1.get('receipt_date') + assert acol.receipt_date.strftime("%Y-%m-%d") == acrl1.get("receipt_date") # NOTE -------------------------------------------------------------------- assert acrl1.get_note(AcqReceiptLineNoteType.STAFF) @@ -48,7 +50,7 @@ def test_receipt_lines_properties(acq_receipt_fiction_martigny, # amount, exchange rate and VAT rate # Starting situation is : qte=1, amount=1000, vat=0, exchange=0 assert acrl1.total_amount == 1000 - acrl1['vat_rate'] = 6.2 # 1000 * 0.062 --> 62 + acrl1["vat_rate"] = 6.2 # 1000 * 0.062 --> 62 assert acrl1.total_amount == 1062 - acrl1['vat_rate'] = 100 # 1000 * 1.00 --> 1000 + acrl1["vat_rate"] = 100 # 1000 * 1.00 --> 1000 assert acrl1.total_amount == 2000 diff --git a/tests/ui/acq_receipt_lines/test_acq_receipt_lines_jsonresolver.py b/tests/ui/acq_receipt_lines/test_acq_receipt_lines_jsonresolver.py index 6e8083675d..7b8b82fc2c 100644 --- a/tests/ui/acq_receipt_lines/test_acq_receipt_lines_jsonresolver.py +++ b/tests/ui/acq_receipt_lines/test_acq_receipt_lines_jsonresolver.py @@ -27,16 +27,16 @@ def test_acq_receipt_lines_jsonresolver(acq_receipt_line_1_fiction_martigny): """Acquisition receipt lines resolver tests.""" - data = {'$ref': 'https://bib.rero.ch/api/acq_receipt_lines/acrl1'} - rec = Record.create({'acq_receipt_line': data}) - assert extracted_data_from_ref(rec.get('acq_receipt_line')) == 'acrl1' + data = {"$ref": "https://bib.rero.ch/api/acq_receipt_lines/acrl1"} + rec = Record.create({"acq_receipt_line": data}) + assert extracted_data_from_ref(rec.get("acq_receipt_line")) == "acrl1" # deleted record acq_receipt_line_1_fiction_martigny.delete() with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() # non existing record - data = {'$ref': 'https://bib.rero.ch/api/acq_receipt_lines/n_e'} - rec = Record.create({'acq_receipt_line': data}) + data = {"$ref": "https://bib.rero.ch/api/acq_receipt_lines/n_e"} + rec = Record.create({"acq_receipt_line": data}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/acq_receipt_lines/test_acq_receipt_lines_mapping.py b/tests/ui/acq_receipt_lines/test_acq_receipt_lines_mapping.py index 9c0ab13f65..c54749fb42 100644 --- a/tests/ui/acq_receipt_lines/test_acq_receipt_lines_mapping.py +++ b/tests/ui/acq_receipt_lines/test_acq_receipt_lines_mapping.py @@ -19,14 +19,19 @@ """Acquisition receipt line record mapping tests.""" from utils import get_mapping -from rero_ils.modules.acquisition.acq_receipt_lines.api import \ - AcqReceiptLine, AcqReceiptLinesSearch +from rero_ils.modules.acquisition.acq_receipt_lines.api import ( + AcqReceiptLine, + AcqReceiptLinesSearch, +) def test_acq_receipt_lines_es_mapping( - search, db, lib_martigny, vendor_martigny, + search, + db, + lib_martigny, + vendor_martigny, acq_receipt_line_1_fiction_martigny, - acq_receipt_line_1_fiction_martigny_data + acq_receipt_line_1_fiction_martigny_data, ): """Test acquisition receipt lines elasticsearch mapping.""" search = AcqReceiptLinesSearch() @@ -36,7 +41,7 @@ def test_acq_receipt_lines_es_mapping( acq_receipt_line_1_fiction_martigny_data, dbcommit=True, reindex=True, - delete_pid=True + delete_pid=True, ) assert mapping == get_mapping(search.Meta.index) receipt.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/acq_receipts/test_acq_receipts_api.py b/tests/ui/acq_receipts/test_acq_receipts_api.py index 4e8cd9efb3..6b517d6b7b 100644 --- a/tests/ui/acq_receipts/test_acq_receipts_api.py +++ b/tests/ui/acq_receipts/test_acq_receipts_api.py @@ -25,28 +25,31 @@ def test_receipts_custom_validation( - acq_order_fiction_martigny, acq_account_fiction_martigny, - acq_receipt_fiction_martigny, acq_receipt_fiction_martigny_data + acq_order_fiction_martigny, + acq_account_fiction_martigny, + acq_receipt_fiction_martigny, + acq_receipt_fiction_martigny_data, ): """test receipts custom validations.""" acre1 = acq_receipt_fiction_martigny # TEST ADJUSTMENT AMOUNT WITH BAD DECIMALS -------------------------------- - acre1['amount_adjustments'][0]['amount'] = 1.000003 + acre1["amount_adjustments"][0]["amount"] = 1.000003 with pytest.raises(ValidationError) as err: acre1 = acre1.update(acre1, dbcommit=True, reindex=True) - assert 'must be multiple of 0.01' in str(err) + assert "must be multiple of 0.01" in str(err) - acre1['amount_adjustments'][0]['amount'] = -99999.990 + acre1["amount_adjustments"][0]["amount"] = -99999.990 acre1 = acre1.update(acre1, dbcommit=True, reindex=True) - acre1.update( - acq_receipt_fiction_martigny_data, dbcommit=True, reindex=True) + acre1.update(acq_receipt_fiction_martigny_data, dbcommit=True, reindex=True) def test_receipts_properties( acq_order_fiction_martigny, - acq_account_fiction_martigny, acq_receipt_fiction_martigny, - acq_receipt_line_1_fiction_martigny, acq_receipt_line_2_fiction_martigny, - lib_martigny + acq_account_fiction_martigny, + acq_receipt_fiction_martigny, + acq_receipt_line_1_fiction_martigny, + acq_receipt_line_2_fiction_martigny, + lib_martigny, ): """Test receipt properties.""" acre1 = acq_receipt_fiction_martigny @@ -63,20 +66,22 @@ def test_receipts_properties( # EXCHANGE_RATE ----------------------------------------------------------- assert acre1.exchange_rate # AMOUNT ------------------------------------------------------------------ - adj_amount = sum(adj.get('amount') for adj in acre1.amount_adjustments) + adj_amount = sum(adj.get("amount") for adj in acre1.amount_adjustments) wished_amount = sum([acrl1.total_amount, acrl2.total_amount, adj_amount]) assert acre1.total_amount == wished_amount # QUANTITY ---------------------------------------------------------------- assert acre1.total_item_quantity == sum([acrl1.quantity, acrl2.quantity]) # ACQ ACCOUNT ------------------------------------------------------------- for amount in acre1.amount_adjustments: - assert extracted_data_from_ref(amount.get('acq_account')) == \ - acq_account_fiction_martigny.pid + assert ( + extracted_data_from_ref(amount.get("acq_account")) + == acq_account_fiction_martigny.pid + ) # RECEIPT LINES ----------------------------------------------------------- lines = [acrl1, acrl2] assert all(line in lines for line in acre1.get_receipt_lines()) lines_pid = [line.pid for line in lines] - assert all(pid in lines_pid for pid in acre1.get_receipt_lines('pids')) + assert all(pid in lines_pid for pid in acre1.get_receipt_lines("pids")) - assert acre1.get_receipt_lines('count') == 2 + assert acre1.get_receipt_lines("count") == 2 diff --git a/tests/ui/acq_receipts/test_acq_receipts_jsonresolver.py b/tests/ui/acq_receipts/test_acq_receipts_jsonresolver.py index 75c1246b59..1ceaffd41c 100644 --- a/tests/ui/acq_receipts/test_acq_receipts_jsonresolver.py +++ b/tests/ui/acq_receipts/test_acq_receipts_jsonresolver.py @@ -27,16 +27,16 @@ def test_acq_receipts_jsonresolver(acq_receipt_fiction_martigny): """Acquisition receipts resolver tests.""" - data = {'$ref': 'https://bib.rero.ch/api/acq_receipts/acre1'} - rec = Record.create({'acq_receipt': data}) - assert extracted_data_from_ref(rec.get('acq_receipt')) == 'acre1' + data = {"$ref": "https://bib.rero.ch/api/acq_receipts/acre1"} + rec = Record.create({"acq_receipt": data}) + assert extracted_data_from_ref(rec.get("acq_receipt")) == "acre1" # deleted record acq_receipt_fiction_martigny.delete() with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() # non existing record - data = {'$ref': 'https://bib.rero.ch/api/acq_receipts/n_e'} - rec = Record.create({'acq_receipt': data}) + data = {"$ref": "https://bib.rero.ch/api/acq_receipts/n_e"} + rec = Record.create({"acq_receipt": data}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/acq_receipts/test_acq_receipts_mapping.py b/tests/ui/acq_receipts/test_acq_receipts_mapping.py index dfe1099239..e2ea556c50 100644 --- a/tests/ui/acq_receipts/test_acq_receipts_mapping.py +++ b/tests/ui/acq_receipts/test_acq_receipts_mapping.py @@ -19,23 +19,24 @@ """Acquisition receipt record mapping tests.""" from utils import get_mapping -from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, \ - AcqReceiptsSearch +from rero_ils.modules.acquisition.acq_receipts.api import AcqReceipt, AcqReceiptsSearch -def test_acq_receipts_es_mapping(search, db, lib_martigny, vendor_martigny, - acq_order_fiction_martigny, - acq_account_fiction_martigny, - acq_receipt_fiction_martigny_data): +def test_acq_receipts_es_mapping( + search, + db, + lib_martigny, + vendor_martigny, + acq_order_fiction_martigny, + acq_account_fiction_martigny, + acq_receipt_fiction_martigny_data, +): """Test acquisition receipts elasticsearch mapping.""" search = AcqReceiptsSearch() mapping = get_mapping(search.Meta.index) assert mapping receipt = AcqReceipt.create( - acq_receipt_fiction_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + acq_receipt_fiction_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) receipt.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/apiharvester/test_apiharvester_utils.py b/tests/ui/apiharvester/test_apiharvester_utils.py index 0f6ee2b747..bee9fe2590 100644 --- a/tests/ui/apiharvester/test_apiharvester_utils.py +++ b/tests/ui/apiharvester/test_apiharvester_utils.py @@ -26,85 +26,89 @@ from rero_ils.modules.apiharvester.utils import api_source, get_records -@mock.patch('requests.get') +@mock.patch("requests.get") def test_api_source(mock_get, app, capsys): """Test api source creation update.""" - msg = api_source(name='test', url='http://test.com') - assert msg == 'Added' + msg = api_source(name="test", url="http://test.com") + assert msg == "Added" - mock_get.return_value = mock_response(json_data={ - 'hits': { - 'hits': [ - {'metadata': {'pid': 'test1', 'data': 'test data 1'}}, - {'metadata': {'pid': 'test2', 'data': 'test data 2'}} - ], - 'total': { - 'value': 2 - }, - 'links': { - 'self': 'http:/test.com' + mock_get.return_value = mock_response( + json_data={ + "hits": { + "hits": [ + {"metadata": {"pid": "test1", "data": "test data 1"}}, + {"metadata": {"pid": "test2", "data": "test data 2"}}, + ], + "total": {"value": 2}, + "links": {"self": "http:/test.com"}, } } - }) - harvest_records(name='test', url='http://test.com', signals=False, - size=1000, max_results=1000) + ) + harvest_records( + name="test", url="http://test.com", signals=False, size=1000, max_results=1000 + ) out, err = capsys.readouterr() - assert out.strip() == 'API records found: 2' + assert out.strip() == "API records found: 2" - msg = api_source(name='test', url='http://test.com', size=1000) - assert msg == 'Not Updated' - msg = api_source(name='test', url='http://test.com', mimetype='mimetype', - size=1000, comment='comment', update=True) - assert msg == ('Updated: url:http://test.com, mimetype:mimetype,' - ' size:1000, comment:comment') + msg = api_source(name="test", url="http://test.com", size=1000) + assert msg == "Not Updated" + msg = api_source( + name="test", + url="http://test.com", + mimetype="mimetype", + size=1000, + comment="comment", + update=True, + ) + assert msg == ( + "Updated: url:http://test.com, mimetype:mimetype," " size:1000, comment:comment" + ) -@mock.patch('requests.get') +@mock.patch("requests.get") def test_get_records(mock_get, app, capsys): """Test finding a circulation policy.""" - mock_get.return_value = mock_response(json_data={ - 'hits': { - 'hits': [ - {'metadata': {'pid': 'test1', 'data': 'test data 1'}}, - {'metadata': {'pid': 'test2', 'data': 'test data 2'}} - ], - 'total': { - 'value': 2 - }, - 'links': { - 'self': 'http:/test.com' + mock_get.return_value = mock_response( + json_data={ + "hits": { + "hits": [ + {"metadata": {"pid": "test1", "data": "test data 1"}}, + {"metadata": {"pid": "test2", "data": "test data 2"}}, + ], + "total": {"value": 2}, + "links": {"self": "http:/test.com"}, } } - }) - for next_url, data in get_records(url='http://test.com', name='test', - signals=False): + ) + for next_url, data in get_records( + url="http://test.com", name="test", signals=False + ): assert next_url assert data == [ - {'data': 'test data 1', 'pid': 'test1'}, - {'data': 'test data 2', 'pid': 'test2'} + {"data": "test data 1", "pid": "test1"}, + {"data": "test data 2", "pid": "test2"}, ] out, err = capsys.readouterr() - assert out.strip() == 'API records found: 2' - mock_get.return_value = mock_response(json_data={ - 'hits': { - 'hits': [ - {'metadata': {'pid': 'test3', 'data': 'test data 3'}}, - {'metadata': {'pid': 'test4', 'data': 'test data 4'}} - ], - 'total': { - 'value': 2 - }, - 'links': { - 'self': 'http:/test.com' + assert out.strip() == "API records found: 2" + mock_get.return_value = mock_response( + json_data={ + "hits": { + "hits": [ + {"metadata": {"pid": "test3", "data": "test data 3"}}, + {"metadata": {"pid": "test4", "data": "test data 4"}}, + ], + "total": {"value": 2}, + "links": {"self": "http:/test.com"}, } } - }) - for next_url, data in get_records(url='http://test.com', name='test', - from_date='1970-01-01', signals=False): + ) + for next_url, data in get_records( + url="http://test.com", name="test", from_date="1970-01-01", signals=False + ): assert next_url assert data == [ - {'data': 'test data 3', 'pid': 'test3'}, - {'data': 'test data 4', 'pid': 'test4'} + {"data": "test data 3", "pid": "test3"}, + {"data": "test data 4", "pid": "test4"}, ] out, err = capsys.readouterr() - assert out.strip() == 'API records found: 2' + assert out.strip() == "API records found: 2" diff --git a/tests/ui/budgets/test_budgets_api.py b/tests/ui/budgets/test_budgets_api.py index ae32b91af6..b12a0e5f54 100644 --- a/tests/ui/budgets/test_budgets_api.py +++ b/tests/ui/budgets/test_budgets_api.py @@ -16,7 +16,6 @@ # along with this program. If not, see . """Budget API tests.""" -from utils import flush_index from rero_ils.modules.acquisition.acq_accounts.api import AcqAccountsSearch from rero_ils.modules.acquisition.budgets.api import BudgetsSearch @@ -24,30 +23,27 @@ def test_budget_properties(budget_2017_martigny): """Test budget properties.""" - assert budget_2017_martigny.name == budget_2017_martigny.get('name') + assert budget_2017_martigny.name == budget_2017_martigny.get("name") -def test_budget_cascade_reindex( - acq_account_fiction_martigny, - budget_2020_martigny -): +def test_budget_cascade_reindex(acq_account_fiction_martigny, budget_2020_martigny): """Test budget cascading reindex.""" budg = budget_2020_martigny acac = acq_account_fiction_martigny - flush_index(BudgetsSearch.Meta.index) - flush_index(AcqAccountsSearch.Meta.index) + BudgetsSearch.flush_and_refresh() + AcqAccountsSearch.flush_and_refresh() # when the `is_active` budget field change, the related account must be # reindex too. es_budg = BudgetsSearch().get_record_by_pid(budg.pid) es_acac = AcqAccountsSearch().get_record_by_pid(acac.pid) - assert es_budg['is_active'] and es_acac['is_active'] + assert es_budg["is_active"] and es_acac["is_active"] - budg['is_active'] = False + budg["is_active"] = False budg.update(budg, dbcommit=True, reindex=True) - flush_index(BudgetsSearch.Meta.index) - flush_index(AcqAccountsSearch.Meta.index) + BudgetsSearch.flush_and_refresh() + AcqAccountsSearch.flush_and_refresh() es_budg = BudgetsSearch().get_record_by_pid(budg.pid) es_acac = AcqAccountsSearch().get_record_by_pid(acac.pid) - assert not es_budg['is_active'] and not es_acac['is_active'] + assert not es_budg["is_active"] and not es_acac["is_active"] diff --git a/tests/ui/budgets/test_budgets_jsonresolver.py b/tests/ui/budgets/test_budgets_jsonresolver.py index 4144d62481..cb73291b9b 100644 --- a/tests/ui/budgets/test_budgets_jsonresolver.py +++ b/tests/ui/budgets/test_budgets_jsonresolver.py @@ -26,10 +26,8 @@ def test_budgets_jsonresolver(budget_2017_martigny): """Budgets resolver tests.""" - rec = Record.create({ - 'budget': {'$ref': 'https://bib.rero.ch/api/budgets/budg5'} - }) - assert extracted_data_from_ref(rec.get('budget')) == 'budg5' + rec = Record.create({"budget": {"$ref": "https://bib.rero.ch/api/budgets/budg5"}}) + assert extracted_data_from_ref(rec.get("budget")) == "budg5" # deleted record budget_2017_martigny.delete() @@ -37,8 +35,6 @@ def test_budgets_jsonresolver(budget_2017_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'budget': {'$ref': 'https://bib.rero.ch/api/budgets/n_e'} - }) + rec = Record.create({"budget": {"$ref": "https://bib.rero.ch/api/budgets/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/budgets/test_budgets_mapping.py b/tests/ui/budgets/test_budgets_mapping.py index a4ebd9acae..964de2e0d7 100644 --- a/tests/ui/budgets/test_budgets_mapping.py +++ b/tests/ui/budgets/test_budgets_mapping.py @@ -21,18 +21,13 @@ from rero_ils.modules.acquisition.budgets.api import Budget, BudgetsSearch -def test_budgets_es_mapping( - search, db, org_martigny, budget_2017_martigny_data -): +def test_budgets_es_mapping(search, db, org_martigny, budget_2017_martigny_data): """Test acquisition budget elasticsearch mapping.""" search = BudgetsSearch() mapping = get_mapping(search.Meta.index) assert mapping budget = Budget.create( - budget_2017_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + budget_2017_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) budget.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/circ_policies/test_circ_policies_api.py b/tests/ui/circ_policies/test_circ_policies_api.py index 041f73137c..57cdd39dc4 100644 --- a/tests/ui/circ_policies/test_circ_policies_api.py +++ b/tests/ui/circ_policies/test_circ_policies_api.py @@ -24,70 +24,65 @@ import pytest from jsonschema.exceptions import ValidationError -from rero_ils.modules.circ_policies.api import CircPolicy, \ - circ_policy_id_fetcher +from rero_ils.modules.circ_policies.api import CircPolicy, circ_policy_id_fetcher def test_no_default_policy(app): """Test when no default circulation policy configured.""" - cipo = CircPolicy.get_default_circ_policy('org1') + cipo = CircPolicy.get_default_circ_policy("org1") assert not cipo -def test_circ_policy_create(circ_policy_martigny_data_tmp, - circ_policy_short_martigny_data, - org_martigny, - lib_martigny, lib_saxon, - patron_type_children_martigny, - item_type_standard_martigny, - patron_type_adults_martigny, - item_type_specific_martigny, - item_type_regular_sion, - patron_type_youngsters_sion): +def test_circ_policy_create( + circ_policy_martigny_data_tmp, + circ_policy_short_martigny_data, + org_martigny, + lib_martigny, + lib_saxon, + patron_type_children_martigny, + item_type_standard_martigny, + patron_type_adults_martigny, + item_type_specific_martigny, + item_type_regular_sion, + patron_type_youngsters_sion, +): """Test circulation policy creation.""" cipo = CircPolicy.create(circ_policy_martigny_data_tmp, delete_pid=True) assert cipo == circ_policy_martigny_data_tmp - assert cipo.get('pid') == '1' + assert cipo.get("pid") == "1" - cipo = CircPolicy.get_record_by_pid('1') + cipo = CircPolicy.get_record_by_pid("1") assert cipo == circ_policy_martigny_data_tmp fetched_pid = circ_policy_id_fetcher(cipo.id, cipo) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'cipo' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "cipo" circ_policy_data = deepcopy(circ_policy_short_martigny_data) - del circ_policy_data['$schema'] + del circ_policy_data["$schema"] cipo = CircPolicy.create(circ_policy_data, delete_pid=True) - assert cipo.get('$schema') - assert cipo.get('pid') == '2' + assert cipo.get("$schema") + assert cipo.get("pid") == "2" cipo_data = { - '$schema': 'https://bib.rero.ch/schemas/' - 'circ_policies/circ_policy-v0.0.1.json', - 'pid': 'cipo_test', - 'name': 'test', - 'organisation': { - '$ref': 'https://bib.rero.ch/api/organisations/org1' - }, - 'is_default': False, - 'allow_requests': True, - 'policy_library_level': False, - 'settings': [{ - 'patron_type': { - '$ref': 'https://bib.rero.ch/api/patron_types/ptty3' + "$schema": "https://bib.rero.ch/schemas/" + "circ_policies/circ_policy-v0.0.1.json", + "pid": "cipo_test", + "name": "test", + "organisation": {"$ref": "https://bib.rero.ch/api/organisations/org1"}, + "is_default": False, + "allow_requests": True, + "policy_library_level": False, + "settings": [ + { + "patron_type": {"$ref": "https://bib.rero.ch/api/patron_types/ptty3"}, + "item_type": {"$ref": "https://bib.rero.ch/api/item_types/itty1"}, }, - 'item_type': { - '$ref': 'https://bib.rero.ch/api/item_types/itty1' - } - }, { - 'patron_type': { - '$ref': 'https://bib.rero.ch/api/patron_types/ptty2' + { + "patron_type": {"$ref": "https://bib.rero.ch/api/patron_types/ptty2"}, + "item_type": {"$ref": "https://bib.rero.ch/api/item_types/itty4"}, }, - 'item_type': { - '$ref': 'https://bib.rero.ch/api/item_types/itty4' - } - }] + ], } with pytest.raises(ValidationError): cipo = CircPolicy.create(cipo_data, delete_pid=False) @@ -95,22 +90,22 @@ def test_circ_policy_create(circ_policy_martigny_data_tmp, # TEST #2 : create a second defaut policy # The first created policy (pid=1) is the default policy. # Creation of a second default policy should raise a ValidationError - default_cipo = CircPolicy.get_record_by_pid('1') - assert default_cipo.get('is_default') + default_cipo = CircPolicy.get_record_by_pid("1") + assert default_cipo.get("is_default") with pytest.raises(ValidationError) as excinfo: CircPolicy.create(circ_policy_martigny_data_tmp, delete_pid=True) - assert 'CircPolicy: already a default policy for this org' \ - in str(excinfo.value) + assert "CircPolicy: already a default policy for this org" in str(excinfo.value) -def test_circ_policy_exist_name_and_organisation_pid( - circ_policy_short_martigny): +def test_circ_policy_exist_name_and_organisation_pid(circ_policy_short_martigny): """Test policy name existence.""" cipo = circ_policy_short_martigny.replace_refs() assert CircPolicy.exist_name_and_organisation_pid( - cipo.get('name'), cipo.get('organisation', {}).get('pid')) + cipo.get("name"), cipo.get("organisation", {}).get("pid") + ) assert not CircPolicy.exist_name_and_organisation_pid( - 'not exists yet', cipo.get('organisation', {}).get('pid')) + "not exists yet", cipo.get("organisation", {}).get("pid") + ) def test_circ_policy_can_not_delete(circ_policy_short_martigny): @@ -119,7 +114,7 @@ def test_circ_policy_can_not_delete(circ_policy_short_martigny): defaut_cipo = CircPolicy.get_default_circ_policy(org_pid) can, reasons = defaut_cipo.can_delete assert not can - assert reasons['others']['is_default'] + assert reasons["others"]["is_default"] can, reasons = circ_policy_short_martigny.can_delete assert can @@ -128,7 +123,7 @@ def test_circ_policy_can_not_delete(circ_policy_short_martigny): def test_circ_policy_can_delete(app, circ_policy_martigny_data_tmp): """Test can delete a policy.""" - circ_policy_martigny_data_tmp['is_default'] = False + circ_policy_martigny_data_tmp["is_default"] = False cipo = CircPolicy.create(circ_policy_martigny_data_tmp, delete_pid=True) can, reasons = cipo.can_delete @@ -140,27 +135,27 @@ def test_circ_policy_extended_validation( app, circ_policy_short_martigny, circ_policy_short_martigny_data, - circ_policy_default_sion_data + circ_policy_default_sion_data, ): """Test extended validation for circ policy""" cipo_data = deepcopy(circ_policy_short_martigny_data) - cipo_data['allow_requests'] = False - cipo_data['pickup_hold_duration'] = 10 - del cipo_data['pid'] + cipo_data["allow_requests"] = False + cipo_data["pickup_hold_duration"] = 10 + del cipo_data["pid"] cipo = CircPolicy.create(cipo_data) assert cipo - assert 'pickup_hold_duration' not in cipo + assert "pickup_hold_duration" not in cipo cipo.delete() # Check that I cannot save a CiPo without a renewal duration if # renewals are enabled. cipo_sion_data = deepcopy(circ_policy_default_sion_data) - assert cipo_sion_data['number_renewals'] > 0 + assert cipo_sion_data["number_renewals"] > 0 - cipo_sion_data.pop('renewal_duration') + cipo_sion_data.pop("renewal_duration") with pytest.raises(ValidationError) as err: CircPolicy.create(cipo_sion_data, delete_pid=True) - assert 'renewal duration is required' in str(err.value) + assert "renewal duration is required" in str(err.value) diff --git a/tests/ui/circ_policies/test_circ_policies_mapping.py b/tests/ui/circ_policies/test_circ_policies_mapping.py index 07e9f84951..c28dee1348 100644 --- a/tests/ui/circ_policies/test_circ_policies_mapping.py +++ b/tests/ui/circ_policies/test_circ_policies_mapping.py @@ -22,17 +22,15 @@ from rero_ils.modules.circ_policies.api import CircPoliciesSearch, CircPolicy -def test_circ_policy_es_mapping(es_clear, db, org_martigny, - circ_policy_martigny_data_tmp): +def test_circ_policy_es_mapping( + search_clear, db, org_martigny, circ_policy_martigny_data_tmp +): """Test circulation policy elasticsearch mapping.""" search = CircPoliciesSearch() mapping = get_mapping(search.Meta.index) assert mapping CircPolicy.create( - circ_policy_martigny_data_tmp, - dbcommit=True, - reindex=True, - delete_pid=True + circ_policy_martigny_data_tmp, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) @@ -41,11 +39,11 @@ def test_circ_policies_search_mapping(app, circulation_policies): """Test circulation policy search mapping.""" search = CircPoliciesSearch() - c = search.query('query_string', query='policy').count() + c = search.query("query_string", query="policy").count() assert c == 4 - c = search.query('match', name='default').count() + c = search.query("match", name="default").count() assert c == 2 - es_query = search.query('match', name='temporary').source(['pid']).scan() + es_query = search.query("match", name="temporary").source(["pid"]).scan() pids = [hit.pid for hit in es_query] assert len(pids) == 1 - assert 'cipo3' in pids + assert "cipo3" in pids diff --git a/tests/ui/circ_policies/test_circ_policies_utils.py b/tests/ui/circ_policies/test_circ_policies_utils.py index 1d4aa64ce1..14ee1dbcd0 100644 --- a/tests/ui/circ_policies/test_circ_policies_utils.py +++ b/tests/ui/circ_policies/test_circ_policies_utils.py @@ -26,57 +26,53 @@ def test_circ_policy_search(app, circulation_policies): """Test finding a circulation policy.""" data = [ { - 'organisation_pid': 'org1', - 'library_pid': 'lib1', - 'patron_type_pid': 'ptty1', - 'item_type_pid': 'itty1', - 'cipo': 'cipo2' - + "organisation_pid": "org1", + "library_pid": "lib1", + "patron_type_pid": "ptty1", + "item_type_pid": "itty1", + "cipo": "cipo2", }, { - 'organisation_pid': 'org1', - 'library_pid': 'lib1', - 'patron_type_pid': 'ptty2', - 'item_type_pid': 'itty2', - 'cipo': 'cipo3' - + "organisation_pid": "org1", + "library_pid": "lib1", + "patron_type_pid": "ptty2", + "item_type_pid": "itty2", + "cipo": "cipo3", }, { - 'organisation_pid': 'org1', - 'library_pid': 'lib2', - 'patron_type_pid': 'ptty2', - 'item_type_pid': 'itty2', - 'cipo': 'cipo1' + "organisation_pid": "org1", + "library_pid": "lib2", + "patron_type_pid": "ptty2", + "item_type_pid": "itty2", + "cipo": "cipo1", }, { - 'organisation_pid': 'org1', - 'library_pid': 'lib1', - 'patron_type_pid': 'ptty3', - 'item_type_pid': 'itty2', - 'cipo': 'cipo1' - + "organisation_pid": "org1", + "library_pid": "lib1", + "patron_type_pid": "ptty3", + "item_type_pid": "itty2", + "cipo": "cipo1", }, { - 'organisation_pid': 'org1', - 'library_pid': 'lib1', - 'patron_type_pid': 'ptty1', - 'item_type_pid': 'itty2', - 'cipo': 'cipo1' - + "organisation_pid": "org1", + "library_pid": "lib1", + "patron_type_pid": "ptty1", + "item_type_pid": "itty2", + "cipo": "cipo1", }, { - 'organisation_pid': 'org2', - 'library_pid': 'lib4', - 'patron_type_pid': 'ptty3', - 'item_type_pid': 'itty4', - 'cipo': 'cipo4' - } + "organisation_pid": "org2", + "library_pid": "lib4", + "patron_type_pid": "ptty3", + "item_type_pid": "itty4", + "cipo": "cipo4", + }, ] for row in data: cipo = CircPolicy.provide_circ_policy( - row['organisation_pid'], - row['library_pid'], - row['patron_type_pid'], - row['item_type_pid'] + row["organisation_pid"], + row["library_pid"], + row["patron_type_pid"], + row["item_type_pid"], ) - assert cipo.pid == row['cipo'] + assert cipo.pid == row["cipo"] diff --git a/tests/ui/circulation/test_actions_add_request.py b/tests/ui/circulation/test_actions_add_request.py index 0c66b669c1..f2242ec640 100644 --- a/tests/ui/circulation/test_actions_add_request.py +++ b/tests/ui/circulation/test_actions_add_request.py @@ -26,157 +26,171 @@ def test_add_request_on_item_on_shelf( - item_on_shelf_martigny_patron_and_loan_pending, - loc_public_martigny, librarian_martigny, - patron2_martigny): + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test requests on an on_shelf item.""" item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending # the following tests the circulation action ADD_REQUEST_1_1 # an on_shelf item with no pending requests can have new pending requests. - assert loan['state'] == LoanState.PENDING + assert loan["state"] == LoanState.PENDING # the following tests the circulation action ADD_REQUEST_1_2_1 # for an item on_shelf with a pending loan, the patron that owns the # pending loan can not add a new pending loan on same item. params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } with pytest.raises(RecordCannotBeRequestedError): item, loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # the following tests the circulation action ADD_REQUEST_1_2_2 # for an item on_shelf with a pending loan, a patron that does not own the # pending loan can add a new pending loan on same item. - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING def test_add_request_on_item_at_desk( - client, item_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test requests on an at_desk item.""" item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action ADD_REQUEST_2_1 # the patron owns the ITEM_AT_DESK loan may not create a new pending loan. params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } with pytest.raises(RecordCannotBeRequestedError): item, loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # the following tests the circulation action ADD_REQUEST_2_2 # a patron who doesnt own the ITEM_AT_DESK loan can add a new pending loan. - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert requested_loan['state'] == LoanState.PENDING - assert loan['state'] == LoanState.ITEM_AT_DESK + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING + assert loan["state"] == LoanState.ITEM_AT_DESK def test_add_request_on_item_on_loan( - item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, - patron2_martigny, patron4_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + patron4_martigny, +): """Test requests on an on_loan item.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan # the following tests the circulation action ADD_REQUEST_3_1 # the patron owns the ITEM_ON_LOAN loan may not create a new pending loan. params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } with pytest.raises(RecordCannotBeRequestedError): item, loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # the following tests the circulation action ADD_REQUEST_3_2_1 # any patron who does not own the ITEM_ON_LOAN loan can add a new pending # loan. - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert requested_loan['state'] == LoanState.PENDING - assert loan['state'] == LoanState.ITEM_ON_LOAN + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING + assert loan["state"] == LoanState.ITEM_ON_LOAN # the following tests the circulation action ADD_REQUEST_3_2_2_1 # when an item on_loan has pending requests, the patron who owns the # pending loan may not add a new pending loan - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid with pytest.raises(RecordCannotBeRequestedError): item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # the following tests the circulation action ADD_REQUEST_3_2_2_2 # when an item on_loan has pending requests, any patron who does not own # the pending loan may add a new pending loan - params['patron_pid'] = patron4_martigny.pid + params["patron_pid"] = patron4_martigny.pid item, second_requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert loan['state'] == LoanState.ITEM_ON_LOAN - assert requested_loan['state'] == LoanState.PENDING - assert second_requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert loan["state"] == LoanState.ITEM_ON_LOAN + assert requested_loan["state"] == LoanState.PENDING + assert second_requested_loan["state"] == LoanState.PENDING def test_add_request_on_item_in_transit_for_pickup( - item_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_fully): + item_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_fully, +): """Test requests on an in_transit item for pickup.""" item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup # the following tests the circulation action ADD_REQUEST_4_1 # the owner of the IN_TRANSIT_FOR_PICKUP loan can not add a pending loan params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } with pytest.raises(RecordCannotBeRequestedError): item, loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # the following tests the circulation action ADD_REQUEST_4_2 # a patron who does not own the IN_TRANSIT_FOR_PICKUP loan can add # a new pending loan. - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert requested_loan['state'] == LoanState.PENDING - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_add_request_on_item_in_transit_to_house( - item_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_fully, - patron4_martigny): + item_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_fully, + patron4_martigny, +): """Test requests on an in_transit item to house.""" item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house @@ -184,33 +198,33 @@ def test_add_request_on_item_in_transit_to_house( # any patron can add a new pending loan on an item with a loan equal to # ITEM_IN_TRANSIT_TO_HOUSE params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid, + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert requested_loan["state"] == LoanState.PENDING # the following tests the circulation action ADD_REQUEST_5_2_1 # when a pending loan exist on an item with loan ITEM_IN_TRANSIT_TO_HOUSE # the patron who owns the pending loan can not add a new pending loan with pytest.raises(RecordCannotBeRequestedError): item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # the following tests the circulation action ADD_REQUEST_5_2_2 # when a pending loan exist on an item with loan ITEM_IN_TRANSIT_TO_HOUSE, # any patron who does now own the pending loan can add a new pending loan. - params['patron_pid'] = patron4_martigny.pid + params["patron_pid"] = patron4_martigny.pid item, second_requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - assert second_requested_loan['state'] == LoanState.PENDING - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert second_requested_loan["state"] == LoanState.PENDING + assert requested_loan["state"] == LoanState.PENDING diff --git a/tests/ui/circulation/test_actions_auto_extend.py b/tests/ui/circulation/test_actions_auto_extend.py index b76da57ce8..8e4338f5ae 100644 --- a/tests/ui/circulation/test_actions_auto_extend.py +++ b/tests/ui/circulation/test_actions_auto_extend.py @@ -28,25 +28,28 @@ from rero_ils.modules.operation_logs.api import OperationLogsSearch -def test_auto_extend_task(item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, patron2_martigny, - librarian_martigny, mailbox): +def test_auto_extend_task( + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + patron2_martigny, + librarian_martigny, + mailbox, +): """Test the automatic extension of on_loan item.""" # Prepare a loan where the due date is today item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan cipo = get_circ_policy(loan) - start_date = datetime.now(timezone.utc) - timedelta( - days=cipo['checkout_duration']) + start_date = datetime.now(timezone.utc) - timedelta(days=cipo["checkout_duration"]) end_date = datetime.now(timezone.utc) - loan['start_date'] = start_date.isoformat() - loan['end_date'] = end_date.isoformat() - loan['transaction_date'] = start_date.isoformat() + loan["start_date"] = start_date.isoformat() + loan["end_date"] = end_date.isoformat() + loan["transaction_date"] = start_date.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) loan = loan.get_record_by_pid(loan.pid) # If automatic_renewal is not set in the policy, the loan is # not renewed. - cipo.pop('automatic_renewal', None) + cipo.pop("automatic_renewal", None) cipo.update(cipo, dbcommit=True, reindex=True) result = automatic_renewal() @@ -56,21 +59,21 @@ def test_auto_extend_task(item_on_loan_martigny_patron_and_loan_on_loan, # (because the unextended loan was filtered directly in the first query) assert result == (0, 0) assert item.status == ItemStatus.ON_LOAN - assert not loan.get('auto_extend') + assert not loan.get("auto_extend") assert not mailbox # If loan is not renewable (request exists), it is ignored - cipo['automatic_renewal'] = True + cipo["automatic_renewal"] = True cipo.update(cipo, dbcommit=True, reindex=True) # Add a request to the same item item, actions = item.request( - pickup_location_pid=loc_public_martigny.pid, - patron_pid=patron2_martigny.pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid - ) - requested_loan_pid = actions['request']['pid'] + pickup_location_pid=loc_public_martigny.pid, + patron_pid=patron2_martigny.pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ) + requested_loan_pid = actions["request"]["pid"] result = automatic_renewal() extended_loan = Loan.get_record_by_pid(loan.pid) @@ -79,21 +82,24 @@ def test_auto_extend_task(item_on_loan_martigny_patron_and_loan_on_loan, # and one loan was ignored because not renewable assert result == (0, 1) assert item.status == ItemStatus.ON_LOAN - assert not extended_loan.get('auto_extend') + assert not extended_loan.get("auto_extend") assert not mailbox # Cancel the request made for the previous test - item.cancel_item_request(pid=requested_loan_pid, - transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid) + item.cancel_item_request( + pid=requested_loan_pid, + transaction_location_pid=loc_public_martigny.pid, + transaction_user_pid=librarian_martigny.pid, + ) # disallows the renewals - with mock.patch.object(ItemCirculation, 'can', - mock.MagicMock(return_value=(False, ['foo']))): + with mock.patch.object( + ItemCirculation, "can", mock.MagicMock(return_value=(False, ["foo"])) + ): # no loans has been extended assert automatic_renewal() == (0, 1) assert item.status == ItemStatus.ON_LOAN - assert not extended_loan.get('auto_extend') + assert not extended_loan.get("auto_extend") assert not mailbox # Auto extend one extendable loan @@ -101,25 +107,28 @@ def test_auto_extend_task(item_on_loan_martigny_patron_and_loan_on_loan, assert result == (1, 0) assert item.status == ItemStatus.ON_LOAN extended_loan = Loan.get_record_by_pid(loan.pid) - assert extended_loan.get('auto_extend') is True + assert extended_loan.get("auto_extend") is True # Check that the notification was correctly sent assert len(mailbox) == 1 - assert mailbox[0].recipients == [ - 'reroilstest+martigny+auto_extend@gmail.com'] + assert mailbox[0].recipients == ["reroilstest+martigny+auto_extend@gmail.com"] # Check that the operation_logs were created OperationLogsSearch.flush_and_refresh() - es_query = OperationLogsSearch()\ - .filter('term', loan__pid=loan.pid)\ - .filter('term', record__type='loan')\ - .filter('term', loan__trigger=ItemCirculationAction.EXTEND)\ - .filter('term', loan__auto_extend=True) + es_query = ( + OperationLogsSearch() + .filter("term", loan__pid=loan.pid) + .filter("term", record__type="loan") + .filter("term", loan__trigger=ItemCirculationAction.EXTEND) + .filter("term", loan__auto_extend=True) + ) assert es_query.count() == 1 - es_query = OperationLogsSearch()\ - .filter('term', loan__pid=loan.pid)\ - .filter('term', record__type='notif')\ - .filter('term', loan__trigger=ItemCirculationAction.EXTEND)\ - .filter('term', notification__type='auto_extend') + es_query = ( + OperationLogsSearch() + .filter("term", loan__pid=loan.pid) + .filter("term", record__type="notif") + .filter("term", loan__trigger=ItemCirculationAction.EXTEND) + .filter("term", notification__type="auto_extend") + ) assert es_query.count() == 1 diff --git a/tests/ui/circulation/test_actions_cancel_request.py b/tests/ui/circulation/test_actions_cancel_request.py index 73ce716c65..d96aafdb0d 100644 --- a/tests/ui/circulation/test_actions_cancel_request.py +++ b/tests/ui/circulation/test_actions_cancel_request.py @@ -31,9 +31,14 @@ def test_cancel_item_request_in_transit_for_pickup_with_requests_same_lib( - client, item3_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, librarian_fully, - loc_public_fully, patron2_martigny): + client, + item3_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + librarian_fully, + loc_public_fully, + patron2_martigny, +): """Test cancel requests on an in_transit for pickup item with requests.""" item, patron, loan = item3_in_transit_martigny_patron_and_loan_for_pickup origin_loan = deepcopy(loan) @@ -42,28 +47,28 @@ def test_cancel_item_request_in_transit_for_pickup_with_requests_same_lib( # other pending loans and the pickup locations are the same. params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_fully.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_fully.pid, + "pickup_location_pid": loc_public_fully.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_fully.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_fully.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP # Clean created data requested_loan.delete(force=True, dbcommit=True, delindex=True) @@ -72,9 +77,12 @@ def test_cancel_item_request_in_transit_for_pickup_with_requests_same_lib( def test_cancel_request_on_item_on_shelf( - item_lib_martigny, item_on_shelf_martigny_patron_and_loan_pending, - loc_public_martigny, librarian_martigny, - patron2_martigny): + item_lib_martigny, + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel request on an on_shelf item.""" # the following tests the circulation action CANCEL_REQUEST_1_1 # on_shelf item with no pending requests, not possible to cancel a request. @@ -89,31 +97,34 @@ def test_cancel_request_on_item_on_shelf( item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending # add request for another patron params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # cancel request params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF - assert loan['state'] == LoanState.CANCELLED + assert loan["state"] == LoanState.CANCELLED def test_cancel_request_on_item_at_desk_no_requests_externally( - client, item_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - loc_public_fully): + client, + item_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + loc_public_fully, +): """Test cancel requests on an at_desk item externally.""" item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action CANCEL_REQUEST_2_1_1_1 @@ -121,20 +132,23 @@ def test_cancel_request_on_item_at_desk_no_requests_externally( # if the item library != pickup location, update the at_desk loan. # loan ITEM_IN_TRANSIT_TO_HOUSE and item is: in_transit params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE def test_cancel_request_on_item_at_desk_no_requests_at_home( - client, item2_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny): + client, + item2_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, +): """Test cancel requests on an at_desk item at home.""" item, patron, loan = item2_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action CANCEL_REQUEST_2_1_1_2 @@ -142,21 +156,25 @@ def test_cancel_request_on_item_at_desk_no_requests_at_home( # if the item library = pickup location, cancels the # loan and item is: on_shelf params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF - assert loan['state'] == LoanState.CANCELLED + assert loan["state"] == LoanState.CANCELLED def test_cancel_request_on_item_at_desk_with_requests_externally( - client, item3_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_fully): + client, + item3_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_fully, +): """Test cancel requests on an at_desk item with requests at externally.""" item, patron, loan = item3_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action CANCEL_REQUEST_2_1_2_1 @@ -166,34 +184,37 @@ def test_cancel_request_on_item_at_desk_with_requests_externally( # firt pending loan params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_cancel_request_on_item_at_desk_with_requests_at_home( - client, item4_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item4_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel requests on an at_desk item with requests at home.""" item, patron, loan = item4_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action CANCEL_REQUEST_2_1_2_2 @@ -203,117 +224,126 @@ def test_cancel_request_on_item_at_desk_with_requests_at_home( # firt pending loan params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.AT_DESK - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_AT_DESK def test_cancel_pending_request_on_item_at_desk( - client, item5_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item5_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel requests on an at_desk item with requests at home.""" item, patron, loan = item5_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action CANCEL_REQUEST_2_2 # an item at_desk with other pending loans. when a librarian wants to # cancel one of the pending loans. the item remains at_desk params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': requested_loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": requested_loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.AT_DESK - assert requested_loan['state'] == LoanState.CANCELLED - assert loan['state'] == LoanState.ITEM_AT_DESK + assert requested_loan["state"] == LoanState.CANCELLED + assert loan["state"] == LoanState.ITEM_AT_DESK def test_cancel_item_request_on_item_on_loan( - client, item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel requests on an on_loan item.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan # the following tests the circulation action CANCEL_REQUEST_3_1 # an item on_loan with no other pending loans. when a librarian wants to # cancel the on_loan loan. action is not permitted and item remain on_loan. params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN # the following tests the circulation action CANCEL_REQUEST_3_2 # an item on_loan with other pending loans. when a librarian wants to # cancel the pending loan. action is permitted and item remains on_loan. params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': requested_loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": requested_loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN - assert requested_loan['state'] == LoanState.CANCELLED + assert loan["state"] == LoanState.ITEM_ON_LOAN + assert requested_loan["state"] == LoanState.CANCELLED def test_cancel_item_request_on_item_in_transit_for_pickup( - client, item_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel requests on an in_transit for pickup item.""" item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup # the following tests the circulation action CANCEL_REQUEST_4_1_1 @@ -321,21 +351,24 @@ def test_cancel_item_request_on_item_in_transit_for_pickup( # librarian wants to cancel the in_transit loan. action is permitted. # update loan, item is: in_transit (IN_TRANSIT_TO_HOUSE). params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE def test_cancel_item_request_on_item_in_transit_for_pickup_with_requests( - client, item2_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item2_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel requests on an in_transit for pickup item with requests.""" item, patron, loan = item2_in_transit_martigny_patron_and_loan_for_pickup # the following tests the circulation action CANCEL_REQUEST_4_1_2 @@ -344,34 +377,37 @@ def test_cancel_item_request_on_item_in_transit_for_pickup_with_requests( # cancel loan, next pending loan is validated. params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_cancel_pending_loan_on_item_in_transit_for_pickup_with_requests( - client, item3_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item3_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel pending loan on an in_transit for pickup item.""" item, patron, loan = item3_in_transit_martigny_patron_and_loan_for_pickup # the following tests the circulation action CANCEL_REQUEST_4_2 @@ -380,33 +416,36 @@ def test_cancel_pending_loan_on_item_in_transit_for_pickup_with_requests( # item remains in_transit params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': requested_loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": requested_loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP - assert requested_loan['state'] == LoanState.CANCELLED + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert requested_loan["state"] == LoanState.CANCELLED def test_cancel_request_on_item_in_transit_to_house( - client, item_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny): + client, + item_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, +): """Test cancel request loan on an in_transit to_house item.""" item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house # the following tests the circulation action CANCEL_REQUEST_5_1_1 @@ -414,21 +453,24 @@ def test_cancel_request_on_item_in_transit_to_house( # librarian wants to cancel the in_transit loan. action is permitted. # the item will be checked in. at home, will go on_shelf params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF - assert loan['state'] == LoanState.CANCELLED + assert loan["state"] == LoanState.CANCELLED def test_cancel_request_on_item_in_transit_to_house_with_requests( - client, item2_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item2_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel request on an in_transit to_house item with requests.""" item, patron, loan = item2_in_transit_martigny_patron_and_loan_to_house # the following tests the circulation action CANCEL_REQUEST_5_1_2 @@ -436,34 +478,37 @@ def test_cancel_request_on_item_in_transit_to_house_with_requests( # librarian wants to cancel the in_transit loan. action is permitted. # the loan will be cancelled. and first pending loan will be validated. params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_cancel_pending_on_item_in_transit_to_house( - client, item3_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny): + client, + item3_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test cancel pending loan on an in_transit to_house item.""" item, patron, loan = item3_in_transit_martigny_patron_and_loan_to_house # the following tests the circulation action CANCEL_REQUEST_5_2 @@ -471,25 +516,25 @@ def test_cancel_pending_on_item_in_transit_to_house( # librarian wants to cancel the pending loan. action is permitted. # the loan will be cancelled. and in_transit loan remains in transit. params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'pid': requested_loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": requested_loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.cancel_item_request(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - assert requested_loan['state'] == LoanState.CANCELLED + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert requested_loan["state"] == LoanState.CANCELLED diff --git a/tests/ui/circulation/test_actions_checkin.py b/tests/ui/circulation/test_actions_checkin.py index 5dd6239f3a..2b66df8dcb 100644 --- a/tests/ui/circulation/test_actions_checkin.py +++ b/tests/ui/circulation/test_actions_checkin.py @@ -32,17 +32,24 @@ def test_checkin_on_item_on_shelf_no_requests( - item_lib_martigny, patron_martigny, lib_martigny, - loc_public_martigny, librarian_martigny, lib_fully, - patron2_martigny, loc_public_fully, circulation_policies): + item_lib_martigny, + patron_martigny, + lib_martigny, + loc_public_martigny, + librarian_martigny, + lib_fully, + patron2_martigny, + loc_public_fully, + circulation_policies, +): """Test checkin on an on_shelf item with no requests.""" # the following tests the circulation action CHECKIN_1_1_1 # an on_shelf item with no pending requests. when the item library equal # to the transaction library, there is no checkin action possible. # no circulation action will be performed. params = { - 'transaction_library_pid': lib_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_library_pid": lib_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item_lib_martigny.checkin(**params) @@ -52,12 +59,12 @@ def test_checkin_on_item_on_shelf_no_requests( # for an item on_shelf with no pending loans, the item library does not # equal to the transaction library, the item assigned the in_transit # status and no circulation action will be performed. - params['transaction_library_pid'] = lib_fully.pid + params["transaction_library_pid"] = lib_fully.pid with pytest.raises(NoCirculationAction): item, actions = item_lib_martigny.checkin(**params) item = Item.get_record_by_pid(item_lib_martigny.pid) assert item.status == ItemStatus.IN_TRANSIT - params['transaction_library_pid'] = lib_martigny.pid + params["transaction_library_pid"] = lib_martigny.pid with pytest.raises(NoCirculationAction): item, actions = item_lib_martigny.checkin(**params) item = Item.get_record_by_pid(item_lib_martigny.pid) @@ -65,9 +72,14 @@ def test_checkin_on_item_on_shelf_no_requests( def test_checkin_on_item_on_shelf_with_requests( - item_on_shelf_martigny_patron_and_loan_pending, - loc_public_martigny, librarian_martigny, item_lib_martigny_data, - patron2_martigny, loc_public_fully, lib_martigny): + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + librarian_martigny, + item_lib_martigny_data, + patron2_martigny, + loc_public_fully, + lib_martigny, +): """Test checkin on an on_shelf item with requests.""" item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending # the following tests the circulation action CHECKIN_1_2_1 @@ -77,39 +89,40 @@ def test_checkin_on_item_on_shelf_with_requests( # validate_request circulation action will be performed. # create a second pending loan on same item - item_pid = item_lib_martigny_data.get('pid') - item_es = ItemsSearch().filter('term', pid=item_pid)\ - .execute().hits.hits[0]._source - assert item_es['current_pending_requests'] == 0 + item_pid = item_lib_martigny_data.get("pid") + item_es = ItemsSearch().filter("term", pid=item_pid).execute().hits.hits[0]._source + assert item_es["current_pending_requests"] == 0 params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING - item_es = ItemsSearch().filter('term', pid=item.pid)\ - .execute().hits.hits[0]._source - assert item_es['current_pending_requests'] == 2 + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING + item_es = ItemsSearch().filter("term", pid=item.pid).execute().hits.hits[0]._source + assert item_es["current_pending_requests"] == 2 params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) assert item.status == ItemStatus.AT_DESK - assert Loan.get_record_by_pid(loan.pid)['state'] == LoanState.ITEM_AT_DESK - assert Loan.get_record_by_pid(requested_loan.pid)['state'] == \ - LoanState.PENDING + assert Loan.get_record_by_pid(loan.pid)["state"] == LoanState.ITEM_AT_DESK + assert Loan.get_record_by_pid(requested_loan.pid)["state"] == LoanState.PENDING def test_checkin_on_item_on_shelf_with_requests_external( - item_on_shelf_fully_patron_and_loan_pending, - loc_public_fully, librarian_martigny, - patron2_martigny, lib_martigny, loc_public_martigny): + item_on_shelf_fully_patron_and_loan_pending, + loc_public_fully, + librarian_martigny, + patron2_martigny, + lib_martigny, + loc_public_martigny, +): """Test checkin on an on_shelf item with requests.""" item, patron, loan = item_on_shelf_fully_patron_and_loan_pending # the following tests the circulation action CHECKIN_1_2_2 @@ -120,33 +133,37 @@ def test_checkin_on_item_on_shelf_with_requests_external( # create a second pending loan on same item params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) assert item.status == ItemStatus.IN_TRANSIT - assert Loan.get_record_by_pid(loan.pid)['state'] == \ - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP - assert Loan.get_record_by_pid(requested_loan.pid)['state'] == \ - LoanState.PENDING + assert ( + Loan.get_record_by_pid(loan.pid)["state"] + == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + ) + assert Loan.get_record_by_pid(requested_loan.pid)["state"] == LoanState.PENDING def test_checkin_on_item_at_desk( - item_at_desk_martigny_patron_and_loan_at_desk, - librarian_martigny, loc_public_fully, - lib_martigny, loc_public_martigny): + item_at_desk_martigny_patron_and_loan_at_desk, + librarian_martigny, + loc_public_fully, + lib_martigny, + loc_public_martigny, +): """Test checkin on an at_desk item.""" item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk # the following tests the circulation action CHECKIN_2_1 @@ -154,8 +171,8 @@ def test_checkin_on_item_at_desk( # item_at_desk loan does equal to the transaction library # no action is done, item remains at_desk params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.checkin(**params) @@ -168,20 +185,23 @@ def test_checkin_on_item_at_desk( # to IN_TRANSIT_FOR_PICKUP params = { - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.checkin(**params) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.IN_TRANSIT_FOR_PICKUP def test_checkin_on_item_on_loan( - item_on_loan_martigny_patron_and_loan_on_loan, - item2_on_loan_martigny_patron_and_loan_on_loan, - item_on_loan_fully_patron_and_loan_on_loan, loc_public_fully, - loc_public_martigny, librarian_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + item2_on_loan_martigny_patron_and_loan_on_loan, + item_on_loan_fully_patron_and_loan_on_loan, + loc_public_fully, + loc_public_martigny, + librarian_martigny, +): """Test checkin on an on_loan item.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan # the following tests the circulation action CHECKIN_3_1_1 @@ -189,48 +209,50 @@ def test_checkin_on_item_on_loan( # checkin the item and item becomes on_shelf # case when the loan pid is given as a parameter params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF - assert loan['state'] == LoanState.ITEM_RETURNED + assert loan["state"] == LoanState.ITEM_RETURNED # case when the loan pid is not given as a parameter item, patron, loan = item_on_loan_fully_patron_and_loan_on_loan params = { - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF - assert loan['state'] == LoanState.ITEM_RETURNED + assert loan["state"] == LoanState.ITEM_RETURNED # the following tests the circulation action CHECKIN_3_1_2 # for an item on_loan, the item library does not equal the transaction # library, checkin the item and item becomes in_transit item, patron, loan = item2_on_loan_martigny_patron_and_loan_on_loan params = { - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE def test_checkin_on_item_on_loan_with_requests( - item3_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, - patron2_martigny): + item3_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test checkin on an on_loan item with requests at local library.""" # the following tests the circulation action CHECKIN_3_2_1 # for an item on_loan, with pending requests. when the pickup library of @@ -243,15 +265,15 @@ def test_checkin_on_item_on_loan_with_requests( item, patron, loan = item3_on_loan_martigny_patron_and_loan_on_loan # create a request on the same item one day after the first loan - tomorrow = ciso8601.parse_datetime(loan['start_date']) + timedelta(days=10) + tomorrow = ciso8601.parse_datetime(loan["start_date"]) + timedelta(days=10) with freeze_time(tomorrow.isoformat()): item, actions = item.request( pickup_location_pid=loc_public_martigny.pid, patron_pid=patron2_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - requested_loan_pid = actions[LoanAction.REQUEST].get('pid') + requested_loan_pid = actions[LoanAction.REQUEST].get("pid") requested_loan = Loan.get_record_by_pid(requested_loan_pid) # Check-in the item @@ -268,7 +290,7 @@ def test_checkin_on_item_on_loan_with_requests( patron_pid=patron2_martigny.pid, transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - pickup_location_pid=loc_public_martigny.pid + pickup_location_pid=loc_public_martigny.pid, ) item = Item.get_record_by_pid(item.pid) @@ -276,17 +298,21 @@ def test_checkin_on_item_on_loan_with_requests( requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.AT_DESK - assert loan['state'] == LoanState.ITEM_RETURNED - assert requested_loan['state'] == LoanState.ITEM_AT_DESK - trans_date = ciso8601.parse_datetime(requested_loan['transaction_date']) - assert trans_date.strftime('%Y%m%d') == next_day.strftime('%Y%m%d') + assert loan["state"] == LoanState.ITEM_RETURNED + assert requested_loan["state"] == LoanState.ITEM_AT_DESK + trans_date = ciso8601.parse_datetime(requested_loan["transaction_date"]) + assert trans_date.strftime("%Y%m%d") == next_day.strftime("%Y%m%d") def test_checkin_on_item_on_loan_with_requests_externally( - item4_on_loan_martigny_patron_and_loan_on_loan, - item5_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_fully, loc_public_saxon): + item4_on_loan_martigny_patron_and_loan_on_loan, + item5_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_fully, + loc_public_saxon, +): """Test checkin on an on_loan item with requests at an external library.""" item, patron, loan = item4_on_loan_martigny_patron_and_loan_on_loan # the following tests the circulation action CHECKIN_3_2_2_1 @@ -297,23 +323,23 @@ def test_checkin_on_item_on_loan_with_requests_externally( # the pending loan becomes ITEM_IN_TRANSIT_FOR_PICKUP params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP item, patron, loan = item5_on_loan_martigny_patron_and_loan_on_loan # the following tests the circulation action CHECKIN_3_2_2_2 @@ -324,29 +350,31 @@ def test_checkin_on_item_on_loan_with_requests_externally( # library, the pending loan becomes ITEM_IN_TRANSIT_FOR_PICKUP params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_checkin_on_item_in_transit_for_pickup( - item_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - loc_public_fully): + item_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + loc_public_fully, +): """Test checkin on an in_transit item for pickup.""" item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup @@ -356,21 +384,24 @@ def test_checkin_on_item_in_transit_for_pickup( # receive of the item is done and the loan becomes ITEM_AT_DESK, the item # becomes at_desk params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.AT_DESK - assert loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.ITEM_AT_DESK def test_checkin_on_item_in_transit_for_pickup_externally( - item2_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - loc_public_fully, loc_public_saxon): + item2_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + loc_public_fully, + loc_public_saxon, +): """Test checkin on an in_transit item for pickup.""" item, patron, loan = item2_in_transit_martigny_patron_and_loan_for_pickup @@ -379,21 +410,23 @@ def test_checkin_on_item_in_transit_for_pickup_externally( # library of the loan does not equal to the transaction library, no action # is done, the item remains in_transit params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_checkin_on_item_in_transit_to_house( - item_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny): + item_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, +): """Test checkin on an in_transit item to house.""" item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house @@ -402,20 +435,23 @@ def test_checkin_on_item_in_transit_to_house( # library does equal to the item library, will receive the item. # the item becomes on_shelf and the loan is terminated. params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF - assert loan['state'] == LoanState.ITEM_RETURNED + assert loan["state"] == LoanState.ITEM_RETURNED def test_checkin_on_item_in_transit_to_house_externally( - item2_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, loc_public_saxon): + item2_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + loc_public_saxon, +): """Test checkin on an in_transit item to house.""" item, patron, loan = item2_in_transit_martigny_patron_and_loan_to_house @@ -424,22 +460,24 @@ def test_checkin_on_item_in_transit_to_house_externally( # library does not equal to the item library, will receive the item. # the item becomes on_shelf and the loan is terminated. params = { - 'patron_pid': patron.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "patron_pid": patron.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.checkin(**params) item = Item.get_record_by_pid(item.pid) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE def test_checkin_on_item_in_transit_to_house_with_requests( - item3_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny): + item3_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, +): """Test checkin on an in_transit item to house.""" item, patron, loan = item3_in_transit_martigny_patron_and_loan_to_house @@ -451,19 +489,19 @@ def test_checkin_on_item_in_transit_to_house_with_requests( # the item becomes at_desk and the loan is terminated. # and will validate the first pending loan params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, actions = item.checkin(**params) @@ -471,14 +509,17 @@ def test_checkin_on_item_in_transit_to_house_with_requests( loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.AT_DESK - assert loan['state'] == LoanState.ITEM_RETURNED - assert requested_loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.ITEM_RETURNED + assert requested_loan["state"] == LoanState.ITEM_AT_DESK def test_checkin_on_item_in_transit_to_house_with_requests_externally( - item4_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_saxon): + item4_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_saxon, +): """Test checkin on an in_transit item to house.""" item, patron, loan = item4_in_transit_martigny_patron_and_loan_to_house @@ -490,18 +531,18 @@ def test_checkin_on_item_in_transit_to_house_with_requests_externally( # the item becomes at_desk and will validate the first pending loan params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) @@ -509,14 +550,17 @@ def test_checkin_on_item_in_transit_to_house_with_requests_externally( loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.AT_DESK - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_AT_DESK def test_checkin_on_item_in_transit_to_house_with_external_loans( - item5_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_saxon): + item5_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_saxon, +): """Test checkin on an in_transit item to house.""" item, patron, loan = item5_in_transit_martigny_patron_and_loan_to_house @@ -527,18 +571,18 @@ def test_checkin_on_item_in_transit_to_house_with_external_loans( # the to the item library, no action performed. # the item remains at_desk params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): @@ -547,14 +591,18 @@ def test_checkin_on_item_in_transit_to_house_with_external_loans( loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - assert requested_loan['state'] == LoanState.PENDING + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert requested_loan["state"] == LoanState.PENDING def test_checkin_on_item_in_transit_to_house_with_external_loans_transit( - item6_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - patron2_martigny, loc_public_saxon, loc_public_saillon): + item6_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + patron2_martigny, + loc_public_saxon, + loc_public_saillon, +): """Test checkin on an in_transit item to house.""" item, patron, loan = item6_in_transit_martigny_patron_and_loan_to_house @@ -566,18 +614,18 @@ def test_checkin_on_item_in_transit_to_house_with_external_loans_transit( # the first pending request. item becomes in_transit and becomes # ITEM_IN_TRANSIT_FOR_PICKUP params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, - params=params, copy_item=False) - assert requested_loan['state'] == LoanState.PENDING + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + assert requested_loan["state"] == LoanState.PENDING params = { - 'transaction_location_pid': loc_public_saillon.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_saillon.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.checkin(**params) @@ -585,5 +633,5 @@ def test_checkin_on_item_in_transit_to_house_with_external_loans_transit( loan = Loan.get_record_by_pid(loan.pid) requested_loan = Loan.get_record_by_pid(requested_loan.pid) assert item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.CANCELLED - assert requested_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.CANCELLED + assert requested_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP diff --git a/tests/ui/circulation/test_actions_checkout.py b/tests/ui/circulation/test_actions_checkout.py index 48f3634f9a..ed12c3e384 100644 --- a/tests/ui/circulation/test_actions_checkout.py +++ b/tests/ui/circulation/test_actions_checkout.py @@ -21,8 +21,11 @@ from datetime import date, timedelta import pytest -from invenio_circulation.errors import ItemNotAvailableError, \ - NoValidTransitionAvailableError, TransitionConstraintsViolationError +from invenio_circulation.errors import ( + ItemNotAvailableError, + NoValidTransitionAvailableError, + TransitionConstraintsViolationError, +) from invenio_circulation.ext import NoValidTransitionAvailableError from utils import item_record_to_a_specific_loan_state @@ -33,174 +36,141 @@ def test_checkout_library_never_open( - circulation_policies, - patron_martigny, - lib_martigny, - item_lib_martigny, - loc_public_martigny, - librarian_martigny - ): + circulation_policies, + patron_martigny, + lib_martigny, + item_lib_martigny, + loc_public_martigny, + librarian_martigny, +): """Test checkout from a library without opening hours.""" # Test checkout if library has no open days but has exception days/hours # in the past - lib_martigny['opening_hours'] = [ - { - "day": "monday", - "is_open": False, - "times": [] - }, - { - "day": "tuesday", - "is_open": False, - "times": [] - }, - { - "day": "wednesday", - "is_open": False, - "times": [] - }, - { - "day": "thursday", - "is_open": False, - "times": [] - }, - { - "day": "friday", - "is_open": False, - "times": [] - }, - { - "day": "saturday", - "is_open": False, - "times": [] - }, - { - "day": "sunday", - "is_open": False, - "times": [] - } + lib_martigny["opening_hours"] = [ + {"day": "monday", "is_open": False, "times": []}, + {"day": "tuesday", "is_open": False, "times": []}, + {"day": "wednesday", "is_open": False, "times": []}, + {"day": "thursday", "is_open": False, "times": []}, + {"day": "friday", "is_open": False, "times": []}, + {"day": "saturday", "is_open": False, "times": []}, + {"day": "sunday", "is_open": False, "times": []}, ] lib_martigny.commit() data = deepcopy(item_lib_martigny) - data.pop('barcode') - data.setdefault('status', ItemStatus.ON_SHELF) - created_item = Item.create( - data=data, dbcommit=True, reindex=True, delete_pid=True) + data.pop("barcode") + data.setdefault("status", ItemStatus.ON_SHELF) + created_item = Item.create(data=data, dbcommit=True, reindex=True, delete_pid=True) params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } onloan_item, actions = created_item.checkout(**params) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # check that can_extend method does not raise exception assert Loan.can_extend(onloan_item)[0] in [True, False] # check loan is ITEM_ON_LOAN and item is ON_LOAN assert onloan_item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN # test checkout if library has no open days but has exception closed day # in the future exception_date = (date.today() + timedelta(days=30)).isoformat() - lib_martigny['exception_dates'].append( - { - "title": "Closed", - "is_open": False, - "start_date": exception_date - } + lib_martigny["exception_dates"].append( + {"title": "Closed", "is_open": False, "start_date": exception_date} ) lib_martigny.commit() data = deepcopy(item_lib_martigny) - data.pop('barcode') - data.setdefault('status', ItemStatus.ON_SHELF) - created_item = Item.create( - data=data, dbcommit=True, reindex=True, delete_pid=True) + data.pop("barcode") + data.setdefault("status", ItemStatus.ON_SHELF) + created_item = Item.create(data=data, dbcommit=True, reindex=True, delete_pid=True) params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } onloan_item, actions = created_item.checkout(**params) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # check loan is ITEM_ON_LOAN and item is ON_LOAN assert onloan_item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN # test checkout if library has no open days and no exception days/hours - del lib_martigny['exception_dates'] + del lib_martigny["exception_dates"] lib_martigny.commit() data = deepcopy(item_lib_martigny) - data.pop('barcode') - data.setdefault('status', ItemStatus.ON_SHELF) - created_item = Item.create( - data=data, dbcommit=True, reindex=True, delete_pid=True) + data.pop("barcode") + data.setdefault("status", ItemStatus.ON_SHELF) + created_item = Item.create(data=data, dbcommit=True, reindex=True, delete_pid=True) params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } onloan_item, actions = created_item.checkout(**params) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # check loan is ITEM_ON_LOAN and item is ON_LOAN assert onloan_item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN from invenio_db import db + db.session.rollback() def test_checkout_on_item_on_shelf( - circulation_policies, - patron_martigny, - patron2_martigny, - item_lib_martigny, - loc_public_martigny, - librarian_martigny, - item_on_shelf_martigny_patron_and_loan_pending): + circulation_policies, + patron_martigny, + patron2_martigny, + item_lib_martigny, + loc_public_martigny, + librarian_martigny, + item_on_shelf_martigny_patron_and_loan_pending, +): """Test checkout on an ON_SHELF item.""" # Create a new item in ON_SHELF (without Loan) data = deepcopy(item_lib_martigny) - data.pop('barcode') - data.setdefault('status', ItemStatus.ON_SHELF) - created_item = Item.create( - data=data, dbcommit=True, reindex=True, delete_pid=True) + data.pop("barcode") + data.setdefault("status", ItemStatus.ON_SHELF) + created_item = Item.create(data=data, dbcommit=True, reindex=True, delete_pid=True) # Check item is ON_SHELF and NO PENDING loan exist! assert created_item.number_of_requests() == 0 assert created_item.status == ItemStatus.ON_SHELF assert not created_item.is_requested_by_patron( - patron_martigny.get('patron', {}).get('barcode')[0]) + patron_martigny.get("patron", {}).get("barcode")[0] + ) # the following tests the circulation action CHECKOUT_1_1 # an ON_SHELF item # WITHOUT pending loan # CAN be CHECKOUT params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } onloan_item, actions = created_item.checkout(**params) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert onloan_item.number_of_requests() == 0 assert onloan_item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN # Fetch a PENDING item and loan pending_item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending @@ -210,16 +180,16 @@ def test_checkout_on_item_on_shelf( # WITH pending loan # checkout patron != patron of first PENDING loan # can NOT be CHECKOUT - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid # CHECKOUT patron is DIFFERENT from 1st PENDING LOAN patron - assert params['patron_pid'] != patron['pid'] + assert params["patron_pid"] != patron["pid"] with pytest.raises(ItemNotAvailableError): asked_item, actions = pending_item.checkout(**params) - checkout_loan = Loan.get_record_by_pid(loan['pid']) + checkout_loan = Loan.get_record_by_pid(loan["pid"]) asked_item = Item.get_record_by_pid(pending_item.pid) # Check loan is PENDING and item is ON_SHELF assert asked_item.status == ItemStatus.ON_SHELF - assert checkout_loan['state'] == LoanState.PENDING + assert checkout_loan["state"] == LoanState.PENDING assert asked_item.number_of_requests() == 1 # the following tests the circulation action CHECKOUT_1_2_1 @@ -227,24 +197,24 @@ def test_checkout_on_item_on_shelf( # WITH a pending loan # checkout patron = patron of first PENDING loan # CAN be CHECKOUT - assert pending_item.is_requested_by_patron( - patron.patron.get('barcode')[0]) + assert pending_item.is_requested_by_patron(patron.patron.get("barcode")[0]) # Checkout it! CHECKOUT patron == 1st PENDING LOAN patron - assert patron.get('pid') == loan.get('patron_pid') - params['patron_pid'] = patron_martigny.pid + assert patron.get("pid") == loan.get("patron_pid") + params["patron_pid"] = patron_martigny.pid onloan_item, actions = pending_item.checkout(**params, pid=loan.pid) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert onloan_item.number_of_requests() == 0 assert onloan_item.status == ItemStatus.ON_LOAN - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN def test_checkout_on_item_at_desk( - item_at_desk_martigny_patron_and_loan_at_desk, - patron2_martigny, - loc_public_martigny, - librarian_martigny): + item_at_desk_martigny_patron_and_loan_at_desk, + patron2_martigny, + loc_public_martigny, + librarian_martigny, +): """Test CHECKOUT on an AT_DESK item.""" # Prepare a new item with ITEM_AT_DESK loan atdesk_item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk @@ -255,12 +225,12 @@ def test_checkout_on_item_at_desk( # checkout patron != patron of first PENDING loan # can NOT be CHECKOUT (raise ItemNotAvailableError) params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } - assert params['patron_pid'] != loan['patron_pid'] + assert params["patron_pid"] != loan["patron_pid"] with pytest.raises(ItemNotAvailableError): asked_item, actions = atdesk_item.checkout(**params) @@ -268,31 +238,31 @@ def test_checkout_on_item_at_desk( checkout_loan = Loan.get_record_by_pid(loan.pid) asked_item = Item.get_record_by_pid(atdesk_item.pid) assert asked_item.status == ItemStatus.AT_DESK - assert checkout_loan['state'] == LoanState.ITEM_AT_DESK + assert checkout_loan["state"] == LoanState.ITEM_AT_DESK assert asked_item.number_of_requests() == 1 # the following tests the circulation action CHECKOUT_2_1 # an AT_DESK item # checkout patron = patron of first PENDING loan # CAN be CHECKOUT - params.update({'patron_pid': patron['pid']}) + params.update({"patron_pid": patron["pid"]}) # Checkout it! CHECKOUT patron == 1st PENDING LOAN patron - assert params['patron_pid'] == loan['patron_pid'] + assert params["patron_pid"] == loan["patron_pid"] asked_item, actions = atdesk_item.checkout(**params) - checkout_loan = Loan.get_record_by_pid( - actions[LoanAction.CHECKOUT].get('pid')) + checkout_loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert asked_item.number_of_requests() == 0 assert asked_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN def test_checkout_on_item_on_loan( - item_on_loan_martigny_patron_and_loan_on_loan, - patron_martigny, - patron2_martigny, - loc_public_martigny, - librarian_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + patron_martigny, + patron2_martigny, + loc_public_martigny, + librarian_martigny, +): """Test CHECKOUT on an ON_LOAN item.""" # Prepare a new item with an ITEM_ON_LOAN loan onloan_item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan @@ -303,13 +273,13 @@ def test_checkout_on_item_on_loan( # checkout patron = patron of current loan # can NOT be CHECKOUT params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } # Checkout it! CHECKOUT patron == current LOAN patron - assert params['patron_pid'] == patron['pid'] + assert params["patron_pid"] == patron["pid"] with pytest.raises(NoValidTransitionAvailableError): asked_item, actions = onloan_item.checkout(**params, pid=loan.pid) # Check item is ON_SHELF (because no @@ -317,33 +287,34 @@ def test_checkout_on_item_on_loan( checkout_loan = Loan.get_record_by_pid(loan.pid) asked_item = Item.get_record_by_pid(onloan_item.pid) assert asked_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN # the following tests the circulation action CHECKOUT_3_2 # an ON_LOAN item # checkout patron != patron of current loan # can NOT be CHECKOUT - params['patron_pid'] = patron2_martigny.pid - assert params['patron_pid'] != patron['pid'] + params["patron_pid"] = patron2_martigny.pid + assert params["patron_pid"] != patron["pid"] with pytest.raises(ItemNotAvailableError): asked_item, actions = onloan_item.checkout(**params) asked_item = Item.get_record_by_pid(onloan_item.pid) checkout_loan = Loan.get_record_by_pid(loan.pid) assert asked_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN assert asked_item.number_of_requests() == 0 def test_checkout_on_item_in_transit_for_pickup( - item_in_transit_martigny_patron_and_loan_for_pickup, - patron_martigny, - patron2_martigny, - loc_public_martigny, - librarian_martigny, loc_public_saxon): + item_in_transit_martigny_patron_and_loan_for_pickup, + patron_martigny, + patron2_martigny, + loc_public_martigny, + librarian_martigny, + loc_public_saxon, +): """Test CHECKOUT on an IN_TRANSIT (for pickup) item.""" # Prepare a new item with an IN_TRANSIT loan - intransit_item, patron, loan = \ - item_in_transit_martigny_patron_and_loan_for_pickup + intransit_item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup assert intransit_item.number_of_requests() == 1 # the following tests the circulation action CHECKOUT_4_2 @@ -351,45 +322,44 @@ def test_checkout_on_item_in_transit_for_pickup( # checkout patron != patron of current loan # can NOT be CHECKOUT params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_saxon.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_saxon.pid, } - assert params['patron_pid'] != patron['pid'] + assert params["patron_pid"] != patron["pid"] with pytest.raises(ItemNotAvailableError): asked_item, actions = intransit_item.checkout(**params) # Check item is ON_LOAN and loan is ITEM_ON_LOAN asked_item = Item.get_record_by_pid(intransit_item.pid) checkout_loan = Loan.get_record_by_pid(loan.pid) assert asked_item.status == ItemStatus.IN_TRANSIT - assert checkout_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert checkout_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP # the following tests the circulation action CHECKOUT_4_1 # an IN_TRANSIT (for pickup) item # checkout patron = patron of current loan # CAN be CHECKOUT - params['patron_pid'] = patron_martigny.pid + params["patron_pid"] = patron_martigny.pid # Checkout it! CHECKOUT patron == current LOAN patron - assert params['patron_pid'] == patron['pid'] + assert params["patron_pid"] == patron["pid"] asked_item, actions = intransit_item.checkout(**params, pid=loan.pid) - checkout_loan = Loan.get_record_by_pid( - actions[LoanAction.CHECKOUT].get('pid')) + checkout_loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check item is ON_LOAN and loan is ITEM_ON_LOAN assert asked_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN def test_checkout_on_item_in_transit_to_house( - item_in_transit_martigny_patron_and_loan_to_house, - patron_martigny, - librarian_martigny, - loc_public_martigny, - loc_public_saxon): + item_in_transit_martigny_patron_and_loan_to_house, + patron_martigny, + librarian_martigny, + loc_public_martigny, + loc_public_saxon, +): """Test CHECKOUT on an IN_TRANSIT (to house) item.""" # Create a new item in IN_TRANSIT_TO_HOUSE - intransit_item, patron, loan = \ - item_in_transit_martigny_patron_and_loan_to_house + intransit_item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house assert intransit_item.number_of_requests() == 0 # the following tests the circulation action CHECKOUT_5_1 @@ -397,31 +367,30 @@ def test_checkout_on_item_in_transit_to_house( # WITHOUT pending loan # CAN be CHECKOUT params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } # Checkout it! asked_item, actions = intransit_item.checkout(**params, pid=loan.pid) - checkout_loan = Loan.get_record_by_pid( - actions[LoanAction.CHECKOUT].get('pid')) + checkout_loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert intransit_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN assert intransit_item.number_of_requests() == 0 def test_checkout_on_item_in_transit_to_house_for_another_patron( - item2_in_transit_martigny_patron_and_loan_to_house, - patron2_martigny, - librarian_martigny, - loc_public_martigny, - loc_public_saxon): + item2_in_transit_martigny_patron_and_loan_to_house, + patron2_martigny, + librarian_martigny, + loc_public_martigny, + loc_public_saxon, +): """Test CHECKOUT on an IN_TRANSIT (to house) item.""" # Create a new item in IN_TRANSIT_TO_HOUSE - intransit_item, patron, loan = \ - item2_in_transit_martigny_patron_and_loan_to_house + intransit_item, patron, loan = item2_in_transit_martigny_patron_and_loan_to_house assert intransit_item.number_of_requests() == 0 # the following tests the circulation action CHECKOUT_5_1 @@ -429,40 +398,39 @@ def test_checkout_on_item_in_transit_to_house_for_another_patron( # WITHOUT pending loan # CAN be CHECKOUT params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } # Checkout it! asked_item, actions = intransit_item.checkout(**params, pid=loan.pid) - checkout_loan = Loan.get_record_by_pid( - actions[LoanAction.CHECKOUT].get('pid')) + checkout_loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert intransit_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN assert intransit_item.number_of_requests() == 0 def test_checkout_on_item_in_transit_to_house_with_pending_loan( - item_in_transit_martigny_patron_and_loan_to_house, - item_lib_martigny, - patron2_martigny, - loc_public_martigny, - librarian_martigny, - loc_public_fully): + item_in_transit_martigny_patron_and_loan_to_house, + item_lib_martigny, + patron2_martigny, + loc_public_martigny, + librarian_martigny, + loc_public_fully, +): """Test item IN_TRANSIT (to house), WITHOUT pending loan, same patron.""" # Create a new item in IN_TRANSIT_TO_HOUSE - intransit_item, patron, loan = \ - item_in_transit_martigny_patron_and_loan_to_house + intransit_item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house assert intransit_item.number_of_requests() == 0 params = { - 'patron_pid': patron['pid'], - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, - 'checkin_transaction_location_pid': loc_public_fully.pid + "patron_pid": patron["pid"], + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, + "checkin_transaction_location_pid": loc_public_fully.pid, } # WARNING: this test works alone. But with other test, we need to check # if item is ON_LOAN. If yes: create a new item and loan @@ -470,17 +438,19 @@ def test_checkout_on_item_in_transit_to_house_with_pending_loan( intransit_item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_IN_TRANSIT_TO_HOUSE, - params=params) + params=params, + ) # Create a pending loan - params['patron_pid'] = patron2_martigny.pid + params["patron_pid"] = patron2_martigny.pid checked_item, requested_loan = item_record_to_a_specific_loan_state( item=intransit_item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - assert requested_loan['state'] == LoanState.PENDING + copy_item=False, + ) + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert requested_loan["state"] == LoanState.PENDING assert checked_item.number_of_requests() == 1 # the following tests the circulation action CHECKOUT_5_2_2 @@ -488,17 +458,16 @@ def test_checkout_on_item_in_transit_to_house_with_pending_loan( # WITH pending loan # checkout patron != patron of first pending loan # can NOT be CHECKOUT - params['patron_pid'] = patron['pid'] - assert params['patron_pid'] != requested_loan.patron_pid + params["patron_pid"] = patron["pid"] + assert params["patron_pid"] != requested_loan.patron_pid assert intransit_item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE with pytest.raises(TransitionConstraintsViolationError): - asked_item, actions = intransit_item.checkout( - **params, pid=requested_loan.pid) + asked_item, actions = intransit_item.checkout(**params, pid=requested_loan.pid) asked_item = Item.get_record_by_pid(intransit_item.pid) checkout_loan = Loan.get_record_by_pid(loan.pid) assert asked_item.status == ItemStatus.IN_TRANSIT - assert checkout_loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert checkout_loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE # the following tests the circulation action CHECKOUT_5_2_1 # an IN_TRANSIT (to house) item @@ -506,14 +475,13 @@ def test_checkout_on_item_in_transit_to_house_with_pending_loan( # checkout patron = patron of first pending loan # CAN BE CHECKOUT # Checkout it! CHECKOUT patron = patron of first PENDING loan - assert params['patron_pid'] == loan.patron_pid + assert params["patron_pid"] == loan.patron_pid assert intransit_item.status == ItemStatus.IN_TRANSIT - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE # What differs from CHECKOUT_5_2_2 is the given LOAN (`pid=`) checkout_item, actions = asked_item.checkout(**params, pid=loan.pid) - checkout_loan = Loan.get_record_by_pid( - actions[LoanAction.CHECKOUT].get('pid')) + checkout_loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # Check loan is ITEM_ON_LOAN and item is ON_LOAN assert checkout_item.status == ItemStatus.ON_LOAN - assert checkout_loan['state'] == LoanState.ITEM_ON_LOAN + assert checkout_loan["state"] == LoanState.ITEM_ON_LOAN assert checkout_item.number_of_requests() == 1 # pending loan remains diff --git a/tests/ui/circulation/test_actions_expired_request.py b/tests/ui/circulation/test_actions_expired_request.py index 537af51dab..9c32968461 100644 --- a/tests/ui/circulation/test_actions_expired_request.py +++ b/tests/ui/circulation/test_actions_expired_request.py @@ -20,22 +20,24 @@ from datetime import datetime, timedelta, timezone from freezegun import freeze_time -from utils import flush_index from rero_ils.modules.items.api import Item from rero_ils.modules.items.models import ItemStatus from rero_ils.modules.loans.api import Loan, LoansSearch, get_expired_request -from rero_ils.modules.loans.logs.api import LoanOperationLog, \ - LoanOperationLogsSearch +from rero_ils.modules.loans.logs.api import LoanOperationLogsSearch from rero_ils.modules.loans.models import LoanState from rero_ils.modules.loans.tasks import cancel_expired_request_task @freeze_time("2022-03-01T14:33:22+02:00") def test_expired_request_with_transit( - item_lib_martigny, loc_public_sion, librarian_sion, - loc_public_martigny, patron2_martigny, librarian_martigny, - circulation_policies + item_lib_martigny, + loc_public_sion, + librarian_sion, + loc_public_martigny, + patron2_martigny, + librarian_martigny, + circulation_policies, ): """Test request expiration for item in transit.""" item = item_lib_martigny @@ -49,16 +51,16 @@ def test_expired_request_with_transit( pickup_location_pid=loc_public_sion.pid, patron_pid=patron2_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - assert 'request' in actions - loan = Loan.get_record_by_pid(actions['request']['pid']) + assert "request" in actions + loan = Loan.get_record_by_pid(actions["request"]["pid"]) assert item.location_pid != loan.pickup_location_pid item, _ = item.validate_request( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - pid=loan.pid + pid=loan.pid, ) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.IN_TRANSIT @@ -67,20 +69,20 @@ def test_expired_request_with_transit( item, _ = item.receive( transaction_location_pid=loc_public_sion.pid, transaction_user_pid=librarian_sion.pid, - pid=loan.pid + pid=loan.pid, ) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.AT_DESK assert loan.state == LoanState.ITEM_AT_DESK - assert 'request_expire_date' in loan + assert "request_expire_date" in loan # STEP#1 :: UPDATE THE LOAN TO SIMULATE REQUEST HAS EXPIRED # Update the loan `request_expire_date` field to simulate than the # requester patron never came take this item. yesterday = datetime.now(timezone.utc) - timedelta(days=1) - loan['request_expire_date'] = yesterday.isoformat() + loan["request_expire_date"] = yesterday.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) - flush_index(LoansSearch.Meta.index) + LoansSearch.flush_and_refresh() assert loan.pid in [loan.pid for loan in get_expired_request()] # STEP#2 :: CANCEL THE EXPIRED REQUEST @@ -97,10 +99,10 @@ def test_expired_request_with_transit( assert item.status == ItemStatus.IN_TRANSIT assert loan.state == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - flush_index(LoanOperationLog.index_name) + LoanOperationLogsSearch.flush_and_refresh() logs = LoanOperationLogsSearch().get_logs_by_record_pid(loan.pid) logs_trigger = [hit.loan.trigger for hit in logs] - assert 'cancel' in logs_trigger + assert "cancel" in logs_trigger # STEP#3 :: RECEIVE THE ITEM AT OWNING LIBRARY # * Receive the item at the owning library. @@ -109,7 +111,7 @@ def test_expired_request_with_transit( item, _ = item.receive( transaction_location_pid=loc_public_martigny.pid, transaction_user_pid=librarian_martigny.pid, - pid=loan.pid + pid=loan.pid, ) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.ON_SHELF diff --git a/tests/ui/circulation/test_actions_extend.py b/tests/ui/circulation/test_actions_extend.py index 4d5815f712..8687db8614 100644 --- a/tests/ui/circulation/test_actions_extend.py +++ b/tests/ui/circulation/test_actions_extend.py @@ -22,7 +22,7 @@ import ciso8601 import pytest from invenio_circulation.errors import CirculationException -from utils import flush_index, item_record_to_a_specific_loan_state +from utils import item_record_to_a_specific_loan_state from rero_ils.modules.circ_policies.api import CircPolicy from rero_ils.modules.errors import NoCirculationAction @@ -32,15 +32,16 @@ from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.loans.utils import get_circ_policy from rero_ils.modules.patron_transactions.api import PatronTransactionsSearch -from rero_ils.modules.patron_transactions.utils import \ - get_last_transaction_by_loan_pid +from rero_ils.modules.patron_transactions.utils import get_last_transaction_by_loan_pid from rero_ils.modules.utils import get_ref_for_pid def test_fees_after_extend( item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, loc_public_saxon, librarian_martigny, - circulation_policies + loc_public_martigny, + loc_public_saxon, + librarian_martigny, + circulation_policies, ): """Test fees calculation after extend on different location.""" @@ -48,16 +49,16 @@ def test_fees_after_extend( item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan checkout_cipo = get_circ_policy(loan) extend_cipo = deepcopy(checkout_cipo) - extend_cipo['policy_library_level'] = True - extend_cipo.update({ - 'libraries': [ - {'$ref': get_ref_for_pid(Library, loc_public_saxon.library_pid)} - ], - 'overdue_fees': { - 'intervals': [{'from': 1, 'fee_amount': 0.01}] + extend_cipo["policy_library_level"] = True + extend_cipo.update( + { + "libraries": [ + {"$ref": get_ref_for_pid(Library, loc_public_saxon.library_pid)} + ], + "overdue_fees": {"intervals": [{"from": 1, "fee_amount": 0.01}]}, } - }) - del extend_cipo['pid'] + ) + del extend_cipo["pid"] extend_cipo = CircPolicy.create(extend_cipo) assert extend_cipo @@ -65,18 +66,17 @@ def test_fees_after_extend( checkout_cipo_ori = get_circ_policy(loan, checkout_location=True) checkout_cipo = deepcopy(checkout_cipo_ori) checkout_fee_amount = 10 - checkout_cipo['overdue_fees'] = { - 'intervals': [{'from': 1, 'fee_amount': checkout_fee_amount}] + checkout_cipo["overdue_fees"] = { + "intervals": [{"from": 1, "fee_amount": checkout_fee_amount}] } - checkout_cipo = checkout_cipo.update( - checkout_cipo, dbcommit=True, reindex=True) + checkout_cipo = checkout_cipo.update(checkout_cipo, dbcommit=True, reindex=True) # UPDATE LOAN TO BE OVERDUE # LIBRARY FIXTURES EXCEPTION: Christmas Holidays is 15 days interval = 20 while not loan.is_loan_overdue(): new_end_date = datetime.now(timezone.utc) - timedelta(days=interval) - loan['end_date'] = new_end_date.isoformat() + loan["end_date"] = new_end_date.isoformat() interval += 1 loan.update(loan, dbcommit=True, reindex=True) @@ -84,18 +84,18 @@ def test_fees_after_extend( # The loan should use the new created circulation policy # Update loan `end_date` to play with "extend" function without problem params = { - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.extend_loan(**params) loan = actions[LoanAction.EXTEND] assert item.status == ItemStatus.ON_LOAN assert get_circ_policy(loan).pid == extend_cipo.pid - assert loan.get('checkout_location_pid') == loc_public_martigny.pid - assert loan.get('transaction_location_pid') == loc_public_saxon.pid + assert loan.get("checkout_location_pid") == loc_public_martigny.pid + assert loan.get("transaction_location_pid") == loc_public_saxon.pid # The patron should have fees. - flush_index(PatronTransactionsSearch.Meta.index) + PatronTransactionsSearch.flush_and_refresh() pttr = get_last_transaction_by_loan_pid(loan.pid) assert pttr.total_amount >= checkout_fee_amount @@ -103,7 +103,7 @@ def test_fees_after_extend( interval = 10 while not loan.is_loan_overdue(): new_end_date = datetime.now(timezone.utc) - timedelta(days=interval) - loan['end_date'] = new_end_date.isoformat() + loan["end_date"] = new_end_date.isoformat() interval += 1 loan.update(loan, dbcommit=True, reindex=True) @@ -112,13 +112,13 @@ def test_fees_after_extend( # Check that a fee has been created and this fees is related to the # checkout circulation. params = { - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } _, actions = item.checkin(**params) loan = actions[LoanAction.CHECKIN] assert loan.state == LoanState.ITEM_IN_TRANSIT_TO_HOUSE - params['transaction_location_pid'] = loc_public_martigny.pid + params["transaction_location_pid"] = loc_public_martigny.pid _, actions = item.checkin(**params) loan = actions[LoanAction.RECEIVE] assert loan.state == LoanState.ITEM_RETURNED @@ -132,16 +132,19 @@ def test_fees_after_extend( def test_extend_on_item_on_shelf( - item_lib_martigny, patron_martigny, - loc_public_martigny, librarian_martigny, - circulation_policies): + item_lib_martigny, + patron_martigny, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test extend an on_shelf item.""" # the following tests the circulation action EXTEND_1 # for an on_shelf item, the extend action is not possible. params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item_lib_martigny.extend_loan(**params) @@ -149,25 +152,27 @@ def test_extend_on_item_on_shelf( def test_extend_on_item_at_desk( - item_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - circulation_policies): + item_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test extend an at_desk item.""" # the following tests the circulation action EXTEND_2 # for an at_desk item, the extend action is not possible. item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk # test fails if no loan pid is given params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.extend_loan(**params) assert item.status == ItemStatus.AT_DESK params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(CirculationException): item, actions = item.extend_loan(**params) @@ -175,14 +180,18 @@ def test_extend_on_item_at_desk( def test_extend_on_item_on_loan_with_no_requests( - app, item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, lib_martigny, - circulation_policies): + app, + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + lib_martigny, + circulation_policies, +): """Test extend an on_loan item.""" # the following tests the circulation action EXTEND_3_1 # for an on_loan item with no requests, the extend action is possible. item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan - settings = deepcopy(app.config['CIRCULATION_POLICIES']['extension']) + settings = deepcopy(app.config["CIRCULATION_POLICIES"]["extension"]) cipo = get_circ_policy(loan) # FIRST TEST :: Extends expected from loan 'end_date' @@ -190,96 +199,97 @@ def test_extend_on_item_on_loan_with_no_requests( # circulation policies excepting if some closed date exist for the # related library. As the checkout already set the 'end_date' to the end # of day, no timedelta should appears on hour/min/sec new end_date - app.config['CIRCULATION_POLICIES']['extension']['from_end_date'] = True + app.config["CIRCULATION_POLICIES"]["extension"]["from_end_date"] = True # Update loan `end_date` to play with "extend" function without problem - end_date = ciso8601.parse_datetime(str(loan.get('end_date'))) - start_date = ciso8601.parse_datetime(str(loan.get('start_date'))) + end_date = ciso8601.parse_datetime(str(loan.get("end_date"))) + start_date = ciso8601.parse_datetime(str(loan.get("start_date"))) end_date = end_date.replace( - year=start_date.year, - month=start_date.month, - day=start_date.day + year=start_date.year, month=start_date.month, day=start_date.day ) - loan['end_date'] = end_date.isoformat() - start_date = datetime.now() - timedelta(days=cipo['checkout_duration']) - loan['start_date'] = start_date.isoformat() - loan['transaction_date'] = start_date.isoformat() + loan["end_date"] = end_date.isoformat() + start_date = datetime.now() - timedelta(days=cipo["checkout_duration"]) + loan["start_date"] = start_date.isoformat() + loan["transaction_date"] = start_date.isoformat() initial_loan_data = deepcopy(loan) initial_loan = loan.update(loan, dbcommit=True, reindex=True) # Extend the loan params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.extend_loan(**params) assert item.status == ItemStatus.ON_LOAN extended_loan = Loan.get_record_by_pid(initial_loan.pid) init_end_date = ciso8601.parse_datetime(str(initial_loan.end_date)) - expected_date = init_end_date + timedelta(days=cipo['renewal_duration']) + expected_date = init_end_date + timedelta(days=cipo["renewal_duration"]) expected_date_eve = expected_date - timedelta(days=1) expected_date = lib_martigny.next_open(expected_date_eve) ext_end_date = ciso8601.parse_datetime(str(extended_loan.end_date)) - assert expected_date.strftime('%Y%m%d') == ext_end_date.strftime('%Y%m%d') + assert expected_date.strftime("%Y%m%d") == ext_end_date.strftime("%Y%m%d") # SECOND TEST :: Extends expected from loan `transaction_date` # The loan will also be extended from 'extension_duration' days excepting # library possible closed dates. But new end_date time should always # match end_of_the_day regardless the transaction_date. - app.config['CIRCULATION_POLICIES']['extension']['from_end_date'] = False + app.config["CIRCULATION_POLICIES"]["extension"]["from_end_date"] = False initial_loan = loan.update(initial_loan_data, dbcommit=True, reindex=True) # Extend the loan params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.extend_loan(**params) assert item.status == ItemStatus.ON_LOAN extended_loan = Loan.get_record_by_pid(initial_loan.pid) - expected_date = datetime.now() + timedelta(days=cipo['renewal_duration']) + expected_date = datetime.now() + timedelta(days=cipo["renewal_duration"]) expected_date_eve = expected_date - timedelta(days=1) expected_date = lib_martigny.next_open(expected_date_eve) ext_end_date = ciso8601.parse_datetime(str(extended_loan.end_date)) - assert expected_date.strftime('%Y%m%d') == ext_end_date.strftime('%Y%m%d') + assert expected_date.strftime("%Y%m%d") == ext_end_date.strftime("%Y%m%d") # Reset the application configuration - app.config['CIRCULATION_POLICIES']['extension'] = settings + app.config["CIRCULATION_POLICIES"]["extension"] = settings def test_extend_on_item_on_loan_with_requests( - item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, - circulation_policies, patron2_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + circulation_policies, + patron2_martigny, +): """Test extend an on_loan item with requests.""" # the following tests the circulation action EXTEND_3_2 # for an on_loan item with requests, the extend action is not possible. item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) # test fails if no loan pid is given params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.extend_loan(**params) assert item.status == ItemStatus.ON_LOAN # test fails if loan pid is given params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.extend_loan(**params) @@ -287,26 +297,28 @@ def test_extend_on_item_on_loan_with_requests( def test_extend_on_item_in_transit_for_pickup( - item_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - circulation_policies): + item_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test extend an in_transit for pickup item.""" # the following tests the circulation action EXTEND_4 # for an in_transit item, the extend action is not possible. item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup # test fails if no loan pid is given params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.extend_loan(**params) assert item.status == ItemStatus.IN_TRANSIT # test fails if a loan pid is given params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(CirculationException): item, actions = item.extend_loan(**params) @@ -314,26 +326,28 @@ def test_extend_on_item_in_transit_for_pickup( def test_extend_on_item_in_transit_to_house( - item_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - circulation_policies): + item_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test extend an in_transit to_house item.""" # the following tests the circulation action EXTEND_4 # for an in_transit item, the extend action is not possible. item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house # test fails if no loan pid is given params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item.extend_loan(**params) assert item.status == ItemStatus.IN_TRANSIT # test fails if a loan pid is given params = { - 'pid': loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(CirculationException): item, actions = item.extend_loan(**params) diff --git a/tests/ui/circulation/test_actions_validate_request.py b/tests/ui/circulation/test_actions_validate_request.py index df8386e0cc..904593fe24 100644 --- a/tests/ui/circulation/test_actions_validate_request.py +++ b/tests/ui/circulation/test_actions_validate_request.py @@ -29,17 +29,20 @@ def test_validate_on_item_on_shelf_no_requests( - item_lib_martigny, patron_martigny, - loc_public_martigny, librarian_martigny, - circulation_policies): + item_lib_martigny, + patron_martigny, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test validate a request on an on_shelf item with no requests.""" # the following tests the circulation action VALIDATE_1_1 # an on_shelf item with no pending requests. # no circulation action will be performed. no loan to validate. params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } with pytest.raises(NoCirculationAction): item, actions = item_lib_martigny.validate_request(**params) @@ -47,9 +50,11 @@ def test_validate_on_item_on_shelf_no_requests( def test_validate_on_item_on_shelf_with_requests_at_home( - item_on_shelf_martigny_patron_and_loan_pending, - loc_public_martigny, librarian_martigny, - circulation_policies): + item_on_shelf_martigny_patron_and_loan_pending, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test validate a request on an on_shelf item with requests at home.""" # the following tests the circulation action VALIDATE_1_2_1 # an on_shelf item with pending requests. @@ -57,20 +62,23 @@ def test_validate_on_item_on_shelf_with_requests_at_home( # equal to the transaction library. item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item, actions = item.validate_request(**params) assert item.status == ItemStatus.AT_DESK loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.ITEM_AT_DESK def test_validate_on_item_on_shelf_with_requests_externally( - item2_on_shelf_martigny_patron_and_loan_pending, loc_public_fully, - loc_public_martigny, librarian_martigny, - circulation_policies): + item2_on_shelf_martigny_patron_and_loan_pending, + loc_public_fully, + loc_public_martigny, + librarian_martigny, + circulation_policies, +): """Test validate a request on an on_shelf item with requests externally.""" # the following tests the circulation action VALIDATE_1_2_2 # an on_shelf item with pending requests. @@ -78,161 +86,173 @@ def test_validate_on_item_on_shelf_with_requests_externally( # pickup library does not equal to the transaction library. item, patron, loan = item2_on_shelf_martigny_patron_and_loan_pending params = { - 'transaction_location_pid': loc_public_fully.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_fully.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item, actions = item.validate_request(**params) assert item.status == ItemStatus.IN_TRANSIT loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_validate_on_item_at_desk( - item_at_desk_martigny_patron_and_loan_at_desk, - loc_public_martigny, librarian_martigny, - circulation_policies, patron2_martigny): + item_at_desk_martigny_patron_and_loan_at_desk, + loc_public_martigny, + librarian_martigny, + circulation_policies, + patron2_martigny, +): """Test validate a request on an item at_desk.""" # the following tests the circulation action VALIDATE_2 # on at_desk item, the validation is not possible item, patron, loan = item_at_desk_martigny_patron_and_loan_at_desk params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) assert item.status == ItemStatus.AT_DESK loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.ITEM_AT_DESK # will not be able to validate any requestes for this item params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - params['pid'] = requested_loan.pid + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + params["pid"] = requested_loan.pid with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) requested_loan = Loan.get_record_by_pid(requested_loan.pid) - assert requested_loan['state'] == LoanState.PENDING + assert requested_loan["state"] == LoanState.PENDING loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_AT_DESK + assert loan["state"] == LoanState.ITEM_AT_DESK def test_validate_on_item_on_loan( - item_on_loan_martigny_patron_and_loan_on_loan, - loc_public_martigny, librarian_martigny, - circulation_policies, patron2_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + loc_public_martigny, + librarian_martigny, + circulation_policies, + patron2_martigny, +): """Test validate a request on an item on_loan.""" # the following tests the circulation action VALIDATE_3 # on on_loan item, the validation is not possible item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) assert item.status == ItemStatus.ON_LOAN loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN # will not be able to validate any requestes for this item params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - params['pid'] = requested_loan.pid + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + params["pid"] = requested_loan.pid with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) requested_loan = Loan.get_record_by_pid(requested_loan.pid) - assert requested_loan['state'] == LoanState.PENDING + assert requested_loan["state"] == LoanState.PENDING loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_ON_LOAN + assert loan["state"] == LoanState.ITEM_ON_LOAN def test_validate_on_item_in_transit_for_pickup( - item_in_transit_martigny_patron_and_loan_for_pickup, - loc_public_martigny, librarian_martigny, - circulation_policies, patron2_martigny): + item_in_transit_martigny_patron_and_loan_for_pickup, + loc_public_martigny, + librarian_martigny, + circulation_policies, + patron2_martigny, +): """Test validate a request on an item in_transit for pickup.""" # the following tests the circulation action VALIDATE_4 # on on_loan item, the validation is not possible item, patron, loan = item_in_transit_martigny_patron_and_loan_for_pickup params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) assert item.status == ItemStatus.IN_TRANSIT loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP # will not be able to validate any requestes for this item params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - params['pid'] = requested_loan.pid + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + params["pid"] = requested_loan.pid with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) requested_loan = Loan.get_record_by_pid(requested_loan.pid) - assert requested_loan['state'] == LoanState.PENDING + assert requested_loan["state"] == LoanState.PENDING loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP def test_validate_on_item_in_transit_to_house( - item_in_transit_martigny_patron_and_loan_to_house, - loc_public_martigny, librarian_martigny, - circulation_policies, patron2_martigny): + item_in_transit_martigny_patron_and_loan_to_house, + loc_public_martigny, + librarian_martigny, + circulation_policies, + patron2_martigny, +): """Test validate a request on an item in_transit to house.""" # the following tests the circulation action VALIDATE_5 # on on_loan item, the validation is not possible item, patron, loan = item_in_transit_martigny_patron_and_loan_to_house params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) assert item.status == ItemStatus.IN_TRANSIT loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE # will not be able to validate any requestes for this item params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, requested_loan = item_record_to_a_specific_loan_state( - item=item, loan_state=LoanState.PENDING, params=params, - copy_item=False) - params['pid'] = requested_loan.pid + item=item, loan_state=LoanState.PENDING, params=params, copy_item=False + ) + params["pid"] = requested_loan.pid with pytest.raises(NoValidTransitionAvailableError): item, actions = item.validate_request(**params) requested_loan = Loan.get_record_by_pid(requested_loan.pid) - assert requested_loan['state'] == LoanState.PENDING + assert requested_loan["state"] == LoanState.PENDING loan = Loan.get_record_by_pid(loan.pid) - assert loan['state'] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE + assert loan["state"] == LoanState.ITEM_IN_TRANSIT_TO_HOUSE diff --git a/tests/ui/circulation/test_extend_external.py b/tests/ui/circulation/test_extend_external.py index 8d2dff5987..274d3b0ddf 100644 --- a/tests/ui/circulation/test_extend_external.py +++ b/tests/ui/circulation/test_extend_external.py @@ -31,102 +31,120 @@ def test_item_loans_extend_duration( - item_lib_martigny, librarian_martigny, patron_martigny, - loc_public_martigny, circulation_policies, lib_martigny): + item_lib_martigny, + librarian_martigny, + patron_martigny, + loc_public_martigny, + circulation_policies, + lib_martigny, +): """Test loan extend duration.""" # Note: this test moved here from the file test_loans_api.py because of # tests fixtures conflicts. for now_str in [ # winter time - '2021-12-13 06:00:00', '2022-12-13 20:00:00', + "2021-12-13 06:00:00", + "2022-12-13 20:00:00", # winter to summer time - '2022-03-07 06:00:00', '2022-03-07 20:00:00', + "2022-03-07 06:00:00", + "2022-03-07 20:00:00", # summer time - '2022-06-13 05:00:00', '2022-06-13 19:00:00', + "2022-06-13 05:00:00", + "2022-06-13 19:00:00", # summer to winter time - '2022-10-10 05:00:00', '2022-10-10 19:00:00' + "2022-10-10 05:00:00", + "2022-10-10 19:00:00", ]: with freeze_time(now_str, tz_offset=0): # do a checkout item, actions = item_lib_martigny.checkout( patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - loan_pid = actions['checkout']['pid'] + loan_pid = actions["checkout"]["pid"] # assert loan_pid loan = Loan.get_record_by_pid(loan_pid) - end_date = ciso8601.parse_datetime(loan.get('end_date')) + end_date = ciso8601.parse_datetime(loan.get("end_date")) policy = get_circ_policy(loan, checkout_location=True) # do the extend one day before the end date at 3pm - extend_action_date = ( - end_date - timedelta(days=1)).replace(hour=15) + extend_action_date = (end_date - timedelta(days=1)).replace(hour=15) with freeze_time(extend_action_date.isoformat()): - duration = get_extension_params( - loan, parameter_name='duration_default') + duration = get_extension_params(loan, parameter_name="duration_default") now = datetime.now(timezone.utc) utc_end_date = now + duration # computed end date at the library timezone - end_date = utc_end_date.astimezone( - tz=lib_martigny.get_timezone()) - expected_utc_end_date = now + timedelta( - days=policy['renewal_duration']) + end_date = utc_end_date.astimezone(tz=lib_martigny.get_timezone()) + expected_utc_end_date = now + timedelta(days=policy["renewal_duration"]) # expected end date at the library timezone expected_end_date = expected_utc_end_date.astimezone( - lib_martigny.get_timezone()) - assert end_date.strftime('%Y-%m-%d') == \ - expected_end_date.strftime('%Y-%m-%d') + lib_martigny.get_timezone() + ) + assert end_date.strftime("%Y-%m-%d") == expected_end_date.strftime( + "%Y-%m-%d" + ) assert end_date.hour == 23 assert end_date.minute == 59 # checkin the item for the next tests item_lib_martigny.checkin( patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) def test_extend_on_item_on_loan_with_no_requests_external_library( - app, item_lib_martigny, patron_martigny, item_type_on_site_martigny, - loc_public_martigny, librarian_martigny, lib_martigny, lib_saxon, - loc_public_saxon, patron_type_adults_martigny, circulation_policies): + app, + item_lib_martigny, + patron_martigny, + item_type_on_site_martigny, + loc_public_martigny, + librarian_martigny, + lib_martigny, + lib_saxon, + loc_public_saxon, + patron_type_adults_martigny, + circulation_policies, +): """Test extend an on_loan item at an external library.""" - patron_martigny['patron']['type']['$ref'] = get_ref_for_pid( - 'ptty', patron_type_adults_martigny.pid) + patron_martigny["patron"]["type"]["$ref"] = get_ref_for_pid( + "ptty", patron_type_adults_martigny.pid + ) patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) - item_lib_martigny['item_type']['$ref'] = get_ref_for_pid( - 'itty', item_type_on_site_martigny.pid) + item_lib_martigny["item_type"]["$ref"] = get_ref_for_pid( + "itty", item_type_on_site_martigny.pid + ) item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) # the library level cipo3 is used here circ_policy_temp_martigny params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan = item_record_to_a_specific_loan_state( item=item_lib_martigny, loan_state=LoanState.ITEM_ON_LOAN, - params=params, copy_item=True) + params=params, + copy_item=True, + ) - settings = app.config['CIRCULATION_POLICIES']['extension'] - app.config['CIRCULATION_POLICIES']['extension']['from_end_date'] = True - loan['end_date'] = loan['start_date'] + settings = app.config["CIRCULATION_POLICIES"]["extension"] + app.config["CIRCULATION_POLICIES"]["extension"]["from_end_date"] = True + loan["end_date"] = loan["start_date"] initial_loan = loan.update(loan, dbcommit=True, reindex=True) - assert get_circ_policy( - loan, checkout_location=True) == get_circ_policy(loan) + assert get_circ_policy(loan, checkout_location=True) == get_circ_policy(loan) # The cipo used for the checkout or renewal is "short" which is configured # only for lib_martigny. For other libraries it is the default cipo to be # used. params = { - 'transaction_location_pid': loc_public_saxon.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_saxon.pid, + "transaction_user_pid": librarian_martigny.pid, } cipo = get_circ_policy(loan) item, actions = item.extend_loan(**params) loan = Loan.get_record_by_pid(initial_loan.pid) # now the extend action does not take into account anymore the transaction # library so it continues to use the "short" policy for the extend action. - assert get_circ_policy( - loan, checkout_location=True).get('pid') == cipo.get('pid') - assert get_circ_policy(loan).get('pid') != cipo.get('pid') + assert get_circ_policy(loan, checkout_location=True).get("pid") == cipo.get("pid") + assert get_circ_policy(loan).get("pid") != cipo.get("pid") diff --git a/tests/ui/circulation/test_in_transit_actions.py b/tests/ui/circulation/test_in_transit_actions.py index 8aceeb7242..88f1cbc697 100644 --- a/tests/ui/circulation/test_in_transit_actions.py +++ b/tests/ui/circulation/test_in_transit_actions.py @@ -22,100 +22,103 @@ from rero_ils.modules.loans.models import LoanAction, LoanState -def test_in_transit_second_request_at_home(app, item_lib_martigny, - patron_martigny, patron2_martigny, - librarian_martigny, - loc_public_martigny, - circulation_policies, - loc_public_fully): +def test_in_transit_second_request_at_home( + app, + item_lib_martigny, + patron_martigny, + patron2_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, +): """Test cases when in-transit loan is cancelled.""" params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } assert item_lib_martigny.status == ItemStatus.ON_SHELF item, actions = item_lib_martigny.request(**params) - first_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get('pid')) + first_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get("pid")) assert item_lib_martigny.status == ItemStatus.ON_SHELF - assert first_loan['state'] == LoanState.PENDING + assert first_loan["state"] == LoanState.PENDING item, actions = item.validate_request(**params, pid=first_loan.pid) - first_loan = Loan.get_record_by_pid(actions[LoanAction.VALIDATE].get( - 'pid')) + first_loan = Loan.get_record_by_pid(actions[LoanAction.VALIDATE].get("pid")) assert item.status == ItemStatus.IN_TRANSIT - assert first_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert first_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, actions = item.request(**params) - second_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get( - 'pid')) + second_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get("pid")) assert item_lib_martigny.status == ItemStatus.IN_TRANSIT - assert second_loan['state'] == LoanState.PENDING + assert second_loan["state"] == LoanState.PENDING params = { - 'pid': first_loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": first_loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.cancel_item_request(**params) first_loan = Loan.get_record_by_pid(first_loan.pid) second_loan = Loan.get_record_by_pid(second_loan.pid) assert item_lib_martigny.status == ItemStatus.IN_TRANSIT - assert second_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP - assert first_loan['state'] == LoanState.CANCELLED + assert second_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert first_loan["state"] == LoanState.CANCELLED -def test_in_transit_second_request_externally(app, item2_lib_martigny, - patron_martigny, - patron2_martigny, - librarian_martigny, - loc_public_martigny, - circulation_policies, - loc_public_fully): +def test_in_transit_second_request_externally( + app, + item2_lib_martigny, + patron_martigny, + patron2_martigny, + librarian_martigny, + loc_public_martigny, + circulation_policies, + loc_public_fully, +): """Test cases when in-transit loan is cancelled.""" params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } assert item2_lib_martigny.status == ItemStatus.ON_SHELF item, actions = item2_lib_martigny.request(**params) - first_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get('pid')) + first_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get("pid")) assert item2_lib_martigny.status == ItemStatus.ON_SHELF - assert first_loan['state'] == LoanState.PENDING + assert first_loan["state"] == LoanState.PENDING item, actions = item.validate_request(**params, pid=first_loan.pid) - first_loan = Loan.get_record_by_pid(actions[LoanAction.VALIDATE].get( - 'pid')) + first_loan = Loan.get_record_by_pid(actions[LoanAction.VALIDATE].get("pid")) assert item.status == ItemStatus.IN_TRANSIT - assert first_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert first_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_fully.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_fully.pid, } item, actions = item.request(**params) - second_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get( - 'pid')) + second_loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get("pid")) assert item2_lib_martigny.status == ItemStatus.IN_TRANSIT - assert second_loan['state'] == LoanState.PENDING + assert second_loan["state"] == LoanState.PENDING params = { - 'pid': first_loan.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "pid": first_loan.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item, actions = item.cancel_item_request(**params) first_loan = Loan.get_record_by_pid(first_loan.pid) second_loan = Loan.get_record_by_pid(second_loan.pid) assert item2_lib_martigny.status == ItemStatus.IN_TRANSIT - assert second_loan['state'] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP - assert first_loan['state'] == LoanState.CANCELLED + assert second_loan["state"] == LoanState.ITEM_IN_TRANSIT_FOR_PICKUP + assert first_loan["state"] == LoanState.CANCELLED diff --git a/tests/ui/circulation/test_loan_utils.py b/tests/ui/circulation/test_loan_utils.py index 7a99e86596..c4ec161c78 100644 --- a/tests/ui/circulation/test_loan_utils.py +++ b/tests/ui/circulation/test_loan_utils.py @@ -21,47 +21,58 @@ import pytest from invenio_circulation.errors import CirculationException -from utils import flush_index, item_record_to_a_specific_loan_state +from utils import item_record_to_a_specific_loan_state from rero_ils.modules.api import IlsRecordError from rero_ils.modules.items.utils import item_pid_to_object -from rero_ils.modules.loans.api import Loan, \ - get_last_transaction_loc_for_item, get_loans_by_patron_pid +from rero_ils.modules.loans.api import ( + Loan, + get_last_transaction_loc_for_item, + get_loans_by_patron_pid, +) from rero_ils.modules.loans.models import LoanState from rero_ils.modules.loans.utils import can_be_requested from rero_ils.modules.locations.api import LocationsSearch -def test_loan_utils(client, patron_martigny, - patron2_martigny, circulation_policies, - item_lib_martigny, librarian_martigny, - loc_public_martigny): +def test_loan_utils( + client, + patron_martigny, + patron2_martigny, + circulation_policies, + item_lib_martigny, + librarian_martigny, + loc_public_martigny, +): """Test loan utils methods.""" loan_metadata = dict(item_lib_martigny) - loan_metadata['item_pid'] = item_pid_to_object(item_lib_martigny.pid) - if 'patron_pid' not in loan_metadata: - loan_metadata['patron_pid'] = patron_martigny.pid + loan_metadata["item_pid"] = item_pid_to_object(item_lib_martigny.pid) + if "patron_pid" not in loan_metadata: + loan_metadata["patron_pid"] = patron_martigny.pid # Create "virtual" Loan (not registered) loan = Loan(loan_metadata) # test that loan can successfully move to the pending state assert can_be_requested(loan) # test that loan without an item may not move to the pending state - del loan['item_pid'] + del loan["item_pid"] with pytest.raises(Exception): assert can_be_requested(loan) # test a pending loan will be attached at the right organisation and # will not be considered as an active loan params = { - 'patron_pid': patron2_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron2_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, loan_pending_martigny = item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.PENDING, + params=params, + copy_item=True, + ) assert loan_pending_martigny.patron_pid == patron2_martigny.pid assert not loan_pending_martigny.is_active @@ -79,38 +90,36 @@ def test_loan_utils(client, patron_martigny, # test the organisation of the loan is based on the item new_loan = deepcopy(loan_pending_martigny) assert new_loan.organisation_pid - del new_loan['item_pid'] + del new_loan["item_pid"] with pytest.raises(IlsRecordError.PidDoesNotExist): new_loan.organisation_pid assert not can_be_requested(loan_pending_martigny) # test the allow request at the location level - loc_public_martigny['allow_request'] = False - loc_public_martigny.update( - loc_public_martigny, - dbcommit=True, - reindex=True - ) - flush_index(LocationsSearch.Meta.index) + loc_public_martigny["allow_request"] = False + loc_public_martigny.update(loc_public_martigny, dbcommit=True, reindex=True) + LocationsSearch.flush_and_refresh() new_loan = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } with pytest.raises(CirculationException): item, loan_pending_martigny = item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=True) + item=item_lib_martigny, + loan_state=LoanState.PENDING, + params=params, + copy_item=True, + ) - loc_public_martigny['allow_request'] = True - loc_public_martigny.update( - loc_public_martigny, - dbcommit=True, - reindex=True - ) - flush_index(LocationsSearch.Meta.index) + loc_public_martigny["allow_request"] = True + loc_public_martigny.update(loc_public_martigny, dbcommit=True, reindex=True) + LocationsSearch.flush_and_refresh() item, loan_pending_martigny = item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=True) - assert loan_pending_martigny['state'] == LoanState.PENDING + item=item_lib_martigny, + loan_state=LoanState.PENDING, + params=params, + copy_item=True, + ) + assert loan_pending_martigny["state"] == LoanState.PENDING diff --git a/tests/ui/collections/test_collections_mapping.py b/tests/ui/collections/test_collections_mapping.py index 98a959ca29..ebc18e7507 100644 --- a/tests/ui/collections/test_collections_mapping.py +++ b/tests/ui/collections/test_collections_mapping.py @@ -21,17 +21,20 @@ from rero_ils.modules.collections.api import Collection, CollectionsSearch -def test_collections_es_mapping(search, db, org_martigny, coll_martigny_1_data, - item_lib_martigny, item2_lib_martigny): +def test_collections_es_mapping( + search, + db, + org_martigny, + coll_martigny_1_data, + item_lib_martigny, + item2_lib_martigny, +): """Test collections elasticsearch mapping.""" search = CollectionsSearch() mapping = get_mapping(search.Meta.index) assert mapping collection = Collection.create( - coll_martigny_1_data, - dbcommit=True, - reindex=True, - delete_pid=True + coll_martigny_1_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) collection.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/collections/test_collections_ui.py b/tests/ui/collections/test_collections_ui.py index 836f14b5f1..e5fff7d3a2 100644 --- a/tests/ui/collections/test_collections_ui.py +++ b/tests/ui/collections/test_collections_ui.py @@ -24,16 +24,18 @@ def test_collection_detailed_view_without_library(client, coll_saxon_1): """Test collection detailed view.""" # check redirection - res = client.get(url_for( - 'invenio_records_ui.coll', viewcode='org1', - pid_value=coll_saxon_1.pid)) + res = client.get( + url_for("invenio_records_ui.coll", viewcode="org1", pid_value=coll_saxon_1.pid) + ) assert res.status_code == 200 def test_collection_detailed_view(client, coll_martigny_1): """Test collection detailed view.""" # check redirection - res = client.get(url_for( - 'invenio_records_ui.coll', viewcode='org1', - pid_value=coll_martigny_1.pid)) + res = client.get( + url_for( + "invenio_records_ui.coll", viewcode="org1", pid_value=coll_martigny_1.pid + ) + ) assert res.status_code == 200 diff --git a/tests/ui/collections/test_collections_utils.py b/tests/ui/collections/test_collections_utils.py index 5971e310a6..91422283de 100644 --- a/tests/ui/collections/test_collections_utils.py +++ b/tests/ui/collections/test_collections_utils.py @@ -24,15 +24,12 @@ def test_get_teachers(db, coll_martigny_1_data): """Test get teachers.""" - result = 'Pr. Smith, John, Pr. Nonyme, Anne' + result = "Pr. Smith, John, Pr. Nonyme, Anne" assert get_teachers(coll_martigny_1_data) == result def test_start_end_date(db, coll_martigny_1_data): """Test date format.""" - result = '01/09/2020 - 31/12/2020' + result = "01/09/2020 - 31/12/2020" coll = Collection.create(coll_martigny_1_data, delete_pid=True) - assert _start_end_date( - coll.get('start_date'), - coll.get('end_date') - ) == result + assert _start_end_date(coll.get("start_date"), coll.get("end_date")) == result diff --git a/tests/ui/conftest.py b/tests/ui/conftest.py index 12d2517566..c28813375a 100644 --- a/tests/ui/conftest.py +++ b/tests/ui/conftest.py @@ -23,19 +23,19 @@ from invenio_search import current_search_client -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def user_with_profile(db, default_user_password): """Create a simple invenio user with a profile.""" with db.session.begin_nested(): profile = dict( - birth_date='1990-01-01', - first_name='User', - last_name='With Profile', - city='Nowhere' + birth_date="1990-01-01", + first_name="User", + last_name="With Profile", + city="Nowhere", ) user = User( - email='user_with_profile@test.com', - username='user_with_profile', + email="user_with_profile@test.com", + username="user_with_profile", password=hash_password(default_user_password), user_profile=profile, active=True, @@ -46,20 +46,20 @@ def user_with_profile(db, default_user_password): return user -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def user_without_email(db, default_user_password): """Create a simple invenio user without email.""" with db.session.begin_nested(): profile = dict( - birth_date='1990-01-01', - first_name='User', - last_name='With Profile', - city='Nowhere' + birth_date="1990-01-01", + first_name="User", + last_name="With Profile", + city="Nowhere", ) user = User( password=hash_password(default_user_password), user_profile=profile, - username='user_without_email', + username="user_without_email", active=True, ) db.session.add(user) @@ -68,7 +68,7 @@ def user_without_email(db, default_user_password): return user -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def create_app(): """Create test app.""" # from invenio_app.factory import create_ui @@ -82,8 +82,8 @@ def create_app(): def ils_record(): """Ils Record test record.""" yield { - 'pid': 'ilsrecord_pid', - 'name': 'IlsRecord Name', + "pid": "ilsrecord_pid", + "name": "IlsRecord Name", } @@ -91,35 +91,26 @@ def ils_record(): def ils_record_2(): """Ils Record test record 2.""" yield { - 'pid': 'ilsrecord_pid_2', - 'name': 'IlsRecord Name 2', + "pid": "ilsrecord_pid_2", + "name": "IlsRecord Name 2", } -@pytest.fixture(scope='module') -def es_default_index(es): +@pytest.fixture(scope="module") +def es_default_index(search): """ES default index.""" index_name = list( - current_search_client.indices.get_alias( - 'records-record-v1.0.0').keys()).pop() - current_search_client.indices.delete( - index=index_name - ) + current_search_client.indices.get_alias("records-record-v1.0.0").keys() + ).pop() + current_search_client.indices.delete(index=index_name) current_search_client.indices.create( - index='records-record-v1.0.0', + index="records-record-v1.0.0", body={ - 'mappings': { - 'record-v1.0.0': { - 'properties': { - 'pid': {'type': 'keyword'} - } - } - } + "mappings": {"record-v1.0.0": {"properties": {"pid": {"type": "keyword"}}}} }, - ignore=[400] + ignore=[400], ) - yield es + yield search current_search_client.indices.delete( - index='records-record-v1.0.0', - ignore=[400, 404] + index="records-record-v1.0.0", ignore=[400, 404] ) diff --git a/tests/ui/documents/conftest.py b/tests/ui/documents/conftest.py index e76ff97a13..eb38dd3c2b 100644 --- a/tests/ui/documents/conftest.py +++ b/tests/ui/documents/conftest.py @@ -20,13 +20,8 @@ import pytest -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def document_records( - document, - document_ref, - document_sion_items, - ebook_1, - ebook_2, - ebook_3 + document, document_ref, document_sion_items, ebook_1, ebook_2, ebook_3 ): """Documents for test mapping.""" diff --git a/tests/ui/documents/test_documents_api.py b/tests/ui/documents/test_documents_api.py index 61c9f30e9f..a0d64812ca 100644 --- a/tests/ui/documents/test_documents_api.py +++ b/tests/ui/documents/test_documents_api.py @@ -25,18 +25,22 @@ import pytest from invenio_db import db from jsonschema.exceptions import ValidationError -from utils import flush_index, mock_response +from utils import mock_response from rero_ils.modules.api import IlsRecordError -from rero_ils.modules.documents.api import Document, DocumentsSearch, \ - document_id_fetcher +from rero_ils.modules.documents.api import ( + Document, + DocumentsSearch, + document_id_fetcher, +) from rero_ils.modules.documents.models import DocumentIdentifier from rero_ils.modules.ebooks.tasks import create_records from rero_ils.modules.entities.models import EntityType -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity -from rero_ils.modules.entities.remote_entities.utils import \ - extract_data_from_mef_uri +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) +from rero_ils.modules.entities.remote_entities.utils import extract_data_from_mef_uri from rero_ils.modules.tasks import process_bulk_queue @@ -44,27 +48,32 @@ def test_document_create(db, document_data_tmp): """Test document creation.""" ptty = Document.create(document_data_tmp, delete_pid=True) assert ptty == document_data_tmp - assert ptty.get('pid') == '1' - assert ptty.dumps()['editionStatement'][0]['_text'] == [ - {'language': 'chi-hani', 'value': '第3版 / 曾令良主编'}, - {'language': 'default', 'value': 'Di 3 ban / Zeng Lingliang zhu bian'} + assert ptty.get("pid") == "1" + assert ptty.dumps()["editionStatement"][0]["_text"] == [ + {"language": "chi-hani", "value": "第3版 / 曾令良主编"}, + {"language": "default", "value": "Di 3 ban / Zeng Lingliang zhu bian"}, ] - doc = Document.get_record_by_pid('1') + doc = Document.get_record_by_pid("1") assert doc == document_data_tmp - assert doc.document_type == 'docsubtype_other_book' + assert doc.document_type == "docsubtype_other_book" fetched_pid = document_id_fetcher(ptty.id, ptty) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'doc' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "doc" with pytest.raises(IlsRecordError.PidAlreadyUsed): Document.create(doc) -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_document_create_with_mef( - mock_contributions_mef_get, app, document_data_ref, document_data, - entity_person_data, entity_person_response_data): + mock_contributions_mef_get, + app, + document_data_ref, + document_data, + entity_person_data, + entity_person_response_data, +): """Load document with mef records reference.""" mock_contributions_mef_get.return_value = mock_response( json_data=entity_person_response_data @@ -72,76 +81,80 @@ def test_document_create_with_mef( assert RemoteEntitiesSearch().count() == 0 doc = Document.create( data=deepcopy(document_data_ref), - delete_pid=False, dbcommit=False, reindex=False) + delete_pid=False, + dbcommit=False, + reindex=False, + ) doc.reindex() - flush_index(DocumentsSearch.Meta.index) - doc = Document.get_record_by_pid(doc.get('pid')) - assert doc['contribution'][0]['entity']['pid'] == entity_person_data['pid'] + DocumentsSearch.flush_and_refresh() + doc = Document.get_record_by_pid(doc.get("pid")) + assert doc["contribution"][0]["entity"]["pid"] == entity_person_data["pid"] hit = DocumentsSearch().get_record_by_pid(doc.pid).to_dict() - assert hit['contribution'][0]['entity']['pid'] == entity_person_data['pid'] - assert hit['contribution'][0]['entity']['primary_source'] == 'rero' + assert hit["contribution"][0]["entity"]["pid"] == entity_person_data["pid"] + assert hit["contribution"][0]["entity"]["primary_source"] == "rero" assert RemoteEntitiesSearch().count() == 1 - contrib = RemoteEntity.get_record_by_pid(entity_person_data['pid']) + contrib = RemoteEntity.get_record_by_pid(entity_person_data["pid"]) contrib.delete_from_index() doc.delete_from_index() db.session.rollback() - assert not Document.get_record_by_pid(doc.get('pid')) - assert not RemoteEntity.get_record_by_pid(entity_person_data['pid']) + assert not Document.get_record_by_pid(doc.get("pid")) + assert not RemoteEntity.get_record_by_pid(entity_person_data["pid"]) assert RemoteEntitiesSearch().count() == 0 with pytest.raises(ValidationError): - doc = Document.create( - data={}, - delete_pid=False, dbcommit=True, reindex=True) + doc = Document.create(data={}, delete_pid=False, dbcommit=True, reindex=True) - assert not Document.get_record_by_pid(doc.get('pid')) - assert not RemoteEntity.get_record_by_pid(entity_person_data['pid']) + assert not Document.get_record_by_pid(doc.get("pid")) + assert not RemoteEntity.get_record_by_pid(entity_person_data["pid"]) assert RemoteEntitiesSearch().count() == 0 data = deepcopy(document_data_ref) - contrib = data.pop('contribution') - doc = Document.create( - data=data, - delete_pid=False, dbcommit=False, reindex=False) + contrib = data.pop("contribution") + doc = Document.create(data=data, delete_pid=False, dbcommit=False, reindex=False) doc.reindex() - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() with pytest.raises(ValidationError): - doc['contribution'] = contrib + doc["contribution"] = contrib # remove required property - doc.pop('type') + doc.pop("type") doc.update(doc, commit=True, dbcommit=True, reindex=True) - assert Document.get_record_by_pid(doc.get('pid')) - assert not RemoteEntity.get_record_by_pid(entity_person_data['pid']) + assert Document.get_record_by_pid(doc.get("pid")) + assert not RemoteEntity.get_record_by_pid(entity_person_data["pid"]) assert RemoteEntitiesSearch().count() == 0 data = deepcopy(document_data_ref) doc.update(data, commit=True, dbcommit=False, reindex=False) doc.reindex() - assert Document.get_record_by_pid(doc.get('pid')) - assert RemoteEntity.get_record_by_pid(entity_person_data['pid']) + assert Document.get_record_by_pid(doc.get("pid")) + assert RemoteEntity.get_record_by_pid(entity_person_data["pid"]) assert RemoteEntitiesSearch().count() == 1 doc.delete_from_index() db.session.rollback() -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_document_linked_subject( - mock_subjects_mef_get, app, document_data_tmp, - mef_concept1_data, mef_concept1_es_response + mock_subjects_mef_get, + app, + document_data_tmp, + mef_concept1_data, + mef_concept1_es_response, ): """Load document with MEF reference as a subject.""" mock_subjects_mef_get.return_value = mock_response( - json_data=mef_concept1_es_response) + json_data=mef_concept1_es_response + ) - concept_pid = mef_concept1_data['idref']['pid'] - entity_uri = f'https://mef.rero.ch/api/concepts/idref/{concept_pid}' - document_data_tmp['subjects'] = [{'entity': {'$ref': entity_uri}}] + concept_pid = mef_concept1_data["idref"]["pid"] + entity_uri = f"https://mef.rero.ch/api/concepts/idref/{concept_pid}" + document_data_tmp["subjects"] = [{"entity": {"$ref": entity_uri}}] - doc = Document.create(document_data_tmp, - delete_pid=True, dbcommit=True, reindex=True) - flush_index(DocumentsSearch.Meta.index) + doc = Document.create( + document_data_tmp, delete_pid=True, dbcommit=True, reindex=True + ) + DocumentsSearch.flush_and_refresh() doc = Document.get_record(doc.id) # a "bf:Concepts" entity should be created. @@ -149,22 +162,20 @@ def test_document_linked_subject( # - Check if ES mapping is correct for this entity _, _type, _id = extract_data_from_mef_uri(entity_uri) entity = RemoteEntity.get_entity(_type, _id) - assert _type in entity.get('sources') + assert _type in entity.get("sources") es_record = RemoteEntitiesSearch().get_record_by_pid(entity.pid) - assert es_record['type'] == EntityType.TOPIC - assert es_record[_type]['pid'] == _id + assert es_record["type"] == EntityType.TOPIC + assert es_record[_type]["pid"] == _id # Check the document ES record # - check if $ref linked subject is correctly dumped es_record = DocumentsSearch().get_record_by_pid(doc.pid) - subject = es_record['subjects'][0] - assert subject['entity']['primary_source'] == _type - assert _id in subject['entity']['pids'][_type] - assert subject['entity']['authorized_access_point_fr'] == \ - 'Antienzymes' - assert 'Inhibiteurs enzymatiques' \ - in subject['entity']['variant_access_point'] + subject = es_record["subjects"][0] + assert subject["entity"]["primary_source"] == _type + assert _id in subject["entity"]["pids"][_type] + assert subject["entity"]["authorized_access_point_fr"] == "Antienzymes" + assert "Inhibiteurs enzymatiques" in subject["entity"]["variant_access_point"] # reset fixtures doc.delete_from_index() @@ -174,35 +185,39 @@ def test_document_linked_subject( def test_document_add_cover_url(db, document): """Test add url.""" - document.add_cover_url(url='http://images.rero.ch/cover.png') - assert document.get('electronicLocator') == [{ - 'content': 'coverImage', - 'type': 'relatedResource', - 'url': 'http://images.rero.ch/cover.png' - }] + document.add_cover_url(url="http://images.rero.ch/cover.png") + assert document.get("electronicLocator") == [ + { + "content": "coverImage", + "type": "relatedResource", + "url": "http://images.rero.ch/cover.png", + } + ] # don't add the same url - document.add_cover_url(url='http://images.rero.ch/cover.png') - assert document.get('electronicLocator') == [{ - 'content': 'coverImage', - 'type': 'relatedResource', - 'url': 'http://images.rero.ch/cover.png' - }] + document.add_cover_url(url="http://images.rero.ch/cover.png") + assert document.get("electronicLocator") == [ + { + "content": "coverImage", + "type": "relatedResource", + "url": "http://images.rero.ch/cover.png", + } + ] def test_document_with_item_can_not_delete(document, item_lib_martigny): """Test can not delete.""" can, reasons = document.can_delete assert not can - assert reasons['links']['items'] + assert reasons["links"]["items"] def test_document_with_files_can_not_delete(document_with_files): """Test can not delete.""" links_to_me = document_with_files.get_links_to_me(True) - assert len(links_to_me['files']) > 0 + assert len(links_to_me["files"]) > 0 can, reasons = document_with_files.can_delete assert not can - assert reasons['links']['files'] + assert reasons["links"]["files"] def test_document_can_delete(app, document_data_tmp): @@ -213,51 +228,57 @@ def test_document_can_delete(app, document_data_tmp): assert reasons == {} -def test_document_create_records(app, org_martigny, org_sion, ebook_1_data, - ebook_2_data, item_type_online_martigny, - loc_online_martigny, item_type_online_sion, - loc_online_sion - ): +def test_document_create_records( + app, + org_martigny, + org_sion, + ebook_1_data, + ebook_2_data, + item_type_online_martigny, + loc_online_martigny, + item_type_online_sion, + loc_online_sion, +): """Test can create harvested records.""" - ebook_1_data['electronicLocator'] = [ + ebook_1_data["electronicLocator"] = [ { "source": "ebibliomedia", "url": "https://www.site1.org/ebook", - "type": "resource" + "type": "resource", } ] - ebook_2_data['electronicLocator'] = [ + ebook_2_data["electronicLocator"] = [ { "source": "ebibliomedia", "url": "https://www.site2.org/ebook", - "type": "resource" + "type": "resource", } ] n_created, n_updated = create_records([ebook_1_data]) assert n_created == 1 assert n_updated == 0 - ebook_1_data['electronicLocator'] = [ + ebook_1_data["electronicLocator"] = [ { "source": "ebibliomedia", "url": "https://www.site2.org/ebook", - "type": "resource" + "type": "resource", }, { "source": "mv-cantook", "url": "https://www.site3.org/ebook", - "type": "resource" - } + "type": "resource", + }, ] n_created, n_updated = create_records([ebook_1_data, ebook_2_data]) assert n_created == 1 assert n_updated == 1 - ebook_1_data['electronicLocator'] = [ + ebook_1_data["electronicLocator"] = [ { "source": "mv-cantook", "url": "https://www.site3.org/ebook", - "type": "resource" + "type": "resource", } ] n_created, n_updated = create_records([ebook_1_data, ebook_2_data]) @@ -276,102 +297,94 @@ def test_document_can_delete_harvested(app, ebook_1_data): can, reasons = document.can_delete assert document.harvested assert not can - assert reasons['others']['harvested'] + assert reasons["others"]["harvested"] def test_document_can_delete_with_loans( - client, item_lib_martigny, loan_pending_martigny, document): + client, item_lib_martigny, loan_pending_martigny, document +): """Test can delete a document.""" can, reasons = document.can_delete assert not can - assert reasons['links']['items'] - assert reasons['links']['loans'] + assert reasons["links"]["items"] + assert reasons["links"]["loans"] -def test_document_contribution_resolve_exception(es_clear, db, mef_agents_url, - document_data_ref): +def test_document_contribution_resolve_exception( + search_clear, db, mef_agents_url, document_data_ref +): """Test document contribution resolve.""" - document_data_ref['contribution'] = [{ - '$ref': f'{mef_agents_url}/rero/XXXXXX' - }], + document_data_ref["contribution"] = ([{"$ref": f"{mef_agents_url}/rero/XXXXXX"}],) with pytest.raises(Exception): Document.create( - data=document_data_ref, - delete_pid=False, - dbcommit=True, - reindex=True + data=document_data_ref, delete_pid=False, dbcommit=True, reindex=True ) -def test_document_create_invalid_data(es_clear, db, document_data): +def test_document_create_invalid_data(search_clear, db, document_data): """Test document contribution resolve.""" data = deepcopy(document_data) n_pids = DocumentIdentifier.query.count() - data.pop('type') - data.pop('pid') + data.pop("type") + data.pop("pid") with pytest.raises(Exception): - Document.create( - data=data, - delete_pid=True, - dbcommit=True, - reindex=True - ) + Document.create(data=data, delete_pid=True, dbcommit=True, reindex=True) db.session.rollback() assert DocumentIdentifier.query.count() == n_pids def test_document_get_links_to_me(document, export_document): """Test document links.""" - assert document.get_links_to_me() == {'documents': 1} + assert document.get_links_to_me() == {"documents": 1} assert document.get_links_to_me(get_pids=True) == { - 'documents': { - 'partOf': [export_document.pid] - } + "documents": {"partOf": [export_document.pid]} } def test_document_indexing(document, export_document): """Test document indexing.""" # get the export_document from the es index - s = DocumentsSearch().filter('term', pid=export_document.pid) + s = DocumentsSearch().filter("term", pid=export_document.pid) assert s.count() == 1 # get the partOf field - record = next(s.source('partOf').scan()) + record = next(s.source("partOf").scan()) # get the titles from the host document parent_titles = [ - v['_text'] for v in document.dumps().get('title') - if v.get('_text') and v.get('type') == 'bf:Title' + v["_text"] + for v in document.dumps().get("title") + if v.get("_text") and v.get("type") == "bf:Title" ] assert record.partOf[0].document.title == parent_titles.pop() # change the title of the host document - orig_title = document['title'][0]['mainTitle'][1]['value'] - document['title'][0]['mainTitle'][1]['value'] = 'New title' + orig_title = document["title"][0]["mainTitle"][1]["value"] + document["title"][0]["mainTitle"][1]["value"] = "New title" document.update(document, dbcommit=True, reindex=True) # process the bulked indexed documents process_bulk_queue() - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() # get the export_document from the es index - s = DocumentsSearch().filter('term', pid=export_document.pid) + s = DocumentsSearch().filter("term", pid=export_document.pid) # get the partOf field - record = next(s.source('partOf').scan()) + record = next(s.source("partOf").scan()) # get the titles from the host document parent_titles = [ - v['_text'] for v in document.dumps().get('title') - if v.get('_text') and v.get('type') == 'bf:Title' + v["_text"] + for v in document.dumps().get("title") + if v.get("_text") and v.get("type") == "bf:Title" ] assert record.partOf[0].document.title == parent_titles.pop() # check updated created should exists - record = next(s.source(['_updated', '_created']).scan()) + record = next(s.source(["_updated", "_created"]).scan()) assert record._updated assert record._created # restore initial data - document['title'].pop(-1) - document['title'][0]['mainTitle'][1]['value'] = orig_title + document["title"].pop(-1) + document["title"][0]["mainTitle"][1]["value"] = orig_title document.update(document, dbcommit=True, reindex=True) @@ -379,23 +392,22 @@ def test_document_replace_refs(document, mef_agents_url): """Test document replace refs.""" orig = deepcopy(document) data = document.replace_refs() - assert len(data.get('contribution')) == 1 + assert len(data.get("contribution")) == 1 # add MEF contribution agent - document['contribution'].append({ - 'entity': {'$ref': f'{mef_agents_url}/rero/A017671081'}, - 'role': ['aut'] - }) + document["contribution"].append( + {"entity": {"$ref": f"{mef_agents_url}/rero/A017671081"}, "role": ["aut"]} + ) document.update(document, dbcommit=True, reindex=True) - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() es_doc = DocumentsSearch().get_record_by_pid(document.pid).to_dict() - assert es_doc['contribution'][1]['entity']['type'] == EntityType.PERSON + assert es_doc["contribution"][1]["entity"]["type"] == EntityType.PERSON data = document.replace_refs() - assert len(data.get('contribution')) == 2 - assert 'entity' in data.get('contribution')[1] - assert data.get('contribution')[1].get('role') == ['aut'] + assert len(data.get("contribution")) == 2 + assert "entity" in data.get("contribution")[1] + assert data.get("contribution")[1].get("role") == ["aut"] # Reset fixtures document.update(orig, dbcommit=True, reindex=True) diff --git a/tests/ui/documents/test_documents_filter.py b/tests/ui/documents/test_documents_filter.py index 46d5d57cf1..fefcb132ef 100644 --- a/tests/ui/documents/test_documents_filter.py +++ b/tests/ui/documents/test_documents_filter.py @@ -19,54 +19,48 @@ import mock -from rero_ils.modules.documents.views import babeltheque_enabled_view, \ - cartographic_attributes, contribution_format, doc_entity_label, \ - get_first_isbn, identified_by, main_title_text, note_general, \ - notes_except_general, part_of_format, provision_activity, \ - provision_activity_not_publication, provision_activity_original_date, \ - provision_activity_publication, title_variants, work_access_point +from rero_ils.modules.documents.views import ( + babeltheque_enabled_view, + cartographic_attributes, + contribution_format, + doc_entity_label, + get_first_isbn, + identified_by, + main_title_text, + note_general, + notes_except_general, + part_of_format, + provision_activity, + provision_activity_not_publication, + provision_activity_original_date, + provision_activity_publication, + title_variants, + work_access_point, +) from rero_ils.modules.entities.models import EntityType def test_note_general(): """Test note general.""" notes = [ - { - 'noteType': 'general', - 'label': 'Note general' - }, - { - 'noteType': 'dummy', - 'label': 'dummy' - } + {"noteType": "general", "label": "Note general"}, + {"noteType": "dummy", "label": "dummy"}, ] - result = {'general': ['Note general']} + result = {"general": ["Note general"]} assert result == note_general(notes) def test_notes_except_general(): """Test note except general.""" notes = [ - { - 'noteType': 'general', - 'label': 'Note general' - }, - { - 'noteType': 'accompanyingMaterial', - 'label': 'Accompany' - }, - { - 'noteType': 'accompanyingMaterial', - 'label': 'Material' - }, - { - 'noteType': 'otherPhysicalDetails', - 'label': 'Physical' - } + {"noteType": "general", "label": "Note general"}, + {"noteType": "accompanyingMaterial", "label": "Accompany"}, + {"noteType": "accompanyingMaterial", "label": "Material"}, + {"noteType": "otherPhysicalDetails", "label": "Physical"}, ] result = { - 'accompanyingMaterial': ['Accompany', 'Material'], - 'otherPhysicalDetails': ['Physical'] + "accompanyingMaterial": ["Accompany", "Material"], + "otherPhysicalDetails": ["Physical"], } assert result == notes_except_general(notes) @@ -74,39 +68,15 @@ def test_notes_except_general(): def test_cartographic_attributes(): """Test cartographic attributes.""" attributes = [ - { - 'projection': 'Projection', - 'coordinates': { - 'label': 'coordinate label' - } - }, - { - 'projection': 'Projection 2' - }, - { - 'coordinates': { - 'label': 'coordinate label 2' - } - }, - { - 'dummy': 'dummy' - } + {"projection": "Projection", "coordinates": {"label": "coordinate label"}}, + {"projection": "Projection 2"}, + {"coordinates": {"label": "coordinate label 2"}}, + {"dummy": "dummy"}, ] result = [ - { - 'projection': 'Projection', - 'coordinates': { - 'label': 'coordinate label' - } - }, - { - 'projection': 'Projection 2' - }, - { - 'coordinates': { - 'label': 'coordinate label 2' - } - } + {"projection": "Projection", "coordinates": {"label": "coordinate label"}}, + {"projection": "Projection 2"}, + {"coordinates": {"label": "coordinate label 2"}}, ] assert result == cartographic_attributes(attributes) @@ -115,71 +85,58 @@ def test_provision_activity(): """Test preprocess provision activity.""" provisions = [ { - '_text': [ - { - 'language': 'default', - 'value': 'Paris : Ed. de Minuit, 1988' - } + "_text": [{"language": "default", "value": "Paris : Ed. de Minuit, 1988"}], + "place": [{"country": "fr", "type": "bf:Place"}], + "startDate": 1988, + "statement": [ + {"label": [{"value": "Paris"}], "type": "bf:Place"}, + {"label": [{"value": "Ed. de Minuit"}], "type": "bf:Agent"}, + {"label": [{"value": "1988"}], "type": "Date"}, ], - 'place': [{'country': 'fr', 'type': 'bf:Place'}], - 'startDate': 1988, - 'statement': [ - {'label': [{'value': 'Paris'}], 'type': 'bf:Place'}, - {'label': [{'value': 'Ed. de Minuit'}], 'type': 'bf:Agent'}, - {'label': [{'value': '1988'}], 'type': 'Date'} - ], - 'type': 'bf:Publication' + "type": "bf:Publication", }, { - '_text': [ - { - 'language': 'default', - 'value': 'Martigny : Alex Morgan, 2010' - } + "_text": [{"language": "default", "value": "Martigny : Alex Morgan, 2010"}], + "startDate": 1998, + "statement": [ + {"label": [{"value": "Martigny"}], "type": "bf:Place"}, + {"label": [{"value": "Alex Morgan"}], "type": "bf:Agent"}, + {"label": [{"value": "2010"}], "type": "Date"}, ], - 'startDate': 1998, - 'statement': [ - {'label': [{'value': 'Martigny'}], 'type': 'bf:Place'}, - {'label': [{'value': 'Alex Morgan'}], 'type': 'bf:Agent'}, - {'label': [{'value': '2010'}], 'type': 'Date'} - ], - 'type': 'bf:Distribution' + "type": "bf:Distribution", }, { - '_text': [ + "_text": [ { - 'language': 'default', - 'value': 'Will Edwards, 2010 ; Paris ; Martigny' + "language": "default", + "value": "Will Edwards, 2010 ; Paris ; Martigny", } ], - 'startDate': 1990, - 'statement': [ - {'label': [{'value': 'Will Edwards'}], 'type': 'bf:Agent'}, - {'label': [{'value': '2010'}], 'type': 'Date'}, - {'label': [{'value': 'Paris'}], 'type': 'bf:Place'}, - {'label': [{'value': 'Martigny'}], 'type': 'bf:Place'} + "startDate": 1990, + "statement": [ + {"label": [{"value": "Will Edwards"}], "type": "bf:Agent"}, + {"label": [{"value": "2010"}], "type": "Date"}, + {"label": [{"value": "Paris"}], "type": "bf:Place"}, + {"label": [{"value": "Martigny"}], "type": "bf:Place"}, ], - 'type': 'bf:Distribution' + "type": "bf:Distribution", }, { - '_text': [{'language': 'default', 'value': ''}], - 'original_date': 2010, - 'place': [{'country': 'xx', 'type': 'bf:Place'}], - 'startDate': 1989, - 'type': 'bf:Manufacture' - } + "_text": [{"language": "default", "value": ""}], + "original_date": 2010, + "place": [{"country": "xx", "type": "bf:Place"}], + "startDate": 1989, + "type": "bf:Manufacture", + }, ] result = { - 'bf:Publication': [ - {'language': 'default', 'value': 'Paris : Ed. de Minuit, 1988'} + "bf:Publication": [ + {"language": "default", "value": "Paris : Ed. de Minuit, 1988"} + ], + "bf:Distribution": [ + {"language": "default", "value": "Martigny : Alex Morgan, 2010"}, + {"language": "default", "value": "Will Edwards, 2010 ; Paris ; Martigny"}, ], - 'bf:Distribution': [ - {'language': 'default', 'value': 'Martigny : Alex Morgan, 2010'}, - { - 'language': 'default', - 'value': 'Will Edwards, 2010 ; Paris ; Martigny' - } - ] } assert result == provision_activity(provisions) @@ -187,20 +144,17 @@ def test_provision_activity(): def test_provision_activity_publication(): """Test extract only publication on provision activity.""" provisions = { - 'bf:Publication': [ - {'language': 'default', 'value': 'Paris : Ed. de Minuit, 1988'} + "bf:Publication": [ + {"language": "default", "value": "Paris : Ed. de Minuit, 1988"} + ], + "bf:Distribution": [ + {"language": "default", "value": "Martigny : Alex Morgan, 2010"}, + {"language": "default", "value": "Will Edwards, 2010 ; Paris ; Martigny"}, ], - 'bf:Distribution': [ - {'language': 'default', 'value': 'Martigny : Alex Morgan, 2010'}, - { - 'language': 'default', - 'value': 'Will Edwards, 2010 ; Paris ; Martigny' - } - ] } result = { - 'bf:Publication': [ - {'language': 'default', 'value': 'Paris : Ed. de Minuit, 1988'} + "bf:Publication": [ + {"language": "default", "value": "Paris : Ed. de Minuit, 1988"} ] } assert result == provision_activity_publication(provisions) @@ -209,24 +163,18 @@ def test_provision_activity_publication(): def test_provision_activity_not_publication(): """Test extract all provision activity except publication.""" provisions = { - 'bf:Publication': [ - {'language': 'default', 'value': 'Paris : Ed. de Minuit, 1988'} + "bf:Publication": [ + {"language": "default", "value": "Paris : Ed. de Minuit, 1988"} + ], + "bf:Distribution": [ + {"language": "default", "value": "Martigny : Alex Morgan, 2010"}, + {"language": "default", "value": "Will Edwards, 2010 ; Paris ; Martigny"}, ], - 'bf:Distribution': [ - {'language': 'default', 'value': 'Martigny : Alex Morgan, 2010'}, - { - 'language': 'default', - 'value': 'Will Edwards, 2010 ; Paris ; Martigny' - } - ] } result = { - 'bf:Distribution': [ - {'language': 'default', 'value': 'Martigny : Alex Morgan, 2010'}, - { - 'language': 'default', - 'value': 'Will Edwards, 2010 ; Paris ; Martigny' - } + "bf:Distribution": [ + {"language": "default", "value": "Martigny : Alex Morgan, 2010"}, + {"language": "default", "value": "Will Edwards, 2010 ; Paris ; Martigny"}, ] } assert result == provision_activity_not_publication(provisions) @@ -234,69 +182,46 @@ def test_provision_activity_not_publication(): def test_provision_activity_original_date(): """Test provision activity.""" - activity = [ - { - 'original_date': '2021' - }, - { - 'date': '2021-07-23' - } - ] - result = ['2021'] + activity = [{"original_date": "2021"}, {"date": "2021-07-23"}] + result = ["2021"] assert result == provision_activity_original_date(activity) def test_title_variants(): """Test title variants.""" titles = [ + {"type": "bf:Title", "mainTitle": [{"value": "Title"}]}, { - 'type': 'bf:Title', - 'mainTitle': [{ - 'value': 'Title' - }] - }, - { - 'type': 'bf:VariantTitle', - 'mainTitle': [{ - 'value': 'Variant title 1' - }], - 'part': [ + "type": "bf:VariantTitle", + "mainTitle": [{"value": "Variant title 1"}], + "part": [ { - 'partName': [{'value': 'part1'}], - 'partNumber': [{'value': 'number1'}], + "partName": [{"value": "part1"}], + "partNumber": [{"value": "number1"}], }, { - 'partNumber': [{'value': 'number2'}], - 'partName': [{'value': 'part2'}] - } - ] + "partNumber": [{"value": "number2"}], + "partName": [{"value": "part2"}], + }, + ], }, { - 'type': 'bf:VariantTitle', - 'mainTitle': [{ - 'value': 'Variant title 2' - }], - 'subtitle': [{ - 'value': 'Variant 2 sub' - }] + "type": "bf:VariantTitle", + "mainTitle": [{"value": "Variant title 2"}], + "subtitle": [{"value": "Variant 2 sub"}], }, { - 'type': 'bf:ParallelTitle', - 'mainTitle': [{ - 'value': 'Parallel title' - }], - 'subtitle': [{ - 'value': 'sub parallel' - }] - } + "type": "bf:ParallelTitle", + "mainTitle": [{"value": "Parallel title"}], + "subtitle": [{"value": "sub parallel"}], + }, ] result = { - 'bf:VariantTitle': - [ - 'Variant title 1. number1, part1. number2, part2', - 'Variant title 2 : Variant 2 sub' + "bf:VariantTitle": [ + "Variant title 1. number1, part1. number2, part2", + "Variant title 2 : Variant 2 sub", ], - 'bf:ParallelTitle': ['Parallel title : sub parallel'] + "bf:ParallelTitle": ["Parallel title : sub parallel"], } assert result == title_variants(titles) @@ -306,91 +231,78 @@ def test_work_access_point(): """Test work access point process.""" wap = [ { - 'part': [ + "part": [ { - 'partName': 'part section title', - 'partNumber': 'part section designation' + "partName": "part section title", + "partNumber": "part section designation", } ], - 'creator': { - 'type': 'bf:Person', - 'qualifier': 'physicien', - 'numeration': 'XX', - 'date_of_birth': '1955', - 'date_of_death': '2012', - 'preferred_name': - 'Müller, Hans', - 'fuller_form_of_name': - 'Müller, Hans Peter' + "creator": { + "type": "bf:Person", + "qualifier": "physicien", + "numeration": "XX", + "date_of_birth": "1955", + "date_of_death": "2012", + "preferred_name": "Müller, Hans", + "fuller_form_of_name": "Müller, Hans Peter", }, - 'title': 'Müller, Hans (Title)', - 'language': 'fre', - 'date_of_work': '2000', - 'key_for_music': 'key music', - 'form_subdivision': ['Form sub.'], - 'miscellaneous_information': 'Miscellaneous info', - 'arranged_statement_for_music': 'arranged stat', - 'medium_of_performance_for_music': ['medium perf'] + "title": "Müller, Hans (Title)", + "language": "fre", + "date_of_work": "2000", + "key_for_music": "key music", + "form_subdivision": ["Form sub."], + "miscellaneous_information": "Miscellaneous info", + "arranged_statement_for_music": "arranged stat", + "medium_of_performance_for_music": ["medium perf"], }, { - 'part': [ - { - 'partName': 'Title', - 'partNumber': 'part designation' - }], - 'creator': { - 'type': 'bf:Organisation', - 'place': 'Lausanne', - 'numbering': '4', - 'conference': False, - 'preferred_name': 'Corp body Name', - 'conference_date': '1990', - 'subordinate_unit': ['Office 1', 'Office 2'] + "part": [{"partName": "Title", "partNumber": "part designation"}], + "creator": { + "type": "bf:Organisation", + "place": "Lausanne", + "numbering": "4", + "conference": False, + "preferred_name": "Corp body Name", + "conference_date": "1990", + "subordinate_unit": ["Office 1", "Office 2"], }, - 'title': 'Corp Title', - 'language': 'fre', - 'date_of_work': '1980', - 'key_for_music': 'Corp Key music', - 'form_subdivision': ['Form sub 1', 'Form sub 2'], - 'miscellaneous_information': 'miscellaneous info', - 'arranged_statement_for_music': 'Copr Arranged stat', - 'medium_of_performance_for_music': [ - 'Corp Medium perf 1', - 'Corp Medium perf 2' - ] + "title": "Corp Title", + "language": "fre", + "date_of_work": "1980", + "key_for_music": "Corp Key music", + "form_subdivision": ["Form sub 1", "Form sub 2"], + "miscellaneous_information": "miscellaneous info", + "arranged_statement_for_music": "Copr Arranged stat", + "medium_of_performance_for_music": [ + "Corp Medium perf 1", + "Corp Medium perf 2", + ], }, { - 'creator': { - 'type': 'bf:Person', - 'qualifier': 'pianiste', - 'date_of_birth': '1980', - 'preferred_name': 'Hans, Peter' + "creator": { + "type": "bf:Person", + "qualifier": "pianiste", + "date_of_birth": "1980", + "preferred_name": "Hans, Peter", }, - 'title': 'Work title' + "title": "Work title", }, { - 'part': [ - { - 'partNumber': 'part number' - } - ], - 'creator': { - 'type': 'bf:Person', - 'qualifier': 'pianiste' - }, - 'title': 'title with part' - } + "part": [{"partNumber": "part number"}], + "creator": {"type": "bf:Person", "qualifier": "pianiste"}, + "title": "title with part", + }, ] results = [ - 'Müller, Hans, XX, physicien, 1955-2012. Müller, Hans (Title). ' - 'part section designation. part section title. Miscellaneous info. ' - 'lang_fre. medium perf. key music. arranged stat. 2000.', - 'Corp body Name. Office 1. Office 2. (4 : 1990 : Lausanne) ' - 'Corp Title. part designation. Title. miscellaneous info. ' - 'lang_fre. Corp Medium perf 1. Corp Medium perf 2. ' - 'Corp Key music. Copr Arranged stat. 1980.', - 'Hans, Peter, 1980. pianiste. Work title.', - 'pianiste. title with part. part number.' + "Müller, Hans, XX, physicien, 1955-2012. Müller, Hans (Title). " + "part section designation. part section title. Miscellaneous info. " + "lang_fre. medium perf. key music. arranged stat. 2000.", + "Corp body Name. Office 1. Office 2. (4 : 1990 : Lausanne) " + "Corp Title. part designation. Title. miscellaneous info. " + "lang_fre. Corp Medium perf 1. Corp Medium perf 2. " + "Corp Key music. Copr Arranged stat. 1980.", + "Hans, Peter, 1980. pianiste. Work title.", + "pianiste. title with part. part number.", ] assert results == work_access_point(wap) @@ -398,146 +310,114 @@ def test_work_access_point(): def test_contribution_format(db, entity_organisation): """Test contribution format.""" entity = entity_organisation - contributions = [{ - 'entity': { - 'authorized_access_point': 'author_def', - 'authorized_access_point_fr': 'author_fr' + contributions = [ + { + "entity": { + "authorized_access_point": "author_def", + "authorized_access_point_fr": "author_fr", + } } - }] + ] # ---- Textual contribution # With english language - link_part = '/global/search/documents?q=' \ - 'contribution.entity.authorized_access_point_en%3A' \ - '%22author_def%22' - assert link_part in contribution_format(contributions, 'en', 'global') + link_part = ( + "/global/search/documents?q=" + "contribution.entity.authorized_access_point_en%3A" + "%22author_def%22" + ) + assert link_part in contribution_format(contributions, "en", "global") # With french language - link_part = '/global/search/documents?q=' \ - 'contribution.entity.authorized_access_point_fr%3A' \ - '%22author_fr%22' - assert link_part in contribution_format(contributions, 'fr', 'global') + link_part = ( + "/global/search/documents?q=" + "contribution.entity.authorized_access_point_fr%3A" + "%22author_fr%22" + ) + assert link_part in contribution_format(contributions, "fr", "global") # ---- Remote contribution - contributions = [{ - 'entity': {'pid': entity.pid} - }] - link_part = f'/global/search/documents?q=' \ - f'contribution.entity.pids.{entity.resource_type}%3A' \ - f'{entity.pid}' - assert link_part in contribution_format(contributions, 'en', 'global') + contributions = [{"entity": {"pid": entity.pid}}] + link_part = ( + f"/global/search/documents?q=" + f"contribution.entity.pids.{entity.resource_type}%3A" + f"{entity.pid}" + ) + assert link_part in contribution_format(contributions, "en", "global") def test_identifiedby_format(): """Test identifiedBy format.""" identifiedby = [ + {"type": "bf:Local", "source": "RERO", "value": "R008745599"}, { - 'type': 'bf:Local', - 'source': 'RERO', - 'value': 'R008745599' - }, { - 'note': 'Lorem ipsun dolor', - 'qualifier': 'Qualifier', - 'status': 'cancelled', - 'type': 'bf:Isbn', - 'value': '9782844267788' - }, { - 'note': 'Lorem ipsun dolor', - 'type': 'bf:Local', - 'source': 'BNF', - 'value': 'FRBNF452959040000002' - }, { - 'type': 'uri', - 'value': 'http://catalogue.bnf.fr/ark:/12148/cb45295904f' - } + "note": "Lorem ipsun dolor", + "qualifier": "Qualifier", + "status": "cancelled", + "type": "bf:Isbn", + "value": "9782844267788", + }, + { + "note": "Lorem ipsun dolor", + "type": "bf:Local", + "source": "BNF", + "value": "FRBNF452959040000002", + }, + {"type": "uri", "value": "http://catalogue.bnf.fr/ark:/12148/cb45295904f"}, ] results = [ + {"details": "", "type": "RERO", "value": "R008745599"}, { - 'details': '', - 'type': 'RERO', - 'value': 'R008745599' + "details": "Qualifier, cancelled, Lorem ipsun dolor", + "type": "bf:Isbn", + "value": "9782844267788", }, { - 'details': 'Qualifier, cancelled, Lorem ipsun dolor', - 'type': 'bf:Isbn', - 'value': '9782844267788' + "details": "Lorem ipsun dolor", + "type": "BNF", + "value": "FRBNF452959040000002", }, { - 'details': 'Lorem ipsun dolor', - 'type': 'BNF', - 'value': 'FRBNF452959040000002' + "details": "", + "type": "uri", + "value": "http://catalogue.bnf.fr/ark:/12148/cb45295904f", }, - { - 'details': '', - 'type': 'uri', - 'value': 'http://catalogue.bnf.fr/ark:/12148/cb45295904f' - } ] assert results == identified_by(identifiedby) -def test_part_of_format( - document_with_issn, - document2_with_issn, - document_sion_items -): +def test_part_of_format(document_with_issn, document2_with_issn, document_sion_items): """Test 'part of' format.""" # Label Series with numbering part_of = { - "document": { - "$ref": "https://bib.rero.ch/api/documents/doc5" - }, - "numbering": [ - { - "year": "1818", - "volume": 2704, - "issue": "1", - "pages": "55" - } - ] + "document": {"$ref": "https://bib.rero.ch/api/documents/doc5"}, + "numbering": [{"year": "1818", "volume": 2704, "issue": "1", "pages": "55"}], } result = { "document_pid": "doc5", "label": "Series", - "numbering": [ - "1818, vol. 2704, nr. 1, p. 55" - ], - "title": "Manuales del Africa espa\u00f1ola" + "numbering": ["1818, vol. 2704, nr. 1, p. 55"], + "title": "Manuales del Africa espa\u00f1ola", } assert result == part_of_format(part_of) # Label Journal with numbering part_of = { - "document": { - "$ref": "https://bib.rero.ch/api/documents/doc6" - }, - "numbering": [ - { - "year": "1818", - "volume": 2704, - "issue": "1", - "pages": "55" - } - ] + "document": {"$ref": "https://bib.rero.ch/api/documents/doc6"}, + "numbering": [{"year": "1818", "volume": 2704, "issue": "1", "pages": "55"}], } result = { "document_pid": "doc6", "label": "Journal", - "numbering": [ - "1818, vol. 2704, nr. 1, p. 55" - ], - "title": "Nota bene" + "numbering": ["1818, vol. 2704, nr. 1, p. 55"], + "title": "Nota bene", } assert result == part_of_format(part_of) # Label Published in without numbering - part_of = { - "document": { - "$ref": "https://bib.rero.ch/api/documents/doc3" - } - } + part_of = {"document": {"$ref": "https://bib.rero.ch/api/documents/doc3"}} result = { "document_pid": "doc3", "label": "Published in", - "title": "La reine Berthe et son fils" + "title": "La reine Berthe et son fils", } assert result == part_of_format(part_of) @@ -545,113 +425,92 @@ def test_part_of_format( def test_main_title_text(): """Test extract only main title.""" title = [ - { - "mainTitle": [{"value": "J. Am. Med. Assoc."}], - "type": "bf:AbbreviatedTitle" - }, - { - "mainTitle": [{"value": "J Am Med Assoc"}], - "type": "bf:KeyTitle" - }, + {"mainTitle": [{"value": "J. Am. Med. Assoc."}], "type": "bf:AbbreviatedTitle"}, + {"mainTitle": [{"value": "J Am Med Assoc"}], "type": "bf:KeyTitle"}, { "_text": "Journal of the American medical association", - "mainTitle": [{ - "value": "Journal of the American medical association"}], - "type": "bf:Title" - } + "mainTitle": [{"value": "Journal of the American medical association"}], + "type": "bf:Title", + }, ] extract = main_title_text(title) assert len(extract) == 1 - assert extract[0].get('_text') is not None + assert extract[0].get("_text") is not None def test_doc_entity_label_filter(entity_person, local_entity_person): """Test entity label filter.""" # Remote entity - remote_pid = entity_person['idref']['pid'] + remote_pid = entity_person["idref"]["pid"] data = { - 'entity': { - '$ref': f'https://mef.rero.ch/api/concepts/idref/{remote_pid}', - 'pid': remote_pid + "entity": { + "$ref": f"https://mef.rero.ch/api/concepts/idref/{remote_pid}", + "pid": remote_pid, } } - entity_type, value, label = doc_entity_label(data['entity'], 'fr') - assert 'remote' == entity_type - assert 'ent_pers' == value - assert 'Loy, Georg, 1885-19..' == label + entity_type, value, label = doc_entity_label(data["entity"], "fr") + assert "remote" == entity_type + assert "ent_pers" == value + assert "Loy, Georg, 1885-19.." == label # Local entity - pid = local_entity_person['pid'] - data = { - 'entity': { - '$ref': f'https://bib.rero.ch/api/local_entities/{pid}' - } - } - entity_type, value, label = doc_entity_label(data['entity'], 'fr') - assert 'local' == entity_type - assert 'locent_pers' == value - assert 'Loy, Georg (1881-1968)' == label - - entity_type, value, label = doc_entity_label(data['entity'], 'en') - assert 'local' == entity_type - assert 'locent_pers' == value - assert 'Loy, Georg (1881-1968)' == label + pid = local_entity_person["pid"] + data = {"entity": {"$ref": f"https://bib.rero.ch/api/local_entities/{pid}"}} + entity_type, value, label = doc_entity_label(data["entity"], "fr") + assert "local" == entity_type + assert "locent_pers" == value + assert "Loy, Georg (1881-1968)" == label + + entity_type, value, label = doc_entity_label(data["entity"], "en") + assert "local" == entity_type + assert "locent_pers" == value + assert "Loy, Georg (1881-1968)" == label # Textual - data = { - 'entity': { - 'authorized_access_point': 'subject topic' - } - } - entity_type, value, label = doc_entity_label(data['entity'], None) - assert 'textual' == entity_type - assert 'subject topic' == value - assert 'subject topic' == label + data = {"entity": {"authorized_access_point": "subject topic"}} + entity_type, value, label = doc_entity_label(data["entity"], None) + assert "textual" == entity_type + assert "subject topic" == value + assert "subject topic" == label - entity_type, value, label = doc_entity_label(data['entity'], 'fr') - assert 'textual' == entity_type - assert 'subject topic' == value - assert 'subject topic' == label + entity_type, value, label = doc_entity_label(data["entity"], "fr") + assert "textual" == entity_type + assert "subject topic" == value + assert "subject topic" == label # Textual with subdivision - data['entity']['subdivisions'] = [ - { - 'entity': { - 'authorized_access_point': 'Sub 1', - 'type': EntityType.TOPIC - } - }, - { - 'entity': { - 'authorized_access_point': 'Sub 2', - 'type': EntityType.TOPIC - } - } + data["entity"]["subdivisions"] = [ + {"entity": {"authorized_access_point": "Sub 1", "type": EntityType.TOPIC}}, + {"entity": {"authorized_access_point": "Sub 2", "type": EntityType.TOPIC}}, ] - entity_type, value, label = doc_entity_label(data['entity'], 'fr') - assert 'textual' == entity_type - assert 'subject topic' == value - assert 'subject topic - Sub 1 - Sub 2' == label + entity_type, value, label = doc_entity_label(data["entity"], "fr") + assert "textual" == entity_type + assert "subject topic" == value + assert "subject topic - Sub 1 - Sub 2" == label def test_babeltheque_enabled_view(): """Check enabled view for babeltheque.""" + class CurrentApp: """Current app mock.""" - config = {'RERO_ILS_APP_BABELTHEQUE_ENABLED_VIEWS': ['global']} - with mock.patch( - 'rero_ils.modules.documents.views.current_app', CurrentApp): - assert babeltheque_enabled_view('global') - assert not babeltheque_enabled_view('foo') + + config = {"RERO_ILS_APP_BABELTHEQUE_ENABLED_VIEWS": ["global"]} + + with mock.patch("rero_ils.modules.documents.views.current_app", CurrentApp): + assert babeltheque_enabled_view("global") + assert not babeltheque_enabled_view("foo") def test_get_first_isbn(): """Get the first isbn on identifiedBy field.""" - record = {'identifiedBy': [ - {'type': 'bf:Isbn', 'value': '9782501053006'}, - {'type': 'bf:Isbn', 'value': '9782501033671'} - ]} - assert '9782501053006' == get_first_isbn(record) - record = {'identifiedBy': []} + record = { + "identifiedBy": [ + {"type": "bf:Isbn", "value": "9782501053006"}, + {"type": "bf:Isbn", "value": "9782501033671"}, + ] + } + assert "9782501053006" == get_first_isbn(record) + record = {"identifiedBy": []} assert None is get_first_isbn(record) diff --git a/tests/ui/documents/test_documents_jsonresolver.py b/tests/ui/documents/test_documents_jsonresolver.py index b8b28a8a83..1678aaca71 100644 --- a/tests/ui/documents/test_documents_jsonresolver.py +++ b/tests/ui/documents/test_documents_jsonresolver.py @@ -24,10 +24,10 @@ def test_documents_jsonresolver(document): """Test document json resolver.""" - rec = Record.create({ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc1'} - }) - assert rec.replace_refs().get('document') == {'type': 'doc', 'pid': 'doc1'} + rec = Record.create( + {"document": {"$ref": "https://bib.rero.ch/api/documents/doc1"}} + ) + assert rec.replace_refs().get("document") == {"type": "doc", "pid": "doc1"} # deleted record document.delete() @@ -35,8 +35,6 @@ def test_documents_jsonresolver(document): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/n_e'} - }) + rec = Record.create({"document": {"$ref": "https://bib.rero.ch/api/documents/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/documents/test_documents_mapping.py b/tests/ui/documents/test_documents_mapping.py index e5ad3c458c..e945a34a55 100644 --- a/tests/ui/documents/test_documents_mapping.py +++ b/tests/ui/documents/test_documents_mapping.py @@ -26,11 +26,15 @@ from rero_ils.modules.documents.api import Document, DocumentsSearch -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_document_es_mapping( - mock_contributions_mef_get, search, db, org_martigny, - document_data_ref, item_lib_martigny, - entity_person_response_data + mock_contributions_mef_get, + search, + db, + org_martigny, + document_data_ref, + item_lib_martigny, + entity_person_response_data, ): """Test document elasticsearch mapping.""" search = DocumentsSearch() @@ -40,12 +44,7 @@ def test_document_es_mapping( mock_contributions_mef_get.return_value = mock_response( json_data=entity_person_response_data ) - Document.create( - data, - dbcommit=True, - reindex=True, - delete_pid=True - ) + Document.create(data, dbcommit=True, reindex=True, delete_pid=True) assert mapping == get_mapping(search.Meta.index) @@ -53,25 +52,24 @@ def test_document_search_mapping(app, document_records): """Test document search mapping.""" search = DocumentsSearch() - count = search.query('query_string', query='reine Berthe').count() + count = search.query("query_string", query="reine Berthe").count() assert count == 2 - count = search.query('query_string', query='maison').count() + count = search.query("query_string", query="maison").count() assert count == 1 - count = search.query('query_string', query='Körper').count() + count = search.query("query_string", query="Körper").count() assert count == 1 - count = search.query('query_string', query='Chamber Secrets').count() + count = search.query("query_string", query="Chamber Secrets").count() assert count == 1 - query = MultiMatch(query='Chamber of Secrets', - fields=['title.mainTitle.value']) + query = MultiMatch(query="Chamber of Secrets", fields=["title.mainTitle.value"]) count = search.query(query).count() assert count == 1 - count = search.query('query_string', query='9782823855890').count() + count = search.query("query_string", query="9782823855890").count() assert count == 1 - count = search.query('query_string', query='2823855890').count() + count = search.query("query_string", query="2823855890").count() assert count == 1 diff --git a/tests/ui/documents/test_documents_ui.py b/tests/ui/documents/test_documents_ui.py index e8eff71198..57a811a9c6 100644 --- a/tests/ui/documents/test_documents_ui.py +++ b/tests/ui/documents/test_documents_ui.py @@ -24,32 +24,29 @@ def test_documents_detailed_view(client, loc_public_martigny, document): """Test document detailed view.""" # check redirection - res = client.get(url_for( - 'invenio_records_ui.doc', - viewcode='global', - pid_value='doc1' - )) + res = client.get( + url_for("invenio_records_ui.doc", viewcode="global", pid_value="doc1") + ) assert res.status_code == 200 -def tests_document_item_filter_detailed_view( - client, loc_public_martigny, document): +def tests_document_item_filter_detailed_view(client, loc_public_martigny, document): """Test document detailed view with items filter.""" - res = client.get(url_for( - 'invenio_records_ui.doc', - viewcode='org1', - pid_value='doc1' - )) + res = client.get( + url_for("invenio_records_ui.doc", viewcode="org1", pid_value="doc1") + ) assert res.status_code == 200 def tests_document_export_formats(client, document): """Test document export view format.""" - for _format in ['json', 'ris']: - res = client.get(url_for( - 'invenio_records_ui.doc_export', - viewcode='global', - pid_value=document.pid, - format=_format - )) + for _format in ["json", "ris"]: + res = client.get( + url_for( + "invenio_records_ui.doc_export", + viewcode="global", + pid_value=document.pid, + format=_format, + ) + ) assert res.status_code == 200 diff --git a/tests/ui/ebooks/test_ebooks_receivers.py b/tests/ui/ebooks/test_ebooks_receivers.py index 26b5a056c2..efd1f6ef01 100644 --- a/tests/ui/ebooks/test_ebooks_receivers.py +++ b/tests/ui/ebooks/test_ebooks_receivers.py @@ -19,59 +19,64 @@ from collections import namedtuple -from utils import flush_index - from rero_ils.modules.documents.api import Document, DocumentsSearch from rero_ils.modules.ebooks.receivers import publish_harvested_records from rero_ils.modules.ebooks.tasks import create_records, delete_records from rero_ils.modules.holdings.api import Holding, HoldingsSearch -def test_publish_harvested_records(app, ebooks_1_xml, ebooks_2_xml, - org_martigny, loc_online_martigny, - item_type_online_martigny, - org_sion, loc_online_sion, - item_type_online_sion): +def test_publish_harvested_records( + app, + ebooks_1_xml, + ebooks_2_xml, + org_martigny, + loc_online_martigny, + item_type_online_martigny, + org_sion, + loc_online_sion, + item_type_online_sion, +): """Test publish harvested records.""" - Identifier = namedtuple('Identifier', 'identifier') - Record = namedtuple('Record', 'xml deleted header') - records = [Record(xml=ebooks_1_xml, deleted=False, - header=Identifier(identifier='record1'))] - records.append(Record(xml=ebooks_2_xml, deleted=False, - header=Identifier(identifier='record2'))) - records.append(Record(xml=ebooks_2_xml, deleted=True, - header=Identifier(identifier='record3'))) + Identifier = namedtuple("Identifier", "identifier") + Record = namedtuple("Record", "xml deleted header") + records = [ + Record(xml=ebooks_1_xml, deleted=False, header=Identifier(identifier="record1")) + ] + records.append( + Record(xml=ebooks_2_xml, deleted=False, header=Identifier(identifier="record2")) + ) + records.append( + Record(xml=ebooks_2_xml, deleted=True, header=Identifier(identifier="record3")) + ) - kwargs = {'max': 100} + kwargs = {"max": 100} publish_harvested_records(sender=None, records=records, kwargs=kwargs) - flush_index(DocumentsSearch.Meta.index) - flush_index(HoldingsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() + HoldingsSearch.flush_and_refresh() assert Document.count() == 2 - doc1 = Document.get_record_by_pid('1') - assert doc1.get('$schema') is not None - assert doc1.get('identifiedBy') == [ - {'type': 'bf:Isbn', 'value': '9782075118842'}, - {'type': 'bf:Local', 'value': 'cantook-EDEN502344'}, - {'type': 'bf:Local', 'source': 'cantook', 'value': 'record1'} + doc1 = Document.get_record_by_pid("1") + assert doc1.get("$schema") is not None + assert doc1.get("identifiedBy") == [ + {"type": "bf:Isbn", "value": "9782075118842"}, + {"type": "bf:Local", "value": "cantook-EDEN502344"}, + {"type": "bf:Local", "source": "cantook", "value": "record1"}, + ] + assert doc1.get("type") == [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_e-book"} ] - assert doc1.get('type') == [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_e-book' - }] assert len(list(Holding.get_holdings_pid_by_document_pid(doc1.pid))) == 1 - doc2 = Document.get_record_by_pid('2') - assert doc2.get('$schema') is not None - assert doc2.get('identifiedBy') == [ - {'type': 'bf:Isbn', 'value': '9782811234157'}, - {'type': 'bf:Local', 'value': 'cantook-immateriel.frO1006810'}, - {'type': 'bf:Local', 'source': 'cantook', 'value': 'record2'} + doc2 = Document.get_record_by_pid("2") + assert doc2.get("$schema") is not None + assert doc2.get("identifiedBy") == [ + {"type": "bf:Isbn", "value": "9782811234157"}, + {"type": "bf:Local", "value": "cantook-immateriel.frO1006810"}, + {"type": "bf:Local", "source": "cantook", "value": "record2"}, + ] + assert doc2.get("type") == [ + {"main_type": "docmaintype_audio", "subtype": "docsubtype_audio_book"} ] - assert doc2.get('type') == [{ - 'main_type': 'docmaintype_audio', - 'subtype': 'docsubtype_audio_book' - }] assert len(list(Holding.get_holdings_pid_by_document_pid(doc2.pid))) == 1 # test update @@ -80,7 +85,7 @@ def test_publish_harvested_records(app, ebooks_1_xml, ebooks_2_xml, hold = Holding.get_record_by_pid(hold_pid) Holding.create(data=hold, dbcommit=True, reindex=True, delete_pid=True) # create a holding without valid source uri - hold['electronic_location'][0]['uri'] = 'https://invalid.uri/XXXXXX' + hold["electronic_location"][0]["uri"] = "https://invalid.uri/XXXXXX" Holding.create(data=hold, dbcommit=True, reindex=True, delete_pid=True) HoldingsSearch.flush_and_refresh() publish_harvested_records(sender=None, records=records) @@ -91,13 +96,15 @@ def test_publish_harvested_records(app, ebooks_1_xml, ebooks_2_xml, # test delete records = [] - del doc1['electronicLocator'] + del doc1["electronicLocator"] records.append(doc1) - doc2['electronicLocator'] = [{ - "content": "coverImage", - "type": "relatedResource", - "url": "http://images.immateriel.fr/covers/DEQ2C5A.png" - }] + doc2["electronicLocator"] = [ + { + "content": "coverImage", + "type": "relatedResource", + "url": "http://images.immateriel.fr/covers/DEQ2C5A.png", + } + ] records.append(doc2) create_records(records=records) diff --git a/tests/ui/ebooks/test_ebooks_utils.py b/tests/ui/ebooks/test_ebooks_utils.py index 145fd5eebb..af34e865fa 100644 --- a/tests/ui/ebooks/test_ebooks_utils.py +++ b/tests/ui/ebooks/test_ebooks_utils.py @@ -22,10 +22,15 @@ def test_add_oai_source(app): """Test add oai source.""" - msg = add_oai_source(name='test', baseurl='http://test.com') - assert msg == 'Added' - msg = add_oai_source(name='test', baseurl='http://test.com') - assert msg == 'Not Updated' - msg = add_oai_source(name='test', baseurl='http://test.com', - setspecs='specs', comment='comment', update=True) - assert msg == 'Updated' + msg = add_oai_source(name="test", baseurl="http://test.com") + assert msg == "Added" + msg = add_oai_source(name="test", baseurl="http://test.com") + assert msg == "Not Updated" + msg = add_oai_source( + name="test", + baseurl="http://test.com", + setspecs="specs", + comment="comment", + update=True, + ) + assert msg == "Updated" diff --git a/tests/ui/entities/local_entities/test_local_entities_api.py b/tests/ui/entities/local_entities/test_local_entities_api.py index 296c2ea0b7..152db60104 100644 --- a/tests/ui/entities/local_entities/test_local_entities_api.py +++ b/tests/ui/entities/local_entities/test_local_entities_api.py @@ -23,16 +23,16 @@ import time from datetime import timedelta -from utils import flush_index - from rero_ils.modules.documents.api import Document, DocumentsSearch from rero_ils.modules.utils import get_ref_for_pid def test_local_entity_properties(local_entity_person): """Test local entity property""" - assert local_entity_person.get_authorized_access_point(None) == \ - local_entity_person['authorized_access_point'] + assert ( + local_entity_person.get_authorized_access_point(None) + == local_entity_person["authorized_access_point"] + ) def test_local_entity_indexing(app, local_entity_person, document_data_tmp): @@ -41,34 +41,32 @@ def test_local_entity_indexing(app, local_entity_person, document_data_tmp): # Check relations between local entity and other resources. data = document_data_tmp - data.setdefault('contribution', []).append({ - 'entity': {'$ref': get_ref_for_pid('locent', entity.pid)}, - 'role': ['aut'] - }) + data.setdefault("contribution", []).append( + {"entity": {"$ref": get_ref_for_pid("locent", entity.pid)}, "role": ["aut"]} + ) doc = Document.create(data, delete_pid=True, reindex=True, dbcommit=True) reasons = entity.reasons_not_to_delete() - assert reasons['links']['documents'] + assert reasons["links"]["documents"] # Update the local entity and check if related resources are updated - original_access_point = entity['authorized_access_point'] - entity['name'] = 'my_local_access_point' + original_access_point = entity["authorized_access_point"] + entity["name"] = "my_local_access_point" entity = entity.update(entity, dbcommit=True, reindex=True, commit=True) # updating related resource is an asynchronous task (to not block app if # there are a lot of related resource). We need to wait to the end of the # task to check id related resources are up-to-date. - delay = app.config.get('RERO_ILS_INDEXER_TASK_DELAY', 0) \ - + timedelta(seconds=2) + delay = app.config.get("RERO_ILS_INDEXER_TASK_DELAY", 0) + timedelta(seconds=2) time.sleep(delay.seconds) # find a better way to detect task is finished. - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() hit = DocumentsSearch().get_record_by_pid(doc.pid) assert any( - contribution['entity']['authorized_access_point_fr'] == - entity.get_authorized_access_point(language='fr') + contribution["entity"]["authorized_access_point_fr"] + == entity.get_authorized_access_point(language="fr") for contribution in hit.contribution ) # reset fixtures - entity['authorized_access_point'] = original_access_point + entity["authorized_access_point"] = original_access_point entity.update(entity, dbcommit=True, reindex=True) doc.delete() diff --git a/tests/ui/entities/local_entities/test_local_entities_dumpers.py b/tests/ui/entities/local_entities/test_local_entities_dumpers.py index 3ff1395431..5a81d5b577 100644 --- a/tests/ui/entities/local_entities/test_local_entities_dumpers.py +++ b/tests/ui/entities/local_entities/test_local_entities_dumpers.py @@ -25,11 +25,11 @@ def test_local_entities_document_dumper(local_entity_person2): """Test document dumper.""" dumped_record = local_entity_person2.dumps(dumper=document_dumper) - authorized_access_point = 'William III, King of England (1650-1702)' + authorized_access_point = "William III, King of England (1650-1702)" for field in [ - 'authorized_access_point_de', - 'authorized_access_point_en', - 'authorized_access_point_fr', - 'authorized_access_point_it' + "authorized_access_point_de", + "authorized_access_point_en", + "authorized_access_point_fr", + "authorized_access_point_it", ]: assert dumped_record[field] == authorized_access_point diff --git a/tests/ui/entities/local_entities/test_local_entities_jsonresolver.py b/tests/ui/entities/local_entities/test_local_entities_jsonresolver.py index 2e272d16e4..1d49389bbf 100644 --- a/tests/ui/entities/local_entities/test_local_entities_jsonresolver.py +++ b/tests/ui/entities/local_entities/test_local_entities_jsonresolver.py @@ -24,14 +24,16 @@ def test_local_entities_jsonresolver(local_entity_person2): """Test local entity json resolver.""" - rec = Record.create({ - 'local_entity': { - '$ref': 'https://bib.rero.ch/api/local_entities/locent_pers2' + rec = Record.create( + { + "local_entity": { + "$ref": "https://bib.rero.ch/api/local_entities/locent_pers2" + } } - }) - assert rec.replace_refs().get('local_entity') == { - 'pid': 'locent_pers2', - 'type': 'locent' + ) + assert rec.replace_refs().get("local_entity") == { + "pid": "locent_pers2", + "type": "locent", } # deleted record @@ -40,8 +42,8 @@ def test_local_entities_jsonresolver(local_entity_person2): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'local_entity': {'$ref': 'https://bib.rero.ch/api/local_entities/n_e'} - }) + rec = Record.create( + {"local_entity": {"$ref": "https://bib.rero.ch/api/local_entities/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/entities/local_entities/test_local_entities_mapping.py b/tests/ui/entities/local_entities/test_local_entities_mapping.py index b25a0b9aed..a3968bf14e 100644 --- a/tests/ui/entities/local_entities/test_local_entities_mapping.py +++ b/tests/ui/entities/local_entities/test_local_entities_mapping.py @@ -18,8 +18,10 @@ """Item record mapping tests.""" from utils import get_mapping -from rero_ils.modules.entities.local_entities.api import LocalEntitiesSearch, \ - LocalEntity +from rero_ils.modules.entities.local_entities.api import ( + LocalEntitiesSearch, + LocalEntity, +) def test_local_entities_es_mapping(app, local_entity_person2_data): @@ -28,9 +30,6 @@ def test_local_entities_es_mapping(app, local_entity_person2_data): mapping = get_mapping(search.Meta.index) assert mapping LocalEntity.create( - local_entity_person2_data, - dbcommit=True, - reindex=True, - delete_pid=True + local_entity_person2_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) diff --git a/tests/ui/entities/remote_entities/test_remote_entities_api.py b/tests/ui/entities/remote_entities/test_remote_entities_api.py index 57426e2c97..cb8e093457 100644 --- a/tests/ui/entities/remote_entities/test_remote_entities_api.py +++ b/tests/ui/entities/remote_entities/test_remote_entities_api.py @@ -24,53 +24,56 @@ from copy import deepcopy import mock -from utils import flush_index, mock_response +from utils import mock_response from rero_ils.modules.documents.api import Document, DocumentsSearch -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity, remote_entity_id_fetcher -from rero_ils.modules.entities.remote_entities.replace import \ - ReplaceIdentifiedBy +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, + remote_entity_id_fetcher, +) +from rero_ils.modules.entities.remote_entities.replace import ReplaceIdentifiedBy from rero_ils.modules.entities.remote_entities.sync import SyncEntity def test_remote_entity_create(app, entity_person_data_tmp, caplog): """Test MEF entity creation.""" - pers = RemoteEntity.get_record_by_pid('1') + pers = RemoteEntity.get_record_by_pid("1") assert not pers - pers = RemoteEntity.create( - entity_person_data_tmp, - dbcommit=True, - delete_pid=True - ) + pers = RemoteEntity.create(entity_person_data_tmp, dbcommit=True, delete_pid=True) assert pers == entity_person_data_tmp - assert pers.get('pid') == '1' + assert pers.get("pid") == "1" - pers = RemoteEntity.get_record_by_pid('1') + pers = RemoteEntity.get_record_by_pid("1") assert pers == entity_person_data_tmp fetched_pid = remote_entity_id_fetcher(pers.id, pers) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'rement' - entity_person_data_tmp['viaf_pid'] = '1234' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "rement" + entity_person_data_tmp["viaf_pid"] = "1234" RemoteEntity.create(entity_person_data_tmp, dbcommit=True, delete_pid=True) - pers = RemoteEntity.get_record_by_pid('2') - assert pers.get('viaf_pid') == '1234' + pers = RemoteEntity.get_record_by_pid("2") + assert pers.get("viaf_pid") == "1234" assert pers.organisation_pids == [] pers.delete_from_index() # test the messages from current_app.logger - assert caplog.records[0].name == 'elasticsearch' + assert caplog.records[0].name == "elasticsearch" assert caplog.record_tuples[1] == ( - 'invenio', 30, 'Can not delete from index RemoteEntity: 2' + "invenio", + 30, + "Can not delete from index RemoteEntity: 2", ) -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_remote_entity_mef_create( - mock_contributions_mef_get, app, mef_agents_url, - entity_person_data_tmp, entity_person_response_data + mock_contributions_mef_get, + app, + mef_agents_url, + entity_person_data_tmp, + entity_person_response_data, ): """Test MEF contribution creation.""" count = RemoteEntity.count() @@ -78,24 +81,25 @@ def test_remote_entity_mef_create( json_data=entity_person_response_data ) pers_mef, online = RemoteEntity.get_record_by_ref( - f'{mef_agents_url}/rero/A017671081') - flush_index(RemoteEntitiesSearch.Meta.index) + f"{mef_agents_url}/rero/A017671081" + ) + RemoteEntitiesSearch.flush_and_refresh() assert pers_mef == entity_person_data_tmp assert online assert RemoteEntity.count() == count + 1 - pers_mef.pop('idref') - pers_mef['sources'] = ['gnd'] + pers_mef.pop("idref") + pers_mef["sources"] = ["gnd"] pers_mef.replace(pers_mef, dbcommit=True) - pers_db, online = RemoteEntity.get_record_by_ref( - f'{mef_agents_url}/gnd/13343771X') - assert pers_db['sources'] == ['gnd'] + pers_db, online = RemoteEntity.get_record_by_ref(f"{mef_agents_url}/gnd/13343771X") + assert pers_db["sources"] == ["gnd"] assert not online # remove created contribution - RemoteEntity.get_record_by_pid(entity_person_data_tmp['pid']).delete( - True, True, True) + RemoteEntity.get_record_by_pid(entity_person_data_tmp["pid"]).delete( + True, True, True + ) -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_sync_contribution( mock_get, app, mef_agents_url, entity_person_data_tmp, document_data_ref ): @@ -106,136 +110,130 @@ def test_sync_contribution( assert sync_entity pers = RemoteEntity.create( - entity_person_data_tmp, - dbcommit=True, - reindex=True, - delete_pid=True + entity_person_data_tmp, dbcommit=True, reindex=True, delete_pid=True ) - flush_index(RemoteEntitiesSearch.Meta.index) + RemoteEntitiesSearch.flush_and_refresh() - idref_pid = pers['idref']['pid'] - document_data_ref['contribution'][0]['entity']['$ref'] = \ - f'{mef_agents_url}/idref/{idref_pid}' + idref_pid = pers["idref"]["pid"] + document_data_ref["contribution"][0]["entity"][ + "$ref" + ] = f"{mef_agents_url}/idref/{idref_pid}" doc = Document.create( - deepcopy(document_data_ref), - dbcommit=True, - reindex=True, - delete_pid=True + deepcopy(document_data_ref), dbcommit=True, reindex=True, delete_pid=True ) - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() # Test that entity could not be deleted - assert pers.get_links_to_me(True)['documents'] == [doc.pid] - assert pers.reasons_not_to_delete()['links']['documents'] == 1 + assert pers.get_links_to_me(True)["documents"] == [doc.pid] + assert pers.reasons_not_to_delete()["links"]["documents"] == 1 # === nothing to update - sync_entity._get_latest = mock.MagicMock( - return_value=entity_person_data_tmp - ) + sync_entity._get_latest = mock.MagicMock(return_value=entity_person_data_tmp) # nothing touched as it is up-to-date - assert (0, 0, set()) == sync_entity.sync(f'{pers.pid}') + assert (0, 0, set()) == sync_entity.sync(f"{pers.pid}") # nothing removed - assert (0, []) == sync_entity.remove_unused(f'{pers.pid}') + assert (0, []) == sync_entity.remove_unused(f"{pers.pid}") # === MEF metadata has been changed data = deepcopy(entity_person_data_tmp) - data['idref']['authorized_access_point'] = 'foo' + data["idref"]["authorized_access_point"] = "foo" sync_entity._get_latest = mock.MagicMock(return_value=data) - mock_resp = dict(hits=dict(hits=[dict( - id=data['pid'], - metadata=data - )])) + mock_resp = dict(hits=dict(hits=[dict(id=data["pid"], metadata=data)])) mock_get.return_value = mock_response(json_data=mock_resp) - assert DocumentsSearch().query( - 'term', - contribution__entity__authorized_access_point_fr='foo').count() == 0 + assert ( + DocumentsSearch() + .query("term", contribution__entity__authorized_access_point_fr="foo") + .count() + == 0 + ) # synchronization the same document has been updated 3 times, one MEF # record has been updated, no errors - assert (1, 1, set()) == sync_entity.sync(f'{pers.pid}') - flush_index(DocumentsSearch.Meta.index) + assert (1, 1, set()) == sync_entity.sync(f"{pers.pid}") + DocumentsSearch.flush_and_refresh() # contribution and document should be changed - assert RemoteEntity.get_record_by_pid( - pers.pid)['idref']['authorized_access_point'] == 'foo' - assert DocumentsSearch().query( - 'term', contribution__entity__authorized_access_point_fr='foo').count() + assert ( + RemoteEntity.get_record_by_pid(pers.pid)["idref"]["authorized_access_point"] + == "foo" + ) + assert ( + DocumentsSearch() + .query("term", contribution__entity__authorized_access_point_fr="foo") + .count() + ) # nothing has been removed as only metadata has been changed - assert (0, []) == sync_entity.remove_unused(f'{pers.pid}') + assert (0, []) == sync_entity.remove_unused(f"{pers.pid}") # === a new MEF exists with the same content data = deepcopy(entity_person_data_tmp) # MEF pid has changed - data['pid'] = 'foo_mef' + data["pid"] = "foo_mef" # mock MEF services sync_entity._get_latest = mock.MagicMock(return_value=data) - mock_resp = dict(hits=dict(hits=[dict( - id=data['pid'], - metadata=data - )])) + mock_resp = dict(hits=dict(hits=[dict(id=data["pid"], metadata=data)])) mock_get.return_value = mock_response(json_data=mock_resp) # synchronization the same document has been updated 3 times, one MEF # record has been updated, no errors - assert (1, 1, set()) == sync_entity.sync(f'{pers.pid}') - flush_index(DocumentsSearch.Meta.index) + assert (1, 1, set()) == sync_entity.sync(f"{pers.pid}") + DocumentsSearch.flush_and_refresh() # new contribution has been created - assert RemoteEntity.get_record_by_pid('foo_mef') - assert RemoteEntity.get_record_by_ref( - f'{mef_agents_url}/idref/{idref_pid}')[0] - db_agent = Document.get_record_by_pid( - doc.pid).get('contribution')[0]['entity'] - assert db_agent['pid'] == 'foo_mef' + assert RemoteEntity.get_record_by_pid("foo_mef") + assert RemoteEntity.get_record_by_ref(f"{mef_agents_url}/idref/{idref_pid}")[0] + db_agent = Document.get_record_by_pid(doc.pid).get("contribution")[0]["entity"] + assert db_agent["pid"] == "foo_mef" # the old MEF has been removed - assert (1, []) == sync_entity.remove_unused(f'{pers.pid}') + assert (1, []) == sync_entity.remove_unused(f"{pers.pid}") # should not exists anymore assert not RemoteEntity.get_record_by_pid(pers.pid) # === Update the MEF links content data = deepcopy(entity_person_data_tmp) # MEF pid has changed - data['pid'] = 'foo_mef' + data["pid"] = "foo_mef" # IDREF pid has changed - data['idref']['pid'] = 'foo_idref' + data["idref"]["pid"] = "foo_idref" # mock MEF services sync_entity._get_latest = mock.MagicMock(return_value=data) - mock_resp = dict(hits=dict(hits=[dict( - id=data['pid'], - metadata=data - )])) + mock_resp = dict(hits=dict(hits=[dict(id=data["pid"], metadata=data)])) mock_get.return_value = mock_response(json_data=mock_resp) # synchronization the same document has been updated 3 times, # one MEF record has been updated, no errors assert (1, 1, set()) == sync_entity.sync(f'{data["pid"]}') - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() # new contribution has been created - assert RemoteEntity.get_record_by_pid('foo_mef') + assert RemoteEntity.get_record_by_pid("foo_mef") # document has been updated with the new MEF and IDREF pid - assert DocumentsSearch().query( - 'term', contribution__entity__pids__remote='foo_mef').count() - assert DocumentsSearch().query( - 'term', contribution__entity__pids__idref='foo_idref').count() - db_agent = Document.get_record_by_pid( - doc.pid).get('contribution')[0]['entity'] - assert db_agent['$ref'] == f'{mef_agents_url}/idref/foo_idref' - assert db_agent['pid'] == 'foo_mef' + assert ( + DocumentsSearch() + .query("term", contribution__entity__pids__remote="foo_mef") + .count() + ) + assert ( + DocumentsSearch() + .query("term", contribution__entity__pids__idref="foo_idref") + .count() + ) + db_agent = Document.get_record_by_pid(doc.pid).get("contribution")[0]["entity"] + assert db_agent["$ref"] == f"{mef_agents_url}/idref/foo_idref" + assert db_agent["pid"] == "foo_mef" # remove the document doc = Document.get_record_by_pid(doc.pid) doc.delete(True, True, True) - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() # the MEF record can be removed assert (1, []) == sync_entity.remove_unused() # should not exists anymore - assert not RemoteEntity.get_record_by_pid('foo_mef') + assert not RemoteEntity.get_record_by_pid("foo_mef") -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_sync_concept( - mock_get, app, mef_concepts_url, entity_topic_data, - document_data_subject_ref + mock_get, app, mef_concepts_url, entity_topic_data, document_data_subject_ref ): # """Test MEF agent synchronization.""" @@ -245,54 +243,53 @@ def test_sync_concept( assert sync_entity topic = RemoteEntity.create( - entity_topic_data, - dbcommit=True, - reindex=True, - delete_pid=True + entity_topic_data, dbcommit=True, reindex=True, delete_pid=True ) - flush_index(RemoteEntitiesSearch.Meta.index) + RemoteEntitiesSearch.flush_and_refresh() entity_url = f'{mef_concepts_url}/idref/{topic["idref"]["pid"]}' - document_data_subject_ref['subjects'][0]['entity']['$ref'] = entity_url + document_data_subject_ref["subjects"][0]["entity"]["$ref"] = entity_url doc = Document.create( deepcopy(document_data_subject_ref), dbcommit=True, reindex=True, - delete_pid=True + delete_pid=True, ) - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() # === nothing to update sync_entity._get_latest = mock.MagicMock(return_value=entity_topic_data) # nothing touched as it is up-to-date - assert (0, 0, set()) == sync_entity.sync(f'pid:{topic.pid}') + assert (0, 0, set()) == sync_entity.sync(f"pid:{topic.pid}") # nothing removed - assert (0, []) == sync_entity.remove_unused(f'pid:{topic.pid}') + assert (0, []) == sync_entity.remove_unused(f"pid:{topic.pid}") # === MEF metadata has been changed data = deepcopy(entity_topic_data) - data['idref']['authorized_access_point'] = 'foo' + data["idref"]["authorized_access_point"] = "foo" sync_entity._get_latest = mock.MagicMock(return_value=data) - mock_resp = dict(hits=dict(hits=[dict( - id=data['pid'], - metadata=data - )])) + mock_resp = dict(hits=dict(hits=[dict(id=data["pid"], metadata=data)])) mock_get.return_value = mock_response(json_data=mock_resp) - assert DocumentsSearch().query( - 'term', - subjects__entity__authorized_access_point_fr='foo').count() == 0 + assert ( + DocumentsSearch() + .query("term", subjects__entity__authorized_access_point_fr="foo") + .count() + == 0 + ) # synchronization the same document has been updated 3 times, one MEF # record has been updated, no errors - assert (1, 1, set()) == sync_entity.sync(f'pid:{topic.pid}') - flush_index(DocumentsSearch.Meta.index) + assert (1, 1, set()) == sync_entity.sync(f"pid:{topic.pid}") + DocumentsSearch.flush_and_refresh() # contribution and document should be changed entity = RemoteEntity.get_record_by_pid(topic.pid) - assert entity['idref']['authorized_access_point'] == 'foo' - assert DocumentsSearch()\ - .query('term', subjects__entity__authorized_access_point_fr='foo')\ + assert entity["idref"]["authorized_access_point"] == "foo" + assert ( + DocumentsSearch() + .query("term", subjects__entity__authorized_access_point_fr="foo") .count() + ) # nothing has been removed as only metadata has been changed assert (0, []) == sync_entity.remove_unused(topic.pid) @@ -301,9 +298,9 @@ def test_sync_concept( # * Entity record can be removed ; and should not exist anymore doc = Document.get_record_by_pid(doc.pid) doc.delete(True, True, True) - flush_index(DocumentsSearch.Meta.index) + DocumentsSearch.flush_and_refresh() assert (1, []) == sync_entity.remove_unused() - assert not RemoteEntity.get_record_by_pid('foo_mef') + assert not RemoteEntity.get_record_by_pid("foo_mef") def test_remote_entity_properties( @@ -315,66 +312,73 @@ def test_remote_entity_properties( assert document.pid not in entity_person.documents_pids() assert str(document.id) not in entity_person.documents_ids() assert item.organisation_pid not in entity_person.organisation_pids - document['contribution'] = [{ - 'entity': { - '$ref': 'https://mef.rero.ch/api/agents/idref/223977268', - }, - 'role': ['cre'] - }] + document["contribution"] = [ + { + "entity": { + "$ref": "https://mef.rero.ch/api/agents/idref/223977268", + }, + "role": ["cre"], + } + ] document.update(document, dbcommit=True, reindex=True) assert document.pid in entity_person.documents_pids() assert str(document.id) in entity_person.documents_ids() assert item.organisation_pid in entity_person.organisation_pids - assert entity_person == RemoteEntity.get_entity('mef', entity_person.pid) - assert entity_person == RemoteEntity.get_entity('viaf', '70119347') + assert entity_person == RemoteEntity.get_entity("mef", entity_person.pid) + assert entity_person == RemoteEntity.get_entity("viaf", "70119347") sources_pids = entity_person.source_pids() - assert sources_pids['idref'] == '223977268' - assert sources_pids['gnd'] == '13343771X' - assert sources_pids['rero'] == 'A017671081' + assert sources_pids["idref"] == "223977268" + assert sources_pids["gnd"] == "13343771X" + assert sources_pids["rero"] == "A017671081" # Test special behavior of `get_record_by_ref` :: # Simulate an exception into the entity creation to test the exception # catching block statement. with mock.patch( - 'rero_ils.modules.entities.remote_entities.api.RemoteEntity.create', - side_effect=Exception() + "rero_ils.modules.entities.remote_entities.api.RemoteEntity.create", + side_effect=Exception(), ): entity, _ = RemoteEntity.get_record_by_ref( - 'https://bib.rero.ch/api/documents/dummy_doc') + "https://bib.rero.ch/api/documents/dummy_doc" + ) assert entity is None # remove contribution - document.pop('contribution') + document.pop("contribution") document.update(document, dbcommit=True, reindex=True) assert document.pid not in entity_person.documents_pids() assert str(document.id) not in entity_person.documents_ids() assert item.organisation_pid not in entity_person.organisation_pids # add subjects - document['subjects'] = [{ - 'entity': { - '$ref': 'https://mef.rero.ch/api/concepts/idref/ent_concept_idref', + document["subjects"] = [ + { + "entity": { + "$ref": "https://mef.rero.ch/api/concepts/idref/ent_concept_idref", + } } - }] + ] document.update(document, dbcommit=True, reindex=True) assert document.pid in mef_concept1.documents_pids() assert str(document.id) in mef_concept1.documents_ids() assert item.organisation_pid in mef_concept1.organisation_pids # remove subjects - document.pop('subjects') + document.pop("subjects") document.update(document, dbcommit=True, reindex=True) assert document.pid not in mef_concept1.documents_pids() assert str(document.id) not in mef_concept1.documents_ids() assert item.organisation_pid not in mef_concept1.organisation_pids # add genreForm - document['genreForm'] = [{ - 'entity': { - '$ref': 'https://mef.rero.ch/api/concepts/idref/ent_concept_idref', + document["genreForm"] = [ + { + "entity": { + "$ref": "https://mef.rero.ch/api/concepts/idref/ent_concept_idref", + } } - }] + ] document.update(document, dbcommit=True, reindex=True) assert document.pid in mef_concept1.documents_pids() assert str(document.id) in mef_concept1.documents_ids() @@ -385,102 +389,98 @@ def test_remote_entity_properties( def test_replace_identified_by( - app, entity_organisation, entity_person_rero, person2_data, - entity_person_all, entity_topic_data_2, entity_topic_data_temporal, + app, + entity_organisation, + entity_person_rero, + person2_data, + entity_person_all, + entity_topic_data_2, + entity_topic_data_temporal, entity_place_data, - document, document_sion_items, export_document + document, + document_sion_items, + export_document, ): """Test replace identified by with $ref.""" # === setup log_path = tempfile.mkdtemp() replace_identified_by = ReplaceIdentifiedBy( - field='contribution', - verbose=True, - dry_run=False, - log_dir=log_path + field="contribution", verbose=True, dry_run=False, log_dir=log_path ) assert replace_identified_by assert replace_identified_by.count() == 2 # no MEF response for agents in contribution with mock.patch( - 'requests.Session.get', - side_effect=[mock_response(status=404), mock_response(status=404)] + "requests.Session.get", + side_effect=[mock_response(status=404), mock_response(status=404)], ): changed, not_found, rero_only = replace_identified_by.run() assert changed == 0 assert not_found == 2 assert rero_only == 0 assert replace_identified_by.not_found == { - 'bf:Organisation': { - 'gnd:1161956409': 'Convegno internazionale ' - 'di italianistica Craiova' + "bf:Organisation": { + "gnd:1161956409": "Convegno internazionale " "di italianistica Craiova" }, - 'bf:Person': { - 'rero:A003633163': 'Nebehay, Christian Michael' - } + "bf:Person": {"rero:A003633163": "Nebehay, Christian Michael"}, } replace_identified_by.set_timestamp() data = replace_identified_by.get_timestamp() - assert 'contribution' in data - assert data['contribution']['changed'] == 0 - assert data['contribution']['not found'] == 2 - assert data['contribution']['rero only'] == 0 + assert "contribution" in data + assert data["contribution"]["changed"] == 0 + assert data["contribution"]["not found"] == 2 + assert data["contribution"]["rero only"] == 0 # with MEF response for agents in contribution with mock.patch( - 'requests.Session.get', + "requests.Session.get", side_effect=[ mock_response(json_data=entity_person_rero), - mock_response(json_data=entity_organisation) - ] + mock_response(json_data=entity_organisation), + ], ): changed, not_found, rero_only = replace_identified_by.run() assert changed == 1 assert not_found == 0 assert rero_only == 1 assert replace_identified_by.rero_only == { - 'bf:Person': { - 'rero:A003633163': 'Nebehay, Christian Michael' - } + "bf:Person": {"rero:A003633163": "Nebehay, Christian Michael"} } # with MEF response for concepts in subjects replace_identified_by = ReplaceIdentifiedBy( - field='subjects', - verbose=True, - dry_run=False, - log_dir=log_path + field="subjects", verbose=True, dry_run=False, log_dir=log_path ) assert replace_identified_by assert replace_identified_by.count() == 2 with mock.patch( - 'requests.Session.get', + "requests.Session.get", side_effect=[ mock_response(json_data=entity_person_all), mock_response(json_data=entity_topic_data_temporal), mock_response(json_data=entity_place_data), mock_response(json_data=person2_data), - mock_response(json_data={'rero': { - 'authorized_access_point': 'Europe occidentale', - 'type': 'bf:Place' - }}), - mock_response(json_data=entity_topic_data_2) - ] + mock_response( + json_data={ + "rero": { + "authorized_access_point": "Europe occidentale", + "type": "bf:Place", + } + } + ), + mock_response(json_data=entity_topic_data_2), + ], ): changed, not_found, rero_only = replace_identified_by.run() assert changed == 1 assert not_found == 0 assert rero_only == 3 assert dict(sorted(replace_identified_by.rero_only.items())) == { - 'bf:Person': { - 'rero:A009963344': 'Athenagoras (patriarche oecuménique ; 1)' + "bf:Person": { + "rero:A009963344": "Athenagoras (patriarche oecuménique ; 1)" }, - 'bf:Topic': { - 'rero:A021039750': 'Bases de données déductives' - }, - 'bf:Place': { - 'rero:A009975209': 'Europe occidentale' - } + "bf:Topic": {"rero:A021039750": "Bases de données déductives"}, + "bf:Place": {"rero:A009975209": "Europe occidentale"}, } @@ -488,21 +488,21 @@ def test_entity_get_record_by_ref( mef_agents_url, entity_person, entity_person_data_tmp ): """Test remote entity: get record by ref.""" - dummy_ref = f'{mef_agents_url}/idref/dummy_idref_pid' + dummy_ref = f"{mef_agents_url}/idref/dummy_idref_pid" assert (None, False) == RemoteEntity.get_record_by_ref(dummy_ref) # Remote entity from ES index - RemoteEntitiesSearch().filter('term', pid=entity_person.pid).delete() - flush_index(RemoteEntitiesSearch.Meta.index) + RemoteEntitiesSearch().filter("term", pid=entity_person.pid).delete() + RemoteEntitiesSearch.flush_and_refresh() ent_ref = f'{mef_agents_url}/idref/{entity_person["idref"]["pid"]}' with mock.patch( - 'rero_ils.modules.entities.remote_entities.api.get_mef_data_by_type', - return_value=entity_person_data_tmp + "rero_ils.modules.entities.remote_entities.api.get_mef_data_by_type", + return_value=entity_person_data_tmp, ): entity, online = RemoteEntity.get_record_by_ref(ent_ref) assert entity and online - flush_index(RemoteEntitiesSearch.Meta.index) - assert RemoteEntitiesSearch().filter('term', pid=entity_person.pid).count() + RemoteEntitiesSearch.flush_and_refresh() + assert RemoteEntitiesSearch().filter("term", pid=entity_person.pid).count() def test_remote_entity_resolve(entity_person): diff --git a/tests/ui/entities/remote_entities/test_remote_entities_mapping.py b/tests/ui/entities/remote_entities/test_remote_entities_mapping.py index e048a60507..0aa25019f9 100644 --- a/tests/ui/entities/remote_entities/test_remote_entities_mapping.py +++ b/tests/ui/entities/remote_entities/test_remote_entities_mapping.py @@ -20,46 +20,49 @@ from utils import get_mapping -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) -def test_remote_entity_es_mapping(es_clear, db, entity_person_data_tmp): +def test_remote_entity_es_mapping(search_clear, db, entity_person_data_tmp): """Test contribution entity elasticsearch mapping.""" search = RemoteEntitiesSearch() mapping = get_mapping(search.Meta.index) assert mapping RemoteEntity.create( - entity_person_data_tmp, - dbcommit=True, - reindex=True, - delete_pid=True + entity_person_data_tmp, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) -def test_concept_entity_es_mapping(es_clear, db, mef_concept1_data_tmp): +def test_concept_entity_es_mapping(search_clear, db, mef_concept1_data_tmp): """Test concept entity elasticsearch mapping.""" search = RemoteEntitiesSearch() mapping = get_mapping(search.Meta.index) assert mapping RemoteEntity.create( - mef_concept1_data_tmp, - dbcommit=True, - reindex=True, - delete_pid=True + mef_concept1_data_tmp, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) def test_entities_search_mapping(app, entity_person): """Test Mef entities search mapping.""" - assert RemoteEntitiesSearch()\ - .query('query_string', query='philosophische Fakultät')\ - .count() == 1 - assert RemoteEntitiesSearch()\ - .query('match', **{'gnd.preferred_name': 'Loy'})\ - .count() == 1 - assert RemoteEntitiesSearch()\ - .query('match', **{'gnd.variant_name': 'Madeiros'})\ - .count() == 1 + assert ( + RemoteEntitiesSearch() + .query("query_string", query="philosophische Fakultät") + .count() + == 1 + ) + assert ( + RemoteEntitiesSearch().query("match", **{"gnd.preferred_name": "Loy"}).count() + == 1 + ) + assert ( + RemoteEntitiesSearch() + .query("match", **{"gnd.variant_name": "Madeiros"}) + .count() + == 1 + ) diff --git a/tests/ui/entities/remote_entities/test_remote_entities_ui.py b/tests/ui/entities/remote_entities/test_remote_entities_ui.py index 51a0a04a91..20ad2c250a 100644 --- a/tests/ui/entities/remote_entities/test_remote_entities_ui.py +++ b/tests/ui/entities/remote_entities/test_remote_entities_ui.py @@ -23,12 +23,12 @@ def test_remote_entity_label(app, entity_person_data): """Test entity label.""" - app.config['RERO_ILS_AGENTS_LABEL_ORDER'] = { - 'fallback': 'fr', - 'fr': ['rero', 'idref', 'gnd'], - 'de': ['gnd', 'rero', 'idref'], + app.config["RERO_ILS_AGENTS_LABEL_ORDER"] = { + "fallback": "fr", + "fr": ["rero", "idref", "gnd"], + "de": ["gnd", "rero", "idref"], } - label = entity_label(entity_person_data, 'fr') - assert label == 'Loy, Georg, 1885-19..' - label = entity_label(entity_person_data, 'it') - assert label == 'Loy, Georg, 1885-19..' + label = entity_label(entity_person_data, "fr") + assert label == "Loy, Georg, 1885-19.." + label = entity_label(entity_person_data, "it") + assert label == "Loy, Georg, 1885-19.." diff --git a/tests/ui/entities/remote_entities/test_remote_entities_utils.py b/tests/ui/entities/remote_entities/test_remote_entities_utils.py index 5795092048..9450d01d25 100644 --- a/tests/ui/entities/remote_entities/test_remote_entities_utils.py +++ b/tests/ui/entities/remote_entities/test_remote_entities_utils.py @@ -22,22 +22,23 @@ from requests import RequestException from utils import mock_response -from rero_ils.modules.entities.remote_entities.utils import \ - get_mef_data_by_type +from rero_ils.modules.entities.remote_entities.utils import get_mef_data_by_type -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_utils_mef_data(mock_get, app): """.""" with pytest.raises(KeyError): - get_mef_data_by_type('idref', 'pid', 'dummy_entity', verbose=True) + get_mef_data_by_type("idref", "pid", "dummy_entity", verbose=True) mock_get.return_value = mock_response( - json_data={'hits': {'hits': [], 'toto': 'foo'}}) + json_data={"hits": {"hits": [], "toto": "foo"}} + ) with pytest.raises(ValueError): - get_mef_data_by_type('viaf', 'pid', 'agents', verbose=True) + get_mef_data_by_type("viaf", "pid", "agents", verbose=True) mock_get.return_value = mock_response( - status=400, json_data={'error': 'Bad request'}) + status=400, json_data={"error": "Bad request"} + ) with pytest.raises(RequestException): - get_mef_data_by_type('viaf', 'pid', 'agents', verbose=True) + get_mef_data_by_type("viaf", "pid", "agents", verbose=True) diff --git a/tests/ui/entities/test_entities_api.py b/tests/ui/entities/test_entities_api.py index 74fd9700d0..e3dea42dcd 100644 --- a/tests/ui/entities/test_entities_api.py +++ b/tests/ui/entities/test_entities_api.py @@ -38,9 +38,9 @@ def test_entities_properties(entity_person_data_tmp): def test_entities_helpers(local_entity_org): """Test entity helpers""" - data = {'pid': 'dummy'} + data = {"pid": "dummy"} with pytest.raises(RecordNotFound): get_entity_record_from_data(data) - data = {'$ref': get_ref_for_pid('locent', local_entity_org.pid)} + data = {"$ref": get_ref_for_pid("locent", local_entity_org.pid)} assert get_entity_record_from_data(data) == local_entity_org diff --git a/tests/ui/entities/test_entities_ui.py b/tests/ui/entities/test_entities_ui.py index b9198bf9b9..782cffc184 100644 --- a/tests/ui/entities/test_entities_ui.py +++ b/tests/ui/entities/test_entities_ui.py @@ -22,99 +22,105 @@ from invenio_i18n.ext import current_i18n from rero_ils.modules.entities.models import EntityType -from rero_ils.modules.entities.views import entity_icon, \ - extract_data_from_remote_entity, search_link, sources_link +from rero_ils.modules.entities.views import ( + entity_icon, + extract_data_from_remote_entity, + search_link, + sources_link, +) def test_view(client, entity_person, local_entity_person): """Entity detailed view test.""" # Check unauthorized type value in url - res = client.get(url_for( - 'entities.entity_detailed_view', - viewcode='global', - type='foo', - pid='foo' - )) + res = client.get( + url_for( + "entities.entity_detailed_view", viewcode="global", type="foo", pid="foo" + ) + ) assert res.status_code == 404 # Check 404 error if entity does not exist - res = client.get(url_for( - 'entities.entity_detailed_view', - viewcode='global', - type='remote', - pid='foo' - )) + res = client.get( + url_for( + "entities.entity_detailed_view", viewcode="global", type="remote", pid="foo" + ) + ) assert res.status_code == 404 # Remote entity - res = client.get(url_for( - 'entities.entity_detailed_view', - viewcode='global', - type='remote', - pid=entity_person.get('pid') - )) + res = client.get( + url_for( + "entities.entity_detailed_view", + viewcode="global", + type="remote", + pid=entity_person.get("pid"), + ) + ) assert res.status_code == 200 # Local entity - res = client.get(url_for( - 'entities.entity_detailed_view', - viewcode='global', - type='local', - pid=local_entity_person.get('pid') - )) + res = client.get( + url_for( + "entities.entity_detailed_view", + viewcode="global", + type="local", + pid=local_entity_person.get("pid"), + ) + ) assert res.status_code == 200 def test_entity_icon(): """Entity icon test.""" - assert 'fa-building-o' == entity_icon(EntityType.ORGANISATION) + assert "fa-building-o" == entity_icon(EntityType.ORGANISATION) # Default icon if type not found - assert 'fa-question-circle-o' == entity_icon('foo') + assert "fa-question-circle-o" == entity_icon("foo") def test_extract_data_from_record(app): """Extract data from record test.""" contrib_data = { - 'idref': {'data': 'idref'}, - 'rero': {'data': 'rero'}, - 'gnd': {'data': 'gnd'} + "idref": {"data": "idref"}, + "rero": {"data": "rero"}, + "gnd": {"data": "gnd"}, } - current_i18n.locale.language = 'fr' + current_i18n.locale.language = "fr" source, data = extract_data_from_remote_entity(contrib_data) - assert source == 'idref' + assert source == "idref" assert contrib_data.get(source) == data - current_i18n.locale.language = 'de' + current_i18n.locale.language = "de" source, data = extract_data_from_remote_entity(contrib_data) - assert source == 'gnd' + assert source == "gnd" assert contrib_data.get(source) == data # Fallback test - current_i18n.locale.language = 'it' + current_i18n.locale.language = "it" source, data = extract_data_from_remote_entity(contrib_data) - assert source == 'idref' + assert source == "idref" assert contrib_data.get(source) == data # Control the selection cascade - contrib_data.pop('idref') - contrib_data.pop('gnd') + contrib_data.pop("idref") + contrib_data.pop("gnd") source, data = extract_data_from_remote_entity(contrib_data) - assert source == 'rero' + assert source == "rero" assert contrib_data.get(source) == data def test_sources_link(app): """Sources link test.""" data = { - 'idref': {'identifier': 'http://www.idref.fr/066924502'}, - 'gnd': {'identifier': 'http://d-nb.info/gnd/118754688'}, - 'rero': {'identifier': 'http://data.rero.ch/02-A003795108'}, - 'sources': ['idref', 'gnd', 'rero'] + "idref": {"identifier": "http://www.idref.fr/066924502"}, + "gnd": {"identifier": "http://d-nb.info/gnd/118754688"}, + "rero": {"identifier": "http://data.rero.ch/02-A003795108"}, + "sources": ["idref", "gnd", "rero"], } result = { - 'idref': 'http://www.idref.fr/066924502', - 'gnd': 'http://d-nb.info/gnd/118754688' + "idref": "http://www.idref.fr/066924502", + "gnd": "http://d-nb.info/gnd/118754688", } assert result == sources_link(data) assert {} == sources_link({}) @@ -125,16 +131,22 @@ def test_search_link(app, entity_organisation, local_entity_org, entity_topic): # test remote link link = search_link(entity_organisation) - assert link == 'contribution.entity.pids.rero:A027711299 ' \ - 'OR subjects.entity.pids.rero:A027711299' \ - '&simple=0' + assert ( + link == "contribution.entity.pids.rero:A027711299 " + "OR subjects.entity.pids.rero:A027711299" + "&simple=0" + ) # test local link link = search_link(local_entity_org) - assert link == 'contribution.entity.pids.local:locent_org ' \ - 'OR subjects.entity.pids.local:locent_org' \ - '&simple=0' + assert ( + link == "contribution.entity.pids.local:locent_org " + "OR subjects.entity.pids.local:locent_org" + "&simple=0" + ) # test Topic link = search_link(entity_topic) - assert link == 'subjects.entity.pids.idref:030752787 ' \ - 'OR genreForm.entity.pids.idref:030752787' \ - '&simple=0' + assert ( + link == "subjects.entity.pids.idref:030752787 " + "OR genreForm.entity.pids.idref:030752787" + "&simple=0" + ) diff --git a/tests/ui/files/test_files_preview.py b/tests/ui/files/test_files_preview.py index 895e2e6447..009402138d 100644 --- a/tests/ui/files/test_files_preview.py +++ b/tests/ui/files/test_files_preview.py @@ -24,29 +24,28 @@ def test_file_preview(client, document_with_files): """Test document detailed view.""" record_file = next(document_with_files.get_records_files()) - files = [ - f for f in record_file.files - if f.endswith('.pdf') or f.endswith('.png') - ] - res = client.get(url_for( - 'invenio_records_ui.recid_preview', - pid_value='foo', - filename='foo.pdf' - )) + files = [f for f in record_file.files if f.endswith(".pdf") or f.endswith(".png")] + res = client.get( + url_for("invenio_records_ui.recid_preview", pid_value="foo", filename="foo.pdf") + ) assert res.status_code == 404 - res = client.get(url_for( - 'invenio_records_ui.recid_preview', - pid_value=record_file['id'], - filename='foo.pdf' - )) + res = client.get( + url_for( + "invenio_records_ui.recid_preview", + pid_value=record_file["id"], + filename="foo.pdf", + ) + ) assert res.status_code == 404 for fname in files: - res = client.get(url_for( - 'invenio_records_ui.recid_preview', - pid_value=record_file['id'], - filename=fname - )) + res = client.get( + url_for( + "invenio_records_ui.recid_preview", + pid_value=record_file["id"], + filename=fname, + ) + ) assert res.status_code == 200 diff --git a/tests/ui/holdings/test_holdings_api.py b/tests/ui/holdings/test_holdings_api.py index 0ddb8bc724..8091f9a320 100644 --- a/tests/ui/holdings/test_holdings_api.py +++ b/tests/ui/holdings/test_holdings_api.py @@ -28,60 +28,73 @@ from rero_ils.modules.holdings.api import Holding, HoldingsSearch from rero_ils.modules.holdings.api import holding_id_fetcher as fetcher from rero_ils.modules.holdings.models import HoldingTypes -from rero_ils.modules.holdings.tasks import \ - delete_standard_holdings_having_no_items +from rero_ils.modules.holdings.tasks import delete_standard_holdings_having_no_items -def test_holding_create(db, search, document, org_martigny, - loc_public_martigny, item_type_standard_martigny, - holding_lib_martigny_data): +def test_holding_create( + db, + search, + document, + org_martigny, + loc_public_martigny, + item_type_standard_martigny, + holding_lib_martigny_data, +): """Test holding creation.""" next_pid = Holding.provider.identifier.next() - holding = Holding.create(holding_lib_martigny_data, dbcommit=True, - reindex=True, delete_pid=True) + holding = Holding.create( + holding_lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True + ) next_pid += 1 assert holding == holding_lib_martigny_data - assert holding.get('pid') == str(next_pid) + assert holding.get("pid") == str(next_pid) holding = Holding.get_record_by_pid(str(next_pid)) assert holding == holding_lib_martigny_data fetched_pid = fetcher(holding.id, holding) assert fetched_pid.pid_value == str(next_pid) - assert fetched_pid.pid_type == 'hold' + assert fetched_pid.pid_type == "hold" search = HoldingsSearch() - es_hit = next(search.filter('term', pid=holding.pid).source('pid').scan()) + es_hit = next(search.filter("term", pid=holding.pid).source("pid").scan()) holding_record = Holding.get_record_by_pid(es_hit.pid) - assert holding_record.organisation_pid == org_martigny.get('pid') + assert holding_record.organisation_pid == org_martigny.get("pid") # holdings does not exist - assert not Holding.get_holdings_type_by_holding_pid('toto') + assert not Holding.get_holdings_type_by_holding_pid("toto") # clean created data holding.delete(force=True, dbcommit=True, delindex=True) -def test_holding_holding_type(holding_lib_martigny_w_patterns, - holding_lib_sion_electronic): +def test_holding_holding_type( + holding_lib_martigny_w_patterns, holding_lib_sion_electronic +): """Test holdings type.""" assert holding_lib_martigny_w_patterns.is_serial assert holding_lib_sion_electronic.is_electronic -def test_holding_availability(holding_lib_sion_electronic, - holding_lib_martigny, item_lib_martigny): +def test_holding_availability( + holding_lib_sion_electronic, holding_lib_martigny, item_lib_martigny +): """Test holding availability.""" # An electronic holding is always available despite if no item are linked assert holding_lib_sion_electronic.is_available() # The availability of other holdings type depends on children availability - assert holding_lib_martigny.is_available() == \ - item_lib_martigny.is_available() + assert holding_lib_martigny.is_available() == item_lib_martigny.is_available() def test_holding_extended_validation( - client, journal, ebook_5, loc_public_sion, loc_public_martigny, - item_type_standard_martigny, item_type_online_sion, - holding_lib_martigny_w_patterns_data, holding_lib_sion_electronic_data + client, + journal, + ebook_5, + loc_public_sion, + loc_public_martigny, + item_type_standard_martigny, + item_type_online_sion, + holding_lib_martigny_w_patterns_data, + holding_lib_sion_electronic_data, ): """Test holding extended validation.""" serial_holding_data = holding_lib_martigny_w_patterns_data @@ -96,30 +109,30 @@ def test_holding_extended_validation( record.validate() - expected_date = record['patterns']['next_expected_date'] - del record['patterns']['next_expected_date'] + expected_date = record["patterns"]["next_expected_date"] + del record["patterns"]["next_expected_date"] with pytest.raises(ValidationError): record.validate() # reset data with original value - record['patterns']['next_expected_date'] = expected_date + record["patterns"]["next_expected_date"] = expected_date - record.get('notes').append({'type': 'general_note', 'content': 'note'}) + record.get("notes").append({"type": "general_note", "content": "note"}) with pytest.raises(ValidationError) as err: record.validate() - assert 'Can not have multiple notes of the same type' in str(err) - del record['notes'] + assert "Can not have multiple notes of the same type" in str(err) + del record["notes"] - with mock.patch.object(Document, 'get_record_by_pid', - mock.MagicMock(return_value=None)), \ - pytest.raises(ValidationError) as err: + with mock.patch.object( + Document, "get_record_by_pid", mock.MagicMock(return_value=None) + ), pytest.raises(ValidationError) as err: record.validate() - assert 'Document does not exist' in str(err) + assert "Document does not exist" in str(err) - record['holdings_type'] = HoldingTypes.STANDARD - assert record['enumerationAndChronology'] + record["holdings_type"] = HoldingTypes.STANDARD + assert record["enumerationAndChronology"] with pytest.raises(ValidationError) as err: record.validate() - assert 'is allowed only for serial holdings' in str(err) + assert "is allowed only for serial holdings" in str(err) # TESTING ELECTRONIC HOLDING # 1. instantiate electronic holding @@ -128,19 +141,17 @@ def test_holding_extended_validation( record = Holding.create(electronic_holding_data, delete_pid=True) record.validate() - record['enumerationAndChronology'] = 'enumerationAndChronology' + record["enumerationAndChronology"] = "enumerationAndChronology" record.validate() def test_holding_tasks( - client, holding_lib_martigny, item_lib_martigny, document, - loc_public_saxon): + client, holding_lib_martigny, item_lib_martigny, document, loc_public_saxon +): """Test delete standard holdings with no items attached.""" # move item to a new holdings record by changing its location - item_lib_martigny['location'] = \ - {'$ref': 'https://bib.rero.ch/api/locations/loc3'} - item = item_lib_martigny.update( - item_lib_martigny, dbcommit=True, reindex=True) + item_lib_martigny["location"] = {"$ref": "https://bib.rero.ch/api/locations/loc3"} + item = item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) holdings_pid = holding_lib_martigny.pid # parent holding has no items and it is not automatically deleted. hold = Holding.get_record_by_pid(holdings_pid) @@ -162,5 +173,5 @@ def test_holdings_properties(holding_lib_martigny_w_patterns, vendor_martigny): assert holding.days_before_first_claim == 7 assert holding.days_before_next_claim == 7 - holding['_masked'] = True + holding["_masked"] = True assert not holding.is_available() diff --git a/tests/ui/holdings/test_holdings_item.py b/tests/ui/holdings/test_holdings_item.py index 4dacc82526..6cab94a102 100644 --- a/tests/ui/holdings/test_holdings_item.py +++ b/tests/ui/holdings/test_holdings_item.py @@ -24,41 +24,48 @@ from copy import deepcopy import pytest -from utils import flush_index -from rero_ils.modules.holdings.api import Holding, HoldingsSearch, \ - get_holdings_by_document_item_type +from rero_ils.modules.holdings.api import ( + Holding, + HoldingsSearch, + get_holdings_by_document_item_type, +) from rero_ils.modules.holdings.views import holding_loan_condition_filter from rero_ils.modules.items.api import Item, ItemsSearch -def test_holding_item_links(client, holding_lib_martigny, item_lib_martigny, - item_lib_martigny_data, document, - item_type_on_site_martigny, loc_public_martigny, - item_lib_saxon_data, loc_public_saxon, - item_type_standard_martigny): +def test_holding_item_links( + client, + holding_lib_martigny, + item_lib_martigny, + item_lib_martigny_data, + document, + item_type_on_site_martigny, + loc_public_martigny, + item_lib_saxon_data, + loc_public_saxon, + item_type_standard_martigny, +): """Test holding and item links.""" item = deepcopy(item_lib_martigny_data) - del item['pid'] - item['barcode'] = 'barcode' + del item["pid"] + item["barcode"] = "barcode" item = Item.create(item, dbcommit=True, reindex=True) - flush_index(HoldingsSearch.Meta.index) + HoldingsSearch.flush_and_refresh() assert item.holding_pid == holding_lib_martigny.pid - assert item.holding_circulation_category_pid == \ - item_type_standard_martigny.pid + assert item.holding_circulation_category_pid == item_type_standard_martigny.pid item2 = deepcopy(item_lib_saxon_data) - del item2['pid'] + del item2["pid"] item2 = Item.create(item2, dbcommit=True, reindex=True) - flush_index(HoldingsSearch.Meta.index) + HoldingsSearch.flush_and_refresh() assert item2.holding_pid != holding_lib_martigny.pid holding = Holding.get_record_by_pid(item2.holding_pid) assert holding.document_pid == document.pid assert holding.circulation_category_pid == item_type_standard_martigny.pid - assert Holding.get_document_pid_by_holding_pid(item2.holding_pid) == \ - document.pid + assert Holding.get_document_pid_by_holding_pid(item2.holding_pid) == document.pid holdings = list(Holding.get_holdings_pid_by_document_pid(document.pid)) assert holding_lib_martigny.pid in holdings @@ -66,17 +73,16 @@ def test_holding_item_links(client, holding_lib_martigny, item_lib_martigny, can, reasons = holding_lib_martigny.can_delete assert not can - assert reasons['links']['items'] + assert reasons["links"]["items"] # test loan conditions - assert holding_loan_condition_filter(holding_lib_martigny.pid) == \ - 'standard' + assert holding_loan_condition_filter(holding_lib_martigny.pid) == "standard" with pytest.raises(Exception): - assert holding_loan_condition_filter('no pid') + assert holding_loan_condition_filter("no pid") holdings = get_holdings_by_document_item_type( - document.pid, item_type_standard_martigny.pid) - assert holding_lib_martigny.pid == holdings[1].get('pid') - assert list(holding_lib_martigny.get_items())[1].get('pid') == \ - item_lib_martigny.pid + document.pid, item_type_standard_martigny.pid + ) + assert holding_lib_martigny.pid == holdings[1].get("pid") + assert list(holding_lib_martigny.get_items())[1].get("pid") == item_lib_martigny.pid holding_lib_martigny.delete_from_index() assert not holding_lib_martigny.delete_from_index() @@ -87,23 +93,24 @@ def test_holding_item_links(client, holding_lib_martigny, item_lib_martigny, def test_holding_delete_after_item_deletion( - client, holding_lib_martigny, item_lib_martigny): + client, holding_lib_martigny, item_lib_martigny +): """Test automatic holding delete after deleting last item.""" for pid in Item.get_all_pids(): if pid != item_lib_martigny.pid: item = Item.get_record_by_pid(pid) Item.delete(item, dbcommit=True, delindex=True) - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() pid = holding_lib_martigny.pid holding = Holding.get_record_by_pid(pid) can, reasons = holding.can_delete assert not can - assert reasons['links']['items'] + assert reasons["links"]["items"] item_lib_martigny.delete(dbcommit=True, delindex=True) - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() pid = holding_lib_martigny.pid holding = Holding.get_record_by_pid(pid) @@ -111,14 +118,14 @@ def test_holding_delete_after_item_deletion( def test_holding_delete_after_item_edition( - client, holding_lib_saxon, item_lib_saxon, holding_lib_fully): + client, holding_lib_saxon, item_lib_saxon, holding_lib_fully +): """Test automatic holding delete after item edition.""" - item_lib_saxon['location'] = \ - {'$ref': 'https://bib.rero.ch/api/locations/loc5'} + item_lib_saxon["location"] = {"$ref": "https://bib.rero.ch/api/locations/loc5"} item_lib_saxon.update(item_lib_saxon, dbcommit=True, reindex=True) - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() item = Item.get_record_by_pid(item_lib_saxon.pid) assert item.holding_pid == holding_lib_fully.pid diff --git a/tests/ui/holdings/test_holdings_jsonresolver.py b/tests/ui/holdings/test_holdings_jsonresolver.py index 10fc5fa885..782804a7ac 100644 --- a/tests/ui/holdings/test_holdings_jsonresolver.py +++ b/tests/ui/holdings/test_holdings_jsonresolver.py @@ -25,12 +25,10 @@ def test_holdings_jsonresolver(holding_lib_martigny): """Test holding json resolver.""" - rec = Record.create({ - 'holding': {'$ref': 'https://bib.rero.ch/api/holdings/holding1'} - }) - assert rec.replace_refs().get('holding') == { - 'type': 'hold', 'pid': 'holding1' - } + rec = Record.create( + {"holding": {"$ref": "https://bib.rero.ch/api/holdings/holding1"}} + ) + assert rec.replace_refs().get("holding") == {"type": "hold", "pid": "holding1"} # deleted record holding_lib_martigny.delete() @@ -38,8 +36,6 @@ def test_holdings_jsonresolver(holding_lib_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'holding': {'$ref': 'https://bib.rero.ch/api/holdings/n_e'} - }) + rec = Record.create({"holding": {"$ref": "https://bib.rero.ch/api/holdings/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/holdings/test_holdings_mapping.py b/tests/ui/holdings/test_holdings_mapping.py index 7267013358..c3f671f3ec 100644 --- a/tests/ui/holdings/test_holdings_mapping.py +++ b/tests/ui/holdings/test_holdings_mapping.py @@ -21,18 +21,20 @@ from rero_ils.modules.holdings.api import Holding, HoldingsSearch -def test_holding_es_mapping(search, db, loc_public_martigny, - item_type_standard_martigny, - document, holding_lib_martigny_data): +def test_holding_es_mapping( + search, + db, + loc_public_martigny, + item_type_standard_martigny, + document, + holding_lib_martigny_data, +): """Test holding elasticsearch mapping.""" search = HoldingsSearch() mapping = get_mapping(search.Meta.index) assert mapping holding = Holding.create( - holding_lib_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + holding_lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) # clean created data diff --git a/tests/ui/holdings/test_holdings_patterns.py b/tests/ui/holdings/test_holdings_patterns.py index 381e04e731..00dc446cff 100644 --- a/tests/ui/holdings/test_holdings_patterns.py +++ b/tests/ui/holdings/test_holdings_patterns.py @@ -28,7 +28,6 @@ import pytest from invenio_accounts.testutils import login_user_via_session from jsonschema.exceptions import ValidationError -from utils import flush_index from rero_ils.modules.holdings.api import Holding from rero_ils.modules.holdings.models import HoldingNoteTypes @@ -36,8 +35,7 @@ from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus -def test_patterns_functions(holding_lib_martigny_w_patterns, - holding_lib_martigny): +def test_patterns_functions(holding_lib_martigny_w_patterns, holding_lib_martigny): """Test holdings patterns functions.""" # test no prediction for standard holdings record assert not holding_lib_martigny.increment_next_prediction() @@ -45,38 +43,38 @@ def test_patterns_functions(holding_lib_martigny_w_patterns, assert not holding_lib_martigny.prediction_issues_preview(1) holding = holding_lib_martigny_w_patterns - old_template = holding.get('patterns').get('template') + old_template = holding.get("patterns").get("template") # test invalid syntax for pattern templates - template = 'no {{first_chronology.level_1}' - holding['patterns']['template'] = template + template = "no {{first_chronology.level_1}" + holding["patterns"]["template"] = template with pytest.raises(jinja2.exceptions.TemplateSyntaxError): assert holding.next_issue_display_text - template = 'no {{unknown_chronology.level_1}}' - holding['patterns']['template'] = template + template = "no {{unknown_chronology.level_1}}" + holding["patterns"]["template"] = template with pytest.raises(jinja2.exceptions.UndefinedError): assert holding.next_issue_display_text - holding['patterns']['template'] = old_template + holding["patterns"]["template"] = old_template def test_patterns_quarterly_one_level(holding_lib_martigny_w_patterns): """Test holdings patterns annual two levels.""" holding = holding_lib_martigny_w_patterns # test first issue - assert holding.next_issue_display_text == 'no 61 mars 2020' + assert holding.next_issue_display_text == "no 61 mars 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'no 62 juin 2020' + assert holding.next_issue_display_text == "no 62 juin 2020" for _ in range(11): holding.increment_next_prediction() - assert holding.next_issue_display_text == 'no 73 mars 2023' + assert holding.next_issue_display_text == "no 73 mars 2023" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'no 85 mars 2026' + assert issues[-1]["issue"] == "no 85 mars 2026" # test expected date new_holding = deepcopy(holding_lib_martigny_w_patterns) - template = '{{expected_date.day}} {{expected_date.month}}' - new_holding['patterns']['template'] = template - assert new_holding.next_issue_display_text == '1 3' + template = "{{expected_date.day}} {{expected_date.month}}" + new_holding["patterns"]["template"] = template + assert new_holding.next_issue_display_text == "1 3" def test_receive_regular_issue(holding_lib_martigny_w_patterns, tomorrow): @@ -84,507 +82,475 @@ def test_receive_regular_issue(holding_lib_martigny_w_patterns, tomorrow): holding = holding_lib_martigny_w_patterns assert holding.is_serial issue = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, dbcommit=True, reindex=True ) # ItemsSearch.flush_and_refresh() # test holdings call number inheriting - assert issue.issue_inherited_first_call_number == \ - holding.get('call_number') - assert issue.issue_inherited_second_call_number == \ - holding.get('second_call_number') - assert ItemsSearch() \ - .filter('term', issue__inherited_first_call_number__raw='h00005') \ - .count() == 1 - assert ItemsSearch() \ - .filter('term', issue__inherited_second_call_number__raw='h00005_2') \ - .count() == 1 - assert ItemsSearch() \ - .filter('term', call_numbers__raw='h00005').count() == 1 - assert ItemsSearch() \ - .filter('term', call_numbers__raw='h00005_2').count() == 1 - - assert list(holding.get_items())[0].get('pid') == issue.pid + assert issue.issue_inherited_first_call_number == holding.get("call_number") + assert issue.issue_inherited_second_call_number == holding.get("second_call_number") + assert ( + ItemsSearch() + .filter("term", issue__inherited_first_call_number__raw="h00005") + .count() + == 1 + ) + assert ( + ItemsSearch() + .filter("term", issue__inherited_second_call_number__raw="h00005_2") + .count() + == 1 + ) + assert ItemsSearch().filter("term", call_numbers__raw="h00005").count() == 1 + assert ItemsSearch().filter("term", call_numbers__raw="h00005_2").count() == 1 + + assert list(holding.get_items())[0].get("pid") == issue.pid assert issue.location_pid == holding.location_pid assert issue.item_type_pid == holding.circulation_category_pid assert issue.document_pid == holding.document_pid assert issue.holding_pid == holding.pid - assert issue.get('status') == ItemStatus.ON_SHELF - assert issue.item_record_type == 'issue' + assert issue.get("status") == ItemStatus.ON_SHELF + assert issue.item_record_type == "issue" assert issue.organisation_pid == holding.organisation_pid - assert issue.get('issue', {}).get('regular') + assert issue.get("issue", {}).get("regular") assert issue.issue_status == ItemIssueStatus.RECEIVED - assert issue.expected_date == '2023-03-01' - assert issue.get('enumerationAndChronology') == 'no 73 mars 2023' - assert issue.received_date == datetime.now().strftime('%Y-%m-%d') + assert issue.expected_date == "2023-03-01" + assert issue.get("enumerationAndChronology") == "no 73 mars 2023" + assert issue.received_date == datetime.now().strftime("%Y-%m-%d") issue_status_date = ciso8601.parse_datetime(issue.issue_status_date) - assert issue_status_date.strftime('%Y-%m-%d') == \ - datetime.now().strftime('%Y-%m-%d') + assert issue_status_date.strftime("%Y-%m-%d") == datetime.now().strftime("%Y-%m-%d") # test change status_date with status changes - issue.expected_date = tomorrow.strftime('%Y-%m-%d') + issue.expected_date = tomorrow.strftime("%Y-%m-%d") issue.issue_status = ItemIssueStatus.LATE new_issue = issue.update(issue, dbcommit=True, reindex=True) assert not new_issue.received_date # As we choose a future expected date, the issue status should be # automatically changed to `expected` assert new_issue.issue_status == ItemIssueStatus.EXPECTED - new_issue_status_date = ciso8601.parse_datetime( - new_issue.issue_status_date) + new_issue_status_date = ciso8601.parse_datetime(new_issue.issue_status_date) assert new_issue_status_date > issue_status_date holding = Holding.get_record_by_pid(holding.pid) issue = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, dbcommit=True, reindex=True ) - assert issue.get('issue', {}).get('regular') + assert issue.get("issue", {}).get("regular") assert issue.issue_status == ItemIssueStatus.RECEIVED - assert issue.expected_date == '2020-06-01' - assert issue.get('enumerationAndChronology') == 'no 62 juin 2020' - assert issue.received_date == datetime.now().strftime('%Y-%m-%d') + assert issue.expected_date == "2020-06-01" + assert issue.get("enumerationAndChronology") == "no 62 juin 2020" + assert issue.received_date == datetime.now().strftime("%Y-%m-%d") # test create customized regular issue record = { - 'issue': { - 'regular': True, - 'status': ItemIssueStatus.RECEIVED, - 'expected_date': datetime.now().strftime('%Y-%m-%d'), - 'received_date': datetime.now().strftime('%Y-%m-%d') + "issue": { + "regular": True, + "status": ItemIssueStatus.RECEIVED, + "expected_date": datetime.now().strftime("%Y-%m-%d"), + "received_date": datetime.now().strftime("%Y-%m-%d"), }, - 'enumerationAndChronology': 'free_text' + "enumerationAndChronology": "free_text", } holding = Holding.get_record_by_pid(holding.pid) issue = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - item=record, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, item=record, dbcommit=True, reindex=True ) - assert issue.get('issue', {}).get('regular') + assert issue.get("issue", {}).get("regular") assert issue.issue_status == ItemIssueStatus.RECEIVED - assert issue.expected_date == datetime.now().strftime('%Y-%m-%d') - assert issue.get('enumerationAndChronology') == 'free_text' - assert issue.received_date == datetime.now().strftime('%Y-%m-%d') + assert issue.expected_date == datetime.now().strftime("%Y-%m-%d") + assert issue.get("enumerationAndChronology") == "free_text" + assert issue.received_date == datetime.now().strftime("%Y-%m-%d") def test_patterns_yearly_one_level( - holding_lib_martigny_w_patterns, - pattern_yearly_one_level_data): + holding_lib_martigny_w_patterns, pattern_yearly_one_level_data +): """Test pattern yearly one level.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = pattern_yearly_one_level_data['patterns'] + holding["patterns"] = pattern_yearly_one_level_data["patterns"] # test first issue - assert holding.next_issue_display_text == '82 2020' + assert holding.next_issue_display_text == "82 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == '83 2021' + assert holding.next_issue_display_text == "83 2021" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == '108 2046' + assert holding.next_issue_display_text == "108 2046" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == '120 2058' + assert issues[-1]["issue"] == "120 2058" def test_patterns_yearly_one_level_with_label( - holding_lib_martigny_w_patterns, - pattern_yearly_one_level_with_label_data): + holding_lib_martigny_w_patterns, pattern_yearly_one_level_with_label_data +): """Test pattern yearly one level with label.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = pattern_yearly_one_level_with_label_data['patterns'] + holding["patterns"] = pattern_yearly_one_level_with_label_data["patterns"] # test first issue - assert holding.next_issue_display_text == '29 Edition 2020' + assert holding.next_issue_display_text == "29 Edition 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == '30 Edition 2021' + assert holding.next_issue_display_text == "30 Edition 2021" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == '55 Edition 2046' + assert holding.next_issue_display_text == "55 Edition 2046" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == '67 Edition 2058' + assert issues[-1]["issue"] == "67 Edition 2058" def test_patterns_yearly_two_times( - holding_lib_martigny_w_patterns, - pattern_yearly_two_times_data): + holding_lib_martigny_w_patterns, pattern_yearly_two_times_data +): """Test pattern yearly two times.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = pattern_yearly_two_times_data['patterns'] + holding["patterns"] = pattern_yearly_two_times_data["patterns"] # test first issue - assert holding.next_issue_display_text == 'Jg. 8 Nov. 2019' + assert holding.next_issue_display_text == "Jg. 8 Nov. 2019" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Jg. 9 März 2020' + assert holding.next_issue_display_text == "Jg. 9 März 2020" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Jg. 21 Nov. 2032' + assert holding.next_issue_display_text == "Jg. 21 Nov. 2032" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'Jg. 27 Nov. 2038' + assert issues[-1]["issue"] == "Jg. 27 Nov. 2038" def test_patterns_quarterly_two_levels( - holding_lib_martigny_w_patterns, - pattern_quarterly_two_levels_data): + holding_lib_martigny_w_patterns, pattern_quarterly_two_levels_data +): """Test pattern quarterly_two_levels.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = pattern_quarterly_two_levels_data['patterns'] + holding["patterns"] = pattern_quarterly_two_levels_data["patterns"] # test first issue - assert holding.next_issue_display_text == 'Jg. 20 Heft 1 2020' + assert holding.next_issue_display_text == "Jg. 20 Heft 1 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Jg. 20 Heft 2 2020' + assert holding.next_issue_display_text == "Jg. 20 Heft 2 2020" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Jg. 26 Heft 3 2026' + assert holding.next_issue_display_text == "Jg. 26 Heft 3 2026" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'Jg. 29 Heft 3 2029' + assert issues[-1]["issue"] == "Jg. 29 Heft 3 2029" def test_patterns_quarterly_two_levels_with_season( - holding_lib_martigny_w_patterns, - pattern_quarterly_two_levels_with_season_data): + holding_lib_martigny_w_patterns, pattern_quarterly_two_levels_with_season_data +): """Test pattern quarterly_two_levels_with_season.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = \ - pattern_quarterly_two_levels_with_season_data['patterns'] + holding["patterns"] = pattern_quarterly_two_levels_with_season_data["patterns"] # test first issue - assert holding.next_issue_display_text == \ - 'année 2019 no 277 printemps 2018' + assert holding.next_issue_display_text == "année 2019 no 277 printemps 2018" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'année 2019 no 278 été 2018' + assert holding.next_issue_display_text == "année 2019 no 278 été 2018" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == \ - 'année 2025 no 303 automne 2024' + assert holding.next_issue_display_text == "année 2025 no 303 automne 2024" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'année 2028 no 315 automne 2027' + assert issues[-1]["issue"] == "année 2028 no 315 automne 2027" def test_patterns_half_yearly_one_level( - holding_lib_martigny_w_patterns, - pattern_half_yearly_one_level_data): + holding_lib_martigny_w_patterns, pattern_half_yearly_one_level_data +): """Test pattern half_yearly_one_level.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = \ - pattern_half_yearly_one_level_data['patterns'] + holding["patterns"] = pattern_half_yearly_one_level_data["patterns"] # test first issue - assert holding.next_issue_display_text == 'N˚ 48 printemps 2019' + assert holding.next_issue_display_text == "N˚ 48 printemps 2019" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'N˚ 49 automne 2019' + assert holding.next_issue_display_text == "N˚ 49 automne 2019" for _ in range(13): holding.increment_next_prediction() - assert holding.next_issue_display_text == 'N˚ 62 printemps 2026' + assert holding.next_issue_display_text == "N˚ 62 printemps 2026" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'N˚ 74 printemps 2032' + assert issues[-1]["issue"] == "N˚ 74 printemps 2032" def test_patterns_bimonthly_every_two_months_one_level( - holding_lib_martigny_w_patterns, - pattern_bimonthly_every_two_months_one_level_data): + holding_lib_martigny_w_patterns, pattern_bimonthly_every_two_months_one_level_data +): """Test pattern quarterly_two_levels.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = \ - pattern_bimonthly_every_two_months_one_level_data['patterns'] + holding["patterns"] = pattern_bimonthly_every_two_months_one_level_data["patterns"] # test first issue - assert holding.next_issue_display_text == '47 jan./fév. 2020' + assert holding.next_issue_display_text == "47 jan./fév. 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == '48 mars/avril 2020' + assert holding.next_issue_display_text == "48 mars/avril 2020" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == '73 mai/juin 2024' + assert holding.next_issue_display_text == "73 mai/juin 2024" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == '85 mai/juin 2026' + assert issues[-1]["issue"] == "85 mai/juin 2026" def test_patterns_half_yearly_two_levels( - holding_lib_martigny_w_patterns, - pattern_half_yearly_two_levels_data): + holding_lib_martigny_w_patterns, pattern_half_yearly_two_levels_data +): """Test pattern half_yearly_two_levels.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = \ - pattern_half_yearly_two_levels_data['patterns'] + holding["patterns"] = pattern_half_yearly_two_levels_data["patterns"] # test first issue - assert holding.next_issue_display_text == 'Année 30 no 84 June 2020' + assert holding.next_issue_display_text == "Année 30 no 84 June 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Année 30 no 85 Dec. 2020' + assert holding.next_issue_display_text == "Année 30 no 85 Dec. 2020" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Année 43 no 110 June 2033' + assert holding.next_issue_display_text == "Année 43 no 110 June 2033" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'Année 49 no 122 June 2039' + assert issues[-1]["issue"] == "Année 49 no 122 June 2039" def test_bimonthly_every_two_months_two_levels( - holding_lib_martigny_w_patterns, - pattern_bimonthly_every_two_months_two_levels_data): + holding_lib_martigny_w_patterns, pattern_bimonthly_every_two_months_two_levels_data +): """Test pattern bimonthly_every_two_months_two_levels.""" holding = holding_lib_martigny_w_patterns holding = Holding.get_record_by_pid(holding.pid) - holding['patterns'] = \ - pattern_bimonthly_every_two_months_two_levels_data['patterns'] + holding["patterns"] = pattern_bimonthly_every_two_months_two_levels_data["patterns"] # test first issue - assert holding.next_issue_display_text == 'Jg 51 Nr 1 Jan. 2020' + assert holding.next_issue_display_text == "Jg 51 Nr 1 Jan. 2020" holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Jg 51 Nr 2 März 2020' + assert holding.next_issue_display_text == "Jg 51 Nr 2 März 2020" for _ in range(25): holding.increment_next_prediction() - assert holding.next_issue_display_text == 'Jg 55 Nr 3 Mai 2024' + assert holding.next_issue_display_text == "Jg 55 Nr 3 Mai 2024" # test preview issues = holding.prediction_issues_preview(13) - assert issues[-1]['issue'] == 'Jg 57 Nr 3 Mai 2026' + assert issues[-1]["issue"] == "Jg 57 Nr 3 Mai 2026" def test_holding_validate_next_expected_date( - client, librarian_martigny, - journal, loc_public_sion, item_type_internal_sion, document, - pattern_yearly_two_times_data, json_header, - holding_lib_sion_w_patterns_data): + client, + librarian_martigny, + journal, + loc_public_sion, + item_type_internal_sion, + document, + pattern_yearly_two_times_data, + json_header, + holding_lib_sion_w_patterns_data, +): """Test create holding with regular frequency and missing the next_expected_date. """ login_user_via_session(client, librarian_martigny.user) holding = holding_lib_sion_w_patterns_data - holding['holdings_type'] = 'serial' - holding['patterns'] = \ - pattern_yearly_two_times_data['patterns'] - del holding['pid'] - del holding['patterns']['next_expected_date'] + holding["holdings_type"] = "serial" + holding["patterns"] = pattern_yearly_two_times_data["patterns"] + del holding["pid"] + del holding["patterns"]["next_expected_date"] # test will fail when the serial holding has no field # next_expected_date for the regular frequency with pytest.raises(ValidationError): - Holding.create( - data=holding, - delete_pid=False, - dbcommit=True, - reindex=True) + Holding.create(data=holding, delete_pid=False, dbcommit=True, reindex=True) def test_intervals_and_expected_dates(holding_lib_martigny_w_patterns): """Test expected dates and intervals for holdings patterns.""" holding = holding_lib_martigny_w_patterns patterns = { - 'template': '{{first_chronology.level_1}}', - 'next_expected_date': '2020-01-05', - 'values': [ - { - 'name': 'first_chronology', - 'levels': [ + "template": "{{first_chronology.level_1}}", + "next_expected_date": "2020-01-05", + "values": [ { - 'number_name': 'level_1', - 'starting_value': 1 + "name": "first_chronology", + "levels": [{"number_name": "level_1", "starting_value": 1}], } - ] - } - ] + ], } - holding['patterns'] = patterns + holding["patterns"] = patterns def update_pattern(holding, frequency): """update holdings patterns with a new frequency.""" - holding['patterns']['frequency'] = frequency + holding["patterns"]["frequency"] = frequency holding.update(holding, dbcommit=True, reindex=True) # test daily pattern - update_pattern(holding, 'rdafr:1001') + update_pattern(holding, "rdafr:1001") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert divmod(interval.days, 1)[0] == 1 previous_expected_date = expected_date # test three times a week pattern - update_pattern(holding, 'rdafr:1002') + update_pattern(holding, "rdafr:1002") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert divmod(interval.days, 1)[0] == 2 previous_expected_date = expected_date # test Biweekly pattern - update_pattern(holding, 'rdafr:1003') + update_pattern(holding, "rdafr:1003") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert divmod(interval.days, 1)[0] == 14 previous_expected_date = expected_date # test Weekly pattern - update_pattern(holding, 'rdafr:1004') + update_pattern(holding, "rdafr:1004") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert divmod(interval.days, 1)[0] == 7 previous_expected_date = expected_date # test Semiweekly pattern - update_pattern(holding, 'rdafr:1005') + update_pattern(holding, "rdafr:1005") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 3 <= divmod(interval.days, 1)[0] <= 4 previous_expected_date = expected_date # test Three times a month pattern - update_pattern(holding, 'rdafr:1006') + update_pattern(holding, "rdafr:1006") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 11 > divmod(interval.days, 1)[0] > 7 previous_expected_date = expected_date # test Bimonthly pattern - update_pattern(holding, 'rdafr:1007') + update_pattern(holding, "rdafr:1007") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 57 < divmod(interval.days, 1)[0] <= 62 previous_expected_date = expected_date # test Monthly pattern - update_pattern(holding, 'rdafr:1008') + update_pattern(holding, "rdafr:1008") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 27 < divmod(interval.days, 1)[0] < 32 previous_expected_date = expected_date # test Semimonthly pattern - update_pattern(holding, 'rdafr:1009') + update_pattern(holding, "rdafr:1009") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 13 < divmod(interval.days, 1)[0] < 16 previous_expected_date = expected_date # test Quarterly pattern - update_pattern(holding, 'rdafr:1010') + update_pattern(holding, "rdafr:1010") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 84 < divmod(interval.days, 1)[0] < 94 previous_expected_date = expected_date # test Three times a year pattern - update_pattern(holding, 'rdafr:1011') + update_pattern(holding, "rdafr:1011") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 112 < divmod(interval.days, 1)[0] < 125 previous_expected_date = expected_date # test Semiannual pattern - update_pattern(holding, 'rdafr:1012') + update_pattern(holding, "rdafr:1012") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 177 < divmod(interval.days, 1)[0] < 207 previous_expected_date = expected_date # test annual pattern - update_pattern(holding, 'rdafr:1013') + update_pattern(holding, "rdafr:1013") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 364 <= divmod(interval.days, 1)[0] <= 366 previous_expected_date = expected_date # test Biennial pattern - update_pattern(holding, 'rdafr:1014') + update_pattern(holding, "rdafr:1014") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 725 < divmod(interval.days, 1)[0] < 733 previous_expected_date = expected_date # test Triennial pattern - update_pattern(holding, 'rdafr:1015') + update_pattern(holding, "rdafr:1015") issues = holding.prediction_issues_preview(13) previous_expected_date = None for issue in issues: - expected_date = datetime.strptime( - issue.get('expected_date'), '%Y-%m-%d') + expected_date = datetime.strptime(issue.get("expected_date"), "%Y-%m-%d") if previous_expected_date: interval = expected_date - previous_expected_date assert 1092 < divmod(interval.days, 1)[0] < 1099 previous_expected_date = expected_date -def test_holding_notes(client, librarian_martigny, - holding_lib_martigny_w_patterns, json_header): +def test_holding_notes( + client, librarian_martigny, holding_lib_martigny_w_patterns, json_header +): """Test holdings notes.""" holding = holding_lib_martigny_w_patterns @@ -594,20 +560,20 @@ def test_holding_notes(client, librarian_martigny, assert len(holding.notes) == 1 # add other note types - holding['notes'] = [ - {'type': HoldingNoteTypes.STAFF, 'content': 'Staff note'}, - {'type': HoldingNoteTypes.CLAIM, 'content': 'Claim note'} + holding["notes"] = [ + {"type": HoldingNoteTypes.STAFF, "content": "Staff note"}, + {"type": HoldingNoteTypes.CLAIM, "content": "Claim note"}, ] holding.update(holding, dbcommit=True, reindex=True) assert len(holding.notes) == 2 # will receive a validation error if tries to add a note type already exist - holding['notes'].append( - {'type': HoldingNoteTypes.CLAIM, 'content': 'new cliam note'} + holding["notes"].append( + {"type": HoldingNoteTypes.CLAIM, "content": "new cliam note"} ) with pytest.raises(ValidationError): holding.update(holding, dbcommit=True, reindex=True) - holding['notes'] = holding.notes[:-1] + holding["notes"] = holding.notes[:-1] # get a specific type of notes # --> staff : should return a note @@ -615,23 +581,22 @@ def test_holding_notes(client, librarian_martigny, # --> dummy : should never return something ! assert holding.get_note(HoldingNoteTypes.STAFF) assert holding.get_note(HoldingNoteTypes.ROUTING) is None - assert holding.get_note('dummy') is None + assert holding.get_note("dummy") is None def test_regular_issue_creation_update_delete_api( - client, holding_lib_martigny_w_patterns, loc_public_martigny, - lib_martigny): + client, holding_lib_martigny_w_patterns, loc_public_martigny, lib_martigny +): """Test create, update and delete of a regular issue API.""" holding = holding_lib_martigny_w_patterns issue_display, expected_date = holding._get_next_issue_display_text( - holding.get('patterns')) + holding.get("patterns") + ) issue = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, dbcommit=True, reindex=True ) issue_pid = issue.pid # flush index to prevent ES conflicts on delete - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() assert holding.delete(dbcommit=True, delindex=True) assert not Item.get_record_by_pid(issue_pid) diff --git a/tests/ui/holdings/test_issues_reindex.py b/tests/ui/holdings/test_issues_reindex.py index fdb813777e..2e515fab39 100644 --- a/tests/ui/holdings/test_issues_reindex.py +++ b/tests/ui/holdings/test_issues_reindex.py @@ -20,8 +20,6 @@ from __future__ import absolute_import, print_function -from utils import flush_index - from rero_ils.modules.holdings.models import HoldingTypes from rero_ils.modules.items.api import Item, ItemsSearch from rero_ils.modules.items.models import ItemIssueStatus @@ -30,114 +28,106 @@ def test_issue_location_after_holdings_update( - holding_lib_martigny_w_patterns, loc_restricted_martigny, - holding_lib_martigny_w_patterns_data): + holding_lib_martigny_w_patterns, + loc_restricted_martigny, + holding_lib_martigny_w_patterns_data, +): """Test location after holdings of type serials changes.""" initial_holding_data = holding_lib_martigny_w_patterns_data holding = holding_lib_martigny_w_patterns - assert holding.get('holdings_type') == HoldingTypes.SERIAL + assert holding.get("holdings_type") == HoldingTypes.SERIAL # create an item of type issue for this holdings item = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, dbcommit=True, reindex=True ) - assert ItemsSearch() \ - .filter('term', holding__pid=holding.pid) \ - .count() == 1 + assert ItemsSearch().filter("term", holding__pid=holding.pid).count() == 1 assert item.location_pid == holding.location_pid # change the holdings location assert holding.location_pid != loc_restricted_martigny.pid - holding['location'] = {'$ref': get_ref_for_pid( - 'locations', loc_restricted_martigny.pid)} + holding["location"] = { + "$ref": get_ref_for_pid("locations", loc_restricted_martigny.pid) + } holding = holding.update(holding, dbcommit=True, reindex=True) assert holding.location_pid == loc_restricted_martigny.pid # process the bulked indexed items process_bulk_queue() - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() # ensure that the location was correctly inherited from the holdings item = Item.get_record(item.id) assert item.location_pid == holding.location_pid - assert ItemsSearch() \ - .filter('term', location__pid=holding.location_pid) \ - .count() == 1 + assert ItemsSearch().filter("term", location__pid=holding.location_pid).count() == 1 # clean up data holding.update(initial_holding_data, dbcommit=True, reindex=True) item.delete(force=True, dbcommit=True, delindex=True) - assert ItemsSearch() \ - .filter('term', holding__pid=holding.pid) \ - .count() == 0 + assert ItemsSearch().filter("term", holding__pid=holding.pid).count() == 0 def test_issue_item_types_after_holdings_update( - holding_lib_martigny_w_patterns, item_type_on_site_martigny, - holding_lib_martigny_w_patterns_data): + holding_lib_martigny_w_patterns, + item_type_on_site_martigny, + holding_lib_martigny_w_patterns_data, +): """Test item type after holdings of type serials changes.""" initial_holding_data = holding_lib_martigny_w_patterns_data holding = holding_lib_martigny_w_patterns - assert holding.get('holdings_type') == HoldingTypes.SERIAL + assert holding.get("holdings_type") == HoldingTypes.SERIAL # create an item of type issue for this holdings item = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, dbcommit=True, reindex=True ) - assert ItemsSearch() \ - .filter('term', holding__pid=holding.pid) \ - .count() == 1 + assert ItemsSearch().filter("term", holding__pid=holding.pid).count() == 1 # change the holdings item_type assert holding.circulation_category_pid != item_type_on_site_martigny.pid - holding['circulation_category'] = {'$ref': get_ref_for_pid( - 'item_types', item_type_on_site_martigny.pid)} + holding["circulation_category"] = { + "$ref": get_ref_for_pid("item_types", item_type_on_site_martigny.pid) + } holding = holding.update(holding, dbcommit=True, reindex=True) assert holding.circulation_category_pid == item_type_on_site_martigny.pid # process the bulked indexed items process_bulk_queue() - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() # ensure that the item type was correctly inherited from the holdings item = Item.get_record(item.id) assert item.item_type_pid == holding.circulation_category_pid - assert ItemsSearch() \ - .filter('term', item_type__pid=holding.circulation_category_pid) \ - .count() == 1 + assert ( + ItemsSearch() + .filter("term", item_type__pid=holding.circulation_category_pid) + .count() + == 1 + ) # clean up data holding.update(initial_holding_data, dbcommit=True, reindex=True) item.delete(force=True, dbcommit=True, delindex=True) ItemsSearch.flush_and_refresh() - assert ItemsSearch() \ - .filter('term', holding__pid=holding.pid) \ - .count() == 0 + assert ItemsSearch().filter("term", holding__pid=holding.pid).count() == 0 def test_inherited_call_numbers_after_holdings_update( - holding_lib_martigny_w_patterns, holding_lib_martigny_w_patterns_data): + holding_lib_martigny_w_patterns, holding_lib_martigny_w_patterns_data +): """Test call numbers after holdings of type serials changes.""" initial_holding_data = holding_lib_martigny_w_patterns_data holding = holding_lib_martigny_w_patterns - assert holding.get('holdings_type') == HoldingTypes.SERIAL + assert holding.get("holdings_type") == HoldingTypes.SERIAL # create an item of type issue for this holdings item = holding.create_regular_issue( - status=ItemIssueStatus.RECEIVED, - dbcommit=True, - reindex=True + status=ItemIssueStatus.RECEIVED, dbcommit=True, reindex=True ) - assert ItemsSearch() \ - .filter('term', holding__pid=holding.pid) \ - .count() == 1 + assert ItemsSearch().filter("term", holding__pid=holding.pid).count() == 1 # change the holdings first call_number - holding['call_number'] = 'cote1' + holding["call_number"] = "cote1" holding = holding.update(holding, dbcommit=True, reindex=True) # process the bulked indexed items @@ -146,16 +136,17 @@ def test_inherited_call_numbers_after_holdings_update( # ensure that the call number was correctly inherited from the holdings item = Item.get_record(item.id) - assert ItemsSearch() \ - .filter('term', issue__inherited_first_call_number__raw='cote1')\ - .count() == 1 - assert ItemsSearch() \ - .filter('term', call_numbers__raw='cote1')\ - .count() == 1 + assert ( + ItemsSearch() + .filter("term", issue__inherited_first_call_number__raw="cote1") + .count() + == 1 + ) + assert ItemsSearch().filter("term", call_numbers__raw="cote1").count() == 1 # delete holdings first call number and change the second call_number - holding.pop('call_number', None) - holding['second_call_number'] = 'cote2' + holding.pop("call_number", None) + holding["second_call_number"] = "cote2" holding = holding.replace(holding, dbcommit=True, reindex=True) # process the bulked indexed items @@ -164,22 +155,22 @@ def test_inherited_call_numbers_after_holdings_update( # ensure that the call numbers were correctly inherited from the holdings item = Item.get_record(item.id) - assert ItemsSearch() \ - .filter('term', issue__inherited_second_call_number__raw='cote2')\ - .count() == 1 - assert ItemsSearch() \ - .filter('term', call_numbers__raw='cote2')\ - .count() == 1 - assert ItemsSearch() \ - .filter('term', issue__inherited_first_call_number__raw='cote1')\ - .count() == 0 - assert ItemsSearch() \ - .filter('term', call_numbers__raw='cote1')\ - .count() == 0 + assert ( + ItemsSearch() + .filter("term", issue__inherited_second_call_number__raw="cote2") + .count() + == 1 + ) + assert ItemsSearch().filter("term", call_numbers__raw="cote2").count() == 1 + assert ( + ItemsSearch() + .filter("term", issue__inherited_first_call_number__raw="cote1") + .count() + == 0 + ) + assert ItemsSearch().filter("term", call_numbers__raw="cote1").count() == 0 # clean up data holding.update(initial_holding_data, dbcommit=True, reindex=True) item.delete(force=True, dbcommit=True, delindex=True) - assert ItemsSearch() \ - .filter('term', holding__pid=holding.pid) \ - .count() == 0 + assert ItemsSearch().filter("term", holding__pid=holding.pid).count() == 0 diff --git a/tests/ui/holdings/test_serial_claims.py b/tests/ui/holdings/test_serial_claims.py index 6274ae3058..fc698bb3a2 100644 --- a/tests/ui/holdings/test_serial_claims.py +++ b/tests/ui/holdings/test_serial_claims.py @@ -24,8 +24,6 @@ from copy import deepcopy from datetime import datetime, timedelta -from utils import flush_index - from rero_ils.modules.holdings.api import Holding from rero_ils.modules.items.api import Item, ItemsSearch from rero_ils.modules.items.models import ItemIssueStatus @@ -33,17 +31,14 @@ def test_late_expected( - holding_lib_martigny_w_patterns, holding_lib_sion_w_patterns, - yesterday, tomorrow): + holding_lib_martigny_w_patterns, holding_lib_sion_w_patterns, yesterday, tomorrow +): """Test automatic change of late expected issues status to late.""" martigny = holding_lib_martigny_w_patterns sion = holding_lib_sion_w_patterns def get_late_issues(holding): - return Item.get_issues_by_status( - ItemIssueStatus.LATE, - holdings_pid=holding.pid - ) + return Item.get_issues_by_status(ItemIssueStatus.LATE, holdings_pid=holding.pid) # these two holdings has no late assert not len(list(get_late_issues(martigny))) @@ -56,9 +51,9 @@ def get_late_issues(holding): assert len(list(get_late_issues(sion))) == 1 # create a second late issue for Martigny and no more for Sion - sion['patterns']['next_expected_date'] = tomorrow.strftime('%Y-%m-%d') + sion["patterns"]["next_expected_date"] = tomorrow.strftime("%Y-%m-%d") sion.update(sion, dbcommit=True, reindex=True) - martigny['patterns']['next_expected_date'] = yesterday.strftime('%Y-%m-%d') + martigny["patterns"]["next_expected_date"] = yesterday.strftime("%Y-%m-%d") martigny.update(martigny, dbcommit=True, reindex=True) process_late_issues(dbcommit=True, reindex=True) @@ -71,8 +66,8 @@ def get_late_issues(holding): martigny = Holding.get_record_by_pid(martigny.pid) martigny_data = deepcopy(martigny) date2 = datetime.now() - timedelta(days=1) - martigny['patterns']['next_expected_date'] = date2.strftime('%Y-%m-%d') - martigny['acquisition_status'] = 'not_currently_received' + martigny["patterns"]["next_expected_date"] = date2.strftime("%Y-%m-%d") + martigny["acquisition_status"] = "not_currently_received" martigny.update(martigny, dbcommit=True, reindex=True) process_late_issues(dbcommit=True, reindex=True) @@ -86,18 +81,18 @@ def get_late_issues(holding): original_expected_date = issue.expected_date es_issue = ItemsSearch().get_record_by_pid(issue.pid) assert not issue.sort_date - assert es_issue['issue']['sort_date'] == original_expected_date + assert es_issue["issue"]["sort_date"] == original_expected_date - issue.expected_date = tomorrow.strftime('%Y-%m-%d') + issue.expected_date = tomorrow.strftime("%Y-%m-%d") issue = issue.update(issue, dbcommit=True, reindex=True) assert issue.sort_date == original_expected_date assert issue.issue_status == ItemIssueStatus.EXPECTED # Now set the issue `expected_date` to an over date and run again the task. # The previous issue should be updated to `LATE` status - issue.expected_date = yesterday.strftime('%Y-%m-%d') + issue.expected_date = yesterday.strftime("%Y-%m-%d") issue.update(issue, dbcommit=True, reindex=True) - flush_index(ItemsSearch.Meta.index) + ItemsSearch.flush_and_refresh() process_late_issues(dbcommit=True, reindex=True) issue = Item.get_record_by_pid(issue.pid) diff --git a/tests/ui/ill_requests/test_ill_requests_api.py b/tests/ui/ill_requests/test_ill_requests_api.py index 5e0b05e038..75dce0ee90 100644 --- a/tests/ui/ill_requests/test_ill_requests_api.py +++ b/tests/ui/ill_requests/test_ill_requests_api.py @@ -23,41 +23,60 @@ from rero_ils.modules.ill_requests.models import ILLRequestNoteStatus -def test_ill_request_properties(ill_request_martigny, ill_request_sion, - loc_public_martigny_data, org_martigny_data, - lib_martigny): +def test_ill_request_properties( + ill_request_martigny, + ill_request_sion, + loc_public_martigny_data, + org_martigny_data, + lib_martigny, +): """Test ill request properties.""" assert not ill_request_martigny.is_copy assert ill_request_sion.is_copy - assert ill_request_martigny.get_pickup_location().pid \ - == loc_public_martigny_data['pid'] - assert ill_request_martigny.organisation_pid == org_martigny_data['pid'] + assert ( + ill_request_martigny.get_pickup_location().pid + == loc_public_martigny_data["pid"] + ) + assert ill_request_martigny.organisation_pid == org_martigny_data["pid"] # test notes assert ill_request_martigny.public_note is None - note_content = 'public note test' - ill_request_martigny['notes'] = [{ - 'type': ILLRequestNoteStatus.PUBLIC_NOTE, - 'content': note_content - }] + note_content = "public note test" + ill_request_martigny["notes"] = [ + {"type": ILLRequestNoteStatus.PUBLIC_NOTE, "content": note_content} + ] assert ill_request_martigny.public_note == note_content - ill_request_martigny['notes'] = [{ - 'type': ILLRequestNoteStatus.STAFF_NOTE, - 'content': note_content - }] + ill_request_martigny["notes"] = [ + {"type": ILLRequestNoteStatus.STAFF_NOTE, "content": note_content} + ] assert ill_request_martigny.public_note is None - del ill_request_martigny['notes'] + del ill_request_martigny["notes"] assert ill_request_martigny.get_library().pid == lib_martigny.pid -def test_ill_request_get_request(ill_request_martigny, ill_request_sion, - patron_martigny): +def test_ill_request_get_request( + ill_request_martigny, ill_request_sion, patron_martigny +): """Test ill request get_request functions.""" - assert len(list(ILLRequest.get_requests_by_patron_pid( - patron_martigny.pid, status='pending' - ))) == 1 - assert len(list(ILLRequest.get_requests_by_patron_pid( - patron_martigny.pid, status='denied' - ))) == 0 + assert ( + len( + list( + ILLRequest.get_requests_by_patron_pid( + patron_martigny.pid, status="pending" + ) + ) + ) + == 1 + ) + assert ( + len( + list( + ILLRequest.get_requests_by_patron_pid( + patron_martigny.pid, status="denied" + ) + ) + ) + == 0 + ) diff --git a/tests/ui/ill_requests/test_ill_requests_jsonresolver.py b/tests/ui/ill_requests/test_ill_requests_jsonresolver.py index 352286fed8..5f4a3de9ab 100644 --- a/tests/ui/ill_requests/test_ill_requests_jsonresolver.py +++ b/tests/ui/ill_requests/test_ill_requests_jsonresolver.py @@ -24,12 +24,10 @@ def test_ill_requests_jsonresolver(ill_request_martigny): """Ill request resolver tests.""" - rec = Record.create({ - 'ill_request': {'$ref': 'https://bib.rero.ch/api/ill_requests/illr1'} - }) - assert rec.replace_refs().get('ill_request') == { - 'type': 'illr', 'pid': 'illr1' - } + rec = Record.create( + {"ill_request": {"$ref": "https://bib.rero.ch/api/ill_requests/illr1"}} + ) + assert rec.replace_refs().get("ill_request") == {"type": "illr", "pid": "illr1"} # deleted record ill_request_martigny.delete() @@ -37,8 +35,8 @@ def test_ill_requests_jsonresolver(ill_request_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'ill_request': {'$ref': 'https://bib.rero.ch/api/ill_requests/n_e'} - }) + rec = Record.create( + {"ill_request": {"$ref": "https://bib.rero.ch/api/ill_requests/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/ill_requests/test_ill_requests_mapping.py b/tests/ui/ill_requests/test_ill_requests_mapping.py index 960ebb1900..c207f1b6f0 100644 --- a/tests/ui/ill_requests/test_ill_requests_mapping.py +++ b/tests/ui/ill_requests/test_ill_requests_mapping.py @@ -21,17 +21,15 @@ from rero_ils.modules.ill_requests.api import ILLRequest, ILLRequestsSearch -def test_ill_request_es_mapping(es, db, loc_public_martigny, patron_martigny, - ill_request_martigny_data): +def test_ill_request_es_mapping( + es, db, loc_public_martigny, patron_martigny, ill_request_martigny_data +): """Test ill request elasticsearch mapping.""" search = ILLRequestsSearch() mapping = get_mapping(search.Meta.index) assert mapping request = ILLRequest.create( - ill_request_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + ill_request_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) request.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/ill_requests/test_ill_requests_ui.py b/tests/ui/ill_requests/test_ill_requests_ui.py index 5e86afeb02..e679fa1e61 100644 --- a/tests/ui/ill_requests/test_ill_requests_ui.py +++ b/tests/ui/ill_requests/test_ill_requests_ui.py @@ -24,14 +24,16 @@ from utils import login_user_for_view -def test_ill_request_create_request_form(client, app, - ill_request_martigny_data_tmp, - loc_public_martigny, - patron_martigny, - default_user_password): - """ test ill request create form.""" - request_form_url = url_for( - 'ill_requests.ill_request_form', viewcode='global') +def test_ill_request_create_request_form( + client, + app, + ill_request_martigny_data_tmp, + loc_public_martigny, + patron_martigny, + default_user_password, +): + """test ill request create form.""" + request_form_url = url_for("ill_requests.ill_request_form", viewcode="global") # Not logged user don't have access to request_form. It is redirected to # login form @@ -48,10 +50,10 @@ def test_ill_request_create_request_form(client, app, # the form submission, will return a response status == 200 (display the # form with error message) form_data = { - 'document-title': 'test title', - 'copy': '1', - 'document-year': '2020', - 'pickup_location': loc_public_martigny.pid + "document-title": "test title", + "copy": "1", + "document-year": "2020", + "pickup_location": loc_public_martigny.pid, } res = client.post(request_form_url, data=form_data) assert res.status_code == 200 @@ -60,21 +62,21 @@ def test_ill_request_create_request_form(client, app, # as user request a copy of document part, they need to specify pages. # the form submission, will return a response status == 201 (user should # be redirected to patron profile page) - form_data['pages'] = '12-13' + form_data["pages"] = "12-13" res = client.post(request_form_url, data=form_data) assert res.status_code == 302 -def test_ill_request_with_document(client, app, document, patron_martigny, - default_user_password): +def test_ill_request_with_document( + client, app, document, patron_martigny, default_user_password +): """Test ills request form with document data.""" - app.config['RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW'] = True - app.config['RERO_ILS_ILL_DEFAULT_SOURCE'] = 'RERO +' + app.config["RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW"] = True + app.config["RERO_ILS_ILL_DEFAULT_SOURCE"] = "RERO +" request_form_url = url_for( - 'ill_requests.ill_request_form', - viewcode='global', - record_pid=document.pid) + "ill_requests.ill_request_form", viewcode="global", record_pid=document.pid + ) # logged as user login_user_for_view(client, patron_martigny, default_user_password) @@ -82,23 +84,23 @@ def test_ill_request_with_document(client, app, document, patron_martigny, assert res.status_code == 200 # Check title - assert b'titre en chinois' in res.data + assert b"titre en chinois" in res.data # Check author - assert b'Zeng Lingliang zhu bian' in res.data + assert b"Zeng Lingliang zhu bian" in res.data # Check publisher - assert b'H. Mignot' in res.data + assert b"H. Mignot" in res.data # Check year - assert b'1971' in res.data + assert b"1971" in res.data # Check identifier - assert b'9782844267788 (ISBN)' in res.data + assert b"9782844267788 (ISBN)" in res.data # Check source - assert b'RERO +' in res.data + assert b"RERO +" in res.data # Check url - assert b'http://localhost/global/documents/doc1' in res.data + assert b"http://localhost/global/documents/doc1" in res.data # Check if the request with document is disabled - app.config['RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW'] = False + app.config["RERO_ILS_ILL_REQUEST_ON_GLOBAL_VIEW"] = False res = client.get(request_form_url) - assert b'H. Mignot' not in res.data + assert b"H. Mignot" not in res.data diff --git a/tests/ui/ill_requests/test_ill_requests_utils.py b/tests/ui/ill_requests/test_ill_requests_utils.py index 8d067f38ca..f5fb3e6037 100644 --- a/tests/ui/ill_requests/test_ill_requests_utils.py +++ b/tests/ui/ill_requests/test_ill_requests_utils.py @@ -25,14 +25,15 @@ from rero_ils.modules.ill_requests.utils import get_pickup_location_options -def test_get_pickup_location_options(patron_martigny, - loc_public_martigny, - loc_restricted_martigny): +def test_get_pickup_location_options( + patron_martigny, loc_public_martigny, loc_restricted_martigny +): """Test pickup location options from utils.""" - with mock.patch('rero_ils.modules.ill_requests.utils.current_patrons', - [patron_martigny]): - assert loc_public_martigny.get('is_pickup', False) - assert not loc_restricted_martigny.get('is_pickup', False) + with mock.patch( + "rero_ils.modules.ill_requests.utils.current_patrons", [patron_martigny] + ): + assert loc_public_martigny.get("is_pickup", False) + assert not loc_restricted_martigny.get("is_pickup", False) options = list(get_pickup_location_options()) assert len(options) == 1 diff --git a/tests/ui/item_types/conftest.py b/tests/ui/item_types/conftest.py index 5dbee31b96..90ef11ddb1 100644 --- a/tests/ui/item_types/conftest.py +++ b/tests/ui/item_types/conftest.py @@ -20,11 +20,11 @@ import pytest -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def item_types_records( item_type_standard_martigny, item_type_on_site_martigny, item_type_specific_martigny, - item_type_regular_sion + item_type_regular_sion, ): """Item types for test mapping.""" diff --git a/tests/ui/item_types/test_item_types_api.py b/tests/ui/item_types/test_item_types_api.py index f17848071b..b6ac364d59 100644 --- a/tests/ui/item_types/test_item_types_api.py +++ b/tests/ui/item_types/test_item_types_api.py @@ -25,55 +25,56 @@ from rero_ils.modules.item_types.api import ItemType, item_type_id_fetcher -def test_item_type_create(db, item_type_data_tmp, org_martigny, - item_type_online_martigny): +def test_item_type_create( + db, item_type_data_tmp, org_martigny, item_type_online_martigny +): """Test item type record creation.""" - item_type_data_tmp['type'] = 'online' + item_type_data_tmp["type"] = "online" with pytest.raises(ValidationError): itty = ItemType.create(item_type_data_tmp, delete_pid=True) db.session.rollback() next_pid = ItemType.provider.identifier.next() - item_type_data_tmp['type'] = 'standard' + item_type_data_tmp["type"] = "standard" itty = ItemType.create(item_type_data_tmp, delete_pid=True) next_pid += 1 assert itty == item_type_data_tmp - assert itty.get('pid') == str(next_pid) + assert itty.get("pid") == str(next_pid) itty = ItemType.get_record_by_pid(str(next_pid)) assert itty == item_type_data_tmp fetched_pid = item_type_id_fetcher(itty.id, itty) assert fetched_pid.pid_value == str(next_pid) - assert fetched_pid.pid_type == 'itty' - assert not ItemType.get_pid_by_name('no exists') + assert fetched_pid.pid_type == "itty" + assert not ItemType.get_pid_by_name("no exists") -def test_item_type_exist_name_and_organisation_pid( - item_type_standard_martigny): +def test_item_type_exist_name_and_organisation_pid(item_type_standard_martigny): """Test item type name uniqueness.""" item_type = item_type_standard_martigny itty = item_type.replace_refs() assert ItemType.exist_name_and_organisation_pid( - itty.get('name'), itty.get('organisation', {}).get('pid')) + itty.get("name"), itty.get("organisation", {}).get("pid") + ) assert not ItemType.exist_name_and_organisation_pid( - 'not exists yet', itty.get('organisation', {}).get('pid')) + "not exists yet", itty.get("organisation", {}).get("pid") + ) def test_item_type_get_pid_by_name(item_type_standard_martigny): """Test item type retrieval by name.""" - assert not ItemType.get_pid_by_name('no exists') - assert ItemType.get_pid_by_name('standard') == 'itty1' + assert not ItemType.get_pid_by_name("no exists") + assert ItemType.get_pid_by_name("standard") == "itty1" -def test_item_type_can_not_delete(item_type_standard_martigny, - item_lib_martigny): +def test_item_type_can_not_delete(item_type_standard_martigny, item_lib_martigny): """Test item type can not delete""" can, reasons = item_type_standard_martigny.can_delete assert not can - assert reasons['links']['items'] + assert reasons["links"]["items"] def test_item_type_can_delete(app, item_type_data_tmp): @@ -89,27 +90,25 @@ def test_item_type_properties(item_type_standard_martigny): itty = item_type_standard_martigny # test 'label' - assert itty.get_label() == itty['name'] + assert itty.get_label() == itty["name"] label_strings = { - 'en': ('info_label_eng', 'disable_text_eng'), - 'fr': ('info_label_fre', 'disable_text_fre'), - 'es': (None, 'disable_text_spa') + "en": ("info_label_eng", "disable_text_eng"), + "fr": ("info_label_fre", "disable_text_fre"), + "es": (None, "disable_text_spa"), } for language, labels in label_strings.items(): - itty.setdefault('circulation_information', []).append({ - 'language': language, - 'label': labels[0] - }) - itty.setdefault('displayed_status', []).append({ - 'language': language, - 'label': labels[1] - }) - assert itty.get_label('en') == label_strings['en'][0] - assert itty.get_label('fr') == label_strings['fr'][0] - assert itty.get_label('es') == itty['name'] - assert itty.get_label('nl') == itty['name'] - itty['negative_availability'] = True - assert itty.get_label('en') == label_strings['en'][1] - assert itty.get_label('fr') == label_strings['fr'][1] - assert itty.get_label('es') == label_strings['es'][1] - assert itty.get_label('nl') == itty['name'] + itty.setdefault("circulation_information", []).append( + {"language": language, "label": labels[0]} + ) + itty.setdefault("displayed_status", []).append( + {"language": language, "label": labels[1]} + ) + assert itty.get_label("en") == label_strings["en"][0] + assert itty.get_label("fr") == label_strings["fr"][0] + assert itty.get_label("es") == itty["name"] + assert itty.get_label("nl") == itty["name"] + itty["negative_availability"] = True + assert itty.get_label("en") == label_strings["en"][1] + assert itty.get_label("fr") == label_strings["fr"][1] + assert itty.get_label("es") == label_strings["es"][1] + assert itty.get_label("nl") == itty["name"] diff --git a/tests/ui/item_types/test_item_types_jsonresolver.py b/tests/ui/item_types/test_item_types_jsonresolver.py index 93c0435106..39f6a50bec 100644 --- a/tests/ui/item_types/test_item_types_jsonresolver.py +++ b/tests/ui/item_types/test_item_types_jsonresolver.py @@ -24,12 +24,10 @@ def test_item_types_jsonresolver(item_type_standard_martigny): """Item type resolver tests.""" - rec = Record.create({ - 'item_type': {'$ref': 'https://bib.rero.ch/api/item_types/itty1'} - }) - assert rec.replace_refs().get('item_type') == { - 'type': 'itty', 'pid': 'itty1' - } + rec = Record.create( + {"item_type": {"$ref": "https://bib.rero.ch/api/item_types/itty1"}} + ) + assert rec.replace_refs().get("item_type") == {"type": "itty", "pid": "itty1"} # deleted record item_type_standard_martigny.delete() @@ -37,8 +35,8 @@ def test_item_types_jsonresolver(item_type_standard_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'item_type': {'$ref': 'https://bib.rero.ch/api/item_types/n_e'} - }) + rec = Record.create( + {"item_type": {"$ref": "https://bib.rero.ch/api/item_types/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/item_types/test_item_types_mapping.py b/tests/ui/item_types/test_item_types_mapping.py index 6f6e95075f..3769144336 100644 --- a/tests/ui/item_types/test_item_types_mapping.py +++ b/tests/ui/item_types/test_item_types_mapping.py @@ -30,10 +30,7 @@ def test_item_type_es_mapping(search, db, org_martigny, item_type_data_tmp): mapping = get_mapping(search.Meta.index) assert mapping itty = ItemType.create( - item_type_data_tmp, - dbcommit=True, - reindex=True, - delete_pid=True + item_type_data_tmp, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) itty.delete(force=True, dbcommit=True, delindex=True) @@ -43,10 +40,10 @@ def test_item_types_search_mapping(app, item_types_records): """Test item type search mapping.""" search = ItemTypesSearch() - assert search.query('query_string', query='checkout').count() == 2 - assert search.query('match', name='checkout').count() == 0 + assert search.query("query_string", query="checkout").count() == 2 + assert search.query("match", name="checkout").count() == 0 - eq_query = search.query('match', name='specific').source(['pid']).scan() + eq_query = search.query("match", name="specific").source(["pid"]).scan() pids = [hit.pid for hit in eq_query] assert len(pids) == 1 - assert 'itty3' in pids + assert "itty3" in pids diff --git a/tests/ui/items/test_items_api.py b/tests/ui/items/test_items_api.py index 225530ac85..a21c87fe27 100644 --- a/tests/ui/items/test_items_api.py +++ b/tests/ui/items/test_items_api.py @@ -27,23 +27,22 @@ from rero_ils.modules.item_types.api import ItemType from rero_ils.modules.items.api import Item, ItemsSearch, item_id_fetcher -from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus, \ - TypeOfItem -from rero_ils.modules.items.utils import item_location_retriever, \ - item_pid_to_object +from rero_ils.modules.items.models import ItemIssueStatus, ItemStatus, TypeOfItem +from rero_ils.modules.items.utils import item_location_retriever, item_pid_to_object from rero_ils.modules.utils import get_ref_for_pid def test_obsolete_temporary_item_types_and_locations( - item_lib_martigny, item_type_on_site_martigny): + item_lib_martigny, item_type_on_site_martigny +): """Test obsolete temporary_item_types and temporary_locations.""" item = item_lib_martigny # First test - No items has temporary_item_type items = Item.get_items_with_obsolete_temporary_item_type_or_location() assert not len(list(items)) # Second test - add an infinite temporary_item_type to an item - item['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid) + item["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_on_site_martigny.pid) } item.update(item, dbcommit=True, reindex=True) items = Item.get_items_with_obsolete_temporary_item_type_or_location() @@ -51,7 +50,7 @@ def test_obsolete_temporary_item_types_and_locations( # Third test - add an expiration date in the future for the temporary # item_type over_2_days = datetime.now() + timedelta(days=2) - item['temporary_item_type']['end_date'] = over_2_days.strftime('%Y-%m-%d') + item["temporary_item_type"]["end_date"] = over_2_days.strftime("%Y-%m-%d") item.update(data=item, dbcommit=True, reindex=True) items = Item.get_items_with_obsolete_temporary_item_type_or_location() assert not len(list(items)) @@ -59,26 +58,30 @@ def test_obsolete_temporary_item_types_and_locations( # Fourth test - check obsolete with for a specified date in the future over_3_days = datetime.now() + timedelta(days=3) items = Item.get_items_with_obsolete_temporary_item_type_or_location( - end_date=over_3_days) + end_date=over_3_days + ) assert len(list(items)) == 1 # reset the item to original values - del item['temporary_item_type'] + del item["temporary_item_type"] item.update(data=item, dbcommit=True, reindex=True) def test_item_organisation_pid(client, org_martigny, item_lib_martigny): """Test organisation pid has been added during the indexing.""" search = ItemsSearch() - item = next(search.filter('term', pid=item_lib_martigny.pid).scan()) + item = next(search.filter("term", pid=item_lib_martigny.pid).scan()) assert item.organisation.pid == org_martigny.pid -def test_item_item_location_retriever(item_lib_martigny, loc_public_martigny, - loc_restricted_martigny): +def test_item_item_location_retriever( + item_lib_martigny, loc_public_martigny, loc_restricted_martigny +): """Test location retriever for invenio-circulation.""" - assert item_location_retriever(item_pid_to_object( - item_lib_martigny.pid)) == loc_public_martigny.pid + assert ( + item_location_retriever(item_pid_to_object(item_lib_martigny.pid)) + == loc_public_martigny.pid + ) def test_item_get_items_pid_by_document_pid(document, item_lib_martigny): @@ -93,40 +96,32 @@ def test_item_create(item_lib_martigny_data_tmp, item_lib_martigny): assert item.can_delete == (True, {}) item = Item.get_record_by_pid(item.pid) - item_lib_martigny_data_tmp['pid'] = '1' + item_lib_martigny_data_tmp["pid"] = "1" assert item == item_lib_martigny_data_tmp fetched_pid = item_id_fetcher(item.id, item) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'item' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "item" def test_item_extended_validation(client, holding_lib_martigny_w_patterns): """Test item extended validation in relation with its parent holding.""" data = { - '$schema': 'https://bib.rero.ch/schemas/items/item-v0.0.1.json', - 'type': 'issue', - 'document': { - '$ref': 'https://bib.rero.ch/api/documents/doc4' + "$schema": "https://bib.rero.ch/schemas/items/item-v0.0.1.json", + "type": "issue", + "document": {"$ref": "https://bib.rero.ch/api/documents/doc4"}, + "call_number": "00001", + "location": {"$ref": "https://bib.rero.ch/api/locations/loc1"}, + "item_type": {"$ref": "https://bib.rero.ch/api/item_types/itty1"}, + "holding": {"$ref": "https://bib.rero.ch/api/holdings/holding5"}, + "status": ItemStatus.ON_SHELF, + "enumerationAndChronology": "irregular_issue", + "issue": { + "status": ItemIssueStatus.RECEIVED, + "received_date": datetime.now().strftime("%Y-%m-%d"), + "expected_date": datetime.now().strftime("%Y-%m-%d"), + "regular": False, }, - 'call_number': '00001', - 'location': { - '$ref': 'https://bib.rero.ch/api/locations/loc1' - }, - 'item_type': { - '$ref': 'https://bib.rero.ch/api/item_types/itty1' - }, - 'holding': { - '$ref': 'https://bib.rero.ch/api/holdings/holding5' - }, - 'status': ItemStatus.ON_SHELF, - 'enumerationAndChronology': 'irregular_issue', - 'issue': { - 'status': ItemIssueStatus.RECEIVED, - 'received_date': datetime.now().strftime('%Y-%m-%d'), - 'expected_date': datetime.now().strftime('%Y-%m-%d'), - 'regular': False - } } Item.create(data, dbcommit=True, reindex=True, delete_pid=True) @@ -142,27 +137,27 @@ def test_item_extended_validation(client, holding_lib_martigny_w_patterns): # Item.create(data, dbcommit=True, reindex=True, delete_pid=True) -def test_extended_validation_unique_barcode(item_lib_martigny, item_lib_fully, - item_lib_martigny_data_tmp, - lib_martigny): +def test_extended_validation_unique_barcode( + item_lib_martigny, item_lib_fully, item_lib_martigny_data_tmp, lib_martigny +): """Test that a barcode must be unique""" # check that own barcode doesn't fail validation on item update assert item_lib_martigny.update(item_lib_martigny) # check that an item cannot be updated with an already existing barcode - item_lib_martigny['barcode'] = 'duplicate' + item_lib_martigny["barcode"] = "duplicate" item_lib_martigny.update(item_lib_martigny, dbcommit=True, reindex=True) - item_lib_fully['barcode'] = 'duplicate' + item_lib_fully["barcode"] = "duplicate" with pytest.raises(ValidationError) as err: item_lib_fully.update(item_lib_fully) - assert 'already taken' in str(err) + assert "already taken" in str(err) # check that an item with an already existing barcode cannot be created item_lib_martigny_data_tmp = deepcopy(item_lib_martigny_data_tmp) - item_lib_martigny_data_tmp['barcode'] = 'duplicate' + item_lib_martigny_data_tmp["barcode"] = "duplicate" with pytest.raises(ValidationError) as err: Item.create(item_lib_martigny_data_tmp, delete_pid=True) - assert 'already taken' in str(err) + assert "already taken" in str(err) def test_items_new_acquisition(item_lib_martigny): @@ -172,117 +167,133 @@ def test_items_new_acquisition(item_lib_martigny): # test 'is_new_acquisition' property # --> not yet a new acquisition acq_date = datetime.now() + timedelta(days=1) - item['acquisition_date'] = acq_date.strftime('%Y-%m-%d') + item["acquisition_date"] = acq_date.strftime("%Y-%m-%d") assert not item.is_new_acquisition # --> Without acq_date, this will be never a new acq - del item['acquisition_date'] + del item["acquisition_date"] assert not item.is_new_acquisition # --> there is an acq_date and this date is now past acq_date = datetime.now() - timedelta(days=1) - item['acquisition_date'] = acq_date.strftime('%Y-%m-%d') + item["acquisition_date"] = acq_date.strftime("%Y-%m-%d") assert item.is_new_acquisition def test_replace_refs(item_lib_martigny, item_type_on_site_martigny): """Test specific replace_refs for items.""" - item_lib_martigny['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid), - 'end_date': '2020-12-31' + item_lib_martigny["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_on_site_martigny.pid), + "end_date": "2020-12-31", } - assert 'end_date' in item_lib_martigny.replace_refs().\ - get('temporary_item_type') + assert "end_date" in item_lib_martigny.replace_refs().get("temporary_item_type") -def test_item_type_circulation_category_pid(item_lib_martigny, - item_type_on_site_martigny): +def test_item_type_circulation_category_pid( + item_lib_martigny, item_type_on_site_martigny +): """Test item_type circulation category pid.""" - assert item_lib_martigny.item_type_pid == \ - item_lib_martigny.item_type_circulation_category_pid + assert ( + item_lib_martigny.item_type_pid + == item_lib_martigny.item_type_circulation_category_pid + ) past_2_days = datetime.now() - timedelta(days=2) over_2_days = datetime.now() + timedelta(days=2) # add an obsolete temporary item_type end_date :: In this case, the # circulation item_type must be the default item_type - item_lib_martigny['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid), - 'end_date': past_2_days.strftime('%Y-%m-%d') + item_lib_martigny["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", item_type_on_site_martigny.pid), + "end_date": past_2_days.strftime("%Y-%m-%d"), } - assert item_lib_martigny.item_type_pid == \ - item_lib_martigny.item_type_circulation_category_pid + assert ( + item_lib_martigny.item_type_pid + == item_lib_martigny.item_type_circulation_category_pid + ) # add a valid temporary item_type end_date :: In this case, the # circulation item_type must be the temporary item_type - item_lib_martigny['temporary_item_type']['end_date'] = \ - over_2_days.strftime('%Y-%m-%d') - assert item_type_on_site_martigny.pid == \ - item_lib_martigny.item_type_circulation_category_pid + item_lib_martigny["temporary_item_type"]["end_date"] = over_2_days.strftime( + "%Y-%m-%d" + ) + assert ( + item_type_on_site_martigny.pid + == item_lib_martigny.item_type_circulation_category_pid + ) # removing any temporary item_type end_date :: In this case, the # circulation item_type must be the temporary item_type - del item_lib_martigny['temporary_item_type']['end_date'] - assert item_type_on_site_martigny.pid == \ - item_lib_martigny.item_type_circulation_category_pid + del item_lib_martigny["temporary_item_type"]["end_date"] + assert ( + item_type_on_site_martigny.pid + == item_lib_martigny.item_type_circulation_category_pid + ) # reset the object with default value - del item_lib_martigny['temporary_item_type'] - - -def test_items_availability(item_type_missing_martigny, - item_type_standard_martigny, - item_lib_martigny_data_tmp, loc_public_martigny, - lib_martigny, org_martigny, document): + del item_lib_martigny["temporary_item_type"] + + +def test_items_availability( + item_type_missing_martigny, + item_type_standard_martigny, + item_lib_martigny_data_tmp, + loc_public_martigny, + lib_martigny, + org_martigny, + document, +): """Test availability for an item.""" # Create a temporary item with correct data for the test item_data = deepcopy(item_lib_martigny_data_tmp) - del item_data['pid'] - item_data['barcode'] = 'TEST_AVAILABILITY' - item_data['temporary_item_type'] = { - '$ref': get_ref_for_pid(ItemType, item_type_missing_martigny.pid) + del item_data["pid"] + item_data["barcode"] = "TEST_AVAILABILITY" + item_data["temporary_item_type"] = { + "$ref": get_ref_for_pid(ItemType, item_type_missing_martigny.pid) } item = Item.create(item_data, dbcommit=True, reindex=True) # test the availability and availability_text assert not item.is_available() - assert len(item.availability_text) == \ - len(item_type_missing_martigny.get('displayed_status', [])) + 1 + assert ( + len(item.availability_text) + == len(item_type_missing_martigny.get("displayed_status", [])) + 1 + ) - del item['temporary_item_type'] + del item["temporary_item_type"] item = item.update(item, dbcommit=True, reindex=True) assert item.is_available() assert len(item.availability_text) == 1 # only default value # test availabilty by item status - item['status'] = ItemStatus.IN_TRANSIT + item["status"] = ItemStatus.IN_TRANSIT item = item.update(item, dbcommit=True, reindex=True) assert not item.is_available() - assert item.availability_text[0]['label'] == ItemStatus.IN_TRANSIT + assert item.availability_text[0]["label"] == ItemStatus.IN_TRANSIT - item['status'] = ItemStatus.ON_SHELF + item["status"] = ItemStatus.ON_SHELF item = item.update(item, dbcommit=True, reindex=True) assert item.is_available() assert len(item.availability_text) == 1 # only default value # test availability and availability_text for an issue - item['type'] = TypeOfItem.ISSUE - item['enumerationAndChronology'] = 'dummy' - item['issue'] = { - 'regular': False, - 'status': ItemIssueStatus.RECEIVED, - 'received_date': '1970-01-01', - 'expected_date': '1970-01-01' + item["type"] = TypeOfItem.ISSUE + item["enumerationAndChronology"] = "dummy" + item["issue"] = { + "regular": False, + "status": ItemIssueStatus.RECEIVED, + "received_date": "1970-01-01", + "expected_date": "1970-01-01", } item = item.update(item, dbcommit=True, reindex=True) assert item.is_available() - assert item.availability_text[0]['label'] == item.status + assert item.availability_text[0]["label"] == item.status - item['issue']['status'] = ItemIssueStatus.LATE + item["issue"]["status"] = ItemIssueStatus.LATE item = item.update(item, dbcommit=True, reindex=True) assert not item.is_available() - assert item.availability_text[0]['label'] == ItemIssueStatus.LATE + assert item.availability_text[0]["label"] == ItemIssueStatus.LATE # delete the created item item.delete() @@ -293,41 +304,38 @@ def test_get_links_to_me_with_fees(patron_transaction_overdue_saxon): pttr = patron_transaction_overdue_saxon loan = pttr.loan item = Item.get_record_by_pid(loan.item_pid) - assert item.get_links_to_me() == {'fees': 1, 'loans': 1} - assert item.get_links_to_me(get_pids=True) == { - 'fees': ['1'], 'loans': ['1']} + assert item.get_links_to_me() == {"fees": 1, "loans": 1} + assert item.get_links_to_me(get_pids=True) == {"fees": ["1"], "loans": ["1"]} - pttr['status'] = 'closed' - pttr['total_amount'] = 0 + pttr["status"] = "closed" + pttr["total_amount"] = 0 pttr = pttr.update(pttr, reindex=True, dbcommit=True) - assert item.get_links_to_me() == {'loans': 1} - assert item.get_links_to_me(get_pids=True) == {'loans': ['1']} + assert item.get_links_to_me() == {"loans": 1} + assert item.get_links_to_me(get_pids=True) == {"loans": ["1"]} def test_get_links_to_me_with_collection(coll_martigny_1, item_lib_martigny): """Test item deletion used by a collection.""" - assert item_lib_martigny.get_links_to_me() == { - 'collections': 1 - } + assert item_lib_martigny.get_links_to_me() == {"collections": 1} assert item_lib_martigny.get_links_to_me(get_pids=True) == { - 'collections': [coll_martigny_1.pid] + "collections": [coll_martigny_1.pid] } can_delete, links = item_lib_martigny.can_delete assert not can_delete - assert links == {'links': {'collections': 1}} + assert links == {"links": {"collections": 1}} def test_items_properties(item_lib_martigny): """Test some properties about item class.""" item = item_lib_martigny assert len(item.call_numbers) == 1 - assert item.get('call_number') in item.call_numbers + assert item.get("call_number") in item.call_numbers - item['second_call_number'] = 'SECOND_CALL' + item["second_call_number"] = "SECOND_CALL" item = item.update(item, dbcommit=True, reindex=True) assert len(item.call_numbers) == 2 - assert item.get('call_number') in item.call_numbers - assert item.get('second_call_number') in item.call_numbers + assert item.get("call_number") in item.call_numbers + assert item.get("second_call_number") in item.call_numbers - del item['second_call_number'] + del item["second_call_number"] item.update(item, dbcommit=True, reindex=True) diff --git a/tests/ui/items/test_items_dumpers.py b/tests/ui/items/test_items_dumpers.py index a4061390a0..f55380da99 100644 --- a/tests/ui/items/test_items_dumpers.py +++ b/tests/ui/items/test_items_dumpers.py @@ -27,58 +27,69 @@ from rero_ils.modules.collections.api import Collection from rero_ils.modules.commons.exceptions import MissingDataException from rero_ils.modules.holdings.api import Holding -from rero_ils.modules.items.dumpers import CirculationActionDumper, \ - ClaimIssueNotificationDumper, ItemCirculationDumper +from rero_ils.modules.items.dumpers import ( + CirculationActionDumper, + ClaimIssueNotificationDumper, + ItemCirculationDumper, +) from rero_ils.modules.items.models import TypeOfItem from rero_ils.modules.loans.models import LoanState from rero_ils.modules.utils import get_ref_for_pid def test_item_action_circulation_dumper( - item_lib_martigny, patron_martigny, loc_public_martigny, - librarian_martigny, circulation_policies, loc_public_martigny_bourg, - coll_martigny_1): + item_lib_martigny, + patron_martigny, + loc_public_martigny, + librarian_martigny, + circulation_policies, + loc_public_martigny_bourg, + coll_martigny_1, +): """Test item circulation action dumper.""" params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid, + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, _ = item_record_to_a_specific_loan_state( - item=item_lib_martigny, loan_state=LoanState.PENDING, - params=params, copy_item=False) + item=item_lib_martigny, + loan_state=LoanState.PENDING, + params=params, + copy_item=False, + ) data = item.dumps(CirculationActionDumper()) # $ref resolution - assert data['library']['pid'] + assert data["library"]["pid"] # document title - assert data['document']['title'] + assert data["document"]["title"] # location name - assert data['location']['name'] + assert data["location"]["name"] # organisation pid - assert data['location']['organisation']['pid'] + assert data["location"]["organisation"]["pid"] # library and location name - assert data['library_location_name'] + assert data["library_location_name"] # actions - assert data['actions'] + assert data["actions"] # pending loans - assert len(data['pending_loans']) == 1 + assert len(data["pending_loans"]) == 1 # number of pending requests - assert data['current_pending_requests'] == 1 + assert data["current_pending_requests"] == 1 # not temporary location - assert 'temporary_location' not in data + assert "temporary_location" not in data # not collections - assert 'collections' not in data + assert "collections" not in data # update end date on the collection collection = Collection.get_record_by_pid(coll_martigny_1["pid"]) @@ -87,47 +98,43 @@ def test_item_action_circulation_dumper( collection.update(collection, dbcommit=True, reindex=True) # add temporary location data - item["temporary_location"] = { - "pid": "loc15", - "type": "loc" - } + item["temporary_location"] = {"pid": "loc15", "type": "loc"} data = item.dumps(CirculationActionDumper()) # temporary location name - assert data['temporary_location']['name'] == loc_public_martigny_bourg\ - .get('name') + assert data["temporary_location"]["name"] == loc_public_martigny_bourg.get("name") # Collection title - assert data['collections'][0] == coll_martigny_1.get('title') + assert data["collections"][0] == coll_martigny_1.get("title") def test_item_circulation_dumper(item_lib_martigny): """Test item circulation dumper.""" item = item_lib_martigny - item['call_number'] = 'ITEM_MAIN_CN' - item['second_call_number'] = 'ITEM_SECOND_CN' + item["call_number"] = "ITEM_MAIN_CN" + item["second_call_number"] = "ITEM_SECOND_CN" holdings = Holding.get_record_by_pid(item.holding_pid) original_holding_data = deepcopy(holdings) - holdings['call_number'] = 'HOLDING_MAIN_CN' - holdings['second_call_number'] = 'HOLDING_SECOND_CN' + holdings["call_number"] = "HOLDING_MAIN_CN" + holdings["second_call_number"] = "HOLDING_SECOND_CN" holdings.update(holdings, dbcommit=True, reindex=True) # CHECK_1 :: dumped call_numbers are equivalent to item call numbers dumped_data = item.dumps(dumper=ItemCirculationDumper()) - assert dumped_data['call_number'] == item['call_number'] - item.pop('call_number', None) + assert dumped_data["call_number"] == item["call_number"] + item.pop("call_number", None) dumped_data = item.dumps(dumper=ItemCirculationDumper()) - assert 'call_number' not in dumped_data - assert dumped_data['second_call_number'] == item['second_call_number'] + assert "call_number" not in dumped_data + assert dumped_data["second_call_number"] == item["second_call_number"] # CHECK_2 :: remove all call_numbers from item, dumped date should # integrate parent holdings call_numbers - item.pop('second_call_number', None) + item.pop("second_call_number", None) dumped_data = item.dumps(dumper=ItemCirculationDumper()) - assert dumped_data['call_number'] == holdings['call_number'] - assert dumped_data['second_call_number'] == holdings['second_call_number'] + assert dumped_data["call_number"] == holdings["call_number"] + assert dumped_data["second_call_number"] == holdings["second_call_number"] # RESET HOLDING RECORD holdings.update(original_holding_data, dbcommit=True, reindex=True) @@ -138,14 +145,14 @@ def test_claim_issue_dumper(item_lib_martigny): with pytest.raises(TypeError): item_lib_martigny.dumps(dumper=ClaimIssueNotificationDumper()) - item_lib_martigny['type'] = TypeOfItem.ISSUE + item_lib_martigny["type"] = TypeOfItem.ISSUE holding = item_lib_martigny.holding - holding.pop('vendor', None) + holding.pop("vendor", None) with pytest.raises(MissingDataException) as exc: item_lib_martigny.dumps(dumper=ClaimIssueNotificationDumper()) - assert 'item.holding.vendor' in str(exc) + assert "item.holding.vendor" in str(exc) - item_lib_martigny['holding']['$ref'] = get_ref_for_pid('hold', 'dummy') + item_lib_martigny["holding"]["$ref"] = get_ref_for_pid("hold", "dummy") with pytest.raises(MissingDataException) as exc: item_lib_martigny.dumps(dumper=ClaimIssueNotificationDumper()) - assert 'item.holding' in str(exc) + assert "item.holding" in str(exc) diff --git a/tests/ui/items/test_items_jsonresolver.py b/tests/ui/items/test_items_jsonresolver.py index ee1531c0af..58d892176f 100644 --- a/tests/ui/items/test_items_jsonresolver.py +++ b/tests/ui/items/test_items_jsonresolver.py @@ -24,10 +24,8 @@ def test_items_jsonresolver(item_lib_martigny): """Test item json resolver.""" - rec = Record.create({ - 'item': {'$ref': 'https://bib.rero.ch/api/items/item1'} - }) - assert rec.replace_refs().get('item') == {'type': 'item', 'pid': 'item1'} + rec = Record.create({"item": {"$ref": "https://bib.rero.ch/api/items/item1"}}) + assert rec.replace_refs().get("item") == {"type": "item", "pid": "item1"} # deleted record item_lib_martigny.delete() @@ -35,8 +33,6 @@ def test_items_jsonresolver(item_lib_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'item': {'$ref': 'https://bib.rero.ch/api/items/n_e'} - }) + rec = Record.create({"item": {"$ref": "https://bib.rero.ch/api/items/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/items/test_items_mapping.py b/tests/ui/items/test_items_mapping.py index 29bb39a91a..d8c066ca84 100644 --- a/tests/ui/items/test_items_mapping.py +++ b/tests/ui/items/test_items_mapping.py @@ -21,17 +21,17 @@ from rero_ils.modules.items.api import Item, ItemsSearch -def test_item_es_mapping(document, loc_public_martigny, - item_type_standard_martigny, - item_lib_martigny_data_tmp): +def test_item_es_mapping( + document, + loc_public_martigny, + item_type_standard_martigny, + item_lib_martigny_data_tmp, +): """Test item elasticsearch mapping.""" search = ItemsSearch() mapping = get_mapping(search.Meta.index) assert mapping Item.create( - item_lib_martigny_data_tmp, - dbcommit=True, - reindex=True, - delete_pid=True + item_lib_martigny_data_tmp, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) diff --git a/tests/ui/libraries/conftest.py b/tests/ui/libraries/conftest.py index 5446b29cb9..284a235739 100644 --- a/tests/ui/libraries/conftest.py +++ b/tests/ui/libraries/conftest.py @@ -20,12 +20,6 @@ import pytest -@pytest.fixture(scope='module') -def libraries_records( - lib_martigny, - lib_saxon, - lib_fully, - lib_sion, - lib_aproz -): +@pytest.fixture(scope="module") +def libraries_records(lib_martigny, lib_saxon, lib_fully, lib_sion, lib_aproz): """Libraries for test mapping.""" diff --git a/tests/ui/libraries/test_libraries_api.py b/tests/ui/libraries/test_libraries_api.py index 95904af86a..6c33c86c0d 100644 --- a/tests/ui/libraries/test_libraries_api.py +++ b/tests/ui/libraries/test_libraries_api.py @@ -44,20 +44,20 @@ def test_library_create(db, org_martigny, lib_martigny_data): """Test library creation.""" lib = Library.create(lib_martigny_data, delete_pid=True) assert lib == lib_martigny_data - assert lib.get('pid') == '1' + assert lib.get("pid") == "1" - lib = Library.get_record_by_pid('1') + lib = Library.get_record_by_pid("1") assert lib == lib_martigny_data fetched_pid = fetcher(lib.id, lib) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'lib' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "lib" def test_libraries_is_open(lib_martigny): """Test library 'open' methods.""" - saturday = '2018-12-15 11:00' - monday = '2018-12-10 06:00' + saturday = "2018-12-15 11:00" + monday = "2018-12-10 06:00" library = lib_martigny def next_weekday(d, weekday): @@ -72,7 +72,7 @@ def next_weekday(d, weekday): # * monday --> friday :: 6 AM --> closed # * monday --> friday :: 12 AM --> open # * saturday & sunday :: closed all day - orginal_date = datetime.strptime('2020/08/17', '%Y/%m/%d') # random date + orginal_date = datetime.strptime("2020/08/17", "%Y/%m/%d") # random date for day_idx in range(0, 5): test_date = next_weekday(orginal_date, day_idx) test_date = test_date.replace(hour=6, minute=0) @@ -88,7 +88,7 @@ def next_weekday(d, weekday): # * According to library setting, the '2018-12-15' day is an exception # not repeatable. It's a saturday (normally closed), but defined as # open by exception. - exception_date = date_string_to_utc('2018-12-15') + exception_date = date_string_to_utc("2018-12-15") exception_date = exception_date.replace(hour=20, minute=0) assert exception_date.weekday() == 5 assert not library.is_open(exception_date) @@ -109,48 +109,48 @@ def next_weekday(d, weekday): # CASE 3 :: Check repeatable exception date for a single date # * According to library setting, each '1st augustus' is closed # (from 2019); despite if '2019-08-01' is a thursday (normally open) - exception_date = date_string_to_utc('2019-08-01') # Thursday + exception_date = date_string_to_utc("2019-08-01") # Thursday assert not library.is_open(exception_date) - exception_date = date_string_to_utc('2022-08-01') # Monday + exception_date = date_string_to_utc("2022-08-01") # Monday assert not library.is_open(exception_date) - exception_date = date_string_to_utc('2018-08-01') # Wednesday + exception_date = date_string_to_utc("2018-08-01") # Wednesday assert library.is_open(exception_date) - exception_date = date_string_to_utc('2222-8-1') # Thursday + exception_date = date_string_to_utc("2222-8-1") # Thursday assert not library.is_open(exception_date) # CASE 4 :: Check repeatable exception range date # * According to library setting, the library is closed for christmas # break each year (22/12 --> 06/01) - exception_date = date_string_to_utc('2018-12-24') # Monday + exception_date = date_string_to_utc("2018-12-24") # Monday assert not library.is_open(exception_date) - exception_date = date_string_to_utc('2019-01-07') # Monday + exception_date = date_string_to_utc("2019-01-07") # Monday assert library.is_open(exception_date) - exception_date = date_string_to_utc('2020-12-29') # Tuesday + exception_date = date_string_to_utc("2020-12-29") # Tuesday assert not library.is_open(exception_date) - exception_date = date_string_to_utc('2101-01-4') # Tuesday + exception_date = date_string_to_utc("2101-01-4") # Tuesday assert not library.is_open(exception_date) # CASE 5 :: Check repeatable date with interval # * According to library setting, each first day of the odd months is # a closed day. - exception_date = date_string_to_utc('2019-03-01') # Friday + exception_date = date_string_to_utc("2019-03-01") # Friday assert not library.is_open(exception_date) - exception_date = date_string_to_utc('2019-04-01') # Monday + exception_date = date_string_to_utc("2019-04-01") # Monday assert library.is_open(exception_date) - exception_date = date_string_to_utc('2019-05-01') # Wednesday + exception_date = date_string_to_utc("2019-05-01") # Wednesday assert not library.is_open(exception_date) # Other tests on opening day/hour - assert library.next_open(date=saturday).date() \ - == parser.parse('2018-12-17').date() - assert library.next_open(date=saturday, previous=True).date() \ - == parser.parse('2018-12-14').date() + assert library.next_open(date=saturday).date() == parser.parse("2018-12-17").date() + assert ( + library.next_open(date=saturday, previous=True).date() + == parser.parse("2018-12-14").date() + ) assert library.count_open(start_date=monday, end_date=saturday) == 6 assert library.in_working_days( - count=6, - date=date_string_to_utc('2018-12-10') - ) == date_string_to_utc('2018-12-17') + count=6, date=date_string_to_utc("2018-12-10") + ) == date_string_to_utc("2018-12-17") def test_library_can_delete(lib_martigny): @@ -163,19 +163,19 @@ def test_library_can_delete(lib_martigny): def test_library_timezone(lib_martigny): """Test library timezone.""" tz = lib_martigny.get_timezone() - assert tz == pytz.timezone('Europe/Zurich') + assert tz == pytz.timezone("Europe/Zurich") def test_library_get_address(lib_martigny, lib_saxon): """Get information about a library address.""" lib = lib_martigny address = lib.get_address(LibraryAddressType.MAIN_ADDRESS) - assert address == lib.get('address') + assert address == lib.get("address") address = lib.get_address(LibraryAddressType.SHIPPING_ADDRESS) - assert address['country'] == 'sz' # translated at 'Suisse (sz)' + assert address["country"] == "sz" # translated at 'Suisse (sz)' address = lib.get_address(LibraryAddressType.BILLING_ADDRESS) - assert address['country'] == 'be' - address = lib.get_address('dummy_type') + assert address["country"] == "be" + address = lib.get_address("dummy_type") assert address is None lib = lib_saxon @@ -187,22 +187,17 @@ def test_library_get_email(lib_martigny): """Test the get_email function about a library.""" def notification_email(library, notif_type): - for setting in library.get('notification_settings', []): - if setting.get('type') == notif_type: - return setting.get('email') + for setting in library.get("notification_settings", []): + if setting.get("type") == notif_type: + return setting.get("email") - assert lib_martigny.get_email(NotificationType.RECALL) == \ - notification_email(lib_martigny, NotificationType.RECALL) - assert not lib_martigny.get_email('dummy_notification_type') + assert lib_martigny.get_email(NotificationType.RECALL) == notification_email( + lib_martigny, NotificationType.RECALL + ) + assert not lib_martigny.get_email("dummy_notification_type") -def test_library_get_links_to_me( - lib_martigny, - loc_public_martigny, - loc_public_sion -): +def test_library_get_links_to_me(lib_martigny, loc_public_martigny, loc_public_sion): """Test library links.""" - assert lib_martigny.get_links_to_me() == {'locations': 1} - assert lib_martigny.get_links_to_me(get_pids=True) == { - 'locations': ['loc1'] - } + assert lib_martigny.get_links_to_me() == {"locations": 1} + assert lib_martigny.get_links_to_me(get_pids=True) == {"locations": ["loc1"]} diff --git a/tests/ui/libraries/test_libraries_dumpers.py b/tests/ui/libraries/test_libraries_dumpers.py index 4c82c84f7a..f6b3202efa 100644 --- a/tests/ui/libraries/test_libraries_dumpers.py +++ b/tests/ui/libraries/test_libraries_dumpers.py @@ -20,9 +20,10 @@ import pytest from rero_ils.modules.commons.exceptions import MissingDataException -from rero_ils.modules.libraries.dumpers import \ - LibraryAcquisitionNotificationDumper, \ - LibrarySerialClaimNotificationDumper +from rero_ils.modules.libraries.dumpers import ( + LibraryAcquisitionNotificationDumper, + LibrarySerialClaimNotificationDumper, +) def test_library_serial_dumpers(lib_martigny, lib_saxon): @@ -30,17 +31,17 @@ def test_library_serial_dumpers(lib_martigny, lib_saxon): # Acquisition dumper dump_data = lib_martigny.dumps(LibraryAcquisitionNotificationDumper()) - assert dump_data['shipping_informations'] - assert dump_data['billing_informations'] + assert dump_data["shipping_informations"] + assert dump_data["billing_informations"] dump_data = lib_saxon.dumps(LibraryAcquisitionNotificationDumper()) - assert dump_data['shipping_informations'] - assert 'billing_informations' not in dump_data + assert dump_data["shipping_informations"] + assert "billing_informations" not in dump_data # Claim issue dumper data = lib_martigny.dumps(LibrarySerialClaimNotificationDumper()) - assert data['address'] - assert data['shipping_informations'] - assert data['billing_informations'] + assert data["address"] + assert data["shipping_informations"] + assert data["billing_informations"] with pytest.raises(MissingDataException) as exc: lib_saxon.dumps(LibrarySerialClaimNotificationDumper()) - assert 'library.serial_acquisition_settings' in str(exc) + assert "library.serial_acquisition_settings" in str(exc) diff --git a/tests/ui/libraries/test_libraries_jsonresolver.py b/tests/ui/libraries/test_libraries_jsonresolver.py index b3aa884433..8d848ccee1 100644 --- a/tests/ui/libraries/test_libraries_jsonresolver.py +++ b/tests/ui/libraries/test_libraries_jsonresolver.py @@ -25,10 +25,8 @@ def test_libraries_jsonresolver(lib_martigny): """Test library json resolver.""" library = lib_martigny - rec = Record.create({ - 'library': {'$ref': 'https://bib.rero.ch/api/libraries/lib1'} - }) - assert rec.replace_refs().get('library') == {'type': 'lib', 'pid': 'lib1'} + rec = Record.create({"library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}}) + assert rec.replace_refs().get("library") == {"type": "lib", "pid": "lib1"} # deleted record library.delete() @@ -36,8 +34,6 @@ def test_libraries_jsonresolver(lib_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'library': {'$ref': 'https://bib.rero.ch/api/libraries/n_e'} - }) + rec = Record.create({"library": {"$ref": "https://bib.rero.ch/api/libraries/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/libraries/test_libraries_mapping.py b/tests/ui/libraries/test_libraries_mapping.py index 1ecaac7146..909648c4e8 100644 --- a/tests/ui/libraries/test_libraries_mapping.py +++ b/tests/ui/libraries/test_libraries_mapping.py @@ -28,7 +28,8 @@ def test_library_es_mapping(search, db, lib_martigny_data, org_martigny): mapping = get_mapping(search.Meta.index) assert mapping lib = Library.create( - lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True) + lib_martigny_data, dbcommit=True, reindex=True, delete_pid=True + ) assert mapping == get_mapping(search.Meta.index) lib.delete(force=True, dbcommit=True, delindex=True) @@ -37,15 +38,15 @@ def test_libraries_search_mapping(app, libraries_records): """Test library search mapping.""" search = LibrariesSearch() - assert search.query( - 'query_string', query='Fully Library Restricted Space' - ).count() == 4 - assert search.query('query_string', query='bibliothèque').count() == 1 - assert search.query('query_string', query='library AND Martigny').count() \ - == 1 - assert search.query('match', name='Aproz').count() == 1 + assert ( + search.query("query_string", query="Fully Library Restricted Space").count() + == 4 + ) + assert search.query("query_string", query="bibliothèque").count() == 1 + assert search.query("query_string", query="library AND Martigny").count() == 1 + assert search.query("match", name="Aproz").count() == 1 - es_query = search.query('match', name='Sion').source(['pid']).scan() + es_query = search.query("match", name="Sion").source(["pid"]).scan() pids = [hit.pid for hit in es_query] assert len(pids) == 1 - assert 'lib4' in pids + assert "lib4" in pids diff --git a/tests/ui/loans/test_loans_api.py b/tests/ui/loans/test_loans_api.py index 46f2ab0b7f..4e8fabf222 100644 --- a/tests/ui/loans/test_loans_api.py +++ b/tests/ui/loans/test_loans_api.py @@ -26,28 +26,31 @@ import mock from freezegun import freeze_time from invenio_circulation.proxies import current_circulation -from invenio_circulation.search.api import LoansSearch -from utils import flush_index, get_mapping +from utils import get_mapping from rero_ils.modules.circ_policies.api import DUE_SOON_REMINDER_TYPE from rero_ils.modules.items.models import ItemStatus from rero_ils.modules.libraries.api import Library -from rero_ils.modules.loans.api import Loan, get_expired_request +from rero_ils.modules.loans.api import Loan, LoansSearch, get_expired_request from rero_ils.modules.loans.models import LoanAction, LoanState from rero_ils.modules.loans.tasks import loan_anonymizer -from rero_ils.modules.loans.utils import get_circ_policy, \ - get_default_loan_duration, sum_for_fees +from rero_ils.modules.loans.utils import ( + get_circ_policy, + get_default_loan_duration, + sum_for_fees, +) from rero_ils.modules.locations.api import Location from rero_ils.modules.notifications.api import NotificationsSearch from rero_ils.modules.notifications.models import NotificationType -from rero_ils.modules.notifications.tasks import create_notifications, \ - process_notifications +from rero_ils.modules.notifications.tasks import ( + create_notifications, + process_notifications, +) from rero_ils.modules.patron_transactions.api import PatronTransaction -from rero_ils.modules.patron_transactions.utils import \ - get_transactions_pids_for_patron +from rero_ils.modules.patron_transactions.utils import get_transactions_pids_for_patron -def test_loan_es_mapping(es_clear, db): +def test_loan_es_mapping(search_clear, db): """Test loans elasticsearch mapping.""" search = current_circulation.loan_search_cls mapping = get_mapping(search.Meta.index) @@ -56,7 +59,7 @@ def test_loan_es_mapping(es_clear, db): def test_loans_create(loan_pending_martigny): """Test loan creation.""" - assert loan_pending_martigny.get('state') == LoanState.PENDING + assert loan_pending_martigny.get("state") == LoanState.PENDING def test_loans_properties(loan_pending_martigny, item_lib_fully): @@ -64,28 +67,28 @@ def test_loans_properties(loan_pending_martigny, item_lib_fully): loan = loan_pending_martigny assert loan.request_creation_date assert not loan.rank - assert loan.item_pid_object['value'] == item_lib_fully.pid + assert loan.item_pid_object["value"] == item_lib_fully.pid # pending transactions - pid = loan.pop('pid') + pid = loan.pop("pid") assert not loan.has_pending_transaction() - loan['pid'] = pid + loan["pid"] = pid # loan due soon assert not loan.is_loan_due_soon() # age - transaction_date = loan.pop('transaction_date') + transaction_date = loan.pop("transaction_date") assert loan.age() == 0 - loan['transaction_date'] = transaction_date + loan["transaction_date"] = transaction_date def test_loans_indexing(loan_pending_martigny, loc_public_martigny): """Test loan indexing.""" loan = loan_pending_martigny assert loan.reindex() - state = loan['state'] - loan['state'] = LoanState.CANCELLED + state = loan["state"] + loan["state"] = LoanState.CANCELLED loan.update(loan, True, True, True) - flush_index(LoansSearch.Meta.index) + LoansSearch.flush_and_refresh() loc_data = dict(loc_public_martigny) # indexing a terminated loan should work even if linked resources are @@ -96,13 +99,18 @@ def test_loans_indexing(loan_pending_martigny, loc_public_martigny): # restore original data loc_public_martigny.delete(True, True, True) Location.create(loc_data, dbcommit=True, reindex=True) - loan['state'] = state + loan["state"] = state loan.update(loan, True, True, True) def test_item_loans_default_duration( - item_lib_martigny, librarian_martigny, patron_martigny, - loc_public_martigny, circulation_policies, lib_martigny): + item_lib_martigny, + librarian_martigny, + patron_martigny, + loc_public_martigny, + circulation_policies, + lib_martigny, +): """Test default loan duration.""" # create a loan with request is easy @@ -110,31 +118,36 @@ def test_item_loans_default_duration( pickup_location_pid=loc_public_martigny.pid, patron_pid=patron_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - loan_pid = actions['request']['pid'] + loan_pid = actions["request"]["pid"] loan = Loan.get_record_by_pid(loan_pid) # a new loan without transaction location new_loan = deepcopy(loan) - del new_loan['transaction_location_pid'] + del new_loan["transaction_location_pid"] # should have the same duration with freeze_time(): - assert get_default_loan_duration(new_loan, None) == \ - get_default_loan_duration(loan, None) + assert get_default_loan_duration(new_loan, None) == get_default_loan_duration( + loan, None + ) policy = get_circ_policy(loan) # the checkout duration should be enougth long - assert policy.get('checkout_duration', 0) > 3 + assert policy.get("checkout_duration", 0) > 3 # now in UTC for now_str in [ # winter time - '2021-12-28 06:00:00', '2022-12-28 20:00:00', + "2021-12-28 06:00:00", + "2022-12-28 20:00:00", # winter to summer time - '2022-03-22 06:00:00', '2022-03-22 20:00:00', + "2022-03-22 06:00:00", + "2022-03-22 20:00:00", # summer time - '2022-06-28 05:00:00', '2022-06-28 19:00:00', + "2022-06-28 05:00:00", + "2022-06-28 19:00:00", # summer to winter time - '2022-10-25 05:00:00', '2022-10-25 19:00:00' + "2022-10-25 05:00:00", + "2022-10-25 19:00:00", ]: with freeze_time(now_str, tz_offset=0): # get loan duration @@ -143,20 +156,20 @@ def test_item_loans_default_duration( now = datetime.now(timezone.utc) utc_end_date = now + duration # computed end date at the library timezone - end_date = utc_end_date.astimezone( - tz=lib_martigny.get_timezone()) - expected_utc_end_date = now + timedelta( - days=policy['checkout_duration']) + end_date = utc_end_date.astimezone(tz=lib_martigny.get_timezone()) + expected_utc_end_date = now + timedelta(days=policy["checkout_duration"]) # expected end date at the library timezone expected_end_date = expected_utc_end_date.astimezone( - lib_martigny.get_timezone()) - assert end_date.strftime('%Y-%m-%d') == \ - expected_end_date.strftime('%Y-%m-%d') + lib_martigny.get_timezone() + ) + assert end_date.strftime("%Y-%m-%d") == expected_end_date.strftime( + "%Y-%m-%d" + ) assert end_date.hour == 23 assert end_date.minute == 59 # test library closed days - now_str = '2022-02-04 14:00:00' + now_str = "2022-02-04 14:00:00" with freeze_time(now_str, tz_offset=0): # get loan duration duration = get_default_loan_duration(loan, None) @@ -167,24 +180,22 @@ def test_item_loans_default_duration( # computed end date at the library timezone end_date = utc_end_date.astimezone(tz=lib_martigny.get_timezone()) # saturday and sunday is closed (+2) - expected_utc_end_date = now + timedelta( - days=(policy['checkout_duration'] + 2)) + expected_utc_end_date = now + timedelta(days=(policy["checkout_duration"] + 2)) # expected end date at the library timezone expected_end_date = expected_utc_end_date.astimezone( - lib_martigny.get_timezone()) - assert end_date.strftime('%Y-%m-%d') == \ - expected_end_date.strftime('%Y-%m-%d') + lib_martigny.get_timezone() + ) + assert end_date.strftime("%Y-%m-%d") == expected_end_date.strftime("%Y-%m-%d") assert end_date.hour == 23 assert end_date.minute == 59 item_lib_martigny.cancel_item_request( pid=loan.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) -def test_is_due_soon_is_late( - item_on_loan_martigny_patron_and_loan_on_loan): +def test_is_due_soon_is_late(item_on_loan_martigny_patron_and_loan_on_loan): """Test 'is due soon' and 'late' method about a loan.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan @@ -193,11 +204,11 @@ def test_is_due_soon_is_late( assert not loan.is_loan_due_soon() cipo = get_circ_policy(loan) reminder = cipo.get_reminder(reminder_type=DUE_SOON_REMINDER_TYPE) - assert reminder.get('days_delay') + assert reminder.get("days_delay") # mock the sysdate to just 5 days before the due_date due_date = ciso8601.parse_datetime(loan.end_date) - mock_date = due_date - timedelta(days=reminder.get('days_delay')) + mock_date = due_date - timedelta(days=reminder.get("days_delay")) with freeze_time(mock_date): assert loan.is_loan_due_soon() @@ -208,17 +219,19 @@ def test_is_due_soon_is_late( def test_loan_keep_and_to_anonymize( - item_on_loan_martigny_patron_and_loan_on_loan, - item2_on_loan_martigny_patron_and_loan_on_loan, - librarian_martigny, loc_public_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + item2_on_loan_martigny_patron_and_loan_on_loan, + librarian_martigny, + loc_public_martigny, +): """Test anonymize and keep loan based on open transactions.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan assert not loan.is_concluded() assert not Loan.can_anonymize(loan_data=loan) params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.checkin(**params) loan = Loan.get_record_by_pid(loan.pid) @@ -246,7 +259,7 @@ def test_loan_keep_and_to_anonymize( # TODO :: Adapt the value depending of the # RERO_ILS_ANONYMISATION_MAX_TIME_LIMIT parameter four_months_ago = datetime.utcnow() - timedelta(days=4 * 31) - loan['transaction_date'] = four_months_ago.isoformat() + loan["transaction_date"] = four_months_ago.isoformat() assert loan.is_concluded() assert loan.can_anonymize(loan_data=loan) @@ -265,19 +278,16 @@ def test_loan_keep_and_to_anonymize( # possible library exceptions don't conflict with `library.open_days` # computation end_date = datetime.now(timezone.utc) - timedelta(days=365) - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) - create_notifications(types=[ - NotificationType.DUE_SOON, - NotificationType.OVERDUE - ]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + create_notifications(types=[NotificationType.DUE_SOON, NotificationType.OVERDUE]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.checkin(**params) loan = Loan.get_record_by_pid(loan.pid) @@ -287,8 +297,10 @@ def test_loan_keep_and_to_anonymize( def test_anonymizer_job( - item_on_loan_martigny_patron_and_loan_on_loan, - librarian_martigny, loc_public_martigny): + item_on_loan_martigny_patron_and_loan_on_loan, + librarian_martigny, + loc_public_martigny, +): """Test loan anonymizer job.""" item, patron, loan = item_on_loan_martigny_patron_and_loan_on_loan @@ -300,14 +312,11 @@ def test_anonymizer_job( end_date = datetime.now(timezone.utc) - timedelta(days=add_days) open_days = loan_lib.get_open_days(end_date) add_days += 1 - loan['end_date'] = end_date.isoformat() + loan["end_date"] = end_date.isoformat() loan.update(loan, dbcommit=True, reindex=True) - create_notifications(types=[ - NotificationType.DUE_SOON, - NotificationType.OVERDUE - ]) - flush_index(NotificationsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + create_notifications(types=[NotificationType.DUE_SOON, NotificationType.OVERDUE]) + NotificationsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() # ensure than this loan cannot be anonymize (it's not yet concluded and # could have open fees [depending of the related CIPO]) @@ -319,8 +328,8 @@ def test_anonymizer_job( # anonymized. patron.set_keep_history(True) params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.checkin(**params) loan = Loan.get_record_by_pid(loan.pid) @@ -337,12 +346,12 @@ def test_anonymizer_job( # all open transactions about it. patron.set_keep_history(False) one_year_ago = datetime.now() - timedelta(days=365) - loan['transaction_date'] = one_year_ago.isoformat() + loan["transaction_date"] = one_year_ago.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) # close open transactions and notifications - for pid in get_transactions_pids_for_patron(patron.get('pid'), 'open'): + for pid in get_transactions_pids_for_patron(patron.get("pid"), "open"): transaction = PatronTransaction.get_record_by_pid(pid) - transaction['status'] = 'closed' + transaction["status"] = "closed" transaction.update(transaction, dbcommit=True, reindex=True) # ensure than, after these change, the loan can be anonymize. @@ -355,22 +364,24 @@ def test_anonymizer_job( assert msg == count -@mock.patch.object(Loan, 'can_anonymize', mock.MagicMock(return_value=False)) +@mock.patch.object(Loan, "can_anonymize", mock.MagicMock(return_value=False)) def test_anonymize_candidates( - item2_on_loan_martigny_patron_and_loan_on_loan, patron_martigny, - librarian_martigny, loc_public_martigny + item2_on_loan_martigny_patron_and_loan_on_loan, + patron_martigny, + librarian_martigny, + loc_public_martigny, ): """Test loan anonymize candidates.""" item, patron, loan = item2_on_loan_martigny_patron_and_loan_on_loan if item.status == ItemStatus.ON_SHELF: params = { - 'patron_pid': patron_martigny.pid, - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pickup_location_pid': loc_public_martigny.pid + "patron_pid": patron_martigny.pid, + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pickup_location_pid": loc_public_martigny.pid, } item, actions = item.checkout(**params) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) # The loan isn't concluded at this time, no candidates should be returned candidates = [loan.pid for loan in Loan.get_anonymized_candidates()] @@ -381,25 +392,25 @@ def test_anonymize_candidates( # the anonymize candidate. patron.set_keep_history(True) params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, } item.checkin(**params) loan = Loan.get_record_by_pid(loan.pid) one_year_ago = datetime.now(timezone.utc) - timedelta(days=365) - loan['transaction_date'] = one_year_ago.isoformat() + loan["transaction_date"] = one_year_ago.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) - flush_index(LoansSearch.Meta.index) + LoansSearch.flush_and_refresh() candidates = [loan.pid for loan in Loan.get_anonymized_candidates()] assert loan.pid in candidates # Set the transaction date to 4 months ago. As the patron want to keep # history, the loan isn't yet an anonymize candidate. - four_month_ago = datetime.now(timezone.utc) - timedelta(days=4*30) - loan['transaction_date'] = four_month_ago.isoformat() + four_month_ago = datetime.now(timezone.utc) - timedelta(days=4 * 30) + loan["transaction_date"] = four_month_ago.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) - flush_index(LoansSearch.Meta.index) + LoansSearch.flush_and_refresh() candidates = [loan.pid for loan in Loan.get_anonymized_candidates()] assert loan.pid not in candidates @@ -428,102 +439,108 @@ def get_end_date(delta=0): # * 3 intervals with no gap into each one. # * no limit on last interval # * no maximum overdue - cipo['overdue_fees'] = { - 'intervals': [ - {'from': 1, 'to': 1, 'fee_amount': 0.10}, - {'from': 2, 'to': 2, 'fee_amount': 0.20}, - {'from': 3, 'fee_amount': 0.50}, + cipo["overdue_fees"] = { + "intervals": [ + {"from": 1, "to": 1, "fee_amount": 0.10}, + {"from": 2, "to": 2, "fee_amount": 0.20}, + {"from": 3, "fee_amount": 0.50}, ] } cipo.update(data=cipo, dbcommit=True, reindex=True) expected_due_amount = [0.1, 0.3, 0.8, 1.3, 1.8, 2.3, 2.8, 3.3, 3.8, 4.3] for delta in range(0, len(expected_due_amount)): end = get_end_date(delta) - loan['end_date'] = end.isoformat() + loan["end_date"] = end.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) count_open = library.count_open(start_date=end + timedelta(days=1)) if count_open == 0: continue - assert sum_for_fees(loan.get_overdue_fees) == \ - expected_due_amount[count_open - 1] + assert ( + sum_for_fees(loan.get_overdue_fees) == expected_due_amount[count_open - 1] + ) # CASE#2 :: no more overdue after 3 days. # * same definition than before, but add a upper limit to the last # interval - cipo['overdue_fees'] = { - 'intervals': [ - {'from': 1, 'to': 1, 'fee_amount': 0.10}, - {'from': 2, 'to': 2, 'fee_amount': 0.20}, - {'from': 3, 'to': 3, 'fee_amount': 0.50}, + cipo["overdue_fees"] = { + "intervals": [ + {"from": 1, "to": 1, "fee_amount": 0.10}, + {"from": 2, "to": 2, "fee_amount": 0.20}, + {"from": 3, "to": 3, "fee_amount": 0.50}, ] } cipo.update(data=cipo, dbcommit=True, reindex=True) expected_due_amount = [0.1, 0.3, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8] for delta in range(0, len(expected_due_amount)): end = get_end_date(delta) - loan['end_date'] = end.isoformat() + loan["end_date"] = end.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) count_open = library.count_open(start_date=end + timedelta(days=1)) if count_open == 0: continue - assert sum_for_fees(loan.get_overdue_fees) == \ - expected_due_amount[count_open - 1] + assert ( + sum_for_fees(loan.get_overdue_fees) == expected_due_amount[count_open - 1] + ) # CASE#3 :: classic setting + maximum overdue. # * 3 intervals with no gap into each one. # * no limit on last interval # * maximum overdue = 2 - cipo['overdue_fees'] = { - 'intervals': [ - {'from': 1, 'to': 1, 'fee_amount': 0.10}, - {'from': 2, 'to': 2, 'fee_amount': 0.20}, - {'from': 3, 'fee_amount': 0.50}, + cipo["overdue_fees"] = { + "intervals": [ + {"from": 1, "to": 1, "fee_amount": 0.10}, + {"from": 2, "to": 2, "fee_amount": 0.20}, + {"from": 3, "fee_amount": 0.50}, ], - 'maximum_total_amount': 2 + "maximum_total_amount": 2, } cipo.update(data=cipo, dbcommit=True, reindex=True) expected_due_amount = [0.1, 0.3, 0.8, 1.3, 1.8, 2.0, 2.0, 2.0, 2.0, 2.0] for delta in range(0, len(expected_due_amount)): end = get_end_date(delta) - loan['end_date'] = end.isoformat() + loan["end_date"] = end.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) count_open = library.count_open(start_date=end + timedelta(days=1)) if count_open == 0: continue - assert sum_for_fees(loan.get_overdue_fees) == \ - expected_due_amount[count_open - 1] + assert ( + sum_for_fees(loan.get_overdue_fees) == expected_due_amount[count_open - 1] + ) # CASE#4 :: intervals with gaps # * define 2 intervals with gaps between # * grace period for first overdue day # * maximum overdue to 2.5 (not a normal step) - cipo['overdue_fees'] = { - 'intervals': [ - {'from': 2, 'to': 3, 'fee_amount': 0.10}, - {'from': 5, 'fee_amount': 0.50} + cipo["overdue_fees"] = { + "intervals": [ + {"from": 2, "to": 3, "fee_amount": 0.10}, + {"from": 5, "fee_amount": 0.50}, ], - 'maximum_total_amount': 1.1 + "maximum_total_amount": 1.1, } cipo.update(data=cipo, dbcommit=True, reindex=True) expected_due_amount = [0, 0.1, 0.2, 0.2, 0.7, 1.1, 1.1, 1.1, 1.1, 1.1, 1.1] for delta in range(0, len(expected_due_amount)): end = get_end_date(delta) - loan['end_date'] = end.isoformat() + loan["end_date"] = end.isoformat() loan = loan.update(loan, dbcommit=True, reindex=True) count_open = library.count_open(start_date=end + timedelta(days=1)) if count_open == 0: continue - assert sum_for_fees(loan.get_overdue_fees) == \ - expected_due_amount[count_open-1] + assert ( + sum_for_fees(loan.get_overdue_fees) == expected_due_amount[count_open - 1] + ) # RESET THE CIPO - del cipo['overdue_fees'] + del cipo["overdue_fees"] cipo.update(data=cipo, dbcommit=True, reindex=True) def test_request_expire_date( item_on_shelf_martigny_patron_and_loan_pending, - librarian_martigny, loc_public_martigny, mailbox + librarian_martigny, + loc_public_martigny, + mailbox, ): """Test request expiration date consistency.""" item, patron, loan = item_on_shelf_martigny_patron_and_loan_pending @@ -535,8 +552,8 @@ def test_request_expire_date( cipo = get_circ_policy(loan) original_cipo = deepcopy(cipo) - cipo['allow_requests'] = True - cipo['pickup_hold_duration'] = 44 + cipo["allow_requests"] = True + cipo["pickup_hold_duration"] = 44 cipo = cipo.update(cipo, dbcommit=True, reindex=True) # STEP#1 : VALIDATE THE REQUEST @@ -544,23 +561,23 @@ def test_request_expire_date( # loan status becomes ITEM_AT_DESK. Additionally the request expiration # date is specified into the loan information. params = { - 'transaction_location_pid': loc_public_martigny.pid, - 'transaction_user_pid': librarian_martigny.pid, - 'pid': loan.pid + "transaction_location_pid": loc_public_martigny.pid, + "transaction_user_pid": librarian_martigny.pid, + "pid": loan.pid, } item, actions = item.validate_request(**params) loan = Loan.get_record_by_pid(loan.pid) assert item.status == ItemStatus.AT_DESK - assert loan['state'] == LoanState.ITEM_AT_DESK - assert 'request_expire_date' in loan - assert 'request_start_date' in loan + assert loan["state"] == LoanState.ITEM_AT_DESK + assert "request_expire_date" in loan + assert "request_start_date" in loan - trans_date = ciso8601.parse_datetime(loan['transaction_date']) - request_expire_date = ciso8601.parse_datetime(loan['request_expire_date']) + trans_date = ciso8601.parse_datetime(loan["transaction_date"]) + request_expire_date = ciso8601.parse_datetime(loan["request_expire_date"]) open_days = (request_expire_date - trans_date).days - assert open_days >= cipo['pickup_hold_duration'] + assert open_days >= cipo["pickup_hold_duration"] # NOTE : we check using '>=' because the exact day could be a closed day. - request_start_date = ciso8601.parse_datetime(loan['request_start_date']) + request_start_date = ciso8601.parse_datetime(loan["request_start_date"]) assert request_start_date.date() == datetime.today().date() # If we check about expired request now, no result should be found @@ -572,12 +589,12 @@ def test_request_expire_date( process_notifications(NotificationType.AVAILABILITY) assert len(mailbox) body = mailbox[-1].body - assert request_expire_date.strftime('%d/%m/%Y') in body + assert request_expire_date.strftime("%d/%m/%Y") in body # ADDITIONAL TESTS :: # A) test exception from loan indexer listener (this is more for code # coverage than a real test). - loan['item_pid']['value'] = 'dummy_pid' + loan["item_pid"]["value"] = "dummy_pid" loan.update(loan, dbcommit=True, reindex=True) # RESET THE CIPO diff --git a/tests/ui/loans/test_loans_dumpers.py b/tests/ui/loans/test_loans_dumpers.py index 79595cfcd3..b74bd1d41f 100644 --- a/tests/ui/loans/test_loans_dumpers.py +++ b/tests/ui/loans/test_loans_dumpers.py @@ -24,9 +24,9 @@ def test_loan_circulation_dumper(loan_pending_martigny): """Test loan circulation action dumper.""" data = loan_pending_martigny.dumps(CirculationDumper()) - assert data['state'] - assert data['creation_date'] - assert 'name' in data['patron'] - assert 'barcode' in data['patron'] - assert 'name' in data['pickup_location'] - assert 'library_name' in data['pickup_location'] + assert data["state"] + assert data["creation_date"] + assert "name" in data["patron"] + assert "barcode" in data["patron"] + assert "name" in data["pickup_location"] + assert "library_name" in data["pickup_location"] diff --git a/tests/ui/loans/test_loans_jsonresolver.py b/tests/ui/loans/test_loans_jsonresolver.py index 8cb45f2631..350bdb7fcf 100644 --- a/tests/ui/loans/test_loans_jsonresolver.py +++ b/tests/ui/loans/test_loans_jsonresolver.py @@ -24,10 +24,8 @@ def test_loans_jsonresolver(loan_pending_martigny): """Test loan json resolver.""" - rec = Record.create({ - 'loan': {'$ref': 'https://bib.rero.ch/api/loans/1'} - }) - assert rec.replace_refs().get('loan') == {'type': 'loanid', 'pid': '1'} + rec = Record.create({"loan": {"$ref": "https://bib.rero.ch/api/loans/1"}}) + assert rec.replace_refs().get("loan") == {"type": "loanid", "pid": "1"} # deleted record loan_pending_martigny.delete() @@ -35,8 +33,6 @@ def test_loans_jsonresolver(loan_pending_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'loan': {'$ref': 'https://bib.rero.ch/api/loans/n_e'} - }) + rec = Record.create({"loan": {"$ref": "https://bib.rero.ch/api/loans/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/loans/test_loans_operation_logs.py b/tests/ui/loans/test_loans_operation_logs.py index d7efe0c077..00ae48212a 100644 --- a/tests/ui/loans/test_loans_operation_logs.py +++ b/tests/ui/loans/test_loans_operation_logs.py @@ -21,96 +21,96 @@ from copy import deepcopy from invenio_jsonschemas import current_jsonschemas -from utils import flush_index, login_user_for_view +from utils import login_user_for_view -from rero_ils.modules.loans.logs.api import LoanOperationLog, \ - LoanOperationLogsSearch +from rero_ils.modules.loans.logs.api import LoanOperationLog, LoanOperationLogsSearch from rero_ils.modules.patrons.api import Patron -def test_loan_operation_log(client, operation_log_data, - loan_validated_martigny, librarian_martigny, - default_user_password): +def test_loan_operation_log( + client, + operation_log_data, + loan_validated_martigny, + librarian_martigny, + default_user_password, +): """Test operation logs creation.""" login_user_for_view(client, librarian_martigny, default_user_password) - operation_log = LoanOperationLog.create(deepcopy(loan_validated_martigny), - index_refresh='wait_for') - operation_log['$schema'] = current_jsonschemas.path_to_url( - LoanOperationLog._schema) + operation_log = LoanOperationLog.create( + deepcopy(loan_validated_martigny), index_refresh="wait_for" + ) + operation_log["$schema"] = current_jsonschemas.path_to_url(LoanOperationLog._schema) operation_log.validate() log_data = LoanOperationLog.get_record(operation_log.id) - patron = Patron.get_record_by_pid(log_data['loan']['patron']['pid']) - assert log_data['operation'] == 'create' - assert log_data['user_name'] == 'Pedronni, Marie' - assert log_data['date'] == loan_validated_martigny['transaction_date'] - assert not log_data['loan']['override_flag'] - assert log_data['loan']['transaction_channel'] == 'system' - assert log_data['loan']['transaction_user']['name'] == 'Pedronni, Marie' - assert log_data['loan'][ - 'transaction_location']['name'] == 'Martigny Library Public Space' - assert log_data['loan'][ - 'pickup_location']['name'] == 'Martigny Library Public Space' - assert log_data['loan']['patron'] == { - 'pid': 'ptrn6', - 'hashed_pid': 'e11ff43bff5be4cf70350e2d15149e29', - 'name': 'Roduit, Louis', - 'type': 'children', - 'age': patron.age, - 'postal_code': '1920', - 'gender': 'male', - 'local_codes': ['code1'] + patron = Patron.get_record_by_pid(log_data["loan"]["patron"]["pid"]) + assert log_data["operation"] == "create" + assert log_data["user_name"] == "Pedronni, Marie" + assert log_data["date"] == loan_validated_martigny["transaction_date"] + assert not log_data["loan"]["override_flag"] + assert log_data["loan"]["transaction_channel"] == "system" + assert log_data["loan"]["transaction_user"]["name"] == "Pedronni, Marie" + assert ( + log_data["loan"]["transaction_location"]["name"] + == "Martigny Library Public Space" + ) + assert ( + log_data["loan"]["pickup_location"]["name"] == "Martigny Library Public Space" + ) + assert log_data["loan"]["patron"] == { + "pid": "ptrn6", + "hashed_pid": "e11ff43bff5be4cf70350e2d15149e29", + "name": "Roduit, Louis", + "type": "children", + "age": patron.age, + "postal_code": "1920", + "gender": "male", + "local_codes": ["code1"], } - assert log_data['loan']['item'] == { - 'category': 'standard', - 'call_number': '001313', - 'document': { - 'pid': 'doc1', - 'title': - 'titre en chinois. Part Number, Part Number = Titolo cinese : ' - 'sottotitolo in cinese', - 'type': - 'docsubtype_other_book' + assert log_data["loan"]["item"] == { + "category": "standard", + "call_number": "001313", + "document": { + "pid": "doc1", + "title": "titre en chinois. Part Number, Part Number = Titolo cinese : " + "sottotitolo in cinese", + "type": "docsubtype_other_book", }, - 'holding': { - 'pid': '1', - 'location_name': 'Martigny Library Public Space' - }, - 'library_pid': 'lib1', - 'pid': 'item5' + "holding": {"pid": "1", "location_name": "Martigny Library Public Space"}, + "library_pid": "lib1", + "pid": "item5", } # Test SIP2 loan = deepcopy(loan_validated_martigny) - loan['selfcheck_terminal_id'] = 'ABCDEF' - operation_log = LoanOperationLog.create(loan, index_refresh='wait_for') - operation_log['$schema'] = current_jsonschemas.path_to_url( - LoanOperationLog._schema) + loan["selfcheck_terminal_id"] = "ABCDEF" + operation_log = LoanOperationLog.create(loan, index_refresh="wait_for") + operation_log["$schema"] = current_jsonschemas.path_to_url(LoanOperationLog._schema) operation_log.validate() log_data = LoanOperationLog.get_record(operation_log.id) - assert log_data['loan']['transaction_channel'] == 'sip2' - assert not log_data['loan'].get('transaction_user') + assert log_data["loan"]["transaction_channel"] == "sip2" + assert not log_data["loan"].get("transaction_user") def test_anonymize_logs(item2_on_loan_martigny_patron_and_loan_on_loan): """Test anonymization for loan logs.""" item, patron, loan = item2_on_loan_martigny_patron_and_loan_on_loan - flush_index(LoanOperationLog.index_name) + LoanOperationLogsSearch.flush_and_refresh() - logs = LoanOperationLogsSearch().get_logs_by_record_pid(loan['pid']) + logs = LoanOperationLogsSearch().get_logs_by_record_pid(loan["pid"]) assert len(logs) == 3 for log in logs: - assert log['loan']['patron']['pid'] == patron['pid'] - assert log['loan']['patron']['name'] == 'Roduit, Louis' + assert log["loan"]["patron"]["pid"] == patron["pid"] + assert log["loan"]["patron"]["name"] == "Roduit, Louis" loan.anonymize(dbcommit=True, reindex=True) - logs = LoanOperationLogsSearch().get_logs_by_record_pid(loan['pid']) + logs = LoanOperationLogsSearch().get_logs_by_record_pid(loan["pid"]) assert len(logs) == 3 for log in logs: log = log.to_dict() - md5_hash = hashlib.md5(patron['pid'].encode()).hexdigest() - assert log['loan']['patron']['hashed_pid'] == f'{md5_hash}' - assert log['loan']['patron'].get('name') == 'anonymized' - assert log['loan']['patron'].get('pid') == 'anonymized' + md5_hash = hashlib.md5(patron["pid"].encode()).hexdigest() + assert log["loan"]["patron"]["hashed_pid"] == f"{md5_hash}" + assert log["loan"]["patron"].get("name") == "anonymized" + assert log["loan"]["patron"].get("pid") == "anonymized" diff --git a/tests/ui/local_fields/conftest.py b/tests/ui/local_fields/conftest.py index c289f84855..8cd89792f2 100644 --- a/tests/ui/local_fields/conftest.py +++ b/tests/ui/local_fields/conftest.py @@ -20,6 +20,6 @@ import pytest -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def local_fields_records(local_field_martigny, local_field_sion): """Local fields for test mapping.""" diff --git a/tests/ui/local_fields/test_local_fields_api.py b/tests/ui/local_fields/test_local_fields_api.py index 3d9bfc8eef..019699a744 100644 --- a/tests/ui/local_fields/test_local_fields_api.py +++ b/tests/ui/local_fields/test_local_fields_api.py @@ -23,7 +23,6 @@ import pytest from jsonschema.exceptions import ValidationError -from utils import flush_index from rero_ils.modules.documents.api import Document, DocumentsSearch from rero_ils.modules.items.api import Item @@ -32,35 +31,40 @@ def test_local_fields( - client, org_martigny, document_data, local_field_martigny_data, - item_lib_martigny, item_lib_martigny_data + client, + org_martigny, + document_data, + local_field_martigny_data, + item_lib_martigny, + item_lib_martigny_data, ): """Test local fields.""" lofi_data = deepcopy(local_field_martigny_data) - lofi_data.pop('pid', None) - document_data.pop('pid', None) - item_lib_martigny_data.pop('pid', None) + lofi_data.pop("pid", None) + document_data.pop("pid", None) + item_lib_martigny_data.pop("pid", None) # INIT :: Create a new Document and a new LocalField document = Document.create(document_data, dbcommit=True, reindex=True) - lofi_data['parent']['$ref'] = \ - get_ref_for_pid(document.provider.pid_type, document.pid) + lofi_data["parent"]["$ref"] = get_ref_for_pid( + document.provider.pid_type, document.pid + ) local_field = LocalField.create(lofi_data, dbcommit=True, reindex=True) # TEST#1 :: get LocalFields - fields = LocalField.get_local_fields_by_id('doc', document.pid) + fields = LocalField.get_local_fields_by_id("doc", document.pid) assert len(list(fields)) == 1 - fields = LocalField.get_local_fields(document, 'org2') + fields = LocalField.get_local_fields(document, "org2") assert not list(fields) # TEST#2 :: Delete the LocalField # Ensure the document index reflects changes local_field.delete(delindex=True) - flush_index(LocalFieldsSearch.Meta.index) - flush_index(DocumentsSearch.Meta.index) + LocalFieldsSearch.flush_and_refresh() + DocumentsSearch.flush_and_refresh() es_doc = DocumentsSearch().get_record_by_pid(document.pid).to_dict() - assert 'local_fields' not in es_doc + assert "local_fields" not in es_doc # TEST#3 :: Delete the document # - Add new LocalFields on the document @@ -68,33 +72,30 @@ def test_local_fields( # cause to block the document suppression # - Delete the document and ensure the LocalField is now deleted. # - Ensure the LocalField index is coherent - assert 'local_fields' not in document.get_links_to_me() - lofi_data.pop('pid', None) + assert "local_fields" not in document.get_links_to_me() + lofi_data.pop("pid", None) local_field = LocalField.create(lofi_data, dbcommit=True, reindex=True) - assert document.get_links_to_me()['local_fields'] == 1 - assert 'local_fields' not in \ - document.reasons_not_to_delete().get('links', {}) + assert document.get_links_to_me()["local_fields"] == 1 + assert "local_fields" not in document.reasons_not_to_delete().get("links", {}) parent_pid = document.pid document.delete(delindex=True) assert not LocalField.get_record_by_pid(local_field.pid) - fields = LocalField.get_local_fields_by_id('doc', parent_pid) + fields = LocalField.get_local_fields_by_id("doc", parent_pid) assert len(list(fields)) == 0 # TEST#4 :: Same as previous but for item. - del item_lib_martigny_data['barcode'] + del item_lib_martigny_data["barcode"] item = Item.create(item_lib_martigny_data, dbcommit=True, reindex=True) - assert 'local_fields' not in item.get_links_to_me() - lofi_data.pop('pid', None) - lofi_data['parent']['$ref'] = \ - get_ref_for_pid(item.provider.pid_type, item.pid) + assert "local_fields" not in item.get_links_to_me() + lofi_data.pop("pid", None) + lofi_data["parent"]["$ref"] = get_ref_for_pid(item.provider.pid_type, item.pid) local_field = LocalField.create(lofi_data, dbcommit=True, reindex=True) - assert item.get_links_to_me()['local_fields'] == 1 - assert 'local_fields' not in \ - item.reasons_not_to_delete().get('links', {}) + assert item.get_links_to_me()["local_fields"] == 1 + assert "local_fields" not in item.reasons_not_to_delete().get("links", {}) parent_pid = item.pid item.delete(delindex=True) assert not LocalField.get_record_by_pid(local_field.pid) - fields = LocalField.get_local_fields_by_id('item', parent_pid) + fields = LocalField.get_local_fields_by_id("item", parent_pid) assert len(list(fields)) == 0 @@ -103,26 +104,26 @@ def test_local_fields_extended_validation( ): """Test local fields extended validation.""" lofi_data = deepcopy(local_field_martigny_data) - lofi_data.pop('pid', None) + lofi_data.pop("pid", None) # TEST#1 :: unknown parent resource - lofi_data['parent']['$ref'] = get_ref_for_pid('doc', 'dummy') + lofi_data["parent"]["$ref"] = get_ref_for_pid("doc", "dummy") with pytest.raises(ValidationError) as err: LocalField.create(lofi_data) assert "Parent record doesn't exists." in str(err) # TEST#2 :: empty fields lofi_data = deepcopy(local_field_martigny_data) - lofi_data.pop('pid', None) - lofi_data['parent']['$ref'] = get_ref_for_pid('doc', document2_ref.pid) - lofi_data['fields'] = {} + lofi_data.pop("pid", None) + lofi_data["parent"]["$ref"] = get_ref_for_pid("doc", document2_ref.pid) + lofi_data["fields"] = {} with pytest.raises(ValidationError) as err: LocalField.create(lofi_data) - assert 'Missing fields.' in str(err) + assert "Missing fields." in str(err) # TEST#3 :: resource unicity for local fields lofi_data = deepcopy(local_field_martigny_data) - lofi_data.pop('pid', None) + lofi_data.pop("pid", None) with pytest.raises(ValidationError) as err: LocalField.create(lofi_data) - assert 'Local fields already exist for this resource.' in str(err) + assert "Local fields already exist for this resource." in str(err) diff --git a/tests/ui/local_fields/test_local_fields_jsonresolver.py b/tests/ui/local_fields/test_local_fields_jsonresolver.py index bda88b0eb3..8e28122322 100644 --- a/tests/ui/local_fields/test_local_fields_jsonresolver.py +++ b/tests/ui/local_fields/test_local_fields_jsonresolver.py @@ -27,10 +27,10 @@ def test_local_field_jsonresolver(local_field_martigny): """Test local fields json resolver.""" local_field = local_field_martigny - rec = Record.create({ - 'local_field': {'$ref': 'https://bib.rero.ch/api/local_fields/lofi1'} - }) - assert extracted_data_from_ref(rec.get('local_field')) == 'lofi1' + rec = Record.create( + {"local_field": {"$ref": "https://bib.rero.ch/api/local_fields/lofi1"}} + ) + assert extracted_data_from_ref(rec.get("local_field")) == "lofi1" # deleted record local_field.delete() @@ -38,8 +38,8 @@ def test_local_field_jsonresolver(local_field_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'local_fields': {'$ref': 'https://bib.rero.ch/api/local_fields/n_e'} - }) + rec = Record.create( + {"local_fields": {"$ref": "https://bib.rero.ch/api/local_fields/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/local_fields/test_local_fields_mapping.py b/tests/ui/local_fields/test_local_fields_mapping.py index f2fff15890..9bb1af27a5 100644 --- a/tests/ui/local_fields/test_local_fields_mapping.py +++ b/tests/ui/local_fields/test_local_fields_mapping.py @@ -23,28 +23,32 @@ def test_local_field_es_mapping( - es, db, org_martigny, document, local_field_martigny_data): + es, db, org_martigny, document, local_field_martigny_data +): """Test local field elasticsearch mapping.""" search = LocalFieldsSearch() mapping = get_mapping(search.Meta.index) assert mapping lf = LocalField.create( - local_field_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True) + local_field_martigny_data, dbcommit=True, reindex=True, delete_pid=True + ) assert mapping == get_mapping(search.Meta.index) lf.delete(force=True, dbcommit=True, delindex=True) def test_libraries_search_mapping( - app, org_martigny, org_sion, document, local_fields_records): + app, org_martigny, org_sion, document, local_fields_records +): """Test local field search mapping.""" search = LocalFieldsSearch() - assert search.query('query_string', query='Auteur').count() == 2 - assert search.query('query_string', query='Bibliographie').count() == 1 + assert search.query("query_string", query="Auteur").count() == 2 + assert search.query("query_string", query="Bibliographie").count() == 1 - pids = [r.pid for r in search.query( - 'match', fields__field_2='students').source(['pid']).scan()] - assert 'lofi2' in pids + pids = [ + r.pid + for r in search.query("match", fields__field_2="students") + .source(["pid"]) + .scan() + ] + assert "lofi2" in pids diff --git a/tests/ui/locations/conftest.py b/tests/ui/locations/conftest.py index 2833f1c525..42e529d254 100644 --- a/tests/ui/locations/conftest.py +++ b/tests/ui/locations/conftest.py @@ -20,7 +20,7 @@ import pytest -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def locations_records( loc_public_martigny, loc_restricted_martigny, @@ -34,6 +34,6 @@ def locations_records( loc_online_saxon, loc_online_fully, loc_online_sion, - loc_online_aproz + loc_online_aproz, ): """Locations for test mapping.""" diff --git a/tests/ui/locations/test_locations_api.py b/tests/ui/locations/test_locations_api.py index d36c63c900..7c510dca3e 100644 --- a/tests/ui/locations/test_locations_api.py +++ b/tests/ui/locations/test_locations_api.py @@ -19,8 +19,6 @@ from __future__ import absolute_import, print_function -from utils import flush_index - from rero_ils.modules.locations.api import Location, LocationsSearch from rero_ils.modules.utils import get_ref_for_pid @@ -31,8 +29,8 @@ def test_location_cannot_delete(item_lib_martigny): location = Location.get_record_by_pid(location_pid) can, reasons = location.can_delete assert not can - assert reasons['links']['holdings'] == 1 - assert reasons['links']['items'] == 1 + assert reasons["links"]["holdings"] == 1 + assert reasons["links"]["items"] == 1 def test_location_organisation_pid(org_martigny, loc_public_martigny): @@ -42,9 +40,12 @@ def test_location_organisation_pid(org_martigny, loc_public_martigny): def test_location_restrict_pickup( - loc_public_martigny, loc_restricted_martigny, loc_public_saxon, - loc_public_martigny_data, loc_restricted_martigny_data, - loc_public_saxon_data + loc_public_martigny, + loc_restricted_martigny, + loc_public_saxon, + loc_public_martigny_data, + loc_restricted_martigny_data, + loc_public_saxon_data, ): """Test automatic modification of restrict_pickup_to field.""" loc_m1 = loc_public_martigny @@ -54,50 +55,57 @@ def test_location_restrict_pickup( # STEP 1 :: Init location for test # * ensure `loc_m2` and `loc_sax` are pickup locations # * ensure `loc_m1` defines other location as pickup restriction - loc_m1['restrict_pickup_to'] = [ - {'$ref': get_ref_for_pid(Location, loc_m2.pid)}, - {'$ref': get_ref_for_pid(Location, loc_sax.pid)}, + loc_m1["restrict_pickup_to"] = [ + {"$ref": get_ref_for_pid(Location, loc_m2.pid)}, + {"$ref": get_ref_for_pid(Location, loc_sax.pid)}, ] loc_m1 = loc_m1.update(loc_m1, dbcommit=True, reindex=True) - loc_m2['is_pickup'] = True - loc_m2['pickup_name'] = 'loc_m2_pickup' + loc_m2["is_pickup"] = True + loc_m2["pickup_name"] = "loc_m2_pickup" loc_m2 = loc_m2.update(loc_m2, dbcommit=True, reindex=True) - loc_sax['is_pickup'] = True - loc_sax['pickup_name'] = 'loc_sax_pickup' + loc_sax["is_pickup"] = True + loc_sax["pickup_name"] = "loc_sax_pickup" loc_sax = loc_sax.update(loc_sax, dbcommit=True, reindex=True) - flush_index(LocationsSearch.Meta.index) + LocationsSearch.flush_and_refresh() - assert len(LocationsSearch() - .get_record_by_pid(loc_m1.pid) - .to_dict()['restrict_pickup_to'] - ) == 2 + assert ( + len( + LocationsSearch() + .get_record_by_pid(loc_m1.pid) + .to_dict()["restrict_pickup_to"] + ) + == 2 + ) # STEP 2 :: Define that loc_m2 isn't yet a pickup location # The `loc_m1` must now only contain `loc_sax` as restriction for # pickup location. ES index should also reflect this change. - del loc_m2['is_pickup'] + del loc_m2["is_pickup"] loc_m2 = loc_m2.update(loc_m2, dbcommit=True, reindex=True) loc_m1 = Location.get_record(loc_m1.id) assert loc_m1.restrict_pickup_to == [loc_sax.pid] - flush_index(LocationsSearch.Meta.index) + LocationsSearch.flush_and_refresh() es_restrictions = [ restriction_loc.pid for restriction_loc in LocationsSearch() - .get_record_by_pid(loc_m1.pid).restrict_pickup_to + .get_record_by_pid(loc_m1.pid) + .restrict_pickup_to ] assert es_restrictions == [loc_sax.pid] # STEP 3 :: Define that loc_sax isn't yet a pickup location # The `loc_m1` must not contain any restriction for pickup location. # ES index should reflect this change` - del loc_sax['is_pickup'] + del loc_sax["is_pickup"] loc_sax = loc_sax.update(loc_sax, dbcommit=True, reindex=True) - assert 'pickup_name' not in loc_sax + assert "pickup_name" not in loc_sax loc_m1 = Location.get_record(loc_m1.id) assert not loc_m1.restrict_pickup_to - flush_index(LocationsSearch.Meta.index) - assert 'restrict_pickup_to' not in \ - LocationsSearch().get_record_by_pid(loc_sax.pid).to_dict() + LocationsSearch.flush_and_refresh() + assert ( + "restrict_pickup_to" + not in LocationsSearch().get_record_by_pid(loc_sax.pid).to_dict() + ) # Reset fixtures loc_m1.update(loc_public_martigny_data, dbcommit=True, reindex=True) diff --git a/tests/ui/locations/test_locations_jsonresolver.py b/tests/ui/locations/test_locations_jsonresolver.py index f38c76f9ac..20a160e664 100644 --- a/tests/ui/locations/test_locations_jsonresolver.py +++ b/tests/ui/locations/test_locations_jsonresolver.py @@ -24,10 +24,10 @@ def test_locations_jsonresolver(loc_public_martigny): """Test location json resolver.""" - rec = Record.create({ - 'location': {'$ref': 'https://bib.rero.ch/api/locations/loc1'} - }) - assert rec.replace_refs().get('location') == {'type': 'loc', 'pid': 'loc1'} + rec = Record.create( + {"location": {"$ref": "https://bib.rero.ch/api/locations/loc1"}} + ) + assert rec.replace_refs().get("location") == {"type": "loc", "pid": "loc1"} # deleted record loc_public_martigny.delete() @@ -35,8 +35,6 @@ def test_locations_jsonresolver(loc_public_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'location': {'$ref': 'https://bib.rero.ch/api/locations/n_e'} - }) + rec = Record.create({"location": {"$ref": "https://bib.rero.ch/api/locations/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/locations/test_locations_mapping.py b/tests/ui/locations/test_locations_mapping.py index efa5b00eb9..b5b5eb5896 100644 --- a/tests/ui/locations/test_locations_mapping.py +++ b/tests/ui/locations/test_locations_mapping.py @@ -22,14 +22,16 @@ from rero_ils.modules.locations.api import Location, LocationsSearch -def test_location_es_mapping(search, db, loc_public_martigny_data, - lib_martigny, org_martigny): +def test_location_es_mapping( + search, db, loc_public_martigny_data, lib_martigny, org_martigny +): """Test library elasticsearch mapping.""" search = LocationsSearch() mapping = get_mapping(search.Meta.index) assert mapping loc = Location.create( - loc_public_martigny_data, dbcommit=True, reindex=True, delete_pid=True) + loc_public_martigny_data, dbcommit=True, reindex=True, delete_pid=True + ) new_mapping = get_mapping(search.Meta.index) assert mapping == new_mapping loc.delete(force=True, dbcommit=True, delindex=True) @@ -39,7 +41,7 @@ def test_location_search_mapping(app, locations_records): """Test library search mapping.""" search = LocationsSearch() - c = search.query('match', code='MARTIGNY-PUBLIC').count() + c = search.query("match", code="MARTIGNY-PUBLIC").count() assert c == 1 - c = search.query('match', code='SAXON-PUBLIC').count() + c = search.query("match", code="SAXON-PUBLIC").count() assert c == 1 diff --git a/tests/ui/locations/test_locations_other.py b/tests/ui/locations/test_locations_other.py index 3900d5f08b..99c4fe0a69 100644 --- a/tests/ui/locations/test_locations_other.py +++ b/tests/ui/locations/test_locations_other.py @@ -26,7 +26,8 @@ def test_location_get_all_pickup_locations( - patron_martigny, loc_public_martigny, loc_public_sion): + patron_martigny, loc_public_martigny, loc_public_sion +): """Test pickup locations retrieval.""" locations = Location.get_pickup_location_pids() assert set(locations) == {loc_public_martigny.pid, loc_public_sion.pid} @@ -36,24 +37,21 @@ def test_location_get_all_pickup_locations( def test_location_get_links_to_me( - loc_public_martigny, loc_public_sion, item_lib_martigny): + loc_public_martigny, loc_public_sion, item_lib_martigny +): """Test pickup locations retrieval.""" - assert loc_public_martigny.get_links_to_me() == { - 'items': 1, - 'holdings': 1 - } + assert loc_public_martigny.get_links_to_me() == {"items": 1, "holdings": 1} assert loc_public_martigny.get_links_to_me(get_pids=True) == { - 'items': ['item1'], - 'holdings': ['1'] + "items": ["item1"], + "holdings": ["1"], } assert loc_public_sion.get_links_to_me() == {} - item_lib_martigny['temporary_location'] = { - '$ref': f'https://bib.rero.ch/api/locations/{loc_public_sion.pid}'} - item_lib_martigny.update(data=item_lib_martigny, dbcommit=True, - reindex=True) + item_lib_martigny["temporary_location"] = { + "$ref": f"https://bib.rero.ch/api/locations/{loc_public_sion.pid}" + } + item_lib_martigny.update(data=item_lib_martigny, dbcommit=True, reindex=True) ItemsSearch.flush_and_refresh() - assert loc_public_sion.get_links_to_me() == {'items': 1} - assert loc_public_sion.get_links_to_me(get_pids=True) == { - 'items': ['item1']} + assert loc_public_sion.get_links_to_me() == {"items": 1} + assert loc_public_sion.get_links_to_me(get_pids=True) == {"items": ["item1"]} diff --git a/tests/ui/notifications/test_notifications_api.py b/tests/ui/notifications/test_notifications_api.py index b79d693331..1fc0ffc314 100644 --- a/tests/ui/notifications/test_notifications_api.py +++ b/tests/ui/notifications/test_notifications_api.py @@ -27,20 +27,23 @@ from rero_ils.modules.items.api import Item from rero_ils.modules.notifications.dispatcher import Dispatcher from rero_ils.modules.notifications.models import NotificationType -from rero_ils.modules.notifications.subclasses.availability import \ - AvailabilityCirculationNotification -from rero_ils.modules.notifications.subclasses.circulation import \ - CirculationNotification -from rero_ils.modules.notifications.subclasses.claim_issue import \ - ClaimSerialIssueNotification +from rero_ils.modules.notifications.subclasses.availability import ( + AvailabilityCirculationNotification, +) +from rero_ils.modules.notifications.subclasses.circulation import ( + CirculationNotification, +) +from rero_ils.modules.notifications.subclasses.claim_issue import ( + ClaimSerialIssueNotification, +) from rero_ils.modules.utils import get_ref_for_pid def test_notification_organisation_pid( - app, org_martigny, notification_availability_martigny): + app, org_martigny, notification_availability_martigny +): """Test organisation pid has been added during the indexing.""" - assert notification_availability_martigny.organisation_pid == \ - org_martigny.pid + assert notification_availability_martigny.organisation_pid == org_martigny.pid # test notification can_delete can, reasons = notification_availability_martigny.can_delete @@ -50,68 +53,80 @@ def test_notification_organisation_pid( def test_notification_mail(notification_late_martigny, lib_martigny, mailbox): """Test notification creation. - Patron communication channel is mail. + Patron communication channel is mail. """ mailbox.clear() - Dispatcher.dispatch_notifications(notification_late_martigny['pid']) - recipient = lib_martigny.get_email( - notification_late_martigny['notification_type']) + Dispatcher.dispatch_notifications(notification_late_martigny["pid"]) + recipient = lib_martigny.get_email(notification_late_martigny["notification_type"]) assert recipient assert mailbox[0].recipients == [recipient] def test_notification_email(notification_late_sion, patron_sion, mailbox): """Test overdue notification. - Patron communication channel is email. + Patron communication channel is email. """ mailbox.clear() - Dispatcher.dispatch_notifications(notification_late_sion['pid']) - assert mailbox[0].recipients == [patron_sion.dumps()['email']] + Dispatcher.dispatch_notifications(notification_late_sion["pid"]) + assert mailbox[0].recipients == [patron_sion.dumps()["email"]] -def test_notification_email_availability(notification_availability_sion, - lib_sion, patron_sion, mailbox): +def test_notification_email_availability( + notification_availability_sion, lib_sion, patron_sion, mailbox +): """Test availability notification. - Patron communication channel is email. + Patron communication channel is email. """ # test availability context fields context = AvailabilityCirculationNotification.get_notification_context( notifications=[notification_availability_sion] ) - for key in ['delay', 'library', 'loans', 'patron']: + for key in ["delay", "library", "loans", "patron"]: assert key in context - loan_ctx = context['loans'][0] - for key in ['document', 'pickup_name', 'pickup_until']: + loan_ctx = context["loans"][0] + for key in ["document", "pickup_name", "pickup_until"]: assert key in loan_ctx - for key in ['barcode', 'call_numbers', 'library_name', 'location_name', - 'title_text']: - assert key in loan_ctx['document'] - for key in ['address', 'barcode', 'first_name', 'last_name']: - assert key in context['patron'] + for key in [ + "barcode", + "call_numbers", + "library_name", + "location_name", + "title_text", + ]: + assert key in loan_ctx["document"] + for key in ["address", "barcode", "first_name", "last_name"]: + assert key in context["patron"] mailbox.clear() - Dispatcher.dispatch_notifications(notification_availability_sion['pid']) - assert mailbox[0].recipients == [patron_sion.dumps()['email']] + Dispatcher.dispatch_notifications(notification_availability_sion["pid"]) + assert mailbox[0].recipients == [patron_sion.dumps()["email"]] -def test_notification_email_aggregated(notification_availability_martigny, - notification2_availability_martigny, - lib_martigny, patron_martigny, mailbox): +def test_notification_email_aggregated( + notification_availability_martigny, + notification2_availability_martigny, + lib_martigny, + patron_martigny, + mailbox, +): """Test availability notification. - Patron communication channel is email. + Patron communication channel is email. """ mailbox.clear() - Dispatcher.dispatch_notifications([ - notification_availability_martigny['pid'], - notification2_availability_martigny['pid'] - ], verbose=True) + Dispatcher.dispatch_notifications( + [ + notification_availability_martigny["pid"], + notification2_availability_martigny["pid"], + ], + verbose=True, + ) assert len(mailbox) == 1 - recipient = '???' - for notification_setting in lib_martigny.get('notification_settings'): - if notification_setting['type'] == NotificationType.AVAILABILITY: - recipient = notification_setting['email'] + recipient = "???" + for notification_setting in lib_martigny.get("notification_settings"): + if notification_setting["type"] == NotificationType.AVAILABILITY: + recipient = notification_setting["email"] assert mailbox[0].recipients == [recipient] @@ -120,14 +135,14 @@ def test_notification_properties(client, holding_lib_martigny_w_patterns): record = CirculationNotification({}) record.__class__ = CirculationNotification - assert record.get_recipients('cc') == [] + assert record.get_recipients("cc") == [] def test_notification_extended_validation(client, item_lib_martigny): """Test notification extended validation process.""" item = item_lib_martigny - data = {'notification_type': NotificationType.AT_DESK} + data = {"notification_type": NotificationType.AT_DESK} record = ClaimSerialIssueNotification(data) record.__class__ = ClaimSerialIssueNotification @@ -135,21 +150,22 @@ def test_notification_extended_validation(client, item_lib_martigny): record.validate() assert "isn't an ClaimSerialIssueNotification" in str(err) - record['notification_type'] = NotificationType.CLAIM_ISSUE - record['context'] = {'item': {'$ref': get_ref_for_pid('item', 'dummy')}} + record["notification_type"] = NotificationType.CLAIM_ISSUE + record["context"] = {"item": {"$ref": get_ref_for_pid("item", "dummy")}} with pytest.raises(ValidationError) as err: record.validate() - assert '`item` field must be specified into `context`' in str(err) - assert record.item_pid == 'dummy' + assert "`item` field must be specified into `context`" in str(err) + assert record.item_pid == "dummy" assert not record.get_notification_context() - record['context'] = {'item': {'$ref': get_ref_for_pid('item', item.pid)}} + record["context"] = {"item": {"$ref": get_ref_for_pid("item", item.pid)}} with pytest.raises(ValidationError) as err: record.validate() - assert '`item` field must reference an serial issue' in str(err) + assert "`item` field must reference an serial issue" in str(err) - record['context']['recipients'] = [{'type': 'to', 'address': 'cc@mail.co'}] - with mock.patch.object(Item, 'is_issue', True), \ - pytest.raises(ValidationError) as err: + record["context"]["recipients"] = [{"type": "to", "address": "cc@mail.co"}] + with mock.patch.object(Item, "is_issue", True), pytest.raises( + ValidationError + ) as err: record.validate() - assert 'Recipient type `to` and `reply_to` are required' in str(err) + assert "Recipient type `to` and `reply_to` are required" in str(err) diff --git a/tests/ui/notifications/test_notifications_mapping.py b/tests/ui/notifications/test_notifications_mapping.py index 1347044c27..46a642fa6e 100644 --- a/tests/ui/notifications/test_notifications_mapping.py +++ b/tests/ui/notifications/test_notifications_mapping.py @@ -20,12 +20,10 @@ from utils import get_mapping -from rero_ils.modules.notifications.api import Notification, \ - NotificationsSearch +from rero_ils.modules.notifications.api import Notification, NotificationsSearch -def test_notification_es_mapping( - dummy_notification, loan_validated_martigny): +def test_notification_es_mapping(dummy_notification, loan_validated_martigny): """Test notification elasticsearch mapping.""" search = NotificationsSearch() @@ -33,8 +31,8 @@ def test_notification_es_mapping( assert mapping notif = deepcopy(dummy_notification) - validated_pid = loan_validated_martigny.get('pid') - loan_ref = f'https://bib.rero.ch/api/loans/{validated_pid}' - notif['context']['loan']['$ref'] = loan_ref + validated_pid = loan_validated_martigny.get("pid") + loan_ref = f"https://bib.rero.ch/api/loans/{validated_pid}" + notif["context"]["loan"]["$ref"] = loan_ref Notification.create(notif, dbcommit=True, delete_pid=True, reindex=True) assert mapping == get_mapping(search.Meta.index) diff --git a/tests/ui/notifications/test_notifications_utils.py b/tests/ui/notifications/test_notifications_utils.py index 9e249643a9..5adab59c6f 100644 --- a/tests/ui/notifications/test_notifications_utils.py +++ b/tests/ui/notifications/test_notifications_utils.py @@ -28,9 +28,7 @@ from rero_ils.modules.notifications.utils import calculate_notification_amount -def test_notification_calculate_notification_amount( - notification_due_soon_martigny -): +def test_notification_calculate_notification_amount(notification_due_soon_martigny): """Test calculate amount for a notifications.""" notification = notification_due_soon_martigny loan = Loan.get_record_by_pid(notification.loan_pid) @@ -48,21 +46,21 @@ def test_notification_calculate_notification_amount( # to this new setting fee_amount = randint(1, 100) reminder = cipo.get_reminder(DUE_SOON_REMINDER_TYPE) - reminder['fee_amount'] = fee_amount - cipo['reminders'] = [reminder] + reminder["fee_amount"] = fee_amount + cipo["reminders"] = [reminder] cipo.update(cipo, dbcommit=True, reindex=True) assert calculate_notification_amount(notification) == fee_amount # STEP 2 :: Update the notification context to simulate it's the second # 'due_soon' reminders notification. Check the notification # amount accord to this new setting - counter = notification['context']['reminder_counter'] - notification['context']['reminder_counter'] = counter + 1 + counter = notification["context"]["reminder_counter"] + notification["context"]["reminder_counter"] = counter + 1 assert calculate_notification_amount(notification) == 0 # STEP 3 :: Delete the cipo reminders setting. As there is no setting, the # caclulted amount must be equals to 0 - del cipo['reminders'] + del cipo["reminders"] cipo.update(cipo, dbcommit=True, reindex=True) assert calculate_notification_amount(notification) == 0 diff --git a/tests/ui/operation_logs/test_operation_logs_api.py b/tests/ui/operation_logs/test_operation_logs_api.py index 1899ad0249..5d693a9c13 100644 --- a/tests/ui/operation_logs/test_operation_logs_api.py +++ b/tests/ui/operation_logs/test_operation_logs_api.py @@ -22,130 +22,146 @@ import pytest from invenio_search import current_search -from utils import flush_index -from rero_ils.modules.operation_logs.api import OperationLog, \ - OperationLogsSearch +from rero_ils.modules.operation_logs.api import OperationLog, OperationLogsSearch -def test_operation_create(client, es_clear, operation_log_data): +def test_operation_create(client, search_clear, operation_log_data): """Test operation logs creation.""" - oplg = OperationLog.create(operation_log_data, index_refresh='wait_for') + oplg = OperationLog.create(operation_log_data, index_refresh="wait_for") assert oplg assert oplg.id # need to compare with dumps as it has resolve $refs data = OperationLog.get_record(oplg.id) - del data['_created'] - del data['_updated'] + del data["_created"] + del data["_updated"] assert data == OperationLog(operation_log_data).dumps() tmp = deepcopy(operation_log_data) - tmp['date'] = '2020-01-21T09:51:52.879533+00:00' - oplg2 = OperationLog.create(tmp, index_refresh='wait_for') - assert OperationLog.get_indices() == set(( - 'operation_logs-2020', - f'operation_logs-{datetime.now().year}' - )) + tmp["date"] = "2020-01-21T09:51:52.879533+00:00" + oplg2 = OperationLog.create(tmp, index_refresh="wait_for") + assert OperationLog.get_indices() == set( + ("operation_logs-2020", f"operation_logs-{datetime.now().year}") + ) assert OperationLog.get_record(oplg.id) assert OperationLog.get_record(oplg2.id) # clean up the index assert OperationLog.delete_indices() -def test_operation_bulk_index(client, es_clear, operation_log_data): +def test_operation_bulk_index(client, search_clear, operation_log_data): """Test operation logs bulk creation.""" data = [] for date in [ - '2020-01-21T09:51:52.879533+00:00', - '2020-02-21T09:51:52.879533+00:00', - '2020-03-21T09:51:52.879533+00:00', - '2020-04-21T09:51:52.879533+00:00', - '2021-01-21T09:51:52.879533+00:00', - '2021-02-21T09:51:52.879533+00:00' + "2020-01-21T09:51:52.879533+00:00", + "2020-02-21T09:51:52.879533+00:00", + "2020-03-21T09:51:52.879533+00:00", + "2020-04-21T09:51:52.879533+00:00", + "2021-01-21T09:51:52.879533+00:00", + "2021-02-21T09:51:52.879533+00:00", ]: tmp = deepcopy(operation_log_data) - tmp['date'] = date + tmp["date"] = date data.append(tmp) OperationLog.bulk_index(data) # flush the index for the test current_search.flush_and_refresh(OperationLog.index_name) - assert OperationLog.get_indices() == set(( - 'operation_logs-2020', - 'operation_logs-2021' - )) + assert OperationLog.get_indices() == set( + ("operation_logs-2020", "operation_logs-2021") + ) with pytest.raises(Exception) as excinfo: - data[0]['operation'] = dict(name='foo') + data[0]["operation"] = dict(name="foo") OperationLog.bulk_index(data) assert "BulkIndexError" in str(excinfo.value) # clean up the index assert OperationLog.delete_indices() -def test_operation_update(app, es_clear, operation_log_data, monkeypatch): +def test_operation_update(app, search_clear, operation_log_data, monkeypatch): """Test update log.""" - operation_log = OperationLog.create(deepcopy(operation_log_data), - index_refresh='wait_for') + operation_log = OperationLog.create( + deepcopy(operation_log_data), index_refresh="wait_for" + ) log_data = OperationLog.get_record(operation_log.id) - assert log_data['record']['value'] == 'item4' + assert log_data["record"]["value"] == "item4" # Update OK - log_data['record']['value'] = '1234' - OperationLog.update(log_data.id, log_data['date'], log_data) + log_data["record"]["value"] = "1234" + OperationLog.update(log_data.id, log_data["date"], log_data) log_data = OperationLog.get_record(operation_log.id) - assert log_data['record']['value'] == '1234' + assert log_data["record"]["value"] == "1234" # Update KO monkeypatch.setattr( - 'elasticsearch_dsl.Document.update', lambda *args, **kwargs: 'error') + "elasticsearch_dsl.Document.update", lambda *args, **kwargs: "error" + ) with pytest.raises(Exception) as exception: - OperationLog.update(log_data.id, log_data['date'], log_data) - assert str(exception) == 'Operation log cannot be updated.' + OperationLog.update(log_data.id, log_data["date"], log_data) + assert str(exception) == "Operation log cannot be updated." -def test_operation_record_create(document, item_lib_martigny, - local_entity_person, ill_request_martigny): +def test_operation_record_create( + document, item_lib_martigny, local_entity_person, ill_request_martigny +): """Test update log.""" - flush_index(OperationLog.index_name) + OperationLogsSearch.flush_and_refresh() records = [ - ('doc', {'record': dict(type='doc', value=document.pid)}), - ('hold', { - 'record': dict( - type='hold', - value='1', - library_pid='lib1', - organisation_pid='org1')}), - ('item', { - 'record': dict( - type='item', - value=item_lib_martigny.pid, - library_pid='lib1', - organisation_pid='org1')}), - ('locent', { - 'record': dict( - type='locent', value=local_entity_person.pid)}), - ('illr', { - 'ill_request': { - 'status': 'pending', - 'library_pid': 'lib1', - 'loan_status': 'PENDING' + ("doc", {"record": dict(type="doc", value=document.pid)}), + ( + "hold", + { + "record": dict( + type="hold", value="1", library_pid="lib1", organisation_pid="org1" + ) }, - 'record': dict( - type='illr', - value=ill_request_martigny.pid, - organisation_pid='org1')}) + ), + ( + "item", + { + "record": dict( + type="item", + value=item_lib_martigny.pid, + library_pid="lib1", + organisation_pid="org1", + ) + }, + ), + ("locent", {"record": dict(type="locent", value=local_entity_person.pid)}), + ( + "illr", + { + "ill_request": { + "status": "pending", + "library_pid": "lib1", + "loan_status": "PENDING", + }, + "record": dict( + type="illr", value=ill_request_martigny.pid, organisation_pid="org1" + ), + }, + ), ] - for (rec_type, extra) in records: + for rec_type, extra in records: res = next( OperationLogsSearch() - .filter('term', record__type=rec_type) - .filter('term', operation='create') + .filter("term", record__type=rec_type) + .filter("term", operation="create") .scan() ).to_dict() - assert set(res.keys()) == set([ - 'date', 'record', 'operation', 'user_name', '_created', 'pid', - '_updated', '$schema' - ] + list(extra)) - assert res['operation'] == 'create' - assert res['user_name'] == 'system' + assert set(res.keys()) == set( + [ + "date", + "record", + "operation", + "user_name", + "_created", + "pid", + "_updated", + "$schema", + ] + + list(extra) + ) + assert res["operation"] == "create" + assert res["user_name"] == "system" for key, value in extra.items(): assert res[key] == value diff --git a/tests/ui/organisations/test_organisations_api.py b/tests/ui/organisations/test_organisations_api.py index c63be958b9..475921a79c 100644 --- a/tests/ui/organisations/test_organisations_api.py +++ b/tests/ui/organisations/test_organisations_api.py @@ -20,8 +20,7 @@ from __future__ import absolute_import, print_function from rero_ils.modules.organisations.api import Organisation -from rero_ils.modules.organisations.api import \ - organisation_id_fetcher as fetcher +from rero_ils.modules.organisations.api import organisation_id_fetcher as fetcher from rero_ils.modules.providers import append_fixtures_new_identifiers @@ -37,32 +36,31 @@ def test_organisation_libararies(org_martigny, vendor_martigny): def test_organisation_organisation_pid(org_martigny): """Test organisation_pid property.""" - assert org_martigny.organisation_pid == 'org1' + assert org_martigny.organisation_pid == "org1" def test_organisation_create(app, db, org_martigny_data, org_sion_data): """Test organisation creation.""" - org_martigny_data['pid'] = '1' + org_martigny_data["pid"] = "1" org = Organisation.create(org_martigny_data, dbcommit=True, reindex=True) assert org == org_martigny_data - assert org.get('pid') == '1' + assert org.get("pid") == "1" can, reasons = org.can_delete assert can assert reasons == {} - org = Organisation.get_record_by_pid('1') + org = Organisation.get_record_by_pid("1") assert org == org_martigny_data fetched_pid = fetcher(org.id, org) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'org' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "org" - org_sion_data['pid'] = '2' - org = Organisation.create( - org_sion_data, dbcommit=True, reindex=True) - assert org.get('pid') == '2' + org_sion_data["pid"] = "2" + org = Organisation.create(org_sion_data, dbcommit=True, reindex=True) + assert org.get("pid") == "2" identifier = Organisation.provider.identifier - append_fixtures_new_identifiers(identifier, ['1', '2'], 'org', limit=1) + append_fixtures_new_identifiers(identifier, ["1", "2"], "org", limit=1) assert identifier.next() == identifier.max() == 3 diff --git a/tests/ui/organisations/test_organisations_jsonresolver.py b/tests/ui/organisations/test_organisations_jsonresolver.py index 6bbb5c62fc..41e90a04f7 100644 --- a/tests/ui/organisations/test_organisations_jsonresolver.py +++ b/tests/ui/organisations/test_organisations_jsonresolver.py @@ -24,12 +24,10 @@ def test_organisations_jsonresolver(app, organisation_temp): """Test organisation resolver.""" - rec = Record.create({ - 'organisation': {'$ref': 'https://bib.rero.ch/api/organisations/1'} - }) - assert rec.replace_refs().get('organisation') == { - 'type': 'org', 'pid': '1' - } + rec = Record.create( + {"organisation": {"$ref": "https://bib.rero.ch/api/organisations/1"}} + ) + assert rec.replace_refs().get("organisation") == {"type": "org", "pid": "1"} # deleted record organisation_temp.delete() @@ -37,8 +35,8 @@ def test_organisations_jsonresolver(app, organisation_temp): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'organisation': {'$ref': 'https://bib.rero.ch/api/organisations/n_e'} - }) + rec = Record.create( + {"organisation": {"$ref": "https://bib.rero.ch/api/organisations/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/patron_transaction_events/test_patron_transaction_events_api.py b/tests/ui/patron_transaction_events/test_patron_transaction_events_api.py index 56af940017..b77872fe58 100644 --- a/tests/ui/patron_transaction_events/test_patron_transaction_events_api.py +++ b/tests/ui/patron_transaction_events/test_patron_transaction_events_api.py @@ -24,15 +24,14 @@ import pytest from jsonschema.exceptions import ValidationError -from rero_ils.modules.patron_transaction_events.api import \ - PatronTransactionEvent -from rero_ils.modules.patron_transactions.api import \ - patron_transaction_id_fetcher as fetcher +from rero_ils.modules.patron_transaction_events.api import PatronTransactionEvent +from rero_ils.modules.patron_transactions.api import ( + patron_transaction_id_fetcher as fetcher, +) def test_patron_transaction_event_properties( - patron_transaction_overdue_event_martigny, - patron_transaction_overdue_martigny + patron_transaction_overdue_event_martigny, patron_transaction_overdue_martigny ): """Test patron transaction event properties.""" pttr = patron_transaction_overdue_martigny @@ -40,44 +39,46 @@ def test_patron_transaction_event_properties( events = PatronTransactionEvent.get_events_by_transaction_id(pttr.pid) assert ptre.pid in [hit.pid for hit in events] assert ptre.parent_pid == pttr.pid - assert PatronTransactionEvent\ - .get_initial_amount_transaction_event(pttr.pid) == pttr.total_amount + assert ( + PatronTransactionEvent.get_initial_amount_transaction_event(pttr.pid) + == pttr.total_amount + ) def test_patron_transaction_event_create( - db, es_clear, patron_transaction_overdue_event_martigny): + db, search_clear, patron_transaction_overdue_event_martigny +): """Test patron transaction event creation.""" patron_event = deepcopy(patron_transaction_overdue_event_martigny) - patron_event['type'] = 'no_type' + patron_event["type"] = "no_type" with pytest.raises(ValidationError): PatronTransactionEvent.create(patron_event, delete_pid=True) db.session.rollback() # Check amount is multiple of 0.01 - patron_event['type'] = 'fee' - patron_event['amount'] = 2.23333 + patron_event["type"] = "fee" + patron_event["amount"] = 2.23333 with pytest.raises(ValidationError) as err: PatronTransactionEvent.create(patron_event, delete_pid=True) - assert 'must be multiple of 0.01' in str(err) + assert "must be multiple of 0.01" in str(err) db.session.rollback() next_pid = PatronTransactionEvent.provider.identifier.next() - patron_event['amount'] = 2.2 + patron_event["amount"] = 2.2 record = PatronTransactionEvent.create(patron_event, delete_pid=True) next_pid += 1 assert record == patron_event - assert record.get('pid') == str(next_pid) - assert record.get('amount') == 2.20 + assert record.get("pid") == str(next_pid) + assert record.get("amount") == 2.20 pttr = PatronTransactionEvent.get_record_by_pid(str(next_pid)) assert pttr == patron_event fetched_pid = fetcher(pttr.id, pttr) assert fetched_pid.pid_value == str(next_pid) - assert fetched_pid.pid_type == 'pttr' + assert fetched_pid.pid_type == "pttr" -def test_patron_transaction_event_can_delete( - patron_transaction_overdue_event_martigny): +def test_patron_transaction_event_can_delete(patron_transaction_overdue_event_martigny): """Test can delete.""" can, reasons = patron_transaction_overdue_event_martigny.can_delete assert can diff --git a/tests/ui/patron_transaction_events/test_patron_transaction_events_jsonresolver.py b/tests/ui/patron_transaction_events/test_patron_transaction_events_jsonresolver.py index 036b14b33f..7a66a1a716 100644 --- a/tests/ui/patron_transaction_events/test_patron_transaction_events_jsonresolver.py +++ b/tests/ui/patron_transaction_events/test_patron_transaction_events_jsonresolver.py @@ -24,16 +24,16 @@ from rero_ils.modules.utils import extracted_data_from_ref -def test_patron_transaction_event_jsonresolver( - patron_transaction_overdue_event_saxon): +def test_patron_transaction_event_jsonresolver(patron_transaction_overdue_event_saxon): """Test patron transaction event json resolver.""" rec = Record.create( - {'patron_transaction_event': { - '$ref': 'https://bib.rero.ch/api/patron_transaction_events/1' + { + "patron_transaction_event": { + "$ref": "https://bib.rero.ch/api/patron_transaction_events/1" } - } + } ) - assert extracted_data_from_ref(rec.get('patron_transaction_event')) == '1' + assert extracted_data_from_ref(rec.get("patron_transaction_event")) == "1" # deleted record patron_transaction_overdue_event_saxon.delete() @@ -42,10 +42,11 @@ def test_patron_transaction_event_jsonresolver( # non existing record rec = Record.create( - {'patron_transaction': - { - '$ref': - 'https://bib.rero.ch/api/patron_transaction_events/n_e'}} + { + "patron_transaction": { + "$ref": "https://bib.rero.ch/api/patron_transaction_events/n_e" + } + } ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/patron_transaction_events/test_patron_transaction_events_mapping.py b/tests/ui/patron_transaction_events/test_patron_transaction_events_mapping.py index 640a70c817..c8d8211475 100644 --- a/tests/ui/patron_transaction_events/test_patron_transaction_events_mapping.py +++ b/tests/ui/patron_transaction_events/test_patron_transaction_events_mapping.py @@ -18,12 +18,15 @@ """Patron transaction event record mapping tests.""" from utils import get_mapping -from rero_ils.modules.patron_transaction_events.api import \ - PatronTransactionEvent, PatronTransactionEventsSearch +from rero_ils.modules.patron_transaction_events.api import ( + PatronTransactionEvent, + PatronTransactionEventsSearch, +) def test_patron_transaction_event_es_mapping( - es, db, patron_transaction_overdue_event_martigny): + es, db, patron_transaction_overdue_event_martigny +): """Test patron_transaction event elasticsearch mapping.""" search = PatronTransactionEventsSearch() mapping = get_mapping(search.Meta.index) @@ -32,7 +35,7 @@ def test_patron_transaction_event_es_mapping( patron_transaction_overdue_event_martigny, dbcommit=True, reindex=True, - delete_pid=True + delete_pid=True, ) assert mapping == get_mapping(search.Meta.index) ptre.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/ui/patron_transactions/test_patron_transactions_api.py b/tests/ui/patron_transactions/test_patron_transactions_api.py index 30fa3f78b9..de879485a6 100644 --- a/tests/ui/patron_transactions/test_patron_transactions_api.py +++ b/tests/ui/patron_transactions/test_patron_transactions_api.py @@ -24,14 +24,15 @@ import pytest from rero_ils.modules.patron_transactions.api import PatronTransaction -from rero_ils.modules.patron_transactions.api import \ - patron_transaction_id_fetcher as fetcher +from rero_ils.modules.patron_transactions.api import ( + patron_transaction_id_fetcher as fetcher, +) def test_patron_transaction_properties( patron_transaction_overdue_martigny, patron_transaction_overdue_event_martigny, - lib_martigny + lib_martigny, ): """Test patron transaction properties.""" pttr = patron_transaction_overdue_martigny @@ -41,32 +42,35 @@ def test_patron_transaction_properties( def test_patron_transaction_create( - db, es_clear, patron_transaction_overdue_martigny, org_martigny): + db, search_clear, patron_transaction_overdue_martigny, org_martigny +): """Test patron transaction creation.""" patron_transaction = deepcopy(patron_transaction_overdue_martigny) - patron_transaction['status'] = 'no_status' + patron_transaction["status"] = "no_status" import jsonschema + with pytest.raises(jsonschema.exceptions.ValidationError): PatronTransaction.create(patron_transaction, delete_pid=True) db.session.rollback() next_pid = PatronTransaction.provider.identifier.next() - patron_transaction['status'] = 'open' + patron_transaction["status"] = "open" record = PatronTransaction.create(patron_transaction, delete_pid=True) next_pid += 1 assert record == patron_transaction - assert record.get('pid') == str(next_pid) + assert record.get("pid") == str(next_pid) pttr = PatronTransaction.get_record_by_pid(str(next_pid)) assert pttr == patron_transaction fetched_pid = fetcher(pttr.id, pttr) assert fetched_pid.pid_value == str(next_pid) - assert fetched_pid.pid_type == 'pttr' + assert fetched_pid.pid_type == "pttr" can, reasons = patron_transaction_overdue_martigny.can_delete assert not can - assert reasons['links']['events'] + assert reasons["links"]["events"] - assert patron_transaction_overdue_martigny.currency == \ - org_martigny.get('default_currency') + assert patron_transaction_overdue_martigny.currency == org_martigny.get( + "default_currency" + ) diff --git a/tests/ui/patron_transactions/test_patron_transactions_jsonresolver.py b/tests/ui/patron_transactions/test_patron_transactions_jsonresolver.py index 7be124de9b..4c76c340ec 100644 --- a/tests/ui/patron_transactions/test_patron_transactions_jsonresolver.py +++ b/tests/ui/patron_transactions/test_patron_transactions_jsonresolver.py @@ -27,12 +27,13 @@ def test_patron_transaction_jsonresolver(patron_transaction_overdue_martigny): """Test patron_transaction json resolver.""" rec = Record.create( - {'patron_transaction': { - '$ref': 'https://bib.rero.ch/api/patron_transactions/1' + { + "patron_transaction": { + "$ref": "https://bib.rero.ch/api/patron_transactions/1" } - } + } ) - assert extracted_data_from_ref(rec.get('patron_transaction')) == '1' + assert extracted_data_from_ref(rec.get("patron_transaction")) == "1" # delete attached events to patron transaction for patron_event in patron_transaction_overdue_martigny.events: @@ -44,9 +45,11 @@ def test_patron_transaction_jsonresolver(patron_transaction_overdue_martigny): # non existing record rec = Record.create( - {'patron_transaction': - { - '$ref': 'https://bib.rero.ch/api/patron_transactions/n_e'}} + { + "patron_transaction": { + "$ref": "https://bib.rero.ch/api/patron_transactions/n_e" + } + } ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/patron_transactions/test_patron_transactions_mapping.py b/tests/ui/patron_transactions/test_patron_transactions_mapping.py index c2da76b66f..7ff7cb294d 100644 --- a/tests/ui/patron_transactions/test_patron_transactions_mapping.py +++ b/tests/ui/patron_transactions/test_patron_transactions_mapping.py @@ -18,12 +18,13 @@ """Patron transaction Record mapping tests.""" from utils import get_mapping -from rero_ils.modules.patron_transactions.api import PatronTransaction, \ - PatronTransactionsSearch +from rero_ils.modules.patron_transactions.api import ( + PatronTransaction, + PatronTransactionsSearch, +) -def test_patron_transaction_es_mapping( - es, db, patron_transaction_overdue_martigny): +def test_patron_transaction_es_mapping(search, db, patron_transaction_overdue_martigny): """Test patron_transaction elasticsearch mapping.""" search = PatronTransactionsSearch() mapping = get_mapping(search.Meta.index) @@ -32,7 +33,7 @@ def test_patron_transaction_es_mapping( patron_transaction_overdue_martigny, dbcommit=True, reindex=True, - delete_pid=True + delete_pid=True, ) assert mapping == get_mapping(search.Meta.index) for event in pttr.events: diff --git a/tests/ui/patron_types/conftest.py b/tests/ui/patron_types/conftest.py index 266dca2048..2db4475e7b 100644 --- a/tests/ui/patron_types/conftest.py +++ b/tests/ui/patron_types/conftest.py @@ -20,10 +20,8 @@ import pytest -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def patron_types_records( - patron_type_adults_martigny, - patron_type_youngsters_sion, - patron_type_grown_sion + patron_type_adults_martigny, patron_type_youngsters_sion, patron_type_grown_sion ): """Patron types for test mapping.""" diff --git a/tests/ui/patron_types/test_patron_types_api.py b/tests/ui/patron_types/test_patron_types_api.py index b7838b3a1b..d241b542ad 100644 --- a/tests/ui/patron_types/test_patron_types_api.py +++ b/tests/ui/patron_types/test_patron_types_api.py @@ -19,37 +19,31 @@ from __future__ import absolute_import, print_function -from rero_ils.modules.patron_types.api import PatronType, \ - patron_type_id_fetcher +from rero_ils.modules.patron_types.api import PatronType, patron_type_id_fetcher from rero_ils.modules.utils import extracted_data_from_ref -def test_patron_type_create( - db, org_martigny, patron_type_children_martigny_data): +def test_patron_type_create(db, org_martigny, patron_type_children_martigny_data): """Test pttyanisation creation.""" - ptty = PatronType.create( - patron_type_children_martigny_data, delete_pid=True) + ptty = PatronType.create(patron_type_children_martigny_data, delete_pid=True) assert ptty == patron_type_children_martigny_data - assert ptty.get('pid') == '1' + assert ptty.get("pid") == "1" - ptty = PatronType.get_record_by_pid('1') + ptty = PatronType.get_record_by_pid("1") assert ptty == patron_type_children_martigny_data fetched_pid = patron_type_id_fetcher(ptty.id, ptty) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'ptty' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "ptty" -def test_patron_type_exist_name_and_organisation_pid( - patron_type_children_martigny): +def test_patron_type_exist_name_and_organisation_pid(patron_type_children_martigny): """Test patron type name uniquness.""" - org_pid = extracted_data_from_ref( - patron_type_children_martigny.get('organisation') - ) + org_pid = extracted_data_from_ref(patron_type_children_martigny.get("organisation")) assert PatronType.exist_name_and_organisation_pid( - patron_type_children_martigny.get('name'), org_pid) - assert not PatronType.exist_name_and_organisation_pid( - 'not exists yet', org_pid) + patron_type_children_martigny.get("name"), org_pid + ) + assert not PatronType.exist_name_and_organisation_pid("not exists yet", org_pid) def test_patron_type_can_delete(patron_type_children_martigny): diff --git a/tests/ui/patron_types/test_patron_types_jsonresolver.py b/tests/ui/patron_types/test_patron_types_jsonresolver.py index 95b26c9fbe..dab38d98ce 100644 --- a/tests/ui/patron_types/test_patron_types_jsonresolver.py +++ b/tests/ui/patron_types/test_patron_types_jsonresolver.py @@ -24,12 +24,10 @@ def test_patron_types_jsonresolver(app, patron_type_tmp): """Test patron type resolver.""" - rec = Record.create({ - 'patron_type': {'$ref': 'https://bib.rero.ch/api/patron_types/1'} - }) - assert rec.replace_refs().get('patron_type') == { - 'type': 'ptty', 'pid': '1' - } + rec = Record.create( + {"patron_type": {"$ref": "https://bib.rero.ch/api/patron_types/1"}} + ) + assert rec.replace_refs().get("patron_type") == {"type": "ptty", "pid": "1"} # deleted record patron_type_tmp.delete() @@ -37,8 +35,8 @@ def test_patron_types_jsonresolver(app, patron_type_tmp): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'patron_type': {'$ref': 'https://bib.rero.ch/api/patron_types/n_e'} - }) + rec = Record.create( + {"patron_type": {"$ref": "https://bib.rero.ch/api/patron_types/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/patron_types/test_patron_types_mapping.py b/tests/ui/patron_types/test_patron_types_mapping.py index d7aa7a02f5..42bfa8d9a8 100644 --- a/tests/ui/patron_types/test_patron_types_mapping.py +++ b/tests/ui/patron_types/test_patron_types_mapping.py @@ -22,8 +22,7 @@ from rero_ils.modules.patron_types.api import PatronType, PatronTypesSearch -def test_patron_type_es_mapping( - org_martigny, patron_type_children_martigny_data): +def test_patron_type_es_mapping(org_martigny, patron_type_children_martigny_data): """Test patron types es mapping.""" search = PatronTypesSearch() mapping = get_mapping(search.Meta.index) @@ -32,7 +31,7 @@ def test_patron_type_es_mapping( patron_type_children_martigny_data, dbcommit=True, reindex=True, - delete_pid=False + delete_pid=False, ) assert mapping == get_mapping(search.Meta.index) @@ -41,17 +40,18 @@ def test_patron_types_search_mapping(app, patron_types_records): """Test patron type search mapping.""" search = PatronTypesSearch() - c = search.query('query_string', query='patrons').count() + c = search.query("query_string", query="patrons").count() # there is one more result from test_patron_type_es_mapping function assert c == 4 - c = search.query('match', name='patrons').count() + c = search.query("match", name="patrons").count() assert c == 0 - c = search.query('match', name='children').count() + c = search.query("match", name="children").count() # there is one more result from test_patron_type_es_mapping function assert c == 1 - pids = [r.pid for r in search.query( - 'match', name='children').source(['pid']).scan()] - assert 'ptty1' in pids + pids = [ + r.pid for r in search.query("match", name="children").source(["pid"]).scan() + ] + assert "ptty1" in pids diff --git a/tests/ui/patrons/conftest.py b/tests/ui/patrons/conftest.py index c8da9370a0..6a099f1076 100644 --- a/tests/ui/patrons/conftest.py +++ b/tests/ui/patrons/conftest.py @@ -20,11 +20,6 @@ import pytest -@pytest.fixture(scope='module') -def patrons_records( - patron_martigny, - patron2_martigny, - librarian_sion, - librarian_saxon -): +@pytest.fixture(scope="module") +def patrons_records(patron_martigny, patron2_martigny, librarian_sion, librarian_saxon): """Patrons for test mapping.""" diff --git a/tests/ui/patrons/test_patrons_api.py b/tests/ui/patrons/test_patrons_api.py index dac76d5134..3203d856ef 100644 --- a/tests/ui/patrons/test_patrons_api.py +++ b/tests/ui/patrons/test_patrons_api.py @@ -25,65 +25,62 @@ from invenio_accounts.models import User from jsonschema.exceptions import ValidationError -from rero_ils.modules.patrons.api import Patron, PatronsSearch, \ - patron_id_fetcher +from rero_ils.modules.patrons.api import Patron, PatronsSearch, patron_id_fetcher from rero_ils.modules.patrons.models import CommunicationChannel from rero_ils.modules.patrons.utils import create_user_from_data from rero_ils.modules.users.models import UserRole -def test_patron_extended_validation(app, patron_martigny, - patron_martigny_data_tmp, patron2_martigny, - patron_sion, patron_sion_data_tmp): +def test_patron_extended_validation( + app, + patron_martigny, + patron_martigny_data_tmp, + patron2_martigny, + patron_sion, + patron_sion_data_tmp, +): """Test that a patron barcode must be unique within organisation""" - ds = app.extensions['invenio-accounts'].datastore + ds = app.extensions["invenio-accounts"].datastore # check that we cannot create a patron with an existing barcode with pytest.raises(ValidationError) as err: - created_patron_martigny = create_user_from_data( - patron_martigny_data_tmp - ) - Patron.create( - created_patron_martigny, - delete_pid=True - ) - assert 'already taken' in str(err) + created_patron_martigny = create_user_from_data(patron_martigny_data_tmp) + Patron.create(created_patron_martigny, delete_pid=True) + assert "already taken" in str(err) # check if resource update doesn't trigger validation error on own barcode - patron_martigny['patron']['barcode'].append('duplicate') + patron_martigny["patron"]["barcode"].append("duplicate") assert patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) # check that we cannot update a patron to an existing barcode - patron2_martigny['patron']['barcode'].append('duplicate') + patron2_martigny["patron"]["barcode"].append("duplicate") with pytest.raises(ValidationError) as err: patron2_martigny.update(patron2_martigny) - assert 'already taken' in str(err) + assert "already taken" in str(err) # check that we can create a patron even with existing barcode in another # organisation - patron_sion_barcode = patron_sion['patron']['barcode'] - created_patron_sion = \ - create_user_from_data(patron_sion_data_tmp) - created_patron_sion['patron']['barcode']\ - = [patron_martigny['patron']['barcode'][0]] - assert (created_user := Patron.create(created_patron_sion, dbcommit=True, - reindex=True, delete_pid=True)) + patron_sion_barcode = patron_sion["patron"]["barcode"] + created_patron_sion = create_user_from_data(patron_sion_data_tmp) + created_patron_sion["patron"]["barcode"] = [patron_martigny["patron"]["barcode"][0]] + assert ( + created_user := Patron.create( + created_patron_sion, dbcommit=True, reindex=True, delete_pid=True + ) + ) # check that we can update a patron with existing barcode in another # organisation - patron_sion['patron']['barcode'] = ['duplicate'] + patron_sion["patron"]["barcode"] = ["duplicate"] assert patron_sion.update(patron_sion) # clean up fixtures - patron_sion['patron']['barcode'] = patron_sion_barcode - patron_martigny['patron']['barcode'].pop() - patron2_martigny['patron']['barcode'].pop() - patron_sion.update( - patron_sion, commit=True, dbcommit=True, reindex=True) - patron_martigny.update( - patron_martigny, commit=True, dbcommit=True, reindex=True) - patron2_martigny.update( - patron2_martigny, commit=True, dbcommit=True, reindex=True) + patron_sion["patron"]["barcode"] = patron_sion_barcode + patron_martigny["patron"]["barcode"].pop() + patron2_martigny["patron"]["barcode"].pop() + patron_sion.update(patron_sion, commit=True, dbcommit=True, reindex=True) + patron_martigny.update(patron_martigny, commit=True, dbcommit=True, reindex=True) + patron2_martigny.update(patron2_martigny, commit=True, dbcommit=True, reindex=True) # clean up created user created_user.delete(True, True, True) @@ -91,15 +88,20 @@ def test_patron_extended_validation(app, patron_martigny, ds.delete_user(user) -def test_patron_create(app, roles, lib_martigny, librarian_martigny_data_tmp, - patron_type_adults_martigny, mailbox): +def test_patron_create( + app, + roles, + lib_martigny, + librarian_martigny_data_tmp, + patron_type_adults_martigny, + mailbox, +): """Test Patron creation.""" - ds = app.extensions['invenio-accounts'].datastore - email = librarian_martigny_data_tmp.get('email') + ds = app.extensions["invenio-accounts"].datastore + email = librarian_martigny_data_tmp.get("email") l_martigny_data_tmp = librarian_martigny_data_tmp - librarian_martigny_data_tmp = create_user_from_data( - librarian_martigny_data_tmp) + librarian_martigny_data_tmp = create_user_from_data(librarian_martigny_data_tmp) # wrong_librarian_martigny_data_tmp = deepcopy(librarian_martigny_data_tmp) # wrong_librarian_martigny_data_tmp.pop('first_name') # with pytest.raises(ValidationError): @@ -110,60 +112,56 @@ def test_patron_create(app, roles, lib_martigny, librarian_martigny_data_tmp, # ) wrong_librarian_martigny_data_tmp = deepcopy(librarian_martigny_data_tmp) - wrong_librarian_martigny_data_tmp.pop('libraries') + wrong_librarian_martigny_data_tmp.pop("libraries") with pytest.raises(ValidationError) as err: - Patron.create( - wrong_librarian_martigny_data_tmp, - dbcommit=True, - delete_pid=True - ) - assert str(err.value) == 'Missing libraries' + Patron.create(wrong_librarian_martigny_data_tmp, dbcommit=True, delete_pid=True) + assert str(err.value) == "Missing libraries" wrong_librarian_martigny_data_tmp = deepcopy(librarian_martigny_data_tmp) - wrong_librarian_martigny_data_tmp.setdefault('patron', { - 'expiration_date': '2023-10-07', - 'barcode': ['2050124311'], - 'type': { - '$ref': 'https://bib.rero.ch/api/patron_types/ptty2' + wrong_librarian_martigny_data_tmp.setdefault( + "patron", + { + "expiration_date": "2023-10-07", + "barcode": ["2050124311"], + "type": {"$ref": "https://bib.rero.ch/api/patron_types/ptty2"}, + "communication_channel": CommunicationChannel.EMAIL, + "communication_language": "ita", }, - 'communication_channel': CommunicationChannel.EMAIL, - 'communication_language': 'ita' - }) - wrong_librarian_martigny_data_tmp['patron']['subscriptions'] = [{ - 'start_date': '2000-01-01', - 'end_date': '2001-01-01', - 'patron_type': {'$ref': 'https://bib.rero.ch/api/patron_types/xxx'}, - 'patron_transaction': { - '$ref': 'https://bib.rero.ch/api/patron_transactions/xxx' - }, - }] + ) + wrong_librarian_martigny_data_tmp["patron"]["subscriptions"] = [ + { + "start_date": "2000-01-01", + "end_date": "2001-01-01", + "patron_type": {"$ref": "https://bib.rero.ch/api/patron_types/xxx"}, + "patron_transaction": { + "$ref": "https://bib.rero.ch/api/patron_transactions/xxx" + }, + } + ] with pytest.raises(ValidationError): - Patron.create( - wrong_librarian_martigny_data_tmp, - dbcommit=True, - delete_pid=True - ) + Patron.create(wrong_librarian_martigny_data_tmp, dbcommit=True, delete_pid=True) # no data has been created assert len(mailbox) == 0 # assert User.query.count() == 0 # assert UserProfile.query.count() == 0 - ptrn = Patron.create( - librarian_martigny_data_tmp, - dbcommit=True, - delete_pid=False - ) - user = User.query.filter_by(id=ptrn.get('user_id')).first() + ptrn = Patron.create(librarian_martigny_data_tmp, dbcommit=True, delete_pid=False) + user = User.query.filter_by(id=ptrn.get("user_id")).first() assert user and user.active - for field in ['first_name', 'last_name', 'street', 'postal_code', 'city', - 'home_phone']: + for field in [ + "first_name", + "last_name", + "street", + "postal_code", + "city", + "home_phone", + ]: assert user.user_profile.get(field) == l_martigny_data_tmp.get(field) - assert user.username == l_martigny_data_tmp.get('username') - assert user.user_profile.get('birth_date') == \ - l_martigny_data_tmp.get('birth_date') + assert user.username == l_martigny_data_tmp.get("username") + assert user.user_profile.get("birth_date") == l_martigny_data_tmp.get("birth_date") user_roles = [r.name for r in user.roles] - assert set(user_roles) == set(ptrn.get('roles')) + assert set(user_roles) == set(ptrn.get("roles")) # TODO: make these checks during the librarian POST creation # assert len(mailbox) == 1 # assert re.search(r'localhost/lost-password', mailbox[0].body) @@ -175,7 +173,7 @@ def test_patron_create(app, roles, lib_martigny, librarian_martigny_data_tmp, # r'Best regards', mailbox[0].body # ) # assert ptrn.get('email') in mailbox[0].recipients - librarian_martigny_data_tmp['user_id'] = ptrn.user.id + librarian_martigny_data_tmp["user_id"] = ptrn.user.id assert ptrn == librarian_martigny_data_tmp ptrn = Patron.get_record_by_pid(ptrn.pid) @@ -183,24 +181,22 @@ def test_patron_create(app, roles, lib_martigny, librarian_martigny_data_tmp, fetched_pid = patron_id_fetcher(ptrn.id, ptrn) assert fetched_pid.pid_value == ptrn.pid - assert fetched_pid.pid_type == 'ptrn' + assert fetched_pid.pid_type == "ptrn" # set librarian roles = UserRole.LIBRARIAN_ROLES - ptrn.update({'roles': roles}, dbcommit=True) + ptrn.update({"roles": roles}, dbcommit=True) user_roles = [r.name for r in user.roles] assert set(user_roles) == set(roles) data = { - 'roles': UserRole.ALL_ROLES, - 'patron': { - 'expiration_date': '2023-10-07', - 'barcode': ['2050124311'], - 'type': { - '$ref': 'https://bib.rero.ch/api/patron_types/ptty2' - }, - 'communication_channel': CommunicationChannel.EMAIL, - 'communication_language': 'ita' - } + "roles": UserRole.ALL_ROLES, + "patron": { + "expiration_date": "2023-10-07", + "barcode": ["2050124311"], + "type": {"$ref": "https://bib.rero.ch/api/patron_types/ptty2"}, + "communication_channel": CommunicationChannel.EMAIL, + "communication_language": "ita", + }, } ptrn.update(data, dbcommit=True) user_roles = [r.name for r in user.roles] @@ -215,11 +211,11 @@ def test_patron_create(app, roles, lib_martigny, librarian_martigny_data_tmp, assert not user.roles # assert len(mailbox) == 1 # patron does not exists anymore - ptrn = Patron.get_record_by_pid('ptrn2') + ptrn = Patron.get_record_by_pid("ptrn2") assert ptrn is None - ptrn = Patron.get_record_by_pid('ptrn2', with_deleted=True) + ptrn = Patron.get_record_by_pid("ptrn2", with_deleted=True) assert ptrn == {} - assert ptrn.persistent_identifier.pid_value == 'ptrn2' + assert ptrn.persistent_identifier.pid_value == "ptrn2" # remove patron ptrn.delete(True, True, True) # clean up the user @@ -227,39 +223,33 @@ def test_patron_create(app, roles, lib_martigny, librarian_martigny_data_tmp, @pytest.mark.skip(reason="no way of currently testing this") -def test_patron_create_without_email(app, roles, patron_type_children_martigny, - patron_martigny_data_tmp, mailbox): +def test_patron_create_without_email( + app, roles, patron_type_children_martigny, patron_martigny_data_tmp, mailbox +): """Test Patron creation without an email.""" patron_martigny_data_tmp = deepcopy(patron_martigny_data_tmp) # no data has been created mailbox.clear() - del patron_martigny_data_tmp['email'] + del patron_martigny_data_tmp["email"] - patron_martigny_data_tmp = \ - create_user_from_data(patron_martigny_data_tmp) + patron_martigny_data_tmp = create_user_from_data(patron_martigny_data_tmp) from rero_ils.modules.users.api import User + patron_martigny_data_tmp = User.remove_fields(patron_martigny_data_tmp) # communication channel require at least one email - patron_martigny_data_tmp['patron']['communication_channel'] = 'email' + patron_martigny_data_tmp["patron"]["communication_channel"] = "email" with pytest.raises(ValidationError): - Patron.create( - patron_martigny_data_tmp, - dbcommit=True, - delete_pid=True - ) + Patron.create(patron_martigny_data_tmp, dbcommit=True, delete_pid=True) # create a patron without email - patron_martigny_data_tmp['patron']['communication_channel'] = \ - CommunicationChannel.MAIL - ptrn = Patron.create( - patron_martigny_data_tmp, - dbcommit=True, - delete_pid=True - ) + patron_martigny_data_tmp["patron"][ + "communication_channel" + ] = CommunicationChannel.MAIL + ptrn = Patron.create(patron_martigny_data_tmp, dbcommit=True, delete_pid=True) # user has been created - user = User.query.filter_by(id=ptrn.get('user_id')).first() + user = User.query.filter_by(id=ptrn.get("user_id")).first() assert user assert not user.email assert user == ptrn.user @@ -317,51 +307,57 @@ def test_patron_create_without_email(app, roles, patron_type_children_martigny, # assert rero_id_user.profile.username == ptrn.get('username') == 'reroid' # clean up created users - ds = app.extensions['invenio-accounts'].datastore + ds = app.extensions["invenio-accounts"].datastore ds.delete_user(user) def test_patron_properties( - org_martigny, patron_martigny, librarian_martigny, patron2_martigny, - lib_martigny, system_librarian_martigny + org_martigny, + patron_martigny, + librarian_martigny, + patron2_martigny, + lib_martigny, + system_librarian_martigny, ): """Test patron properties methods.""" # TEST `organisation.pid` search = PatronsSearch() - librarian = next(search.filter('term', pid=librarian_martigny.pid).scan()) - patron = next(search.filter('term', pid=patron_martigny.pid).scan()) + librarian = next(search.filter("term", pid=librarian_martigny.pid).scan()) + patron = next(search.filter("term", pid=patron_martigny.pid).scan()) assert patron.organisation.pid == org_martigny.pid assert librarian.organisation.pid == org_martigny.pid # TEST `manageable_library_pids` assert librarian_martigny.manageable_library_pids == [lib_martigny.pid] - assert system_librarian_martigny.manageable_library_pids == \ - org_martigny.get_libraries_pids() + assert ( + system_librarian_martigny.manageable_library_pids + == org_martigny.get_libraries_pids() + ) # TEST `blocked` - patron = Patron.get_patron_by_email(patron_martigny.dumps().get('email')) - assert patron.patron.get('blocked') is False + patron = Patron.get_patron_by_email(patron_martigny.dumps().get("email")) + assert patron.patron.get("blocked") is False # TEST `blocked` is absent - patron = Patron.get_patron_by_email(patron2_martigny.dumps().get('email')) - assert 'blocked' not in patron + patron = Patron.get_patron_by_email(patron2_martigny.dumps().get("email")) + assert "blocked" not in patron # TEST `profile_url` - assert org_martigny.get('code') in patron2_martigny.profile_url + assert org_martigny.get("code") in patron2_martigny.profile_url def test_get_patron(patron_martigny): """Test patron retrieval.""" patron = patron_martigny - assert Patron.get_patron_by_email(patron.dumps().get('email')) == patron - assert not Patron.get_patron_by_email('not exists') - assert Patron.get_patron_by_barcode( - patron.patron.get('barcode')[0]) == patron - assert not Patron.get_patron_by_barcode('not exists') + assert Patron.get_patron_by_email(patron.dumps().get("email")) == patron + assert not Patron.get_patron_by_email("not exists") + assert Patron.get_patron_by_barcode(patron.patron.get("barcode")[0]) == patron + assert not Patron.get_patron_by_barcode("not exists") assert Patron.get_patrons_by_user(patron.user)[0] == patron class user: pass + assert not Patron.get_patrons_by_user(user) @@ -396,11 +392,13 @@ def test_patron_multiple(patron_sion_multiple, patron2_martigny, lib_martigny): patron2_roles = {r.name for r in patron2_martigny.user.roles} patron_and_librarian_roles = UserRole.LIBRARIAN_ROLES + [UserRole.PATRON] assert all(r in patron_and_librarian_roles for r in patron2_roles) - data['roles'] = [UserRole.PATRON] - del data['libraries'] + data["roles"] = [UserRole.PATRON] + del data["libraries"] patron_sion_multiple.update(data, dbcommit=True, reindex=True) assert patron2_martigny.user.roles == [UserRole.PATRON] - assert Patron.get_record_by_pid(patron_sion_multiple.pid).get('roles') == \ - [UserRole.PATRON] - assert Patron.get_record_by_pid(patron2_martigny.pid).get('roles') == \ - [UserRole.PATRON] + assert Patron.get_record_by_pid(patron_sion_multiple.pid).get("roles") == [ + UserRole.PATRON + ] + assert Patron.get_record_by_pid(patron2_martigny.pid).get("roles") == [ + UserRole.PATRON + ] diff --git a/tests/ui/patrons/test_patrons_dumpers.py b/tests/ui/patrons/test_patrons_dumpers.py index 359bf27e64..54d925f28a 100644 --- a/tests/ui/patrons/test_patrons_dumpers.py +++ b/tests/ui/patrons/test_patrons_dumpers.py @@ -22,7 +22,7 @@ def test_patron_properties_dumper(patron_martigny): """Test patron properties dumper.""" - dumper = PatronPropertiesDumper(['formatted_name', 'dummy']) + dumper = PatronPropertiesDumper(["formatted_name", "dummy"]) dumped_data = patron_martigny.dumps(dumper=dumper) - assert 'formatted_name' in dumped_data - assert 'dummy' not in dumped_data + assert "formatted_name" in dumped_data + assert "dummy" not in dumped_data diff --git a/tests/ui/patrons/test_patrons_jsonresolver.py b/tests/ui/patrons/test_patrons_jsonresolver.py index f25afe4e9d..5b8e32a28a 100644 --- a/tests/ui/patrons/test_patrons_jsonresolver.py +++ b/tests/ui/patrons/test_patrons_jsonresolver.py @@ -24,12 +24,8 @@ def test_patrons_jsonresolver(system_librarian_martigny): """Test patron json resolver.""" - rec = Record.create({ - 'patron': {'$ref': 'https://bib.rero.ch/api/patrons/ptrn1'} - }) - assert rec.replace_refs().get('patron') == { - 'type': 'ptrn', 'pid': 'ptrn1' - } + rec = Record.create({"patron": {"$ref": "https://bib.rero.ch/api/patrons/ptrn1"}}) + assert rec.replace_refs().get("patron") == {"type": "ptrn", "pid": "ptrn1"} # deleted record system_librarian_martigny.delete() @@ -37,8 +33,6 @@ def test_patrons_jsonresolver(system_librarian_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'patron': {'$ref': 'https://bib.rero.ch/api/patrons/n_e'} - }) + rec = Record.create({"patron": {"$ref": "https://bib.rero.ch/api/patrons/n_e"}}) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/patrons/test_patrons_mapping.py b/tests/ui/patrons/test_patrons_mapping.py index ac438f3d24..11c4cdcce3 100644 --- a/tests/ui/patrons/test_patrons_mapping.py +++ b/tests/ui/patrons/test_patrons_mapping.py @@ -22,8 +22,7 @@ from rero_ils.modules.patrons.api import PatronsSearch -def test_patron_es_mapping( - roles, search, lib_martigny, librarian_martigny_data_tmp): +def test_patron_es_mapping(roles, search, lib_martigny, librarian_martigny_data_tmp): """Test patron elasticsearch mapping.""" search = PatronsSearch() mapping = get_mapping(search.Meta.index) @@ -35,12 +34,12 @@ def test_patron_search_mapping(app, patrons_records, librarian_saxon): """Test patron search mapping.""" search = PatronsSearch() - assert search.query('query_string', query='Roduit').count() == 1 - assert search.query('match', first_name='Eric').count() == 1 - assert search.query('match', last_name='Moret').count() == 1 - assert search.query('match', first_name='Elena').count() == 1 + assert search.query("query_string", query="Roduit").count() == 1 + assert search.query("match", first_name="Eric").count() == 1 + assert search.query("match", last_name="Moret").count() == 1 + assert search.query("match", first_name="Elena").count() == 1 - eq_query = search.query('match', first_name='Eléna').source(['pid']).scan() + eq_query = search.query("match", first_name="Eléna").source(["pid"]).scan() pids = [hit.pid for hit in eq_query] assert len(pids) == 1 assert librarian_saxon.pid in pids diff --git a/tests/ui/patrons/test_patrons_ui.py b/tests/ui/patrons/test_patrons_ui.py index b982a14e5c..2ab3fec9d9 100644 --- a/tests/ui/patrons/test_patrons_ui.py +++ b/tests/ui/patrons/test_patrons_ui.py @@ -29,43 +29,43 @@ def test_patrons_logged_user(client, librarian_martigny): """Test logged user info API.""" # No logged user (only settings are present) - res = client.get(url_for('patrons.logged_user')) + res = client.get(url_for("patrons.logged_user")) assert res.status_code == 200 data = get_json(res) - assert not data.get('metadata') - assert not data.get('patrons') - assert data.get('settings') + assert not data.get("metadata") + assert not data.get("patrons") + assert data.get("settings") # logged user login_user_via_session(client, librarian_martigny.user) - res = client.get(url_for('patrons.logged_user', resolve=1)) + res = client.get(url_for("patrons.logged_user", resolve=1)) assert res.status_code == 200 data = get_json(res) - assert data.get('first_name') - assert data.get('last_name') - assert data.get('patrons') - assert data.get('settings') - assert data.get('permissions') - assert data.get('patrons')[0].get('organisation') + assert data.get("first_name") + assert data.get("last_name") + assert data.get("patrons") + assert data.get("settings") + assert data.get("permissions") + assert data.get("patrons")[0].get("organisation") class current_i18n: class locale: - language = 'fr' - with mock.patch( - 'rero_ils.modules.patrons.views.current_i18n', - current_i18n - ): + language = "fr" + + with mock.patch("rero_ils.modules.patrons.views.current_i18n", current_i18n): login_user_via_session(client, librarian_martigny.user) - res = client.get(url_for('patrons.logged_user')) + res = client.get(url_for("patrons.logged_user")) assert res.status_code == 200 data = get_json(res) - assert data.get('patrons')[0]['libraries'][0]['pid'] == \ - librarian_martigny['libraries'][0]['$ref'].rsplit('/', 1)[-1] - assert data.get('settings').get('language') == 'fr' + assert ( + data.get("patrons")[0]["libraries"][0]["pid"] + == librarian_martigny["libraries"][0]["$ref"].rsplit("/", 1)[-1] + ) + assert data.get("settings").get("language") == "fr" def test_patron_format_currency_filter(app): """Test format currency filter.""" - assert format_currency_filter(3, 'EUR') == '€3.00' - assert format_currency_filter(4.5, 'CHF') == 'CHF4.50' - assert format_currency_filter(None, 'EUR') is None + assert format_currency_filter(3, "EUR") == "€3.00" + assert format_currency_filter(4.5, "CHF") == "CHF4.50" + assert format_currency_filter(None, "EUR") is None diff --git a/tests/ui/stats/conftest.py b/tests/ui/stats/conftest.py index e10dfb349b..b6a7296d76 100644 --- a/tests/ui/stats/conftest.py +++ b/tests/ui/stats/conftest.py @@ -24,13 +24,13 @@ from rero_ils.modules.stats.api.pricing import StatsForPricing -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def stat_for_pricing(document, lib_martigny): """Stats for Pricing.""" yield StatsForPricing(to_date=arrow.utcnow()) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def stat_for_librarian(document, lib_martigny): """Stats for Librarian.""" yield StatsForLibrarian(to_date=arrow.utcnow()) diff --git a/tests/ui/stats/test_stats_librarian.py b/tests/ui/stats/test_stats_librarian.py index 73506affa5..c12c4ff515 100644 --- a/tests/ui/stats/test_stats_librarian.py +++ b/tests/ui/stats/test_stats_librarian.py @@ -20,11 +20,10 @@ import mock from invenio_db import db -from utils import flush_index from rero_ils.modules.documents.api import Document -from rero_ils.modules.loans.logs.api import LoanOperationLog -from rero_ils.modules.operation_logs.api import OperationLog +from rero_ils.modules.loans.logs.api import LoanOperationLogsSearch +from rero_ils.modules.operation_logs.api import OperationLogsSearch from rero_ils.modules.stats.api.librarian import StatsForLibrarian @@ -32,72 +31,73 @@ def test_stats_librarian_collect(stat_for_librarian): """Test the stat librarian collect keys.""" assert list(stat_for_librarian.collect()[0].keys()) == [ - 'library', 'checkouts_for_transaction_library', - 'checkouts_for_owning_library', 'active_patrons_by_postal_code', - 'new_active_patrons_by_postal_code', 'new_documents', 'new_items', - 'renewals', 'validated_requests', 'items_by_document_type_and_subtype', - 'new_items_by_location', - 'loans_of_transaction_library_by_item_location' + "library", + "checkouts_for_transaction_library", + "checkouts_for_owning_library", + "active_patrons_by_postal_code", + "new_active_patrons_by_postal_code", + "new_documents", + "new_items", + "renewals", + "validated_requests", + "items_by_document_type_and_subtype", + "new_items_by_location", + "loans_of_transaction_library_by_item_location", ] def test_stats_librarian_checkouts_for_transaction_library( - stat_for_librarian, loan_due_soon_martigny, lib_martigny, lib_sion): - """Test the number of circulation operation during the specified timeframe. - """ - assert stat_for_librarian\ - .checkouts_for_transaction_library('foo') == 0 - assert stat_for_librarian\ - .checkouts_for_transaction_library(lib_sion.pid) == 0 - assert stat_for_librarian\ - .checkouts_for_transaction_library(lib_martigny.pid) == 1 + stat_for_librarian, loan_due_soon_martigny, lib_martigny, lib_sion +): + """Test the number of circulation operation during the specified timeframe.""" + assert stat_for_librarian.checkouts_for_transaction_library("foo") == 0 + assert stat_for_librarian.checkouts_for_transaction_library(lib_sion.pid) == 0 + assert stat_for_librarian.checkouts_for_transaction_library(lib_martigny.pid) == 1 def test_stats_librarian_checkouts_for_owning_library( - stat_for_librarian, loan_due_soon_martigny, lib_martigny, lib_sion): - """Test the number of circulation operation during the specified timeframe. - """ - assert stat_for_librarian\ - .checkouts_for_owning_library('foo') == 0 - assert stat_for_librarian\ - .checkouts_for_owning_library(lib_sion.pid) == 0 - assert stat_for_librarian\ - .checkouts_for_owning_library(lib_martigny.pid) == 1 + stat_for_librarian, loan_due_soon_martigny, lib_martigny, lib_sion +): + """Test the number of circulation operation during the specified timeframe.""" + assert stat_for_librarian.checkouts_for_owning_library("foo") == 0 + assert stat_for_librarian.checkouts_for_owning_library(lib_sion.pid) == 0 + assert stat_for_librarian.checkouts_for_owning_library(lib_martigny.pid) == 1 def test_stats_librarian_active_patrons_by_postal_code( - stat_for_librarian, loan_due_soon_martigny, lib_martigny): - """Test the number of circulation operation during the specified timeframe. - """ - assert stat_for_librarian\ - .active_patrons_by_postal_code('foo') == {} - assert stat_for_librarian\ - .active_patrons_by_postal_code(lib_martigny.pid) == {'1920': 1} + stat_for_librarian, loan_due_soon_martigny, lib_martigny +): + """Test the number of circulation operation during the specified timeframe.""" + assert stat_for_librarian.active_patrons_by_postal_code("foo") == {} + assert stat_for_librarian.active_patrons_by_postal_code(lib_martigny.pid) == { + "1920": 1 + } # with new patrons - assert stat_for_librarian\ - .active_patrons_by_postal_code('foo', new_patrons=True) == {} - assert stat_for_librarian\ - .active_patrons_by_postal_code( - lib_martigny.pid, new_patrons=True) == {'1920': 1} + assert ( + stat_for_librarian.active_patrons_by_postal_code("foo", new_patrons=True) == {} + ) + assert stat_for_librarian.active_patrons_by_postal_code( + lib_martigny.pid, new_patrons=True + ) == {"1920": 1} stat = StatsForLibrarian() - assert stat\ - .active_patrons_by_postal_code( - lib_martigny.pid, new_patrons=True) == {} + assert stat.active_patrons_by_postal_code(lib_martigny.pid, new_patrons=True) == {} def test_stats_librarian_new_documents( - stat_for_librarian, document_data, lib_martigny, librarian_martigny): + stat_for_librarian, document_data, lib_martigny, librarian_martigny +): """Test the number of new documents per library for given time interval.""" - assert stat_for_librarian.new_documents('foo') == 0 + assert stat_for_librarian.new_documents("foo") == 0 with mock.patch( - 'rero_ils.modules.operation_logs.extensions.current_librarian', - librarian_martigny + "rero_ils.modules.operation_logs.extensions.current_librarian", + librarian_martigny, ): # needs to create a new document created by a librarian Document.create( - data=document_data, delete_pid=True, dbcommit=False, reindex=False) - flush_index(OperationLog.index_name) + data=document_data, delete_pid=True, dbcommit=False, reindex=False + ) + OperationLogsSearch.flush_and_refresh() assert stat_for_librarian.new_documents(lib_martigny.pid) == 1 stat = StatsForLibrarian() @@ -106,58 +106,78 @@ def test_stats_librarian_new_documents( def test_stats_librarian_renewals( - stat_for_librarian, lib_martigny, loan_due_soon_martigny, - loc_public_martigny, librarian_martigny): + stat_for_librarian, + lib_martigny, + loan_due_soon_martigny, + loc_public_martigny, + librarian_martigny, +): """Test the number of items with loan extended.""" - assert stat_for_librarian.renewals('foo') == 0 + assert stat_for_librarian.renewals("foo") == 0 loan_due_soon_martigny.item.extend_loan( pid=loan_due_soon_martigny.pid, transaction_location_pid=loc_public_martigny.pid, - transaction_user_pid=librarian_martigny.pid + transaction_user_pid=librarian_martigny.pid, ) - flush_index(LoanOperationLog.index_name) + LoanOperationLogsSearch.flush_and_refresh() assert stat_for_librarian.renewals(lib_martigny.pid) == 1 def test_stats_librarian_validated_requests( - stat_for_librarian, lib_sion, loan_validated_sion): + stat_for_librarian, lib_sion, loan_validated_sion +): """Test the number of validated requests.""" - assert stat_for_librarian.validated_requests('foo') == 0 + assert stat_for_librarian.validated_requests("foo") == 0 assert stat_for_librarian.validated_requests(lib_sion.pid) == 1 def test_stats_librarian_new_items_by_location( - stat_for_librarian, item_lib_martigny, loc_public_martigny): + stat_for_librarian, item_lib_martigny, loc_public_martigny +): """Test the number of new items per library by location.""" loc = loc_public_martigny - assert stat_for_librarian.new_items_by_location('foo') == {} - assert stat_for_librarian.new_items_by_location( - item_lib_martigny.library_pid)[f'{loc["code"]} - {loc["name"]}'] >= 1 + assert stat_for_librarian.new_items_by_location("foo") == {} + assert ( + stat_for_librarian.new_items_by_location(item_lib_martigny.library_pid)[ + f'{loc["code"]} - {loc["name"]}' + ] + >= 1 + ) stat = StatsForLibrarian() assert stat.new_items_by_location(item_lib_martigny.library_pid) == {} def test_stats_librarian_items_by_document_type_and_subtype( - stat_for_librarian, item_lib_martigny, loc_public_martigny): + stat_for_librarian, item_lib_martigny, loc_public_martigny +): """Test the number of items per library by document type and sub-type.""" loc = loc_public_martigny - assert stat_for_librarian.items_by_document_type_and_subtype('foo') == {} - assert stat_for_librarian.items_by_document_type_and_subtype( - item_lib_martigny.library_pid)['docmaintype_book'] >= 1 - assert stat_for_librarian.items_by_document_type_and_subtype( - item_lib_martigny.library_pid)['docsubtype_other_book'] >= 1 + assert stat_for_librarian.items_by_document_type_and_subtype("foo") == {} + assert ( + stat_for_librarian.items_by_document_type_and_subtype( + item_lib_martigny.library_pid + )["docmaintype_book"] + >= 1 + ) + assert ( + stat_for_librarian.items_by_document_type_and_subtype( + item_lib_martigny.library_pid + )["docsubtype_other_book"] + >= 1 + ) def test_stats_librarian_loans_of_transaction_library_by_item_location( - stat_for_librarian, loan_due_soon_martigny, lib_martigny, - loc_public_martigny): - """Test the number of circulation operation during the specified timeframe. - """ - assert stat_for_librarian\ - .loans_of_transaction_library_by_item_location('foo') == {} - key = f'{lib_martigny.pid}: {lib_martigny["name"]} -'\ - f' {loc_public_martigny["name"]}' - res = stat_for_librarian\ - .loans_of_transaction_library_by_item_location(lib_martigny.pid) - assert res[key]['checkin'] == 0 - assert res[key]['checkout'] >= 0 + stat_for_librarian, loan_due_soon_martigny, lib_martigny, loc_public_martigny +): + """Test the number of circulation operation during the specified timeframe.""" + assert stat_for_librarian.loans_of_transaction_library_by_item_location("foo") == {} + key = ( + f'{lib_martigny.pid}: {lib_martigny["name"]} -' + f' {loc_public_martigny["name"]}' + ) + res = stat_for_librarian.loans_of_transaction_library_by_item_location( + lib_martigny.pid + ) + assert res[key]["checkin"] == 0 + assert res[key]["checkout"] >= 0 diff --git a/tests/ui/stats/test_stats_pricing.py b/tests/ui/stats/test_stats_pricing.py index fbcd8f7294..155c3412dc 100644 --- a/tests/ui/stats/test_stats_pricing.py +++ b/tests/ui/stats/test_stats_pricing.py @@ -20,166 +20,180 @@ import mock from invenio_db import db -from utils import flush_index from rero_ils.modules.ill_requests.models import ILLRequestStatus from rero_ils.modules.items.models import ItemCirculationAction -from rero_ils.modules.loans.logs.api import LoanOperationLog +from rero_ils.modules.loans.logs.api import LoanOperationLogsSearch from rero_ils.modules.stats.api.pricing import StatsForPricing def test_stats_pricing_collect(stat_for_pricing): """Test the stat pricing collect keys.""" - assert set(stat_for_pricing.collect()[0].keys()) == set([ - 'library', 'number_of_docs_with_files', 'number_of_files', - 'files_volume', 'number_of_documents', 'number_of_libraries', - 'number_of_librarians', 'number_of_active_patrons', - 'number_of_order_lines', 'number_of_checkouts', 'number_of_renewals', - 'number_of_ill_requests', 'number_of_items', - 'number_of_new_items', 'number_of_deleted_items', 'number_of_patrons', - 'number_of_new_patrons', 'number_of_checkins', 'number_of_requests']) - - -def test_stats_pricing_number_of_documents( - stat_for_pricing, item_lib_martigny): + assert set(stat_for_pricing.collect()[0].keys()) == set( + [ + "library", + "number_of_docs_with_files", + "number_of_files", + "files_volume", + "number_of_documents", + "number_of_libraries", + "number_of_librarians", + "number_of_active_patrons", + "number_of_order_lines", + "number_of_checkouts", + "number_of_renewals", + "number_of_ill_requests", + "number_of_items", + "number_of_new_items", + "number_of_deleted_items", + "number_of_patrons", + "number_of_new_patrons", + "number_of_checkins", + "number_of_requests", + ] + ) + + +def test_stats_pricing_number_of_documents(stat_for_pricing, item_lib_martigny): """Test the number of documents linked to my library.""" - assert stat_for_pricing.number_of_documents('foo') == 0 - assert stat_for_pricing\ - .number_of_documents(item_lib_martigny.library_pid) == 1 + assert stat_for_pricing.number_of_documents("foo") == 0 + assert stat_for_pricing.number_of_documents(item_lib_martigny.library_pid) == 1 def test_stats_pricing_number_of_libraries(stat_for_pricing, lib_martigny): """Test the Number of libraries of the given organisation.""" - assert stat_for_pricing.number_of_libraries('foo') == 0 - assert stat_for_pricing\ - .number_of_libraries(lib_martigny.organisation_pid) == 1 + assert stat_for_pricing.number_of_libraries("foo") == 0 + assert stat_for_pricing.number_of_libraries(lib_martigny.organisation_pid) == 1 -def test_stats_pricing_number_of_librarians( - stat_for_pricing, librarian_martigny): +def test_stats_pricing_number_of_librarians(stat_for_pricing, librarian_martigny): """Test the number of users with a librarian role.""" - assert stat_for_pricing.number_of_librarians('foo') == 0 - lib_pid = librarian_martigny.replace_refs()['libraries'][0]['pid'] + assert stat_for_pricing.number_of_librarians("foo") == 0 + lib_pid = librarian_martigny.replace_refs()["libraries"][0]["pid"] assert stat_for_pricing.number_of_librarians(lib_pid) == 1 def test_stats_pricing_number_of_active_patrons( - stat_for_pricing, loan_due_soon_martigny, lib_martigny): - """Test the number of patrons who did a transaction in the past 365 days. - """ - assert stat_for_pricing.number_of_active_patrons('foo') == 0 + stat_for_pricing, loan_due_soon_martigny, lib_martigny +): + """Test the number of patrons who did a transaction in the past 365 days.""" + assert stat_for_pricing.number_of_active_patrons("foo") == 0 assert stat_for_pricing.number_of_active_patrons(lib_martigny.pid) == 1 def test_stats_pricing_number_of_order_lines( - stat_for_pricing, acq_order_line_fiction_martigny): - """Test the number of order lines created during the specified timeframe. - """ - assert stat_for_pricing.number_of_order_lines('foo') == 0 + stat_for_pricing, acq_order_line_fiction_martigny +): + """Test the number of order lines created during the specified timeframe.""" + assert stat_for_pricing.number_of_order_lines("foo") == 0 lib_pid = acq_order_line_fiction_martigny.library_pid assert stat_for_pricing.number_of_order_lines(lib_pid) == 1 def test_stats_pricing_number_of_circ_operations( - stat_for_pricing, loan_due_soon_martigny, lib_martigny): + stat_for_pricing, loan_due_soon_martigny, lib_martigny +): """Test the number of circulation operation during the specified - timeframe. + timeframe. """ - assert stat_for_pricing\ - .number_of_circ_operations('foo', ItemCirculationAction.CHECKOUT) == 0 - assert stat_for_pricing\ - .number_of_circ_operations( - lib_martigny.pid, ItemCirculationAction.EXTEND) == 0 - assert stat_for_pricing\ - .number_of_circ_operations( - lib_martigny.pid, ItemCirculationAction.CHECKOUT) == 1 + assert ( + stat_for_pricing.number_of_circ_operations( + "foo", ItemCirculationAction.CHECKOUT + ) + == 0 + ) + assert ( + stat_for_pricing.number_of_circ_operations( + lib_martigny.pid, ItemCirculationAction.EXTEND + ) + == 0 + ) + assert ( + stat_for_pricing.number_of_circ_operations( + lib_martigny.pid, ItemCirculationAction.CHECKOUT + ) + == 1 + ) def test_stats_pricing_number_of_ill_requests( - stat_for_pricing, ill_request_martigny, lib_martigny): + stat_for_pricing, ill_request_martigny, lib_martigny +): """Test the number of ILL requests.""" - assert stat_for_pricing\ - .number_of_ill_requests( - 'foo', [ILLRequestStatus.DENIED]) == 0 + assert ( + stat_for_pricing.number_of_ill_requests("foo", [ILLRequestStatus.DENIED]) == 0 + ) lib_pid = lib_martigny.pid - assert stat_for_pricing\ - .number_of_ill_requests( - lib_pid, [ILLRequestStatus.DENIED]) == 1 - assert stat_for_pricing\ - .number_of_ill_requests( - lib_pid, [ILLRequestStatus.PENDING]) == 0 + assert ( + stat_for_pricing.number_of_ill_requests(lib_pid, [ILLRequestStatus.DENIED]) == 1 + ) + assert ( + stat_for_pricing.number_of_ill_requests(lib_pid, [ILLRequestStatus.PENDING]) + == 0 + ) -def test_stats_pricing_number_of_items( - stat_for_pricing, item_lib_martigny): +def test_stats_pricing_number_of_items(stat_for_pricing, item_lib_martigny): """Test the number of items linked to my library.""" - assert stat_for_pricing.number_of_items('foo') == 0 + assert stat_for_pricing.number_of_items("foo") == 0 # loans used in previous tests can adds some items - assert stat_for_pricing\ - .number_of_items(item_lib_martigny.library_pid) >= 1 + assert stat_for_pricing.number_of_items(item_lib_martigny.library_pid) >= 1 -def test_stats_pricing_number_of_new_items( - stat_for_pricing, item_lib_martigny): +def test_stats_pricing_number_of_new_items(stat_for_pricing, item_lib_martigny): """Test the number of new created items during the specified timeframe.""" - assert stat_for_pricing.number_of_new_items('foo') == 0 + assert stat_for_pricing.number_of_new_items("foo") == 0 # loans used in previous tests can adds some items - assert stat_for_pricing\ - .number_of_new_items(item_lib_martigny.library_pid) >= 1 + assert stat_for_pricing.number_of_new_items(item_lib_martigny.library_pid) >= 1 from rero_ils.modules.stats.api.pricing import StatsForPricing # today item creation is excluded stat = StatsForPricing() - assert stat\ - .number_of_new_items(item_lib_martigny.library_pid) == 0 + assert stat.number_of_new_items(item_lib_martigny.library_pid) == 0 def test_stats_pricing_number_of_deleted_items( - stat_for_pricing, item_lib_martigny, librarian_martigny): + stat_for_pricing, item_lib_martigny, librarian_martigny +): """Test the number of deleted items during the specified timeframe.""" - assert stat_for_pricing.number_of_deleted_items('foo') == 0 + assert stat_for_pricing.number_of_deleted_items("foo") == 0 with mock.patch( - 'rero_ils.modules.operation_logs.extensions.current_librarian', - librarian_martigny + "rero_ils.modules.operation_logs.extensions.current_librarian", + librarian_martigny, ): item_lib_martigny.delete(False, False, False) - flush_index(LoanOperationLog.index_name) - assert stat_for_pricing\ - .number_of_deleted_items(item_lib_martigny.library_pid) == 1 + LoanOperationLogsSearch.flush_and_refresh() + assert ( + stat_for_pricing.number_of_deleted_items(item_lib_martigny.library_pid) == 1 + ) db.session.rollback() -def test_stats_pricing_number_of_patrons( - stat_for_pricing, patron_martigny): +def test_stats_pricing_number_of_patrons(stat_for_pricing, patron_martigny): """Test the number of users with a librarian role.""" - assert stat_for_pricing.number_of_patrons('foo') == 0 + assert stat_for_pricing.number_of_patrons("foo") == 0 # loans used in previous tests can adds some items - assert stat_for_pricing\ - .number_of_patrons(patron_martigny.organisation_pid) >= 1 + assert stat_for_pricing.number_of_patrons(patron_martigny.organisation_pid) >= 1 -def test_stats_pricing_number_of_new_patrons( - stat_for_pricing, patron_martigny): +def test_stats_pricing_number_of_new_patrons(stat_for_pricing, patron_martigny): """Test the number of new patrons for an organisation during the specified - timeframe. + timeframe. """ - assert stat_for_pricing.number_of_patrons('foo') == 0 + assert stat_for_pricing.number_of_patrons("foo") == 0 # loans used in previous tests can adds some items - assert stat_for_pricing\ - .number_of_patrons(patron_martigny.organisation_pid) >= 1 + assert stat_for_pricing.number_of_patrons(patron_martigny.organisation_pid) >= 1 # today item creation is excluded stat = StatsForPricing() - assert stat\ - .number_of_new_patrons(patron_martigny.organisation_pid) == 0 + assert stat.number_of_new_patrons(patron_martigny.organisation_pid) == 0 -def test_stats_pricing_files( - stat_for_pricing, lib_martigny, document_with_files): +def test_stats_pricing_files(stat_for_pricing, lib_martigny, document_with_files): """Test the files indicators for a given library.""" - assert float(stat_for_pricing.files_volume('foo')) == 0 - assert stat_for_pricing.number_of_files('foo') == 0 - assert stat_for_pricing.number_of_docs_with_files('foo') == 0 + assert float(stat_for_pricing.files_volume("foo")) == 0 + assert stat_for_pricing.number_of_files("foo") == 0 + assert stat_for_pricing.number_of_docs_with_files("foo") == 0 assert stat_for_pricing.number_of_files(lib_martigny.pid) >= 1 assert float(stat_for_pricing.files_volume(lib_martigny.pid)) > 0 assert stat_for_pricing.number_of_docs_with_files(lib_martigny.pid) >= 1 diff --git a/tests/ui/stats/test_stats_report.py b/tests/ui/stats/test_stats_report.py index 2c9c437c81..e6fe804db2 100644 --- a/tests/ui/stats/test_stats_report.py +++ b/tests/ui/stats/test_stats_report.py @@ -29,11 +29,8 @@ def test_stats_report_create(lib_martigny, document): """Test the stat report creation.""" cfg = { - "$schema": - "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", - "library": { - "$ref": f"https://bib.rero.ch/api/libraries/{lib_martigny.pid}" - }, + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "library": {"$ref": f"https://bib.rero.ch/api/libraries/{lib_martigny.pid}"}, "is_active": True, "pid": "1", "name": "foo", @@ -42,44 +39,39 @@ def test_stats_report_create(lib_martigny, document): "type": "catalog", "indicator": { "type": "number_of_documents", - "distributions": ['owning_library'] - } - } + "distributions": ["owning_library"], + }, + }, } assert StatsReport(cfg) - cfg['is_active'] = False + cfg["is_active"] = False assert not StatsReport(cfg).collect() res = StatsReport(cfg).collect() - assert Stat.create(data=dict( - type='report', - config=cfg, - values=[dict(results=res)] - )) + assert Stat.create(data=dict(type="report", config=cfg, values=[dict(results=res)])) def test_stats_report_range(app, lib_martigny): """Test the report range period.""" cfg = { - "library": { - "$ref": f"https://bib.rero.ch/api/libraries/{lib_martigny.pid}" - }, + "library": {"$ref": f"https://bib.rero.ch/api/libraries/{lib_martigny.pid}"}, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["owning_library"] + "distributions": ["owning_library"], } - } + }, } - with mock.patch( - 'rero_ils.modules.stats.api.report.datetime' - ) as mock_datetime: + with mock.patch("rero_ils.modules.stats.api.report.datetime") as mock_datetime: mock_datetime.now.return_value = datetime(year=2023, month=2, day=1) - assert StatsReport(cfg).get_range_period('month') == \ - dict(gte='2023-01-01T00:00:00', lte='2023-01-31T23:59:59') - assert StatsReport(cfg).get_range_period('year') == \ - dict(gte='2022-01-01T00:00:00', lte='2022-12-31T23:59:59') + assert StatsReport(cfg).get_range_period("month") == dict( + gte="2023-01-01T00:00:00", lte="2023-01-31T23:59:59" + ) + assert StatsReport(cfg).get_range_period("year") == dict( + gte="2022-01-01T00:00:00", lte="2022-12-31T23:59:59" + ) mock_datetime.now.return_value = datetime(year=2023, month=1, day=5) - assert StatsReport(cfg).get_range_period('month') == \ - dict(gte='2022-12-01T00:00:00', lte='2022-12-31T23:59:59') - assert not StatsReport(cfg).get_range_period('foo') + assert StatsReport(cfg).get_range_period("month") == dict( + gte="2022-12-01T00:00:00", lte="2022-12-31T23:59:59" + ) + assert not StatsReport(cfg).get_range_period("foo") diff --git a/tests/ui/stats/test_stats_report_n_deleted_items.py b/tests/ui/stats/test_stats_report_n_deleted_items.py index 4491445aed..77369bb13e 100644 --- a/tests/ui/stats/test_stats_report_n_deleted_items.py +++ b/tests/ui/stats/test_stats_report_n_deleted_items.py @@ -27,8 +27,8 @@ def test_stats_report_number_of_deleted_items( - org_martigny, org_sion, lib_martigny, lib_martigny_bourg, - lib_sion): + org_martigny, org_sion, lib_martigny, lib_martigny_bourg, lib_sion +): """Test the number of deleted items.""" # fixtures # created by the system user @@ -55,14 +55,8 @@ def test_stats_report_number_of_deleted_items( "date": "2023-01-01", "operation": "create", "user_name": "Doe, John", - "library": { - "type": "lib", - "value": lib_martigny.pid - }, - "organisation": { - "type": "org", - "value": org_martigny.pid - } + "library": {"type": "lib", "value": lib_martigny.pid}, + "organisation": {"type": "org", "value": org_martigny.pid}, }, refresh=True, ) @@ -149,16 +143,14 @@ def test_stats_report_number_of_deleted_items( # one distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_deleted_items", - "distributions": ["operator_library"] + "distributions": ["operator_library"], } - } + }, } # do not contains system assert StatsReport(cfg).collect() == [ @@ -167,86 +159,76 @@ def test_stats_report_number_of_deleted_items( # two distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_deleted_items", - "distributions": ["owning_library", "action_month"] + "distributions": ["owning_library", "action_month"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', '2023-01', '2024-01'], + ["", "2023-01", "2024-01"], [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 0, 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_deleted_items", - "distributions": ["action_month", "owning_library"] + "distributions": ["action_month", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023-01', 0, 1], - ['2024-01', 1, 0] + ["2023-01", 0, 1], + ["2024-01", 1, 0], ] # year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_deleted_items", - "distributions": ["action_year", "owning_library"] + "distributions": ["action_year", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023', 0, 1], - ['2024', 1, 0] + ["2023", 0, 1], + ["2024", 1, 0], ] # limit by period cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_deleted_items", "period": "year", - "distributions": ["owning_library"] + "distributions": ["owning_library"], } - } + }, } - with mock.patch( - 'rero_ils.modules.stats.api.report.datetime' - ) as mock_datetime: + with mock.patch("rero_ils.modules.stats.api.report.datetime") as mock_datetime: mock_datetime.now.return_value = datetime(year=2024, month=1, day=1) assert StatsReport(cfg).collect() == [ [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1] diff --git a/tests/ui/stats/test_stats_report_n_docs.py b/tests/ui/stats/test_stats_report_n_docs.py index 3a6365de96..b4df1bb333 100644 --- a/tests/ui/stats/test_stats_report_n_docs.py +++ b/tests/ui/stats/test_stats_report_n_docs.py @@ -24,201 +24,193 @@ def test_stats_report_number_of_documents( - org_martigny, org_sion, lib_martigny, lib_martigny_bourg, - lib_sion): + org_martigny, org_sion, lib_martigny, lib_martigny_bourg, lib_sion +): """Test the number of documents.""" # fixtures - es.index(index='documents', id='1', body={ - '_created': "2023-02-01", - 'adminMetadata': {'source': 'foo'}, - 'holdings': [{ - 'organisation': { - 'organisation_pid': org_martigny.pid, - 'library_pid': lib_martigny.pid - } - } - ] - }) - es.index(index='documents', id='2', body={ - '_created': "2024-01-01", - 'holdings': [{ - 'organisation': { - 'organisation_pid': org_martigny.pid, - 'library_pid': lib_martigny_bourg.pid - } - }, { - 'organisation': { - 'organisation_pid': org_sion.pid, - 'library_pid': lib_sion.pid - } - } - ] - }) - es.index(index='documents', id='3', body={ - '_created': "2024-01-01", - 'holdings': [{ - 'organisation': { - 'organisation_pid': org_sion.pid, - 'library_pid': lib_sion.pid - } - } - ] - }) - es.indices.refresh(index='documents') + es.index( + index="documents", + id="1", + body={ + "_created": "2023-02-01", + "adminMetadata": {"source": "foo"}, + "holdings": [ + { + "organisation": { + "organisation_pid": org_martigny.pid, + "library_pid": lib_martigny.pid, + } + } + ], + }, + ) + es.index( + index="documents", + id="2", + body={ + "_created": "2024-01-01", + "holdings": [ + { + "organisation": { + "organisation_pid": org_martigny.pid, + "library_pid": lib_martigny_bourg.pid, + } + }, + { + "organisation": { + "organisation_pid": org_sion.pid, + "library_pid": lib_sion.pid, + } + }, + ], + }, + ) + es.index( + index="documents", + id="3", + body={ + "_created": "2024-01-01", + "holdings": [ + { + "organisation": { + "organisation_pid": org_sion.pid, + "library_pid": lib_sion.pid, + } + } + ], + }, + ) + es.indices.refresh(index="documents") # no distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_documents" - } - } + "category": {"indicator": {"type": "number_of_documents"}}, } assert StatsReport(cfg).collect() == [[2]] # no distributions with filters lib_pid = lib_martigny_bourg.pid cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "filter_by_libraries": [{ - '$ref': - f'https://bib.rero.ch/api/libraries/{lib_pid}'}], - "category": { - "indicator": { - "type": "number_of_documents" - } - } + "filter_by_libraries": [ + {"$ref": f"https://bib.rero.ch/api/libraries/{lib_pid}"} + ], + "category": {"indicator": {"type": "number_of_documents"}}, } assert StatsReport(cfg).collect() == [[1]] # one distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["owning_library"] + "distributions": ["owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1], ] # two distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["owning_library", "created_month"] + "distributions": ["owning_library", "created_month"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', '2023-02', '2024-01'], + ["", "2023-02", "2024-01"], [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 0, 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["created_month", "owning_library"] + "distributions": ["created_month", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023-02', 0, 1], - ['2024-01', 1, 0] + ["2023-02", 0, 1], + ["2024-01", 1, 0], ] # by year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["created_year", "owning_library"] + "distributions": ["created_year", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023', 0, 1], - ['2024', 1, 0] + ["2023", 0, 1], + ["2024", 1, 0], ] # imported cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["owning_library", "imported"] + "distributions": ["owning_library", "imported"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', 'imported', 'not imported'], + ["", "imported", "not imported"], [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 0, 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0], ] # reverse imported cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_documents", - "distributions": ["imported", "owning_library"] + "distributions": ["imported", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['imported', 0, 1], - ['not imported', 1, 0] + ["imported", 0, 1], + ["not imported", 1, 0], ] diff --git a/tests/ui/stats/test_stats_report_n_ill_requests.py b/tests/ui/stats/test_stats_report_n_ill_requests.py index 06dc1b639e..da3fcfc7a1 100644 --- a/tests/ui/stats/test_stats_report_n_ill_requests.py +++ b/tests/ui/stats/test_stats_report_n_ill_requests.py @@ -25,202 +25,203 @@ def test_stats_report_number_of_ill_requests( - org_martigny, org_sion, lib_martigny, lib_martigny_bourg, lib_sion, - loc_public_martigny, loc_restricted_martigny, - loc_public_martigny_bourg, loc_public_sion): + org_martigny, + org_sion, + lib_martigny, + lib_martigny_bourg, + lib_sion, + loc_public_martigny, + loc_restricted_martigny, + loc_public_martigny_bourg, + loc_public_sion, +): """Test the number of ill requests.""" - label_loc_pub_martigny = f'{lib_martigny["name"]} / '\ + label_loc_pub_martigny = ( + f'{lib_martigny["name"]} / ' f'{loc_public_martigny["name"]} ({loc_public_martigny.pid})' - label_loc_rest_martigny = f'{lib_martigny["name"]} / '\ + ) + label_loc_rest_martigny = ( + f'{lib_martigny["name"]} / ' f'{loc_restricted_martigny["name"]} ({loc_restricted_martigny.pid})' - label_loc_pub_martigny_bourg = f'{lib_martigny_bourg["name"]} / '\ - f'{loc_public_martigny_bourg["name"]} '\ - f'({loc_public_martigny_bourg.pid})' + ) + label_loc_pub_martigny_bourg = ( + f'{lib_martigny_bourg["name"]} / ' + f'{loc_public_martigny_bourg["name"]} ' + f"({loc_public_martigny_bourg.pid})" + ) # no data cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_ill_requests" - } - } + "category": {"indicator": {"type": "number_of_ill_requests"}}, } assert StatsReport(cfg).collect() == [[0]] # fixtures - es.index(index='ill_requests', id='1', body={ - '_created': "2023-02-01", - 'status': 'pending', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny.pid}, - 'pickup_location': {'pid': loc_public_martigny.pid} - }) - es.index(index='ill_requests', id='2', body={ - '_created': "2023-02-01", - 'status': 'validated', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny.pid}, - 'pickup_location': {'pid': loc_restricted_martigny.pid} - }) - es.index(index='ill_requests', id='3', body={ - '_created': "2024-01-01", - 'status': 'closed', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny_bourg.pid}, - 'pickup_location': {'pid': loc_public_martigny_bourg.pid} - }) - es.index(index='ill_requests', id='4', body={ - '_created': "2024-01-01", - 'status': 'denied', - 'organisation': {'pid': org_sion.pid}, - 'library': {'pid': lib_sion.pid}, - 'pickup_location': {'pid': loc_public_sion.pid} - }) - es.indices.refresh(index='ill_requests') + es.index( + index="ill_requests", + id="1", + body={ + "_created": "2023-02-01", + "status": "pending", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny.pid}, + "pickup_location": {"pid": loc_public_martigny.pid}, + }, + ) + es.index( + index="ill_requests", + id="2", + body={ + "_created": "2023-02-01", + "status": "validated", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny.pid}, + "pickup_location": {"pid": loc_restricted_martigny.pid}, + }, + ) + es.index( + index="ill_requests", + id="3", + body={ + "_created": "2024-01-01", + "status": "closed", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny_bourg.pid}, + "pickup_location": {"pid": loc_public_martigny_bourg.pid}, + }, + ) + es.index( + index="ill_requests", + id="4", + body={ + "_created": "2024-01-01", + "status": "denied", + "organisation": {"pid": org_sion.pid}, + "library": {"pid": lib_sion.pid}, + "pickup_location": {"pid": loc_public_sion.pid}, + }, + ) + es.indices.refresh(index="ill_requests") # no distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_ill_requests" - } - } + "category": {"indicator": {"type": "number_of_ill_requests"}}, } assert StatsReport(cfg).collect() == [[3]] # no distributions with filters lib_pid = lib_martigny_bourg.pid cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "filter_by_libraries": [{ - '$ref': - f'https://bib.rero.ch/api/libraries/{lib_pid}'}], - "category": { - "indicator": { - "type": "number_of_ill_requests" - } - } + "filter_by_libraries": [ + {"$ref": f"https://bib.rero.ch/api/libraries/{lib_pid}"} + ], + "category": {"indicator": {"type": "number_of_ill_requests"}}, } assert StatsReport(cfg).collect() == [[1]] # one distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_ill_requests", - "distributions": ["pickup_location"] + "distributions": ["pickup_location"], } - } + }, } assert StatsReport(cfg).collect() == [ [label_loc_pub_martigny_bourg, 1], [label_loc_pub_martigny, 1], - [label_loc_rest_martigny, 1] + [label_loc_rest_martigny, 1], ] # two distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_ill_requests", - "distributions": ["pickup_location", "created_month"] + "distributions": ["pickup_location", "created_month"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', '2023-02', '2024-01'], + ["", "2023-02", "2024-01"], [label_loc_pub_martigny_bourg, 0, 1], [label_loc_pub_martigny, 1, 0], - [label_loc_rest_martigny, 1, 0] + [label_loc_rest_martigny, 1, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_ill_requests", - "distributions": ["created_month", "pickup_location"] + "distributions": ["created_month", "pickup_location"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", label_loc_pub_martigny_bourg, label_loc_pub_martigny, - label_loc_rest_martigny + label_loc_rest_martigny, ], - ['2023-02', 0, 1, 1], - ['2024-01', 1, 0, 0] + ["2023-02", 0, 1, 1], + ["2024-01", 1, 0, 0], ] # year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_ill_requests", - "distributions": ["created_year", "pickup_location"] + "distributions": ["created_year", "pickup_location"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", label_loc_pub_martigny_bourg, label_loc_pub_martigny, - label_loc_rest_martigny + label_loc_rest_martigny, ], - ['2023', 0, 1, 1], - ['2024', 1, 0, 0] + ["2023", 0, 1, 1], + ["2024", 1, 0, 0], ] # type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_ill_requests", - "distributions": ["status", "pickup_location"] + "distributions": ["status", "pickup_location"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", label_loc_pub_martigny_bourg, label_loc_pub_martigny, - label_loc_rest_martigny + label_loc_rest_martigny, ], - ['closed', 1, 0, 0], - ['pending', 0, 1, 0], - ['validated', 0, 0, 1] + ["closed", 1, 0, 0], + ["pending", 0, 1, 0], + ["validated", 0, 0, 1], ] diff --git a/tests/ui/stats/test_stats_report_n_items.py b/tests/ui/stats/test_stats_report_n_items.py index 0d8e230785..2462797ea6 100644 --- a/tests/ui/stats/test_stats_report_n_items.py +++ b/tests/ui/stats/test_stats_report_n_items.py @@ -24,283 +24,276 @@ def test_stats_report_number_of_items( - org_martigny, org_sion, lib_martigny, lib_martigny_bourg, - lib_sion, loc_public_martigny, loc_restricted_martigny, - loc_public_martigny_bourg, loc_public_sion): + org_martigny, + org_sion, + lib_martigny, + lib_martigny_bourg, + lib_sion, + loc_public_martigny, + loc_restricted_martigny, + loc_public_martigny_bourg, + loc_public_sion, +): """Test the number of items.""" # no data cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_items" - } - } + "category": {"indicator": {"type": "number_of_items"}}, } assert StatsReport(cfg).collect() == [[0]] # fixtures - es.index(index='items', id='1', body={ - '_created': "2023-02-01", - 'type': 'standard', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny.pid}, - 'location': {'pid': loc_public_martigny.pid}, - 'document': { - 'document_type': [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_other_book' - }] - } - }) - es.index(index='items', id='2', body={ - '_created': "2023-02-01", - 'type': 'issue', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny.pid}, - 'location': {'pid': loc_restricted_martigny.pid}, - 'document': { - 'document_type': [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_other_book' - }] - } - - }) - es.index(index='items', id='3', body={ - '_created': "2024-01-01", - 'type': 'provisional', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny_bourg.pid}, - 'location': {'pid': loc_public_martigny_bourg.pid}, - 'document': { - 'document_type': [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_other_book' - }] - } - - }) - es.index(index='items', id='4', body={ - '_created': "2024-01-01", - 'type': 'standard', - 'organisation': {'pid': org_sion.pid}, - 'library': {'pid': lib_sion.pid}, - 'location': {'pid': loc_public_sion.pid}, - 'document': { - 'document_type': [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_other_book' - }] - } - - }) - es.indices.refresh(index='items') + es.index( + index="items", + id="1", + body={ + "_created": "2023-02-01", + "type": "standard", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny.pid}, + "location": {"pid": loc_public_martigny.pid}, + "document": { + "document_type": [ + { + "main_type": "docmaintype_book", + "subtype": "docsubtype_other_book", + } + ] + }, + }, + ) + es.index( + index="items", + id="2", + body={ + "_created": "2023-02-01", + "type": "issue", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny.pid}, + "location": {"pid": loc_restricted_martigny.pid}, + "document": { + "document_type": [ + { + "main_type": "docmaintype_book", + "subtype": "docsubtype_other_book", + } + ] + }, + }, + ) + es.index( + index="items", + id="3", + body={ + "_created": "2024-01-01", + "type": "provisional", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny_bourg.pid}, + "location": {"pid": loc_public_martigny_bourg.pid}, + "document": { + "document_type": [ + { + "main_type": "docmaintype_book", + "subtype": "docsubtype_other_book", + } + ] + }, + }, + ) + es.index( + index="items", + id="4", + body={ + "_created": "2024-01-01", + "type": "standard", + "organisation": {"pid": org_sion.pid}, + "library": {"pid": lib_sion.pid}, + "location": {"pid": loc_public_sion.pid}, + "document": { + "document_type": [ + { + "main_type": "docmaintype_book", + "subtype": "docsubtype_other_book", + } + ] + }, + }, + ) + es.indices.refresh(index="items") # no distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_items" - } - } + "category": {"indicator": {"type": "number_of_items"}}, } assert StatsReport(cfg).collect() == [[3]] # no distributions with filters lib_pid = lib_martigny_bourg.pid cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "filter_by_libraries": [{ - '$ref': - f'https://bib.rero.ch/api/libraries/{lib_pid}'}], - "category": { - "indicator": { - "type": "number_of_items" - } - } + "filter_by_libraries": [ + {"$ref": f"https://bib.rero.ch/api/libraries/{lib_pid}"} + ], + "category": {"indicator": {"type": "number_of_items"}}, } assert StatsReport(cfg).collect() == [[1]] # one distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["owning_library"] + "distributions": ["owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 2] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 2], ] # two distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["owning_library", "created_month"] + "distributions": ["owning_library", "created_month"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', '2023-02', '2024-01'], + ["", "2023-02", "2024-01"], [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 0, 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 2, 0] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 2, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["created_month", "owning_library"] + "distributions": ["created_month", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023-02', 0, 2], - ['2024-01', 1, 0] + ["2023-02", 0, 2], + ["2024-01", 1, 0], ] # year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["created_year", "owning_library"] + "distributions": ["created_year", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023', 0, 2], - ['2024', 1, 0] + ["2023", 0, 2], + ["2024", 1, 0], ] # type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["type", "owning_library"] + "distributions": ["type", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['issue', 0, 1], - ['provisional', 1, 0], - ['standard', 0, 1] + ["issue", 0, 1], + ["provisional", 1, 0], + ["standard", 0, 1], ] # location/type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["type", "owning_location"] + "distributions": ["type", "owning_location"], } - } + }, } - label_loc_pub_martigny = f'{lib_martigny["name"]} / '\ + label_loc_pub_martigny = ( + f'{lib_martigny["name"]} / ' f'{loc_public_martigny["name"]} ({loc_public_martigny.pid})' - label_loc_rest_martigny = f'{lib_martigny["name"]} / '\ + ) + label_loc_rest_martigny = ( + f'{lib_martigny["name"]} / ' f'{loc_restricted_martigny["name"]} ({loc_restricted_martigny.pid})' - label_loc_pub_martigny_bourg = f'{lib_martigny_bourg["name"]} / '\ - f'{loc_public_martigny_bourg["name"]} '\ - f'({loc_public_martigny_bourg.pid})' + ) + label_loc_pub_martigny_bourg = ( + f'{lib_martigny_bourg["name"]} / ' + f'{loc_public_martigny_bourg["name"]} ' + f"({loc_public_martigny_bourg.pid})" + ) assert StatsReport(cfg).collect() == [ [ - '', + "", label_loc_pub_martigny_bourg, label_loc_pub_martigny, - label_loc_rest_martigny + label_loc_rest_martigny, ], - ['issue', 0, 0, 1], - ['provisional', 1, 0, 0], - ['standard', 0, 1, 0] + ["issue", 0, 0, 1], + ["provisional", 1, 0, 0], + ["standard", 0, 1, 0], ] # doc types cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_items", - "distributions": ["document_type"] - } - } + "indicator": {"type": "number_of_items", "distributions": ["document_type"]} + }, } - assert StatsReport(cfg).collect() == [ - ['docmaintype_book', 3] - ] + assert StatsReport(cfg).collect() == [["docmaintype_book", 3]] # doc subtypes cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_items", - "distributions": ["document_subtype"] + "distributions": ["document_subtype"], } - } + }, } - assert StatsReport(cfg).collect() == [ - ['docsubtype_other_book', 3] - ] + assert StatsReport(cfg).collect() == [["docsubtype_other_book", 3]] diff --git a/tests/ui/stats/test_stats_report_n_patrons.py b/tests/ui/stats/test_stats_report_n_patrons.py index c8b8cb9df1..1620764693 100644 --- a/tests/ui/stats/test_stats_report_n_patrons.py +++ b/tests/ui/stats/test_stats_report_n_patrons.py @@ -28,14 +28,23 @@ def test_stats_report_number_of_patrons( - db, org_martigny, lib_martigny, org_sion, lib_martigny_bourg, - patron_type_children_martigny, patron_type_adults_martigny, - patron_type_grown_sion, loc_public_martigny, - loc_public_martigny_bourg, - patron_martigny_data, - patron2_martigny_data, patron_sion_data, roles + db, + org_martigny, + lib_martigny, + org_sion, + lib_martigny_bourg, + patron_type_children_martigny, + patron_type_adults_martigny, + patron_type_grown_sion, + loc_public_martigny, + loc_public_martigny_bourg, + patron_martigny_data, + patron2_martigny_data, + patron_sion_data, + roles, ): """Test the number of patrons and active patrons.""" + def patch_creation_date(patron, date): patron.model.created = date db.session.merge(patron.model) @@ -43,235 +52,181 @@ def patch_creation_date(patron, date): # no data cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_patrons" - } - } + "category": {"indicator": {"type": "number_of_patrons"}}, } assert StatsReport(cfg).collect() == [[0]] from rero_ils.modules.patrons.utils import create_patron_from_data patron_martigny = create_patron_from_data( - data={k: v for k, v in patron_martigny_data.items() if k != 'pid'}, + data={k: v for k, v in patron_martigny_data.items() if k != "pid"}, dbcommit=True, - reindex=True) + reindex=True, + ) patron2_martigny = create_patron_from_data( - data={k: v for k, v in patron2_martigny_data.items() if k != 'pid'}, + data={k: v for k, v in patron2_martigny_data.items() if k != "pid"}, dbcommit=True, - reindex=True) + reindex=True, + ) patron_sion = create_patron_from_data( - data={k: v for k, v in patron_sion_data.items() if k != 'pid'}, + data={k: v for k, v in patron_sion_data.items() if k != "pid"}, dbcommit=True, - reindex=True) + reindex=True, + ) date = datetime(year=2023, month=11, day=1) patch_creation_date(patron_martigny, date) patch_creation_date(patron2_martigny, date) patch_creation_date(patron_sion, date) - es.indices.refresh(index='patrons') + es.indices.refresh(index="patrons") # no distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_patrons" - } - } + "category": {"indicator": {"type": "number_of_patrons"}}, } assert StatsReport(cfg).collect() == [[2]] # gender cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_patrons", - "distributions": ["gender"] - } - } + "indicator": {"type": "number_of_patrons", "distributions": ["gender"]} + }, } - assert StatsReport(cfg).collect() == [ - ['female', 1], - ['male', 1] - ] + assert StatsReport(cfg).collect() == [["female", 1], ["male", 1]] # birth year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_patrons", - "distributions": ["birth_year"] - } - } + "indicator": {"type": "number_of_patrons", "distributions": ["birth_year"]} + }, } - assert StatsReport(cfg).collect() == [ - ['1947', 1], - ['1967', 1] - ] + assert StatsReport(cfg).collect() == [["1947", 1], ["1967", 1]] # patron type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_patrons", - "distributions": ["type"] - } - } + "indicator": {"type": "number_of_patrons", "distributions": ["type"]} + }, } - label_ptrn_type_children = f'{patron_type_children_martigny["name"]} '\ - f'({patron_type_children_martigny.pid})' - label_ptrn_type_adult = f'{patron_type_adults_martigny["name"]} '\ - f'({patron_type_adults_martigny.pid})' + label_ptrn_type_children = ( + f'{patron_type_children_martigny["name"]} ' + f"({patron_type_children_martigny.pid})" + ) + label_ptrn_type_adult = ( + f'{patron_type_adults_martigny["name"]} ' f"({patron_type_adults_martigny.pid})" + ) assert StatsReport(cfg).collect() == [ [label_ptrn_type_adult, 1], - [label_ptrn_type_children, 1] + [label_ptrn_type_children, 1], ] # postal code cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_patrons", - "distributions": ["postal_code"] - } - } + "indicator": {"type": "number_of_patrons", "distributions": ["postal_code"]} + }, } - assert StatsReport(cfg).collect() == [ - ['1920', 2] - ] + assert StatsReport(cfg).collect() == [["1920", 2]] # role cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_patrons", - "distributions": ["role"] - } - } + "indicator": {"type": "number_of_patrons", "distributions": ["role"]} + }, } - assert StatsReport(cfg).collect() == [ - ['patron', 2] - ] + assert StatsReport(cfg).collect() == [["patron", 2]] # gender month cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_patrons", - "distributions": ["gender", "created_month"] + "distributions": ["gender", "created_month"], } - } + }, } - assert StatsReport(cfg).collect() == [ - ['', '2023-11'], - ['female', 1], - ['male', 1] - ] + assert StatsReport(cfg).collect() == [["", "2023-11"], ["female", 1], ["male", 1]] # gender year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_patrons", "period": "year", - "distributions": ["created_year", "gender"] + "distributions": ["created_year", "gender"], } - } + }, } - assert StatsReport(cfg).collect() == [ - ['', 'female', 'male'], - ['2023', 1, 1] - ] - - es.index(index='operation_logs-2020', id='1', body={ - "date": "2023-01-01", - "loan": { - "trigger": "checkin", - "patron": { - "pid": patron_martigny.pid, - "hashed_pid": hashlib.md5( - patron_martigny.pid.encode()).hexdigest() + assert StatsReport(cfg).collect() == [["", "female", "male"], ["2023", 1, 1]] + + es.index( + index="operation_logs-2020", + id="1", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "checkin", + "patron": { + "pid": patron_martigny.pid, + "hashed_pid": hashlib.md5(patron_martigny.pid.encode()).hexdigest(), + }, + "item": {"library_pid": lib_martigny.pid}, + "transaction_location": {"pid": loc_public_martigny.pid}, }, - "item": { - "library_pid": lib_martigny.pid + "record": { + "type": "loan", }, - "transaction_location": { - "pid": loc_public_martigny.pid - } }, - "record": { - "type": "loan", - } - }, refresh=True) - - es.index(index='operation_logs-2020', id='2', body={ - "date": "2023-01-01", - "loan": { - "trigger": "checkin", - "patron": { - "pid": patron2_martigny.pid, - "hashed_pid": hashlib.md5( - patron2_martigny.pid.encode()).hexdigest() + refresh=True, + ) + + es.index( + index="operation_logs-2020", + id="2", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "checkin", + "patron": { + "pid": patron2_martigny.pid, + "hashed_pid": hashlib.md5( + patron2_martigny.pid.encode() + ).hexdigest(), + }, + "item": {"library_pid": lib_martigny_bourg.pid}, + "transaction_location": {"pid": loc_public_martigny_bourg.pid}, }, - "item": { - "library_pid": lib_martigny_bourg.pid + "record": { + "type": "loan", }, - "transaction_location": { - "pid": loc_public_martigny_bourg.pid - } }, - "record": { - "type": "loan", - } - }, refresh=True) + refresh=True, + ) # active patrons cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_active_patrons", "period": "year", } - } + }, } - with mock.patch( - 'rero_ils.modules.stats.api.report.datetime' - ) as mock_datetime: + with mock.patch("rero_ils.modules.stats.api.report.datetime") as mock_datetime: mock_datetime.now.return_value = datetime(year=2024, month=1, day=1) assert StatsReport(cfg).collect() == [[2]] @@ -279,23 +234,19 @@ def patch_creation_date(patron, date): # active patrons lib_pid = lib_martigny_bourg.pid cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "filter_by_libraries": [{ - '$ref': - f'https://bib.rero.ch/api/libraries/{lib_pid}'}], + "filter_by_libraries": [ + {"$ref": f"https://bib.rero.ch/api/libraries/{lib_pid}"} + ], "category": { "indicator": { "type": "number_of_active_patrons", "period": "year", } - } + }, } - with mock.patch( - 'rero_ils.modules.stats.api.report.datetime' - ) as mock_datetime: + with mock.patch("rero_ils.modules.stats.api.report.datetime") as mock_datetime: mock_datetime.now.return_value = datetime(year=2024, month=1, day=1) assert StatsReport(cfg).collect() == [[1]] diff --git a/tests/ui/stats/test_stats_report_n_serial_holdings.py b/tests/ui/stats/test_stats_report_n_serial_holdings.py index d233ac751d..36230aad9b 100644 --- a/tests/ui/stats/test_stats_report_n_serial_holdings.py +++ b/tests/ui/stats/test_stats_report_n_serial_holdings.py @@ -24,161 +24,151 @@ def test_stats_report_number_of_serial_holdings( - org_martigny, org_sion, lib_martigny, lib_martigny_bourg, - lib_sion): + org_martigny, org_sion, lib_martigny, lib_martigny_bourg, lib_sion +): """Test the number of serials.""" # no data cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_serial_holdings" - } - } + "category": {"indicator": {"type": "number_of_serial_holdings"}}, } assert StatsReport(cfg).collect() == [[0]] # fixtures - es.index(index='holdings', id='1', body={ - '_created': "2023-02-01", - 'holdings_type': 'serial', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny.pid} - }) - es.index(index='holdings', id='2', body={ - '_created': "2024-01-01", - 'holdings_type': 'serial', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny_bourg.pid} - }) - es.index(index='holdings', id='3', body={ - '_created': "2024-01-01", - 'holdings_type': 'standard', - 'organisation': {'pid': org_martigny.pid}, - 'library': {'pid': lib_martigny_bourg.pid} - }) - es.index(index='holdings', id='4', body={ - '_created': "2024-01-01", - 'holdings_type': 'serial', - 'organisation': {'pid': org_sion.pid}, - 'library': {'pid': lib_sion.pid} - }) - es.indices.refresh(index='holdings') + es.index( + index="holdings", + id="1", + body={ + "_created": "2023-02-01", + "holdings_type": "serial", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny.pid}, + }, + ) + es.index( + index="holdings", + id="2", + body={ + "_created": "2024-01-01", + "holdings_type": "serial", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny_bourg.pid}, + }, + ) + es.index( + index="holdings", + id="3", + body={ + "_created": "2024-01-01", + "holdings_type": "standard", + "organisation": {"pid": org_martigny.pid}, + "library": {"pid": lib_martigny_bourg.pid}, + }, + ) + es.index( + index="holdings", + id="4", + body={ + "_created": "2024-01-01", + "holdings_type": "serial", + "organisation": {"pid": org_sion.pid}, + "library": {"pid": lib_sion.pid}, + }, + ) + es.indices.refresh(index="holdings") # no distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_serial_holdings" - } - } + "category": {"indicator": {"type": "number_of_serial_holdings"}}, } assert StatsReport(cfg).collect() == [[2]] # no distributions with filters lib_pid = lib_martigny_bourg.pid cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "filter_by_libraries": [{ - '$ref': - f'https://bib.rero.ch/api/libraries/{lib_pid}'}], - "category": { - "indicator": { - "type": "number_of_serial_holdings" - } - } + "filter_by_libraries": [ + {"$ref": f"https://bib.rero.ch/api/libraries/{lib_pid}"} + ], + "category": {"indicator": {"type": "number_of_serial_holdings"}}, } assert StatsReport(cfg).collect() == [[1]] # one distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_serial_holdings", - "distributions": ["owning_library"] + "distributions": ["owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1], ] # two distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_serial_holdings", - "distributions": ["owning_library", "created_month"] + "distributions": ["owning_library", "created_month"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', '2023-02', '2024-01'], + ["", "2023-02", "2024-01"], [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 0, 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_serial_holdings", - "distributions": ["created_month", "owning_library"] + "distributions": ["created_month", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023-02', 0, 1], - ['2024-01', 1, 0] + ["2023-02", 0, 1], + ["2024-01", 1, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_serial_holdings", - "distributions": ["created_year", "owning_library"] + "distributions": ["created_year", "owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [ - '', + "", f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', - f'{lib_martigny.get("name")} ({lib_martigny.pid})' + f'{lib_martigny.get("name")} ({lib_martigny.pid})', ], - ['2023', 0, 1], - ['2024', 1, 0] + ["2023", 0, 1], + ["2024", 1, 0], ] diff --git a/tests/ui/stats/test_stats_report_number_of_ciculation.py b/tests/ui/stats/test_stats_report_number_of_ciculation.py index 67e5229191..6464ba7be7 100644 --- a/tests/ui/stats/test_stats_report_number_of_ciculation.py +++ b/tests/ui/stats/test_stats_report_number_of_ciculation.py @@ -27,567 +27,518 @@ def test_stats_report_circulation_trigger( - org_martigny, lib_martigny, lib_martigny_bourg, loc_public_martigny, - loc_public_martigny_bourg): + org_martigny, + lib_martigny, + lib_martigny_bourg, + loc_public_martigny, + loc_public_martigny_bourg, +): """Test the circulation indicuators.""" # fixtures - for trigger in [ - 'checkin', 'checkout', 'extend', 'request', 'validate_request' - ]: - es.index(index='operation_logs-2020', id='1', body={ - "date": "2023-01-01", - "loan": { - "trigger": trigger, - "item": { - "document": { - "type": "docsubtype_other_book" + for trigger in ["checkin", "checkout", "extend", "request", "validate_request"]: + es.index( + index="operation_logs-2020", + id="1", + body={ + "date": "2023-01-01", + "loan": { + "trigger": trigger, + "item": { + "document": {"type": "docsubtype_other_book"}, + "library_pid": lib_martigny.pid, + "holding": {"location_name": loc_public_martigny["name"]}, + }, + "transaction_location": {"pid": loc_public_martigny.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 13, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", }, - "library_pid": lib_martigny.pid, - "holding": { - "location_name": loc_public_martigny["name"] - } }, - "transaction_location": {"pid": loc_public_martigny.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 13, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) - es.index(index='operation_logs-2020', id='2', body={ - "date": "2023-01-01", - "loan": { - "trigger": trigger, - "item": { - "document": { - "type": "docsubtype_other_book" + refresh=True, + ) + es.index( + index="operation_logs-2020", + id="2", + body={ + "date": "2023-01-01", + "loan": { + "trigger": trigger, + "item": { + "document": {"type": "docsubtype_other_book"}, + "library_pid": lib_martigny.pid, + "holding": {"location_name": loc_public_martigny["name"]}, + }, + "transaction_location": {"pid": loc_public_martigny_bourg.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 13, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", }, - "library_pid": lib_martigny.pid, - "holding": { - "location_name": loc_public_martigny["name"] - } }, - "transaction_location": {"pid": loc_public_martigny_bourg.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 13, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) + refresh=True, + ) cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": f"number_of_{trigger}s" - } - } + "category": {"indicator": {"type": f"number_of_{trigger}s"}}, } assert StatsReport(cfg).collect() == [[2]] lib_pid = lib_martigny_bourg.pid cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "filter_by_libraries": [{ - '$ref': - f'https://bib.rero.ch/api/libraries/{lib_pid}'}], - "category": { - "indicator": { - "type": f"number_of_{trigger}s" - } - } + "filter_by_libraries": [ + {"$ref": f"https://bib.rero.ch/api/libraries/{lib_pid}"} + ], + "category": {"indicator": {"type": f"number_of_{trigger}s"}}, } assert StatsReport(cfg).collect() == [[1]] def test_stats_report_number_of_checkins( - org_martigny, org_sion, lib_martigny, lib_martigny_bourg, - lib_sion, loc_public_martigny, - loc_public_martigny_bourg, loc_public_sion): + org_martigny, + org_sion, + lib_martigny, + lib_martigny_bourg, + lib_sion, + loc_public_martigny, + loc_public_martigny_bourg, + loc_public_sion, +): """Test the number of circulation checkins operations.""" - label_loc_pub_martigny = f'{lib_martigny["name"]} / '\ + label_loc_pub_martigny = ( + f'{lib_martigny["name"]} / ' f'{loc_public_martigny["name"]} ({loc_public_martigny.pid})' - label_loc_pub_martigny_bourg = f'{lib_martigny_bourg["name"]} / '\ - f'{loc_public_martigny_bourg["name"]} '\ - f'({loc_public_martigny_bourg.pid})' + ) + label_loc_pub_martigny_bourg = ( + f'{lib_martigny_bourg["name"]} / ' + f'{loc_public_martigny_bourg["name"]} ' + f"({loc_public_martigny_bourg.pid})" + ) # fixtures - es.index(index='operation_logs-2020', id='1', body={ - "date": "2023-01-01", - "loan": { - "trigger": "checkin", - "item": { - "document": { - "type": "docsubtype_other_book" + es.index( + index="operation_logs-2020", + id="1", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "checkin", + "item": { + "document": {"type": "docsubtype_other_book"}, + "library_pid": lib_martigny.pid, + "holding": {"location_name": loc_public_martigny["name"]}, + }, + "transaction_location": {"pid": loc_public_martigny.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 13, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", }, - "library_pid": lib_martigny.pid, - "holding": { - "location_name": loc_public_martigny["name"] - } }, - "transaction_location": {"pid": loc_public_martigny.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 13, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) - - es.index(index='operation_logs-2020', id='2', body={ - "date": "2024-01-01", - "loan": { - "trigger": "checkin", - "item": { - "document": { - "type": "ebook" + refresh=True, + ) + + es.index( + index="operation_logs-2020", + id="2", + body={ + "date": "2024-01-01", + "loan": { + "trigger": "checkin", + "item": { + "document": {"type": "ebook"}, + "library_pid": lib_martigny_bourg.pid, + "holding": {"location_name": loc_public_martigny_bourg["name"]}, + }, + "transaction_location": {"pid": loc_public_martigny_bourg.pid}, + "transaction_channel": "system", + "patron": { + "age": 30, + "type": "Usager.ère plus de 18 ans", + "postal_code": "1930", }, - "library_pid": lib_martigny_bourg.pid, - "holding": { - "location_name": loc_public_martigny_bourg["name"] - } }, - "transaction_location": {"pid": loc_public_martigny_bourg.pid}, - "transaction_channel": "system", - "patron": { - "age": 30, - "type": "Usager.ère plus de 18 ans", - "postal_code": "1930" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) - - es.index(index='operation_logs-2020', id='3', body={ - "date": "2023-01-01", - "loan": { - "trigger": "checkin", - "item": { - "document": { - "type": "ebook" + refresh=True, + ) + + es.index( + index="operation_logs-2020", + id="3", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "checkin", + "item": {"document": {"type": "ebook"}, "library_pid": lib_sion.pid}, + "transaction_location": {"pid": loc_public_sion.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 13, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", }, - "library_pid": lib_sion.pid }, - "transaction_location": {"pid": loc_public_sion.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 13, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) - - es.index(index='operation_logs-2020', id='4', body={ - "date": "2023-01-01", - "loan": { - "trigger": "checkin", - "item": { - "document": { - "type": "docsubtype_other_book" - }, - "library_pid": lib_martigny_bourg.pid + refresh=True, + ) + es.index( + index="operation_logs-2020", + id="4", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "checkin", + "item": { + "document": {"type": "docsubtype_other_book"}, + "library_pid": lib_martigny_bourg.pid, + }, + "transaction_location": {"pid": loc_public_martigny_bourg.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 13, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", + }, + }, + "record": { + "type": "item", }, - "transaction_location": {"pid": loc_public_martigny_bourg.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 13, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } }, - "record": { - "type": "item", - } - }, refresh=True) - - es.index(index='operation_logs-2020', id='5', body={ - "date": "2023-01-01", - "loan": { - "trigger": "checkout", - "item": { - "document": { - "type": "docsubtype_other_book" + refresh=True, + ) + + es.index( + index="operation_logs-2020", + id="5", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "checkout", + "item": { + "document": {"type": "docsubtype_other_book"}, + "library_pid": lib_martigny_bourg.pid, + }, + "transaction_location": {"pid": loc_public_martigny_bourg.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 10, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", }, - "library_pid": lib_martigny_bourg.pid }, - "transaction_location": {"pid": loc_public_martigny_bourg.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 10, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) + refresh=True, + ) # no distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_checkins" - } - } + "category": {"indicator": {"type": "number_of_checkins"}}, } assert StatsReport(cfg).collect() == [[2]] # limit by period cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, - "category": { - "indicator": { - "type": "number_of_checkins", - "period": "year" - } - } + "category": {"indicator": {"type": "number_of_checkins", "period": "year"}}, } - with mock.patch( - 'rero_ils.modules.stats.api.report.datetime' - ) as mock_datetime: + with mock.patch("rero_ils.modules.stats.api.report.datetime") as mock_datetime: mock_datetime.now.return_value = datetime(year=2024, month=1, day=1) assert StatsReport(cfg).collect() == [[1]] # one distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["transaction_location"] + "distributions": ["transaction_location"], } - } + }, } assert StatsReport(cfg).collect() == [ [label_loc_pub_martigny_bourg, 1], - [label_loc_pub_martigny, 1] + [label_loc_pub_martigny, 1], ] # two distributions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["transaction_location", "transaction_month"] + "distributions": ["transaction_location", "transaction_month"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['', '2023-01', '2024-01'], + ["", "2023-01", "2024-01"], [label_loc_pub_martigny_bourg, 0, 1], - [label_loc_pub_martigny, 1, 0] + [label_loc_pub_martigny, 1, 0], ] # reverse distrubtions cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["transaction_month", "transaction_location"] + "distributions": ["transaction_month", "transaction_location"], } - } + }, } assert StatsReport(cfg).collect() == [ - [ - '', - label_loc_pub_martigny_bourg, - label_loc_pub_martigny - ], - ['2023-01', 0, 1], - ['2024-01', 1, 0] + ["", label_loc_pub_martigny_bourg, label_loc_pub_martigny], + ["2023-01", 0, 1], + ["2024-01", 1, 0], ] # year cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["transaction_year", "transaction_location"] + "distributions": ["transaction_year", "transaction_location"], } - } + }, } assert StatsReport(cfg).collect() == [ - [ - '', - label_loc_pub_martigny_bourg, - label_loc_pub_martigny - ], - ['2023', 0, 1], - ['2024', 1, 0] + ["", label_loc_pub_martigny_bourg, label_loc_pub_martigny], + ["2023", 0, 1], + ["2024", 1, 0], ] # patron type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["patron_type"] + "distributions": ["patron_type"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['Usager.ère moins de 14 ans', 1], - ['Usager.ère plus de 18 ans', 1] + ["Usager.ère moins de 14 ans", 1], + ["Usager.ère plus de 18 ans", 1], ] # patron age cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { - "indicator": { - "type": "number_of_checkins", - "distributions": ["patron_age"] - } - } + "indicator": {"type": "number_of_checkins", "distributions": ["patron_age"]} + }, } - assert StatsReport(cfg).collect() == [ - [13, 1], - [30, 1] - ] + assert StatsReport(cfg).collect() == [[13, 1], [30, 1]] # postal code cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["patron_postal_code"] + "distributions": ["patron_postal_code"], } - } + }, } - assert StatsReport(cfg).collect() == [ - ['1920', 1], - ['1930', 1] - ] + assert StatsReport(cfg).collect() == [["1920", 1], ["1930", 1]] # patron type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["patron_type"] + "distributions": ["patron_type"], } - } + }, } assert StatsReport(cfg).collect() == [ - ['Usager.ère moins de 14 ans', 1], - ['Usager.ère plus de 18 ans', 1] + ["Usager.ère moins de 14 ans", 1], + ["Usager.ère plus de 18 ans", 1], ] # patron type cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["document_type"] + "distributions": ["document_type"], } - } + }, } - assert StatsReport(cfg).collect() == [ - ['docsubtype_other_book', 1], - ['ebook', 1] - ] + assert StatsReport(cfg).collect() == [["docsubtype_other_book", 1], ["ebook", 1]] # transaction channel cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["transaction_channel"] + "distributions": ["transaction_channel"], } - } + }, } - assert StatsReport(cfg).collect() == [ - ['sip2', 1], - ['system', 1] - ] + assert StatsReport(cfg).collect() == [["sip2", 1], ["system", 1]] # owning library cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["owning_library"] + "distributions": ["owning_library"], } - } + }, } assert StatsReport(cfg).collect() == [ [f'{lib_martigny_bourg.get("name")} ({lib_martigny_bourg.pid})', 1], - [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1] + [f'{lib_martigny.get("name")} ({lib_martigny.pid})', 1], ] # owning location cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_checkins", - "distributions": ["owning_location"] + "distributions": ["owning_location"], } - } + }, } assert StatsReport(cfg).collect() == [ - [loc_public_martigny_bourg['name'], 1], - [loc_public_martigny['name'], 1] + [loc_public_martigny_bourg["name"], 1], + [loc_public_martigny["name"], 1], ] def test_stats_report_number_of_requests( - lib_martigny, lib_martigny_bourg, loc_public_martigny, - loc_public_martigny_bourg): + lib_martigny, lib_martigny_bourg, loc_public_martigny, loc_public_martigny_bourg +): """Test the number of circulation checkins operations.""" - label_loc_pub_martigny = f'{lib_martigny["name"]} / '\ + label_loc_pub_martigny = ( + f'{lib_martigny["name"]} / ' f'{loc_public_martigny["name"]} ({loc_public_martigny.pid})' - label_loc_pub_martigny_bourg = f'{lib_martigny_bourg["name"]} / '\ - f'{loc_public_martigny_bourg["name"]} '\ - f'({loc_public_martigny_bourg.pid})' + ) + label_loc_pub_martigny_bourg = ( + f'{lib_martigny_bourg["name"]} / ' + f'{loc_public_martigny_bourg["name"]} ' + f"({loc_public_martigny_bourg.pid})" + ) # fixtures - es.index(index='operation_logs-2020', id='1', body={ - "date": "2023-01-01", - "loan": { - "trigger": "request", - "item": { - "document": { - "type": "docsubtype_other_book" + es.index( + index="operation_logs-2020", + id="1", + body={ + "date": "2023-01-01", + "loan": { + "trigger": "request", + "item": { + "document": {"type": "docsubtype_other_book"}, + "library_pid": lib_martigny.pid, + "holding": {"location_name": loc_public_martigny["name"]}, + }, + "transaction_location": {"pid": loc_public_martigny.pid}, + "pickup_location": {"pid": loc_public_martigny.pid}, + "transaction_channel": "sip2", + "patron": { + "age": 13, + "type": "Usager.ère moins de 14 ans", + "postal_code": "1920", }, - "library_pid": lib_martigny.pid, - "holding": { - "location_name": loc_public_martigny["name"] - } }, - "transaction_location": {"pid": loc_public_martigny.pid}, - "pickup_location": {"pid": loc_public_martigny.pid}, - "transaction_channel": "sip2", - "patron": { - "age": 13, - "type": "Usager.ère moins de 14 ans", - "postal_code": "1920" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) - - es.index(index='operation_logs-2020', id='2', body={ - "date": "2024-01-01", - "loan": { - "trigger": "request", - "item": { - "document": { - "type": "ebook" + refresh=True, + ) + + es.index( + index="operation_logs-2020", + id="2", + body={ + "date": "2024-01-01", + "loan": { + "trigger": "request", + "item": { + "document": {"type": "ebook"}, + "library_pid": lib_martigny_bourg.pid, + "holding": {"location_name": loc_public_martigny_bourg["name"]}, + }, + "transaction_location": {"pid": loc_public_martigny_bourg.pid}, + "pickup_location": {"pid": loc_public_martigny_bourg.pid}, + "transaction_channel": "system", + "patron": { + "age": 30, + "type": "Usager.ère plus de 18 ans", + "postal_code": "1930", }, - "library_pid": lib_martigny_bourg.pid, - "holding": { - "location_name": loc_public_martigny_bourg["name"] - } }, - "transaction_location": {"pid": loc_public_martigny_bourg.pid}, - "pickup_location": {"pid": loc_public_martigny_bourg.pid}, - "transaction_channel": "system", - "patron": { - "age": 30, - "type": "Usager.ère plus de 18 ans", - "postal_code": "1930" - } + "record": { + "type": "loan", + }, }, - "record": { - "type": "loan", - } - }, refresh=True) + refresh=True, + ) # pickup location cfg = { - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, "is_active": True, "category": { "indicator": { "type": "number_of_requests", - "distributions": ["pickup_location"] + "distributions": ["pickup_location"], } - } + }, } assert StatsReport(cfg).collect() == [ [label_loc_pub_martigny_bourg, 1], - [label_loc_pub_martigny, 1] + [label_loc_pub_martigny, 1], ] diff --git a/tests/ui/stats/test_stats_tasks.py b/tests/ui/stats/test_stats_tasks.py index e4c21f7d6f..fb642f8fbe 100644 --- a/tests/ui/stats/test_stats_tasks.py +++ b/tests/ui/stats/test_stats_tasks.py @@ -23,8 +23,8 @@ def test_stats_task_report(stats_cfg_martigny): """Test stat task reports generation.""" - res = collect_stats_reports('year') + res = collect_stats_reports("year") assert not res - res = collect_stats_reports('month') + res = collect_stats_reports("month") assert len(res) > 0 diff --git a/tests/ui/stats/test_stats_views.py b/tests/ui/stats/test_stats_views.py index 0afb969054..0ea6f2361c 100644 --- a/tests/ui/stats/test_stats_views.py +++ b/tests/ui/stats/test_stats_views.py @@ -26,53 +26,51 @@ def test_view_status( - client, patron_martigny, librarian_martigny, - system_librarian_martigny + client, patron_martigny, librarian_martigny, system_librarian_martigny ): """Test view status.""" # User not logged - result = client.get(url_for('stats.stats_billing')) + result = client.get(url_for("stats.stats_billing")) assert result.status_code == 401 # User without access permissions login_user_via_session(client, patron_martigny.user) - result = client.get(url_for('stats.stats_billing')) + result = client.get(url_for("stats.stats_billing")) assert result.status_code == 403 - result = client.get(url_for('stats.live_stats_billing')) + result = client.get(url_for("stats.live_stats_billing")) assert result.status_code == 403 # User with librarian permissions login_user_via_session(client, librarian_martigny.user) - result = client.get(url_for('stats.stats_billing')) + result = client.get(url_for("stats.stats_billing")) assert result.status_code == 403 - result = client.get(url_for('stats.live_stats_billing')) + result = client.get(url_for("stats.live_stats_billing")) assert result.status_code == 403 - result = client.get(url_for('stats.stats_librarian')) + result = client.get(url_for("stats.stats_librarian")) assert result.status_code == 403 - result = client.get(url_for('stats.stats_librarian', record_pid=1)) + result = client.get(url_for("stats.stats_librarian", record_pid=1)) assert result.status_code == 403 # User with system librarian permissions login_user_via_session(client, system_librarian_martigny.user) - result = client.get(url_for('stats.stats_billing')) + result = client.get(url_for("stats.stats_billing")) assert result.status_code == 403 - result = client.get(url_for('stats.live_stats_billing')) + result = client.get(url_for("stats.live_stats_billing")) assert result.status_code == 403 - result = client.get(url_for('stats.stats_librarian')) + result = client.get(url_for("stats.stats_librarian")) assert result.status_code == 200 - result = client.get(url_for('stats.stats_librarian', record_pid=1)) + result = client.get(url_for("stats.stats_librarian", record_pid=1)) assert result.status_code == 200 with mock.patch( - 'rero_ils.modules.stats.permissions.admin_permission', - mock.MagicMock() + "rero_ils.modules.stats.permissions.admin_permission", mock.MagicMock() ): - result = client.get(url_for('stats.stats_billing')) + result = client.get(url_for("stats.stats_billing")) assert result.status_code == 200 diff --git a/tests/ui/stats_cfg/test_stats_cfg_api.py b/tests/ui/stats_cfg/test_stats_cfg_api.py index b34ff203b1..8fa5998a93 100644 --- a/tests/ui/stats_cfg/test_stats_cfg_api.py +++ b/tests/ui/stats_cfg/test_stats_cfg_api.py @@ -25,22 +25,26 @@ from rero_ils.modules.stats_cfg.api import stat_cfg_id_fetcher as fetcher -def test_stats_cfg_create(db, client, stats_cfg_martigny_data, - patron_martigny, librarian_martigny, - system_librarian_martigny): +def test_stats_cfg_create( + db, + client, + stats_cfg_martigny_data, + patron_martigny, + librarian_martigny, + system_librarian_martigny, +): """Test statistics configuration creation.""" login_user_via_session(client, system_librarian_martigny.user) - stats_cfg = StatConfiguration.create(stats_cfg_martigny_data, - delete_pid=True) + stats_cfg = StatConfiguration.create(stats_cfg_martigny_data, delete_pid=True) assert stats_cfg == stats_cfg_martigny_data - assert stats_cfg.get('pid') == '1' + assert stats_cfg.get("pid") == "1" - stats_cfg = StatConfiguration.get_record_by_pid('1') + stats_cfg = StatConfiguration.get_record_by_pid("1") assert stats_cfg == stats_cfg_martigny_data fetched_pid = fetcher(stats_cfg.id, stats_cfg) - assert fetched_pid.pid_value == '1' - assert fetched_pid.pid_type == 'stacfg' + assert fetched_pid.pid_value == "1" + assert fetched_pid.pid_type == "stacfg" stats_cfg.delete() @@ -48,7 +52,7 @@ def test_stats_cfg_create(db, client, stats_cfg_martigny_data, def test_stats_cfg_can_delete(stats_cfg_martigny): """Test statistics configuration can delete.""" - assert stats_cfg_martigny.get_links_to_me('stats_cfg1') == {} + assert stats_cfg_martigny.get_links_to_me("stats_cfg1") == {} can, reasons = stats_cfg_martigny.can_delete assert can diff --git a/tests/ui/stats_cfg/test_stats_cfg_jsonresolver.py b/tests/ui/stats_cfg/test_stats_cfg_jsonresolver.py index 2e4fa4c324..de26b8bb5c 100644 --- a/tests/ui/stats_cfg/test_stats_cfg_jsonresolver.py +++ b/tests/ui/stats_cfg/test_stats_cfg_jsonresolver.py @@ -24,11 +24,12 @@ def test_stats_cfg_jsonresolver(stats_cfg_martigny): """Test statistics configuration json resolver.""" - rec = Record.create({ - 'stats_cfg': {'$ref': 'https://bib.rero.ch/api/stats_cfg/stats_cfg1'} - }) - assert rec.replace_refs().get('stats_cfg') == { - 'type': 'stacfg', 'pid': 'stats_cfg1' + rec = Record.create( + {"stats_cfg": {"$ref": "https://bib.rero.ch/api/stats_cfg/stats_cfg1"}} + ) + assert rec.replace_refs().get("stats_cfg") == { + "type": "stacfg", + "pid": "stats_cfg1", } # deleted record @@ -37,8 +38,8 @@ def test_stats_cfg_jsonresolver(stats_cfg_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'stats_cfg': {'$ref': 'https://bib.rero.ch/api/stats_cfg/n_e'} - }) + rec = Record.create( + {"stats_cfg": {"$ref": "https://bib.rero.ch/api/stats_cfg/n_e"}} + ) with pytest.raises(JsonRefError): type(rec)(rec.replace_refs()).dumps() diff --git a/tests/ui/stats_cfg/test_stats_cfg_mapping.py b/tests/ui/stats_cfg/test_stats_cfg_mapping.py index 495e9f46af..833fa419bd 100644 --- a/tests/ui/stats_cfg/test_stats_cfg_mapping.py +++ b/tests/ui/stats_cfg/test_stats_cfg_mapping.py @@ -20,12 +20,12 @@ from invenio_accounts.testutils import login_user_via_session from utils import get_mapping -from rero_ils.modules.stats_cfg.api import StatConfiguration, \ - StatsConfigurationSearch +from rero_ils.modules.stats_cfg.api import StatConfiguration, StatsConfigurationSearch -def test_stats_cfg_es_mapping(client, stats_cfg_martigny_data, - system_librarian_martigny): +def test_stats_cfg_es_mapping( + client, stats_cfg_martigny_data, system_librarian_martigny +): """Test statistics configuration elasticsearch mapping.""" search = StatsConfigurationSearch() mapping = get_mapping(search.Meta.index) @@ -33,7 +33,8 @@ def test_stats_cfg_es_mapping(client, stats_cfg_martigny_data, login_user_via_session(client, system_librarian_martigny.user) stats_cfg = StatConfiguration.create( - stats_cfg_martigny_data, dbcommit=True, reindex=True, delete_pid=True) + stats_cfg_martigny_data, dbcommit=True, reindex=True, delete_pid=True + ) assert mapping == get_mapping(search.Meta.index) stats_cfg.delete(force=True, dbcommit=True, delindex=True) @@ -42,7 +43,7 @@ def test_stats_cfg_search_mapping(app, stats_cfg_martigny, stats_cfg_sion): """Test statistics configuration search mapping.""" search = StatsConfigurationSearch() - es_query = search.source(['pid']).scan() + es_query = search.source(["pid"]).scan() pids = [hit.pid for hit in es_query] assert len(pids) == 2 - assert 'stats_cfg2' in pids + assert "stats_cfg2" in pids diff --git a/tests/ui/templates/test_templates_api.py b/tests/ui/templates/test_templates_api.py index c118af7ba8..7a244ccd9e 100644 --- a/tests/ui/templates/test_templates_api.py +++ b/tests/ui/templates/test_templates_api.py @@ -27,27 +27,28 @@ from rero_ils.modules.utils import get_ref_for_pid -def test_template_create(db, search, templ_doc_public_martigny_data, - org_martigny, system_librarian_martigny): +def test_template_create( + db, search, templ_doc_public_martigny_data, org_martigny, system_librarian_martigny +): """Test template creation.""" - templ_doc_public_martigny_data['toto'] = 'toto' + templ_doc_public_martigny_data["toto"] = "toto" with pytest.raises(ValidationError): Template.create(templ_doc_public_martigny_data, delete_pid=True) db.session.rollback() next_pid = Template.provider.identifier.next() - del templ_doc_public_martigny_data['toto'] + del templ_doc_public_martigny_data["toto"] temp = Template.create(templ_doc_public_martigny_data, delete_pid=True) next_pid += 1 assert temp == templ_doc_public_martigny_data - assert temp.get('pid') == str(next_pid) + assert temp.get("pid") == str(next_pid) temp = Template.get_record_by_pid(str(next_pid)) assert temp == templ_doc_public_martigny_data fetched_pid = fetcher(temp.id, temp) assert fetched_pid.pid_value == str(next_pid) - assert fetched_pid.pid_type == 'tmpl' + assert fetched_pid.pid_type == "tmpl" def test_template_can_delete(templ_doc_public_martigny): @@ -60,16 +61,16 @@ def test_template_can_delete(templ_doc_public_martigny): def test_template_replace_refs(templ_doc_public_martigny): """Test template replace_refs method.""" tmpl = templ_doc_public_martigny - tmpl.setdefault('data', {})['document'] = { - '$ref': get_ref_for_pid('doc', 'dummy_pid') + tmpl.setdefault("data", {})["document"] = { + "$ref": get_ref_for_pid("doc", "dummy_pid") } tmpl = tmpl.update(tmpl, dbcommit=True, reindex=True) - assert '$ref' in tmpl['data']['document'] - assert '$ref' in tmpl['creator'] + assert "$ref" in tmpl["data"]["document"] + assert "$ref" in tmpl["creator"] replace_data = tmpl.replace_refs() - assert '$ref' in replace_data['data']['document'] - assert '$ref' not in replace_data['creator'] + assert "$ref" in replace_data["data"]["document"] + assert "$ref" not in replace_data["creator"] # reset changes - del tmpl['data']['document'] + del tmpl["data"]["document"] tmpl.update(tmpl, dbcommit=True, reindex=True) diff --git a/tests/ui/templates/test_templates_mapping.py b/tests/ui/templates/test_templates_mapping.py index aee9699473..2618456bac 100644 --- a/tests/ui/templates/test_templates_mapping.py +++ b/tests/ui/templates/test_templates_mapping.py @@ -22,16 +22,20 @@ from rero_ils.modules.templates.api import Template, TemplatesSearch -def test_template_es_mapping(search, db, templ_doc_public_martigny_data, - org_martigny, system_librarian_martigny, - librarian_martigny): +def test_template_es_mapping( + search, + db, + templ_doc_public_martigny_data, + org_martigny, + system_librarian_martigny, + librarian_martigny, +): """Test template elasticsearch mapping.""" search = TemplatesSearch() mapping = get_mapping(search.Meta.index) assert mapping tmpl = Template.create( - templ_doc_public_martigny_data, - dbcommit=True, reindex=True, delete_pid=True + templ_doc_public_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) new_mapping = get_mapping(search.Meta.index) assert mapping == new_mapping @@ -39,11 +43,12 @@ def test_template_es_mapping(search, db, templ_doc_public_martigny_data, def test_template_search_mapping( - app, templ_doc_public_martigny, templ_doc_private_martigny): + app, templ_doc_public_martigny, templ_doc_private_martigny +): """Test template search mapping.""" search = TemplatesSearch() - c = search.query('match', template_type='documents').count() + c = search.query("match", template_type="documents").count() assert c == 2 - c = search.query('match', organisation__pid='org1').count() + c = search.query("match", organisation__pid="org1").count() assert c == 2 diff --git a/tests/ui/test_api.py b/tests/ui/test_api.py index 4f416a0825..a407a7fa27 100644 --- a/tests/ui/test_api.py +++ b/tests/ui/test_api.py @@ -26,7 +26,6 @@ from invenio_pidstore.providers.base import BaseProvider from invenio_records.models import RecordMetadataBase from jsonschema.exceptions import ValidationError -from utils import flush_index from rero_ils.modules.api import IlsRecord, IlsRecordError, IlsRecordsSearch from rero_ils.modules.fetchers import id_fetcher @@ -37,12 +36,13 @@ class IdentifierTest(RecordIdentifier): """Sequence generator for Test identifiers.""" - __tablename__ = 'test_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "test_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) @@ -52,9 +52,9 @@ class SearchTest(IlsRecordsSearch): class Meta: """Search only on test index.""" - index = 'records-record-v1.0.0' + index = "records-record-v1.0.0" doc_types = None - fields = ('*', ) + fields = ("*",) facets = {} default_filter = None @@ -63,7 +63,7 @@ class Meta: class ProviderTest(BaseProvider): """Test identifier provider.""" - pid_type = 'test' + pid_type = "test" """Type of persistent identifier.""" pid_identifier = IdentifierTest.__tablename__ @@ -82,11 +82,11 @@ class ProviderTest(BaseProvider): @classmethod def create(cls, object_type=None, object_uuid=None, **kwargs): """Create a new Test identifier.""" - if not kwargs.get('pid_value'): - kwargs['pid_value'] = str(IdentifierTest.next()) - kwargs.setdefault('status', cls.default_status) + if not kwargs.get("pid_value"): + kwargs["pid_value"] = str(IdentifierTest.next()) + kwargs.setdefault("status", cls.default_status) if object_type and object_uuid: - kwargs['status'] = PIDStatus.REGISTERED + kwargs["status"] = PIDStatus.REGISTERED return super().create( object_type=object_type, object_uuid=object_uuid, **kwargs ) @@ -108,7 +108,7 @@ class TestRecordMetadata(db.Model, RecordMetadataBase): class RecordTest(IlsRecord): """Test record class.""" - name = 'records' + name = "records" minter = id_minter_test fetcher = id_fetcher_test provider = ProviderTest @@ -119,12 +119,8 @@ def test_ilsrecord(app, es_default_index, ils_record, ils_record_2): """Test IlsRecord update.""" # the created records will be accessible in all function of this test file - record_1 = RecordTest.create( - data=ils_record, - dbcommit=True, - reindex=True - ) - assert record_1.pid == 'ilsrecord_pid' + record_1 = RecordTest.create(data=ils_record, dbcommit=True, reindex=True) + assert record_1.pid == "ilsrecord_pid" assert record_1.id == RecordTest.get_id_by_pid(record_1.pid) record_alias = record_1.db_record() assert record_1.pid == record_alias.pid @@ -134,79 +130,73 @@ def test_ilsrecord(app, es_default_index, ils_record, ils_record_2): dbcommit=True, reindex=True, ) - assert record_2.pid == 'ilsrecord_pid_2' + assert record_2.pid == "ilsrecord_pid_2" record_created_pid = RecordTest.create( - data=ils_record, - reindex=True, - dbcommit=True, - delete_pid=True + data=ils_record, reindex=True, dbcommit=True, delete_pid=True ) - assert record_created_pid.pid == '1' + assert record_created_pid.pid == "1" with pytest.raises(IlsRecordError.PidAlreadyUsed): - RecordTest.create( - data=ils_record, - dbcommit=True, - reindex=True - ) - flush_index(SearchTest.Meta.index) + RecordTest.create(data=ils_record, dbcommit=True, reindex=True) + SearchTest.flush_and_refresh() """Test IlsRecord.""" assert sorted(RecordTest.get_all_pids()) == [ - '1', 'ilsrecord_pid', 'ilsrecord_pid_2' + "1", + "ilsrecord_pid", + "ilsrecord_pid_2", ] assert len(list(RecordTest.get_all_pids(limit=None))) == 3 assert len(list(RecordTest.get_all_ids(limit=None))) == 3 - assert RecordTest.get_id_by_pid(record_created_pid.pid) == \ - record_created_pid.id - assert not RecordTest.get_record_by_pid('dummy') + assert RecordTest.get_id_by_pid(record_created_pid.pid) == record_created_pid.id + assert not RecordTest.get_record_by_pid("dummy") """Test IlsRecord update.""" - record = RecordTest.get_record_by_pid('ilsrecord_pid') - record['name'] = 'name changed' + record = RecordTest.get_record_by_pid("ilsrecord_pid") + record["name"] = "name changed" record = record.update(record, dbcommit=True) - assert record['name'] == 'name changed' + assert record["name"] == "name changed" with pytest.raises(IlsRecordError.PidChange): - record['pid'] = 'pid changed' + record["pid"] = "pid changed" record.update(record, dbcommit=True) """Test IlsRecord replace.""" - record = RecordTest.get_record_by_pid('ilsrecord_pid') + record = RecordTest.get_record_by_pid("ilsrecord_pid") - del record['name'] + del record["name"] record = record.replace(record, dbcommit=True) assert record.get_links_to_me() == {} - assert not record.get('name') + assert not record.get("name") with pytest.raises(IlsRecordError.PidMissing): - del record['pid'] + del record["pid"] record.replace(record, dbcommit=True) """Test IlsRecord get pid by id.""" - record = RecordTest.get_record_by_pid('ilsrecord_pid') + record = RecordTest.get_record_by_pid("ilsrecord_pid") pid = RecordTest.get_pid_by_id(record.id) assert pid == record.pid """Test IlsRecord record pid exist.""" - assert RecordTest.record_pid_exists('ilsrecord_pid') - assert not RecordTest.record_pid_exists('unknown') + assert RecordTest.record_pid_exists("ilsrecord_pid") + assert not RecordTest.record_pid_exists("unknown") """Test IlsRecord revert.""" - record = RecordTest.get_record_by_pid('ilsrecord_pid') + record = RecordTest.get_record_by_pid("ilsrecord_pid") record = record.revert(record.revision_id - 1) - assert record.get('name') == 'name changed' + assert record.get("name") == "name changed" record.delete() with pytest.raises(IlsRecordError.Deleted): record = record.revert(record.revision_id - 1) record = record.undelete() - assert record.get('pid') == 'ilsrecord_pid' + assert record.get("pid") == "ilsrecord_pid" with pytest.raises(IlsRecordError.NotDeleted): record = record.undelete() """Test IlsRecord es search.""" search = SearchTest() - count = search.filter('match_all').source().count() + count = search.filter("match_all").source().count() assert count == 3 # TODO: do we need a mapping for this to work? # search_one = list(search.filter('term', pid='ilsrecord_pid') @@ -214,13 +204,13 @@ def test_ilsrecord(app, es_default_index, ils_record, ils_record_2): # assert search_one[0]['pid'] == 'ilsrecord_pid_2' """Test IlsRecord update.""" - record = RecordTest.get_record_by_pid('ilsrecord_pid') + record = RecordTest.get_record_by_pid("ilsrecord_pid") record.delete(delindex=True) assert RecordTest.count() == 2 - record = RecordTest.get_record_by_pid('ilsrecord_pid_2') + record = RecordTest.get_record_by_pid("ilsrecord_pid_2") record.delete(delindex=True) assert RecordTest.count() == 1 - record = RecordTest.get_record_by_pid('1') + record = RecordTest.get_record_by_pid("1") record.delete(delindex=True) assert RecordTest.count() == 0 @@ -228,22 +218,20 @@ def test_ilsrecord(app, es_default_index, ils_record, ils_record_2): class FailedPidIdentifier(RecordIdentifier): """Sequence generator for Test identifiers.""" - __tablename__ = 'failed_id' - __mapper_args__ = {'concrete': True} + __tablename__ = "failed_id" + __mapper_args__ = {"concrete": True} recid = db.Column( - db.BigInteger().with_variant(db.Integer, 'sqlite'), - primary_key=True, autoincrement=True, + db.BigInteger().with_variant(db.Integer, "sqlite"), + primary_key=True, + autoincrement=True, ) FailedPidProvider = type( - 'FailedPidProvider', + "FailedPidProvider", (Provider,), - dict( - identifier=FailedPidIdentifier, - pid_type='failed' - ) + dict(identifier=FailedPidIdentifier, pid_type="failed"), ) # failedPID minter @@ -261,22 +249,22 @@ class FailedIlsRecord(IlsRecord): def test_ilsrecord_failed_pid(app, es_default_index, ils_record, ils_record_2): """Test IlsRecord PID after validation failed""" schema = { - 'type': 'object', - 'properties': { - 'name': { - 'type': 'string', + "type": "object", + "properties": { + "name": { + "type": "string", + }, + "noideaforafield": { + "type": "string", }, - 'noideaforafield': { - 'type': 'string', - } }, - 'required': ['name', 'noideaforafield'] + "required": ["name", "noideaforafield"], } with pytest.raises(ValidationError): FailedIlsRecord.create( data={ - '$schema': schema, - 'name': 'Bad IlsRecord', + "$schema": schema, + "name": "Bad IlsRecord", }, delete_pid=False, ) @@ -295,8 +283,8 @@ def test_ilsrecord_failed_pid(app, es_default_index, ils_record, ils_record_2): assert record2.pid == str(next_pid) ils_record_3 = { - 'pid': '3', - 'name': 'IlsRecord Name 3', + "pid": "3", + "name": "IlsRecord Name 3", } with pytest.raises(IlsRecordError.PidAlreadyUsed): record3 = FailedIlsRecord.create(data=ils_record_3, delete_pid=False) diff --git a/tests/ui/test_api_schema.py b/tests/ui/test_api_schema.py index cbcac1bf0f..aa06c217b0 100644 --- a/tests/ui/test_api_schema.py +++ b/tests/ui/test_api_schema.py @@ -28,12 +28,7 @@ def test_replace_refs(app): "$schema": "http://json-schema.org/draft-07/schema#", "title": "TEST SCHEMA", "type": "object", - "required": [ - "$schema", - "pid", - "test1", - "test2" - ], + "required": ["$schema", "pid", "test1", "test2"], "propertiesOrder": [ "pid", "test1", @@ -45,23 +40,19 @@ def test_replace_refs(app): "title": "Schema", "description": "Schema to validate document against.", "type": "string", - "default": "https://bib.rero.ch/schemas/tests/test.json" - }, - "pid": { - "title": "Document PID", - "type": "string", - "minLength": 1 - }, - "test1": { - "$ref": "https://bib.rero.ch/schemas/tests/test1.json#/1" + "default": "https://bib.rero.ch/schemas/tests/test.json", }, - "test2": { - "$ref": "https://bib.rero.ch/schemas/tests/test2.json#/2" - } - } + "pid": {"title": "Document PID", "type": "string", "minLength": 1}, + "test1": {"$ref": "https://bib.rero.ch/schemas/tests/test1.json#/1"}, + "test2": {"$ref": "https://bib.rero.ch/schemas/tests/test2.json#/2"}, + }, } - schema = replace_ref_url(schema, 'test.org') - assert schema['properties']['test1']['$ref'] == \ - 'https://test.org/schemas/tests/test1.json#/1' - assert schema['properties']['test2']['$ref'] == \ - 'https://test.org/schemas/tests/test2.json#/2' + schema = replace_ref_url(schema, "test.org") + assert ( + schema["properties"]["test1"]["$ref"] + == "https://test.org/schemas/tests/test1.json#/1" + ) + assert ( + schema["properties"]["test2"]["$ref"] + == "https://test.org/schemas/tests/test2.json#/2" + ) diff --git a/tests/ui/test_filters.py b/tests/ui/test_filters.py index 26bc0caa81..0e87369602 100644 --- a/tests/ui/test_filters.py +++ b/tests/ui/test_filters.py @@ -17,97 +17,106 @@ """Jinja2 filters tests.""" -from rero_ils.filter import address_block, empty_data, format_date_filter, \ - get_record_by_ref, jsondumps, text_to_id, to_pretty_json +from rero_ils.filter import ( + address_block, + empty_data, + format_date_filter, + get_record_by_ref, + jsondumps, + text_to_id, + to_pretty_json, +) def test_get_record_by_ref(document_data, document): """Test resolve.""" - record_es = get_record_by_ref( - {'$ref': 'https://bib.rero.ch/api/documents/doc1'}) - assert document_data['pid'] == record_es['pid'] + record_es = get_record_by_ref({"$ref": "https://bib.rero.ch/api/documents/doc1"}) + assert document_data["pid"] == record_es["pid"] def test_date_filter_format_timestamp_en(app): """Test full english date and tile filter.""" - datestring = format_date_filter('2018-06-06T09:29:55.947149+00:00') - assert 'Wednesday 6 June 2018, 11:29:55' in datestring + datestring = format_date_filter("2018-06-06T09:29:55.947149+00:00") + assert "Wednesday 6 June 2018, 11:29:55" in datestring - datestring = format_date_filter( - '2018-06-06T09:29:55.947149+00:00', locale='fr') - assert 'mercredi 6 juin 2018, 11:29:55' in datestring + datestring = format_date_filter("2018-06-06T09:29:55.947149+00:00", locale="fr") + assert "mercredi 6 juin 2018, 11:29:55" in datestring datestring = format_date_filter( - '1950-01-01', date_format='short', time_format=None, locale='fr_CH') - assert '01.01' in datestring + "1950-01-01", date_format="short", time_format=None, locale="fr_CH" + ) + assert "01.01" in datestring def test_date_filter_format_default_en(app): """Test medium english date filter.""" - datestring = format_date_filter( - '1950-01-01', date_format='short', time_format=None) - assert '01/01/1950' in datestring + datestring = format_date_filter("1950-01-01", date_format="short", time_format=None) + assert "01/01/1950" in datestring def test_date_filter_timezone(app): """Test medium english date filter.""" datestring = format_date_filter( - '2018-06-06T09:29:55.947149+00:00', timezone='Europe/Helsinki') - assert 'Wednesday 6 June 2018, 12:29:55' in datestring + "2018-06-06T09:29:55.947149+00:00", timezone="Europe/Helsinki" + ) + assert "Wednesday 6 June 2018, 12:29:55" in datestring def test_date_filter_format_medium_date_en(app): """Test medium_date english date filter.""" datestring = format_date_filter( - '1950-01-01', date_format='medium', time_format=None) - assert '1 Jan 1950' in datestring + "1950-01-01", date_format="medium", time_format=None + ) + assert "1 Jan 1950" in datestring def test_date_filter_format_full_en(app): """Test full english date filter.""" - datestring = format_date_filter( - '1950-01-01', date_format='full', time_format=None) - assert 'Sunday 1 January 1950' in datestring + datestring = format_date_filter("1950-01-01", date_format="full", time_format=None) + assert "Sunday 1 January 1950" in datestring def test_date_filter_format_full_fr(app): """Test full french date filter.""" datestring = format_date_filter( - '1950-01-01', date_format='full', time_format=None, locale='fr') - assert 'dimanche 1 janvier 1950' in datestring + "1950-01-01", date_format="full", time_format=None, locale="fr" + ) + assert "dimanche 1 janvier 1950" in datestring def test_date_filter_format_short_fr(app): """Test short french date filter with pernicious date.""" datestring = format_date_filter( - '2006-08-14', date_format='short', time_format=None, locale='fr_CH') - assert datestring == '14.08.06' + "2006-08-14", date_format="short", time_format=None, locale="fr_CH" + ) + assert datestring == "14.08.06" def test_time_filter_format_default(app): """Test default time.""" datestring = format_date_filter( - '2018-06-06T09:29:55.947149+00:00', date_format=None) - assert datestring == '11:29:55' + "2018-06-06T09:29:55.947149+00:00", date_format=None + ) + assert datestring == "11:29:55" def test_time_filter_format_fr(app): """Test default time.""" datestring = format_date_filter( - '2018-06-06T09:29:55.947149+00:00', date_format=None, locale='fr') - assert datestring == '11:29:55' + "2018-06-06T09:29:55.947149+00:00", date_format=None, locale="fr" + ) + assert datestring == "11:29:55" def test_time_filter_format_delimiter(app): """Test default time.""" - datestring = format_date_filter( - '2018-06-06T09:29:55.947149+00:00', delimiter=' - ') - assert datestring == 'Wednesday 6 June 2018 - 11:29:55' + datestring = format_date_filter("2018-06-06T09:29:55.947149+00:00", delimiter=" - ") + assert datestring == "Wednesday 6 June 2018 - 11:29:55" def test_to_pretty(): """Test json prettx.""" - data = {'test': '1'} + data = {"test": "1"} new_data = '{\n "test": "1"\n}' assert to_pretty_json(data) == new_data assert jsondumps(data) == new_data @@ -115,29 +124,29 @@ def test_to_pretty(): def test_text_to_id(): """Test text to id.""" - assert 'LoremIpsum' == text_to_id('Lorem Ipsum') + assert "LoremIpsum" == text_to_id("Lorem Ipsum") def test_empty_data(): """Test empty data.""" - assert 'data' == empty_data('data') - substitution_text = 'no data available' + assert "data" == empty_data("data") + substitution_text = "no data available" assert substitution_text in empty_data(None, substitution_text) def test_address_block_filter(lib_martigny): """Test address block filter.""" - address = lib_martigny\ - .get('acquisition_settings', {})\ - .get('shipping_informations', {}) + address = lib_martigny.get("acquisition_settings", {}).get( + "shipping_informations", {} + ) # ensure the fixture define a shipping address with correct data - assert address and address.get('email') and address.get('phone') + assert address and address.get("email") and address.get("phone") # test the filter - tmp_data = address_block(address, 'fre') - assert address.get('email') in tmp_data - assert address.get('phone') in tmp_data - assert 'E-mail:' in tmp_data - assert 'Email:' in address_block(address, 'eng') - assert 'Email:' in address_block(address, 'dummy') + tmp_data = address_block(address, "fre") + assert address.get("email") in tmp_data + assert address.get("phone") in tmp_data + assert "E-mail:" in tmp_data + assert "Email:" in address_block(address, "eng") + assert "Email:" in address_block(address, "dummy") diff --git a/tests/ui/test_indexer_utils.py b/tests/ui/test_indexer_utils.py index 72c69e9958..a17bbb5b64 100644 --- a/tests/ui/test_indexer_utils.py +++ b/tests/ui/test_indexer_utils.py @@ -19,7 +19,6 @@ import pytest from elasticsearch import NotFoundError from mock import mock -from utils import flush_index from rero_ils.modules.documents.api import DocumentsSearch from rero_ils.modules.indexer_utils import record_to_index @@ -30,16 +29,16 @@ def test_record_indexing(app, lib_martigny): """Test record indexing process.""" # TEST#1 :: Test indexing without $ref replacement - app.config['INDEXER_REPLACE_REFS'] = False + app.config["INDEXER_REPLACE_REFS"] = False lib_martigny.reindex() - flush_index(LibrariesSearch.Meta.index) + LibrariesSearch.flush_and_refresh() record = LibrariesSearch().get_record_by_pid(lib_martigny.pid) - assert '$ref' in record.organisation.to_dict() + assert "$ref" in record.organisation.to_dict() # TEST#2 :: Raise exception during indexing process with mock.patch( - 'rero_ils.modules.api.IlsRecordsIndexer._index_action', - side_effect=Exception('Test!') + "rero_ils.modules.api.IlsRecordsIndexer._index_action", + side_effect=Exception("Test!"), ): indexer = LibrariesIndexer() indexer.bulk_index([lib_martigny.id]) @@ -47,45 +46,59 @@ def test_record_indexing(app, lib_martigny): assert res[1] == (0, 0) # RESET INDEX - app.config['INDEXER_REPLACE_REFS'] = True + app.config["INDEXER_REPLACE_REFS"] = True lib_martigny.reindex() - flush_index(LibrariesSearch.Meta.index) + LibrariesSearch.flush_and_refresh() def test_record_to_index(app): """Test the index name value from the JSONSchema.""" # for documents - assert record_to_index({ - '$schema': 'https://bib.rero.ch/schemas/' - 'documents/document-v0.0.1.json' - }) == 'documents-document-v0.0.1' - assert record_to_index({ - '$schema': 'https://bib.rero.ch/schemas/' - 'documents/document-v0.0.1.json' - }) == 'documents-document-v0.0.1' + assert ( + record_to_index( + {"$schema": "https://bib.rero.ch/schemas/" "documents/document-v0.0.1.json"} + ) + == "documents-document-v0.0.1" + ) + assert ( + record_to_index( + {"$schema": "https://bib.rero.ch/schemas/" "documents/document-v0.0.1.json"} + ) + == "documents-document-v0.0.1" + ) # for mef-mef-contributions - assert record_to_index({ - '$schema': 'https://mef.rero.ch/schemas/' - 'mef/mef-contribution-v0.0.1.json' - }) == 'remote_entities-remote_entity-v0.0.1' + assert ( + record_to_index( + { + "$schema": "https://mef.rero.ch/schemas/" + "mef/mef-contribution-v0.0.1.json" + } + ) + == "remote_entities-remote_entity-v0.0.1" + ) # for others - assert record_to_index({ - '$schema': 'https://bib.rero.ch/schemas/' - 'organisations/organisation-v0.0.1.json' - }) == 'organisations-organisation-v0.0.1' + assert ( + record_to_index( + { + "$schema": "https://bib.rero.ch/schemas/" + "organisations/organisation-v0.0.1.json" + } + ) + == "organisations-organisation-v0.0.1" + ) def test_get_resource_from_ES(document): """Test get_resource from ElasticSearch engine.""" - metadata = DocumentsSearch().get_record_by_pid('doc1') + metadata = DocumentsSearch().get_record_by_pid("doc1") assert metadata - fields = ['pid', 'title'] - metadata = DocumentsSearch().get_record_by_pid('doc1', fields=fields) + fields = ["pid", "title"] + metadata = DocumentsSearch().get_record_by_pid("doc1", fields=fields) assert all(term in metadata for term in fields) - assert 'statement' not in metadata + assert "statement" not in metadata with pytest.raises(NotFoundError): - DocumentsSearch().get_record_by_pid('dummy_pid') + DocumentsSearch().get_record_by_pid("dummy_pid") diff --git a/tests/ui/test_invenio_celery_tasks_endpoints.py b/tests/ui/test_invenio_celery_tasks_endpoints.py index d36e5c3162..a63d92123e 100644 --- a/tests/ui/test_invenio_celery_tasks_endpoints.py +++ b/tests/ui/test_invenio_celery_tasks_endpoints.py @@ -25,16 +25,15 @@ def test_missing_invenio_celery_task_endpoints(app): """Test missing invenio_celery task endpoints.""" - celery_extension = app.extensions['invenio-celery'] + celery_extension = app.extensions["invenio-celery"] celery_entpoints = [ - e.value - for e in entry_points(group=celery_extension.entry_point_group) + e.value for e in entry_points(group=celery_extension.entry_point_group) ] - for task, data in app.config['CELERY_BEAT_SCHEDULE'].items(): - task_function = data['task'] + for task, data in app.config["CELERY_BEAT_SCHEDULE"].items(): + task_function = data["task"] # test if function exist assert obj_or_import_string(task_function) - endpoint = '.'.join(task_function.split('.')[:-1]) + endpoint = ".".join(task_function.split(".")[:-1]) # test if endpoint is defined in setup.py in invenio_celery.tasks assert endpoint in celery_entpoints diff --git a/tests/ui/test_message.py b/tests/ui/test_message.py index 4021cf9d0d..d800645508 100644 --- a/tests/ui/test_message.py +++ b/tests/ui/test_message.py @@ -23,11 +23,11 @@ def test_message(app): """Test message.""" - key = 'test_fr' - message = 'Foo bar' - result = {'type': 'success', 'message': message} + key = "test_fr" + message = "Foo bar" + result = {"type": "success", "message": message} - assert Message.set(key=key, type='success', value=message) + assert Message.set(key=key, type="success", value=message) assert Message.get(key) == result assert Message.delete(key) assert Message.get(key) is None @@ -35,9 +35,9 @@ def test_message(app): def test_message_filter(app): """Test message filter.""" - key = 'test_en' - message = 'Filter' - result = {'type': 'success', 'message': message} + key = "test_en" + message = "Filter" + result = {"type": "success", "message": message} - assert Message.set(key=key, type='success', value=message) + assert Message.set(key=key, type="success", value=message) assert message_filter(key) == result diff --git a/tests/ui/test_patron_message.py b/tests/ui/test_patron_message.py index 65c6214712..5e683c0c7a 100644 --- a/tests/ui/test_patron_message.py +++ b/tests/ui/test_patron_message.py @@ -23,46 +23,57 @@ from invenio_accounts.testutils import login_user_via_view -def test_info_message(app, client, patron_martigny, patron_martigny_data, - org_martigny_data): +def test_info_message( + app, client, patron_martigny, patron_martigny_data, org_martigny_data +): """Test info message.""" - patron_martigny['patron']['blocked'] = True - patron_martigny['patron']['blocked_note'] = 'This is a blocked message.' - patron_martigny['patron']['expiration_date'] = '2022-12-31' + patron_martigny["patron"]["blocked"] = True + patron_martigny["patron"]["blocked_note"] = "This is a blocked message." + patron_martigny["patron"]["expiration_date"] = "2022-12-31" patron_martigny.update(patron_martigny, dbcommit=True, reindex=True) - blocked_message = patron_martigny['patron']['blocked_note'] + blocked_message = patron_martigny["patron"]["blocked_note"] # If the user is not identified, there is no user information - res = client.get('/') - soup = BeautifulSoup(res.data, 'html.parser') - assert soup.find('div', {"class": "patron-info-message"}) is None + res = client.get("/") + soup = BeautifulSoup(res.data, "html.parser") + assert soup.find("div", {"class": "patron-info-message"}) is None login_user_via_view( client, - email=patron_martigny_data['email'], - password=patron_martigny_data['password']) + email=patron_martigny_data["email"], + password=patron_martigny_data["password"], + ) # If the user is identified, we see the name of the organization # and the message on the global view - res = client.get(url_for('rero_ils.index')) - soup = BeautifulSoup(res.data, 'html.parser') - li = soup.find('div', {"class": "patron-info-message"}).find('li') + res = client.get(url_for("rero_ils.index")) + soup = BeautifulSoup(res.data, "html.parser") + li = soup.find("div", {"class": "patron-info-message"}).find("li") - assert org_martigny_data['name'] == li.find('span').text - assert f'Your account is currently blocked. Reason: {blocked_message}' \ - == li.find('div', {"class": "message-blocked"}).text - assert 'Your account has expired. Please contact your library.'\ - == li.find('div', {"class": "message-expired"}).text + assert org_martigny_data["name"] == li.find("span").text + assert ( + f"Your account is currently blocked. Reason: {blocked_message}" + == li.find("div", {"class": "message-blocked"}).text + ) + assert ( + "Your account has expired. Please contact your library." + == li.find("div", {"class": "message-expired"}).text + ) # If the view of the organization, there is no name of it - res = client.get(url_for( - 'rero_ils.index_with_view_code', viewcode=org_martigny_data['code'])) - soup = BeautifulSoup(res.data, 'html.parser') - div = soup.find('div', {"class": "patron-info-message"}) + res = client.get( + url_for("rero_ils.index_with_view_code", viewcode=org_martigny_data["code"]) + ) + soup = BeautifulSoup(res.data, "html.parser") + div = soup.find("div", {"class": "patron-info-message"}) - assert div.find('span') is None - assert f'Your account is currently blocked. Reason: {blocked_message}' \ - == div.find('div', {"class": "message-blocked"}).text - assert 'Your account has expired. Please contact your library.'\ - == div.find('div', {"class": "message-expired"}).text + assert div.find("span") is None + assert ( + f"Your account is currently blocked. Reason: {blocked_message}" + == div.find("div", {"class": "message-blocked"}).text + ) + assert ( + "Your account has expired. Please contact your library." + == div.find("div", {"class": "message-expired"}).text + ) diff --git a/tests/ui/test_permissions.py b/tests/ui/test_permissions.py index 29bc16b3b1..f4acf2578c 100644 --- a/tests/ui/test_permissions.py +++ b/tests/ui/test_permissions.py @@ -25,13 +25,13 @@ def test_has_superuser_access(app): """Test permissions of has_superuser_access functions.""" assert not has_superuser_access() - app.config['RERO_ILS_APP_DISABLE_PERMISSION_CHECKS'] = True + app.config["RERO_ILS_APP_DISABLE_PERMISSION_CHECKS"] = True assert has_superuser_access() -def test_librarian_update_permission_factory(client, document, ebook_1, - librarian_martigny, - default_user_password): +def test_librarian_update_permission_factory( + client, document, ebook_1, librarian_martigny, default_user_password +): """Test librarian_update_permission_factory function.""" assert not librarian_update_permission_factory(ebook_1).can() login_user_for_view(client, librarian_martigny, default_user_password) diff --git a/tests/ui/test_ui_schema.py b/tests/ui/test_ui_schema.py index db20eb0bdb..d93422e06f 100644 --- a/tests/ui/test_ui_schema.py +++ b/tests/ui/test_ui_schema.py @@ -25,22 +25,20 @@ def test_get_schema(client, app): """Test schemas api in debug mode.""" - scheams_endpoint = app.config.get('JSONSCHEMAS_ENDPOINT') + scheams_endpoint = app.config.get("JSONSCHEMAS_ENDPOINT") for schema in current_jsonschemas.list_schemas(): # TODO: correct local:// - if '/' in schema and 'record-v1.0.0.json' not in schema: - url = f'{scheams_endpoint}/{schema}' + if "/" in schema and "record-v1.0.0.json" not in schema: + url = f"{scheams_endpoint}/{schema}" res = client.get(url) assert res.status_code == 200 data = get_json(res) - if 'properties' in data: - assert data.get( - '$schema').startswith('http://json-schema.org/draft') + if "properties" in data: + assert data.get("$schema").startswith("http://json-schema.org/draft") # test resolved - url = f'{url}?resolved=1' + url = f"{url}?resolved=1" res = client.get(url) assert res.status_code == 200 data = get_json(res) - if 'properties' in data: - assert data.get( - '$schema').startswith('http://json-schema.org/draft') + if "properties" in data: + assert data.get("$schema").startswith("http://json-schema.org/draft") diff --git a/tests/ui/test_utils_app.py b/tests/ui/test_utils_app.py index f337507e8b..757fd26049 100644 --- a/tests/ui/test_utils_app.py +++ b/tests/ui/test_utils_app.py @@ -19,37 +19,33 @@ from rero_ils.modules.documents.api import Document from rero_ils.modules.patrons.api import Patron -from rero_ils.modules.utils import get_record_class_from_schema_or_pid_type, \ - get_ref_for_pid, pids_exists_in_data, truncate_string +from rero_ils.modules.utils import ( + get_record_class_from_schema_or_pid_type, + get_ref_for_pid, + pids_exists_in_data, + truncate_string, +) from rero_ils.utils import get_current_language, remove_empties_from_dict def test_truncate_string(): """Test truncate string.""" - assert truncate_string('this is a long string', 10, '…') \ - == 'this is a…' - assert truncate_string('not truncated string', 100, '…') \ - == 'not truncated string' + assert truncate_string("this is a long string", 10, "…") == "this is a…" + assert truncate_string("not truncated string", 100, "…") == "not truncated string" def test_get_ref_for_pid(app): """Test get $ref for pid.""" - url = 'https://bib.rero.ch/api/documents/3' - assert get_ref_for_pid('documents', '3') == url - assert get_ref_for_pid('doc', '3') == url - assert get_ref_for_pid(Document, '3') == url - assert get_ref_for_pid('test', '3') is None + url = "https://bib.rero.ch/api/documents/3" + assert get_ref_for_pid("documents", "3") == url + assert get_ref_for_pid("doc", "3") == url + assert get_ref_for_pid(Document, "3") == url + assert get_ref_for_pid("test", "3") is None def test_remove_empties_form_dict(): """Test remove empties data from dict.""" - data = { - 'key1': '', - 'key2': [], - 'key3': { - 'key31': None - } - } + data = {"key1": "", "key2": [], "key3": {"key31": None}} cleaned_data = remove_empties_from_dict(data) assert not cleaned_data @@ -57,154 +53,143 @@ def test_remove_empties_form_dict(): def test_pids_exists_in_data(app, org_martigny, lib_martigny): """Test pid exists.""" ok = pids_exists_in_data( - info='test', - data={ - 'organisation': { - '$ref': 'https://bib.rero.ch/api/organisations/org1' - } - }, - required={'org': 'organisation'}, - not_required={'lib': 'library'} + info="test", + data={"organisation": {"$ref": "https://bib.rero.ch/api/organisations/org1"}}, + required={"org": "organisation"}, + not_required={"lib": "library"}, ) assert ok == [] ok = pids_exists_in_data( - info='test', + info="test", data={}, - required={'org': 'organisation'}, - not_required={'lib': 'library'} + required={"org": "organisation"}, + not_required={"lib": "library"}, ) - assert ok == ['test: No data found: organisation'] + assert ok == ["test: No data found: organisation"] ok = pids_exists_in_data( - info='test', + info="test", data={ - 'organisation': { - '$ref': 'https://bib.rero.ch/api/xxxx/org2' - }, + "organisation": {"$ref": "https://bib.rero.ch/api/xxxx/org2"}, }, - required={'org': 'organisation'}, - not_required={'lib': 'library'} + required={"org": "organisation"}, + not_required={"lib": "library"}, ) assert ok == [ "test: No pid found: org {'$ref': 'https://bib.rero.ch/api/xxxx/org2'}" ] ok = pids_exists_in_data( - info='test', + info="test", data={ - 'organisation': { - '$ref': 'https://bib.rero.ch/api/organisations/org2' - }, - 'library': { - '$ref': 'https://bib.rero.ch/api/libraries/lib1' - } + "organisation": {"$ref": "https://bib.rero.ch/api/organisations/org2"}, + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, }, - required={'org': 'organisation'}, - not_required={'lib': 'library'} + required={"org": "organisation"}, + not_required={"lib": "library"}, ) - assert ok == ['test: Pid does not exist: org org2'] + assert ok == ["test: Pid does not exist: org org2"] ok = pids_exists_in_data( - info='partOf', + info="partOf", data={ - 'partOf': [{ - '$ref': 'https://bib.rero.ch/api/documents/doc1' - }, { - '$ref': 'https://bib.rero.ch/api/documents/doc2' - }] + "partOf": [ + {"$ref": "https://bib.rero.ch/api/documents/doc1"}, + {"$ref": "https://bib.rero.ch/api/documents/doc2"}, + ] }, - not_required={'doc': 'partOf'} + not_required={"doc": "partOf"}, ) assert ok == [ - 'partOf: Pid does not exist: doc doc1', - 'partOf: Pid does not exist: doc doc2' + "partOf: Pid does not exist: doc doc1", + "partOf: Pid does not exist: doc doc2", ] ok = pids_exists_in_data( - info='other', + info="other", data={ - "supplement": [{ - "$ref": "https://bib.rero.ch/api/documents/supplement" - }], - "supplementTo": [{ - "$ref": "https://bib.rero.ch/api/documents/supplementTo" - }], - "otherEdition": [{ - "$ref": "https://bib.rero.ch/api/documents/otherEdition" - }], - "otherPhysicalFormat": [{ - "$ref": "https://bib.rero.ch/api/documents/otherPhysicalFormat" - }], - "issuedWith": [{ - "$ref": "https://bib.rero.ch/api/documents/issuedWith" - }], - "precededBy": [{ - "$ref": "https://bib.rero.ch/api/documents/precededBy" - }], - "succeededBy": [{ - "$ref": "https://bib.rero.ch/api/documents/succeededBy" - }], - "relatedTo": [{ - "$ref": "https://bib.rero.ch/api/documents/relatedTo" - }], - "hasReproduction": [{ - "label": "Ed. sur microfilm: La Chaux-de-Fonds" - }], - "reproductionOf": [{ - "label": "Reprod. de l'\u00e9d. de: Leipzig, 1834-1853" - }] + "supplement": [{"$ref": "https://bib.rero.ch/api/documents/supplement"}], + "supplementTo": [ + {"$ref": "https://bib.rero.ch/api/documents/supplementTo"} + ], + "otherEdition": [ + {"$ref": "https://bib.rero.ch/api/documents/otherEdition"} + ], + "otherPhysicalFormat": [ + {"$ref": "https://bib.rero.ch/api/documents/otherPhysicalFormat"} + ], + "issuedWith": [{"$ref": "https://bib.rero.ch/api/documents/issuedWith"}], + "precededBy": [{"$ref": "https://bib.rero.ch/api/documents/precededBy"}], + "succeededBy": [{"$ref": "https://bib.rero.ch/api/documents/succeededBy"}], + "relatedTo": [{"$ref": "https://bib.rero.ch/api/documents/relatedTo"}], + "hasReproduction": [{"label": "Ed. sur microfilm: La Chaux-de-Fonds"}], + "reproductionOf": [ + {"label": "Reprod. de l'\u00e9d. de: Leipzig, 1834-1853"} + ], + }, + not_required={ + "doc": [ + "supplement", + "supplementTo", + "otherEdition", + "otherPhysicalFormat", + "issuedWith", + "precededBy", + "succeededBy", + "relatedTo", + "hasReproduction", + "reproductionOf", + ] }, - not_required={'doc': [ - 'supplement', 'supplementTo', 'otherEdition', - 'otherPhysicalFormat', 'issuedWith', 'precededBy', - 'succeededBy', 'relatedTo', 'hasReproduction', - 'reproductionOf' - ]} ) assert ok == [ - 'other: Pid does not exist: doc supplement', - 'other: Pid does not exist: doc supplementTo', - 'other: Pid does not exist: doc otherEdition', - 'other: Pid does not exist: doc otherPhysicalFormat', - 'other: Pid does not exist: doc issuedWith', - 'other: Pid does not exist: doc precededBy', - 'other: Pid does not exist: doc succeededBy', - 'other: Pid does not exist: doc relatedTo', + "other: Pid does not exist: doc supplement", + "other: Pid does not exist: doc supplementTo", + "other: Pid does not exist: doc otherEdition", + "other: Pid does not exist: doc otherPhysicalFormat", + "other: Pid does not exist: doc issuedWith", + "other: Pid does not exist: doc precededBy", + "other: Pid does not exist: doc succeededBy", + "other: Pid does not exist: doc relatedTo", ] def test_get_language(app): """Test get the current language of the application.""" - assert get_current_language() == 'en' + assert get_current_language() == "en" def test_get_record_class_from_schema_or_pid_type(app): """Test get record class from schema or pid_type.""" - schema = 'https://bib.rero.ch/schemas/documents/document-v0.0.1.json' + schema = "https://bib.rero.ch/schemas/documents/document-v0.0.1.json" assert get_record_class_from_schema_or_pid_type(schema=schema) == Document - assert get_record_class_from_schema_or_pid_type(pid_type='doc') == Document - assert get_record_class_from_schema_or_pid_type( - schema=schema, pid_type='doc') == Document - assert get_record_class_from_schema_or_pid_type( - schema=schema, pid_type='ptrn') == Document - - schema = 'https://bib.rero.ch/schemas/patrons/patron-v0.0.1.json' - assert get_record_class_from_schema_or_pid_type( - schema=schema, pid_type='doc') == Patron - assert get_record_class_from_schema_or_pid_type( - schema=schema, pid_type='ptrn') == Patron - assert get_record_class_from_schema_or_pid_type(pid_type='ptrn') == Patron - - assert not get_record_class_from_schema_or_pid_type(pid_type='toto') - assert not get_record_class_from_schema_or_pid_type( - schema='toto', pid_type=None) - assert not get_record_class_from_schema_or_pid_type( - schema='toto', pid_type='toto') - assert not get_record_class_from_schema_or_pid_type( - schema=None, pid_type=None) - assert not get_record_class_from_schema_or_pid_type( - schema=None, pid_type='toto') + assert get_record_class_from_schema_or_pid_type(pid_type="doc") == Document + assert ( + get_record_class_from_schema_or_pid_type(schema=schema, pid_type="doc") + == Document + ) + assert ( + get_record_class_from_schema_or_pid_type(schema=schema, pid_type="ptrn") + == Document + ) + + schema = "https://bib.rero.ch/schemas/patrons/patron-v0.0.1.json" + assert ( + get_record_class_from_schema_or_pid_type(schema=schema, pid_type="doc") + == Patron + ) + assert ( + get_record_class_from_schema_or_pid_type(schema=schema, pid_type="ptrn") + == Patron + ) + assert get_record_class_from_schema_or_pid_type(pid_type="ptrn") == Patron + + assert not get_record_class_from_schema_or_pid_type(pid_type="toto") + assert not get_record_class_from_schema_or_pid_type(schema="toto", pid_type=None) + assert not get_record_class_from_schema_or_pid_type(schema="toto", pid_type="toto") + assert not get_record_class_from_schema_or_pid_type(schema=None, pid_type=None) + assert not get_record_class_from_schema_or_pid_type(schema=None, pid_type="toto") assert not get_record_class_from_schema_or_pid_type(schema=None) assert not get_record_class_from_schema_or_pid_type(pid_type=None) assert not get_record_class_from_schema_or_pid_type() diff --git a/tests/ui/test_views.py b/tests/ui/test_views.py index f8c3481763..83ba840209 100644 --- a/tests/ui/test_views.py +++ b/tests/ui/test_views.py @@ -29,88 +29,67 @@ def test_nl2br(): """Test nl2br function view.""" - assert 'foo
Bar' == nl2br('foo\nBar') + assert "foo
Bar" == nl2br("foo\nBar") def test_error(client): """Test error entrypoint.""" with pytest.raises(Exception): - client.get(url_for( - 'rero_ils.error' - )) + client.get(url_for("rero_ils.error")) def test_schemaform(client): """Test schema form.""" - result = client.get(url_for( - 'rero_ils.schemaform', document_type="documents")) + result = client.get(url_for("rero_ils.schemaform", document_type="documents")) assert result.status_code == 200 - result = client.get(url_for( - 'rero_ils.schemaform', document_type="not_exists")) + result = client.get(url_for("rero_ils.schemaform", document_type="not_exists")) assert result.status_code == 404 def test_organisation_link_on_homepage(client): """Test Organisation link on homepage.""" - result = client.get(url_for( - 'rero_ils.index' - )) + result = client.get(url_for("rero_ils.index")) assert result.status_code == 200 - assert str(result.data).find('RERO+ catalogue') > -1 + assert str(result.data).find("RERO+ catalogue") > -1 def test_global_link_on_institution_homepage(client, org_martigny): """Test global link on institution homepage.""" - result = client.get(url_for( - 'rero_ils.index_with_view_code', - viewcode='org1' - )) + result = client.get(url_for("rero_ils.index_with_view_code", viewcode="org1")) assert result.status_code == 200 - assert str(result.data).find('Global') > -1 + assert str(result.data).find("Global") > -1 def test_view_parameter_exists(client): """Test view parameter exception.""" - result = client.get(url_for( - 'rero_ils.index_with_view_code', - viewcode='global' - )) + result = client.get(url_for("rero_ils.index_with_view_code", viewcode="global")) assert result.status_code == 302 def test_view_parameter_cypress(client): """Test view parameter with cypress viewcode.""" - result = client.get(url_for( - 'rero_ils.index_with_view_code', - viewcode='cypress' - )) + result = client.get(url_for("rero_ils.index_with_view_code", viewcode="cypress")) assert result.status_code == 404 def test_view_parameter_notfound(client): """Test view parameter exception.""" - result = client.get(url_for( - 'rero_ils.index_with_view_code', - viewcode='foo' - )) + result = client.get(url_for("rero_ils.index_with_view_code", viewcode="foo")) assert result.status_code == 404 def test_external_endpoint_on_institution_homepage(client, org_martigny, app): """Test external endpoint on institution homepage.""" - result = client.get(url_for( - 'rero_ils.index_with_view_code', - viewcode='org1' - )) - endpoint = app.config['RERO_ILS_THEME_ORGANISATION_CSS_ENDPOINT'] + result = client.get(url_for("rero_ils.index_with_view_code", viewcode="org1")) + endpoint = app.config["RERO_ILS_THEME_ORGANISATION_CSS_ENDPOINT"] assert endpoint == "https://resources.rero.ch/bib/test/css/" assert str(result.data).find(endpoint) > 1 def test_help(client): """Test help entrypoint.""" - result = client.get(url_for('wiki.index')) + result = client.get(url_for("wiki.index")) assert result.status_code == 302 @@ -137,83 +116,59 @@ def test_help(client): def test_language(client, app): """Test the language endpoint.""" - res, data = postdata( - client, - 'rero_ils.set_language', - dict( - lang='fr' - ) - ) - assert session[app.config['I18N_SESSION_KEY']] == 'fr' - assert data == dict(lang='fr') + res, data = postdata(client, "rero_ils.set_language", dict(lang="fr")) + assert session[app.config["I18N_SESSION_KEY"]] == "fr" + assert data == dict(lang="fr") assert res.status_code == 200 - res, data = postdata( - client, - 'rero_ils.set_language', - dict( - lang='it' - ) - ) - assert session[app.config['I18N_SESSION_KEY']] == 'it' - - res, data = postdata( - client, - 'rero_ils.set_language', - dict( - language='fr' - ) - ) + res, data = postdata(client, "rero_ils.set_language", dict(lang="it")) + assert session[app.config["I18N_SESSION_KEY"]] == "it" + + res, data = postdata(client, "rero_ils.set_language", dict(language="fr")) assert res.status_code == 400 - res, data = postdata( - client, - 'rero_ils.set_language', - dict( - lang='foo' - ) - ) + res, data = postdata(client, "rero_ils.set_language", dict(lang="foo")) assert res.status_code == 400 # session is unchanged - assert session[app.config['I18N_SESSION_KEY']] == 'it' + assert session[app.config["I18N_SESSION_KEY"]] == "it" def test_set_user_name( - app, librarian_martigny, patron_martigny, user_with_profile, - user_without_email): + app, librarian_martigny, patron_martigny, user_with_profile, user_without_email +): """Test the user_name in the flask session.""" # should be the email address login_user(user=user_with_profile) - assert 'user_name' in session - assert session['user_name'] == user_with_profile.email + assert "user_name" in session + assert session["user_name"] == user_with_profile.email # should be removed logout_user() - assert 'user_name' not in session + assert "user_name" not in session # should not be set login_user(user=user_without_email) - assert 'user_name' not in session + assert "user_name" not in session logout_user() # should be the formatted name login_user(user=patron_martigny.user) - assert session['user_name'] == patron_martigny.formatted_name + assert session["user_name"] == patron_martigny.formatted_name logout_user() # should be the formatted name login_user(user=librarian_martigny.user) - assert session['user_name'] == librarian_martigny.formatted_name + assert session["user_name"] == librarian_martigny.formatted_name logout_user() def test_google_analytics(client, app): """Testing the insertion of the google analytics code in the html page.""" # The Google Analytics code must not be present on the page. - result = client.get(url_for('rero_ils.index')) - assert 'gtag' not in result.text + result = client.get(url_for("rero_ils.index")) + assert "gtag" not in result.text # The Google Analytics code must be present on the page. - app.config['RERO_ILS_GOOGLE_ANALYTICS_TAG_ID'] = 'GA-Foo' - result = client.get(url_for('rero_ils.index')) - assert 'gtag' in result.text + app.config["RERO_ILS_GOOGLE_ANALYTICS_TAG_ID"] = "GA-Foo" + result = client.get(url_for("rero_ils.index")) + assert "gtag" in result.text diff --git a/tests/ui/users/test_forms.py b/tests/ui/users/test_forms.py index 46919d41f2..686d756f7b 100644 --- a/tests/ui/users/test_forms.py +++ b/tests/ui/users/test_forms.py @@ -24,96 +24,88 @@ def test_register_form(client, app): """Test register form.""" + form_data = {"email": "foo@bar.com", "password": "123", "password_confirm": "123"} + res = client.post(url_for("security.register"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "alert-danger"}).find("p") + assert "Field must be at least 8 characters long." == el.text + + app.config["RERO_ILS_PASSWORD_MIN_LENGTH"] = 10 + form_data = {"email": "foo@bar.com", "password": "123", "password_confirm": "123"} + res = client.post(url_for("security.register"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "alert-danger"}).find("p") + assert "Field must be at least 10 characters long." == el.text + + app.config["RERO_ILS_PASSWORD_MIN_LENGTH"] = 8 form_data = { - 'email': 'foo@bar.com', - 'password': '123', - 'password_confirm': '123' + "email": "foo@bar.com", + "password": "12345678", + "password_confirm": "12345678", } - res = client.post(url_for('security.register'), data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "alert-danger"}).find('p') - assert 'Field must be at least 8 characters long.' == el.text + res = client.post(url_for("security.register"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "alert-danger"}).find("p") + assert el.text == "The password must contain a lower case character." - app.config['RERO_ILS_PASSWORD_MIN_LENGTH'] = 10 form_data = { - 'email': 'foo@bar.com', - 'password': '123', - 'password_confirm': '123' + "email": "foo@bar.com", + "password": "a12345678", + "password_confirm": "a12345678", } - res = client.post(url_for('security.register'), data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "alert-danger"}).find('p') - assert 'Field must be at least 10 characters long.' == el.text + res = client.post(url_for("security.register"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "alert-danger"}).find("p") + assert el.text == "The password must contain a upper case character." - app.config['RERO_ILS_PASSWORD_MIN_LENGTH'] = 8 form_data = { - 'email': 'foo@bar.com', - 'password': '12345678', - 'password_confirm': '12345678' + "email": "foo@bar.com", + "password": "NewHouse", + "password_confirm": "NewHouse", } - res = client.post(url_for('security.register'), data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "alert-danger"}).find('p') - assert el.text == 'The password must contain a lower case character.' - - form_data = { - 'email': 'foo@bar.com', - 'password': 'a12345678', - 'password_confirm': 'a12345678' - } - res = client.post(url_for('security.register'), data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "alert-danger"}).find('p') - assert el.text == 'The password must contain a upper case character.' - - form_data = { - 'email': 'foo@bar.com', - 'password': 'NewHouse', - 'password_confirm': 'NewHouse' - } - res = client.post(url_for('security.register'), data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "alert-danger"}).find('p') - assert el.text == 'The password must contain a number.' + res = client.post(url_for("security.register"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "alert-danger"}).find("p") + assert el.text == "The password must contain a number." # Check special char - app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] = True + app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"] = True form_data = { - 'email': 'foo@bar.com', - 'password': 'House1234', - 'password_confirm': 'House1234' + "email": "foo@bar.com", + "password": "House1234", + "password_confirm": "House1234", } - res = client.post(url_for('security.register'), data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "alert-danger"}).find('p') - assert el.text == 'The password must contain a special character.' + res = client.post(url_for("security.register"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "alert-danger"}).find("p") + assert el.text == "The password must contain a special character." # Valid password - app.config['RERO_ILS_PASSWORD_SPECIAL_CHAR'] = False + app.config["RERO_ILS_PASSWORD_SPECIAL_CHAR"] = False form_data = { - 'email': 'foo@bar.com', - 'password': 'Pw123456', - 'password_confirm': 'Pw123456' + "email": "foo@bar.com", + "password": "Pw123456", + "password_confirm": "Pw123456", } - res = client.post(url_for('security.register'), data=form_data) + res = client.post(url_for("security.register"), data=form_data) assert res.status_code == 302 - assert res.location == '/' + assert res.location == "/" form_data = { - 'email': 'foo@bar.com', - 'password': 'Eléphant$07_', - 'password_confirm': 'Eléphant$07_' + "email": "foo@bar.com", + "password": "Eléphant$07_", + "password_confirm": "Eléphant$07_", } - res = client.post(url_for('security.register'), data=form_data) + res = client.post(url_for("security.register"), data=form_data) assert res.status_code == 302 - assert res.location == '/' + assert res.location == "/" -@mock.patch('flask_security.views.reset_password_token_status', - mock.MagicMock( - return_value=[False, False, {'email': 'foo@foo.com'}])) -@mock.patch('flask_security.views.update_password', - mock.MagicMock()) +@mock.patch( + "flask_security.views.reset_password_token_status", + mock.MagicMock(return_value=[False, False, {"email": "foo@foo.com"}]), +) +@mock.patch("flask_security.views.update_password", mock.MagicMock()) def test_reset_password_form(client, app): """Test reset password form. @@ -121,24 +113,18 @@ def test_reset_password_form(client, app): Here we only test that the validator is active on the field. """ - form_data = { - 'email': 'foo@bar.com', - 'password': '123', - 'password_confirm': '123' - } - res = client.post(url_for('security.reset_password', token='123ab'), - data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "text-danger"}).find('p') - assert 'Field must be at least 8 characters long.' == el.text + form_data = {"email": "foo@bar.com", "password": "123", "password_confirm": "123"} + res = client.post(url_for("security.reset_password", token="123ab"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "text-danger"}).find("p") + assert "Field must be at least 8 characters long." == el.text form_data = { - 'email': 'foo@bar.com', - 'password': '12345678', - 'password_confirm': '12345678' + "email": "foo@bar.com", + "password": "12345678", + "password_confirm": "12345678", } - res = client.post(url_for('security.reset_password', token='123ab'), - data=form_data) - soup = BeautifulSoup(res.data, 'html.parser') - el = soup.find('div', {"class": "text-danger"}).find('p') - assert el.text == 'The password must contain a lower case character.' + res = client.post(url_for("security.reset_password", token="123ab"), data=form_data) + soup = BeautifulSoup(res.data, "html.parser") + el = soup.find("div", {"class": "text-danger"}).find("p") + assert el.text == "The password must contain a lower case character." diff --git a/tests/ui/users/test_users_ui.py b/tests/ui/users/test_users_ui.py index 00aa6c41ed..9a4d9b8ab9 100644 --- a/tests/ui/users/test_users_ui.py +++ b/tests/ui/users/test_users_ui.py @@ -24,10 +24,10 @@ def test_users_not_authorized_access(client): """Test profile or change password if the user is not logged.""" - res = client.get(url_for('users.profile', viewcode='global')) + res = client.get(url_for("users.profile", viewcode="global")) assert res.status_code == 401 - res = client.get(url_for('users.password', viewcode='global')) + res = client.get(url_for("users.password", viewcode="global")) assert res.status_code == 401 @@ -35,20 +35,20 @@ def test_users_authorized_access(client, patron_martigny): """Test profile and change password if the user is logged.""" login_user_via_session(client, patron_martigny.user) - res = client.get(url_for('users.profile', viewcode='global')) + res = client.get(url_for("users.profile", viewcode="global")) assert res.status_code == 200 - res = client.get(url_for('users.password', viewcode='global')) + res = client.get(url_for("users.password", viewcode="global")) assert res.status_code == 200 def test_users_readonly_not_authorized_access(app, client, patron_martigny): """Test profile and change password with readonly config.""" - app.config['RERO_PUBLIC_USERPROFILES_READONLY'] = True + app.config["RERO_PUBLIC_USERPROFILES_READONLY"] = True login_user_via_session(client, patron_martigny.user) - res = client.get(url_for('users.profile', viewcode='global')) + res = client.get(url_for("users.profile", viewcode="global")) assert res.status_code == 401 - res = client.get(url_for('users.password', viewcode='global')) + res = client.get(url_for("users.password", viewcode="global")) assert res.status_code == 401 diff --git a/tests/ui/vendors/test_vendors_api.py b/tests/ui/vendors/test_vendors_api.py index 31354b2710..474cdd1127 100644 --- a/tests/ui/vendors/test_vendors_api.py +++ b/tests/ui/vendors/test_vendors_api.py @@ -30,24 +30,23 @@ def test_vendors_properties(vendor_martigny, vendor_sion): # CONTACTS ---------------------------------------------------------------- serial_info = { - 'type': VendorContactType.SERIAL, - 'city': 'Berne', - 'email': 'serial@berne.ch' + "type": VendorContactType.SERIAL, + "city": "Berne", + "email": "serial@berne.ch", } - vendor_martigny['contacts'].append(serial_info) + vendor_martigny["contacts"].append(serial_info) assert not vendor_martigny.get_contact(VendorContactType.ORDER) assert vendor_martigny.get_contact(VendorContactType.SERIAL) == serial_info # ORDER EMAIL ------------------------------------------------------------- # With no specific ORDER contact type, the default contact email field # should be returned - assert vendor_martigny.order_email == \ - vendor_martigny.get_contact(VendorContactType.DEFAULT).get('email') + assert vendor_martigny.order_email == vendor_martigny.get_contact( + VendorContactType.DEFAULT + ).get("email") -def test_vendors_get_links_to_me( - vendor_martigny, acq_invoice_fiction_martigny -): +def test_vendors_get_links_to_me(vendor_martigny, acq_invoice_fiction_martigny): """Test vendors relations.""" links = vendor_martigny.get_links_to_me(True) - assert acq_invoice_fiction_martigny.pid in links['acq_invoices'] + assert acq_invoice_fiction_martigny.pid in links["acq_invoices"] diff --git a/tests/ui/vendors/test_vendors_jsonresolver.py b/tests/ui/vendors/test_vendors_jsonresolver.py index 8e327636db..480e113969 100644 --- a/tests/ui/vendors/test_vendors_jsonresolver.py +++ b/tests/ui/vendors/test_vendors_jsonresolver.py @@ -26,10 +26,8 @@ def test_vendors_jsonresolver(app, vendor_martigny): """Test vendor resolver.""" - rec = Record.create({ - 'vendor': {'$ref': 'https://bib.rero.ch/api/vendors/vndr1'} - }) - assert extracted_data_from_ref(rec.get('vendor')) == 'vndr1' + rec = Record.create({"vendor": {"$ref": "https://bib.rero.ch/api/vendors/vndr1"}}) + assert extracted_data_from_ref(rec.get("vendor")) == "vndr1" # deleted record vendor_martigny.delete() @@ -37,10 +35,8 @@ def test_vendors_jsonresolver(app, vendor_martigny): type(rec)(rec.replace_refs()).dumps() # non existing record - rec = Record.create({ - 'vendor': {'$ref': 'https://bib.rero.ch/api/vendors/n_e'} - }) + rec = Record.create({"vendor": {"$ref": "https://bib.rero.ch/api/vendors/n_e"}}) with pytest.raises(JsonRefError) as error: type(rec)(rec.replace_refs()).dumps() - assert 'PIDDoesNotExistError' in str(error) + assert "PIDDoesNotExistError" in str(error) diff --git a/tests/ui/vendors/test_vendors_mapping.py b/tests/ui/vendors/test_vendors_mapping.py index 94f564e21f..850e91cbf9 100644 --- a/tests/ui/vendors/test_vendors_mapping.py +++ b/tests/ui/vendors/test_vendors_mapping.py @@ -27,10 +27,7 @@ def test_budgets_es_mapping(search, db, org_martigny, vendor_martigny_data): mapping = get_mapping(search.Meta.index) assert mapping vendor = Vendor.create( - vendor_martigny_data, - dbcommit=True, - reindex=True, - delete_pid=True + vendor_martigny_data, dbcommit=True, reindex=True, delete_pid=True ) assert mapping == get_mapping(search.Meta.index) vendor.delete(force=True, dbcommit=True, delindex=True) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index c8ec78fcc3..c8f99a7fb3 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -26,16 +26,19 @@ from pkg_resources import resource_string from utils import get_schema -from rero_ils.modules.entities.remote_entities.api import \ - RemoteEntitiesSearch, RemoteEntity +from rero_ils.modules.entities.remote_entities.api import ( + RemoteEntitiesSearch, + RemoteEntity, +) from rero_ils.modules.patrons.api import Patron from rero_ils.modules.utils import date_string_to_utc -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def create_app(): """Create test app.""" from invenio_app.factory import create_ui + return create_ui @@ -43,8 +46,8 @@ def create_app(): def circ_policy_schema(): """Circ policy Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.circ_policies.jsonschemas', - 'circ_policies/circ_policy-v0.0.1.json', + "rero_ils.modules.circ_policies.jsonschemas", + "circ_policies/circ_policy-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -53,8 +56,8 @@ def circ_policy_schema(): def template_schema(): """Template Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.templates.jsonschemas', - 'templates/template-v0.0.1.json', + "rero_ils.modules.templates.jsonschemas", + "templates/template-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -63,8 +66,8 @@ def template_schema(): def notification_schema(): """Notifications Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.notifications.jsonschemas', - '/notifications/notification-v0.0.1.json' + "rero_ils.modules.notifications.jsonschemas", + "notifications/notification-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -73,8 +76,7 @@ def notification_schema(): def item_type_schema(): """Item type Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.item_types.jsonschemas', - '/item_types/item_type-v0.0.1.json' + "rero_ils.modules.item_types.jsonschemas", "item_types/item_type-v0.0.1.json" ) return get_schema(schema_in_bytes) @@ -83,8 +85,8 @@ def item_type_schema(): def acq_account_schema(): """Acq account Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.acquisition.acq_accounts.jsonschemas', - '/acq_accounts/acq_account-v0.0.1.json' + "rero_ils.modules.acquisition.acq_accounts.jsonschemas", + "acq_accounts/acq_account-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -93,8 +95,8 @@ def acq_account_schema(): def acq_order_schema(): """Acq order Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.acquisition.acq_orders.jsonschemas', - '/acq_orders/acq_order-v0.0.1.json' + "rero_ils.modules.acquisition.acq_orders.jsonschemas", + "acq_orders/acq_order-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -103,8 +105,8 @@ def acq_order_schema(): def acq_order_line_schema(): """Acq order line Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.acquisition.acq_order_lines.jsonschemas', - '/acq_order_lines/acq_order_line-v0.0.1.json' + "rero_ils.modules.acquisition.acq_order_lines.jsonschemas", + "acq_order_lines/acq_order_line-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -113,8 +115,8 @@ def acq_order_line_schema(): def acq_receipt_line_schema(): """Acq receipt line Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.acquisition.acq_receipt_lines.jsonschemas', - '/acq_receipt_lines/acq_receipt_line-v0.0.1.json' + "rero_ils.modules.acquisition.acq_receipt_lines.jsonschemas", + "acq_receipt_lines/acq_receipt_line-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -123,8 +125,8 @@ def acq_receipt_line_schema(): def budget_schema(): """Budget Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.acquisition.budgets.jsonschemas', - '/budgets/budget-v0.0.1.json' + "rero_ils.modules.acquisition.budgets.jsonschemas", + "budgets/budget-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -133,8 +135,7 @@ def budget_schema(): def library_schema(): """Library Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.libraries.jsonschemas', - 'libraries/library-v0.0.1.json' + "rero_ils.modules.libraries.jsonschemas", "libraries/library-v0.0.1.json" ) return get_schema(schema_in_bytes) @@ -143,8 +144,9 @@ def library_schema(): def local_fields_schema(): """Local fields Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.local_fields.jsonschemas', - 'local_fields/local_field-v0.0.1.json') + "rero_ils.modules.local_fields.jsonschemas", + "local_fields/local_field-v0.0.1.json", + ) return get_schema(schema_in_bytes) @@ -152,8 +154,8 @@ def local_fields_schema(): def location_schema(): """Location Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.locations.jsonschemas', - 'locations/location-v0.0.1.json') + "rero_ils.modules.locations.jsonschemas", "locations/location-v0.0.1.json" + ) return get_schema(schema_in_bytes) @@ -161,8 +163,9 @@ def location_schema(): def patron_transaction_schema(): """Patron transaction Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.patron_transactions.jsonschemas', - 'patron_transactions/patron_transaction-v0.0.1.json') + "rero_ils.modules.patron_transactions.jsonschemas", + "patron_transactions/patron_transaction-v0.0.1.json", + ) return get_schema(schema_in_bytes) @@ -170,8 +173,9 @@ def patron_transaction_schema(): def patron_transaction_event_schema(): """Patron transaction event Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.patron_transaction_events.jsonschemas', - 'patron_transaction_events/patron_transaction_event-v0.0.1.json') + "rero_ils.modules.patron_transaction_events.jsonschemas", + "patron_transaction_events/patron_transaction_event-v0.0.1.json", + ) return get_schema(schema_in_bytes) @@ -179,8 +183,8 @@ def patron_transaction_event_schema(): def organisation_schema(): """Organisation Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.organisations.jsonschemas', - 'organisations/organisation-v0.0.1.json', + "rero_ils.modules.organisations.jsonschemas", + "organisations/organisation-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -189,8 +193,8 @@ def organisation_schema(): def patron_type_schema(): """Patron type Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.patron_types.jsonschemas', - '/patron_types/patron_type-v0.0.1.json', + "rero_ils.modules.patron_types.jsonschemas", + "patron_types/patron_type-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -199,8 +203,7 @@ def patron_type_schema(): def patron_schema(): """Patron Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.patrons.jsonschemas', - '/patrons/patron-v0.0.1.json' + "rero_ils.modules.patrons.jsonschemas", "patrons/patron-v0.0.1.json" ) return get_schema(schema_in_bytes) @@ -210,7 +213,7 @@ def patron_martigny_data_tmp_with_id(patron_martigny_data_tmp): """Load Martigny patron data scope function with a mocked user_id.""" patron = Patron.remove_user_data(deepcopy(patron_martigny_data_tmp)) # mock the user_id which is add by the Patron API. - patron['user_id'] = 100 + patron["user_id"] = 100 return patron @@ -218,8 +221,8 @@ def patron_martigny_data_tmp_with_id(patron_martigny_data_tmp): def remote_entities_schema(): """Remote entity Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.entities.remote_entities.jsonschemas', - '/remote_entities/remote_entity-v0.0.1.json' + "rero_ils.modules.entities.remote_entities.jsonschemas", + "remote_entities/remote_entity-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -228,8 +231,8 @@ def remote_entities_schema(): def local_entities_schema(): """Local entity Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.entities.local_entities.jsonschemas', - '/local_entities/local_entity-v0.0.1.json' + "rero_ils.modules.entities.local_entities.jsonschemas", + "local_entities/local_entity-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -238,8 +241,7 @@ def local_entities_schema(): def document_schema(): """Jsonschema for documents.""" schema_in_bytes = resource_string( - 'rero_ils.modules.documents.jsonschemas', - 'documents/document-v0.0.1.json' + "rero_ils.modules.documents.jsonschemas", "documents/document-v0.0.1.json" ) return get_schema(schema_in_bytes) @@ -248,8 +250,7 @@ def document_schema(): def item_schema(): """Item Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.items.jsonschemas', - 'items/item-v0.0.1.json' + "rero_ils.modules.items.jsonschemas", "items/item-v0.0.1.json" ) return get_schema(schema_in_bytes) @@ -258,8 +259,7 @@ def item_schema(): def user_schema(): """User Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.users.jsonschemas', - 'users/user-v0.0.1.json' + "rero_ils.modules.users.jsonschemas", "users/user-v0.0.1.json" ) return get_schema(schema_in_bytes) @@ -268,8 +268,8 @@ def user_schema(): def holding_schema(): """Holdings Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.holdings.jsonschemas', - '/holdings/holding-v0.0.1.json') + "rero_ils.modules.holdings.jsonschemas", "holdings/holding-v0.0.1.json" + ) return get_schema(schema_in_bytes) @@ -277,8 +277,9 @@ def holding_schema(): def ill_request_schema(): """ILL requests JSONSchema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.ill_requests.jsonschemas', - '/ill_requests/ill_request-v0.0.1.json') + "rero_ils.modules.ill_requests.jsonschemas", + "ill_requests/ill_request-v0.0.1.json", + ) return get_schema(schema_in_bytes) @@ -286,8 +287,8 @@ def ill_request_schema(): def operation_log_schema(): """Operation log Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.operation_logs.jsonschemas', - 'operation_logs/operation_log-v0.0.1.json' + "rero_ils.modules.operation_logs.jsonschemas", + "operation_logs/operation_log-v0.0.1.json", ) return get_schema(schema_in_bytes) @@ -296,8 +297,8 @@ def operation_log_schema(): def vendors_schema(): """Local fields Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.vendors.jsonschemas', - 'vendors/vendor-v0.0.1.json') + "rero_ils.modules.vendors.jsonschemas", "vendors/vendor-v0.0.1.json" + ) return get_schema(schema_in_bytes) @@ -305,11 +306,11 @@ def vendors_schema(): def marc21_record(): """Marc21 record.""" date = datetime.now(timezone.utc).isoformat() - created = date_string_to_utc(date).strftime('%y%m%d') + created = date_string_to_utc(date).strftime("%y%m%d") return { - 'leader': '00000cam a2200000zu 4500', - '005': '20270707070707.0', - '008': f'{created}|||||||||xx#|||||||||||||||||||||c' + "leader": "00000cam a2200000zu 4500", + "005": "20270707070707.0", + "008": f"{created}|||||||||xx#|||||||||||||||||||||c", } @@ -317,50 +318,50 @@ def marc21_record(): def mef_record_with_idref_rero_data(): """Mef record with idref rero.""" return { - '$schema': 'https://bib.rero.ch/schemas/' - 'remote_entities/remote_entity-v0.0.1.json', - 'idref': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_idref/idref-agent-v0.0.1.json', - 'authorized_access_point': 'Honnoré, Patrick', - 'type': 'bf:Person', - 'biographical_information': ['Traduit du japonais en français'], - 'country_associated': 'fr', - 'identifier': 'http://www.idref.fr/072277742', - 'language': ['fre', 'jpn'], - 'md5': '8f1dda5f37239c65d3b3d0d2252ceffb', - 'pid': '072277742', - 'preferred_name': 'Honnoré, Patrick', - 'relation_pid': {'type': 'redirect_from', 'value': '193601680'} + "$schema": "https://bib.rero.ch/schemas/" + "remote_entities/remote_entity-v0.0.1.json", + "idref": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_idref/idref-agent-v0.0.1.json", + "authorized_access_point": "Honnoré, Patrick", + "type": "bf:Person", + "biographical_information": ["Traduit du japonais en français"], + "country_associated": "fr", + "identifier": "http://www.idref.fr/072277742", + "language": ["fre", "jpn"], + "md5": "8f1dda5f37239c65d3b3d0d2252ceffb", + "pid": "072277742", + "preferred_name": "Honnoré, Patrick", + "relation_pid": {"type": "redirect_from", "value": "193601680"}, }, - 'pid': '6627670', - 'rero': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_rero/rero-agent-v0.0.1.json', - 'authorized_access_point': 'Honnoré, Patrick', - 'type': 'bf:Person', - 'identifier': 'http://data.rero.ch/02-A009220673', - 'md5': 'c90fa0c93eac4346910734badb77bdce', - 'pid': 'A009220673', - 'preferred_name': 'Honnoré, Patrick'}, - 'sources': ['rero', 'idref'], - 'type': 'bf:Person', - 'viaf_pid': '37141584', - 'type': 'bf:Person' + "pid": "6627670", + "rero": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_rero/rero-agent-v0.0.1.json", + "authorized_access_point": "Honnoré, Patrick", + "type": "bf:Person", + "identifier": "http://data.rero.ch/02-A009220673", + "md5": "c90fa0c93eac4346910734badb77bdce", + "pid": "A009220673", + "preferred_name": "Honnoré, Patrick", + }, + "sources": ["rero", "idref"], + "type": "bf:Person", + "viaf_pid": "37141584", + "type": "bf:Person", } @pytest.fixture() def mef_record_with_idref_rero(mef_record_with_idref_rero_data): """Mef record with idref rero.""" - if entity := RemoteEntity.get_record_by_pid( - mef_record_with_idref_rero_data['pid']): + if entity := RemoteEntity.get_record_by_pid(mef_record_with_idref_rero_data["pid"]): return entity entity = RemoteEntity.create( data=mef_record_with_idref_rero_data, dbcommit=True, reindex=True, - delete_pid=False + delete_pid=False, ) RemoteEntitiesSearch.flush_and_refresh() return entity @@ -370,99 +371,95 @@ def mef_record_with_idref_rero(mef_record_with_idref_rero_data): def mef_record_with_idref_gnd_data(): """Mef record with idref gnd.""" return { - '$schema': 'https://bib.rero.ch/schemas/' - 'remote_entities/remote_entity-v0.0.1.json', - 'gnd': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_gnd/gnd-agent-v0.0.1.json', - 'authorized_access_point': 'Université de Genève', - 'type': 'bf:Organisation', - 'conference': False, - 'country_associated': 'sz', - 'date_of_establishment': '1873', - 'identifier': 'http://d-nb.info/gnd/1010450-1', - 'md5': '291d3a468f69af08fa4a0d352ce71ab4', - 'pid': '004058518', - 'preferred_name': 'Université de Genève', - 'variant_access_point': [ - 'Schola Genevensis', - 'University of Geneva', - 'Ženevskij Universitet', - 'Universitet. Genf', - 'Universität Genf', - 'Università di Ginevra', - 'Universidad de Ginebra', - 'UNIGE. Abkuerzung' + "$schema": "https://bib.rero.ch/schemas/" + "remote_entities/remote_entity-v0.0.1.json", + "gnd": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_gnd/gnd-agent-v0.0.1.json", + "authorized_access_point": "Université de Genève", + "type": "bf:Organisation", + "conference": False, + "country_associated": "sz", + "date_of_establishment": "1873", + "identifier": "http://d-nb.info/gnd/1010450-1", + "md5": "291d3a468f69af08fa4a0d352ce71ab4", + "pid": "004058518", + "preferred_name": "Université de Genève", + "variant_access_point": [ + "Schola Genevensis", + "University of Geneva", + "Ženevskij Universitet", + "Universitet. Genf", + "Universität Genf", + "Università di Ginevra", + "Universidad de Ginebra", + "UNIGE. Abkuerzung", + ], + "variant_name": [ + "Schola Genevensis", + "University of Geneva", + "Ženevskij Universitet", + "Universitet", + "Universität Genf", + "Università di Ginevra", + "Universidad de Ginebra", + "UNIGE", ], - 'variant_name': [ - 'Schola Genevensis', - 'University of Geneva', - 'Ženevskij Universitet', - 'Universitet', - 'Universität Genf', - 'Università di Ginevra', - 'Universidad de Ginebra', - 'UNIGE' - ] }, - 'idref': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_idref/idref-agent-v0.0.1.json', - 'authorized_access_point': 'Université de Genève', - 'type': 'bf:Organisation', - 'biographical_information': [ + "idref": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_idref/idref-agent-v0.0.1.json", + "authorized_access_point": "Université de Genève", + "type": "bf:Organisation", + "biographical_information": [ "Fondée en 1559, l'académie devint Université en 1872", - "3 pl. de l'Université, Genève (Suisse)" + "3 pl. de l'Université, Genève (Suisse)", + ], + "conference": False, + "country_associated": "sz", + "date_of_establishment": "1559", + "identifier": "http://www.idref.fr/02643136X", + "language": ["fre"], + "md5": "96a27be2a6ee9741dab983c3f403c3ff", + "pid": "02643136X", + "preferred_name": "Université de Genève", + "relation_pid": {"type": "redirect_from", "value": "126899959"}, + "variant_access_point": [ + "UNIGE", + "Academia Genevensis", + "Académie de Genève", + "Académie théologique de Genève", + "Académie de Calvin ( Genève )", + "Schola Genevensis", + "Università di Ginevra", ], - 'conference': False, - 'country_associated': 'sz', - 'date_of_establishment': '1559', - 'identifier': 'http://www.idref.fr/02643136X', - 'language': ['fre'], - 'md5': '96a27be2a6ee9741dab983c3f403c3ff', - 'pid': '02643136X', - 'preferred_name': 'Université de Genève', - 'relation_pid': { - 'type': 'redirect_from', - 'value': '126899959' - }, - 'variant_access_point': [ - 'UNIGE', - 'Academia Genevensis', - 'Académie de Genève', - 'Académie théologique de Genève', - 'Académie de Calvin ( Genève )', - 'Schola Genevensis', - 'Università di Ginevra' + "variant_name": [ + "UNIGE", + "Academia Genevensis", + "Académie de Genève", + "Académie théologique de Genève", + "Académie de Calvin ( Genève )", + "Schola Genevensis", + "Università di Ginevra", ], - 'variant_name': [ - 'UNIGE', - 'Academia Genevensis', - 'Académie de Genève', - 'Académie théologique de Genève', - 'Académie de Calvin ( Genève )', - 'Schola Genevensis', - 'Università di Ginevra' - ] }, - 'pid': '5890765', - 'viaf_pid': '143949988', - 'sources': ['gnd', 'idref'], - 'type': 'bf:Organisation' + "pid": "5890765", + "viaf_pid": "143949988", + "sources": ["gnd", "idref"], + "type": "bf:Organisation", } @pytest.fixture() def mef_record_with_idref_gnd(mef_record_with_idref_gnd_data): """Mef record with idref rero.""" - if entity := RemoteEntity.get_record_by_pid( - mef_record_with_idref_gnd_data['pid']): + if entity := RemoteEntity.get_record_by_pid(mef_record_with_idref_gnd_data["pid"]): return entity entity = RemoteEntity.create( data=mef_record_with_idref_gnd_data, dbcommit=True, reindex=True, - delete_pid=False + delete_pid=False, ) RemoteEntitiesSearch.flush_and_refresh() return entity @@ -472,83 +469,83 @@ def mef_record_with_idref_gnd(mef_record_with_idref_gnd_data): def mef_record_with_idref_gnd_rero_data(): """Mef record with idref gnd rero is conference.""" return { - '$schema': 'https://bib.rero.ch/schemas/' - 'remote_entities/remote_entity-v0.0.1.json', - 'gnd': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_gnd/gnd-agent-v0.0.1.json', - 'authorized_access_point': 'Congrès Ouvrier de France', - 'type': 'bf:Organisation', - 'conference': True, - 'identifier': 'http://d-nb.info/gnd/5034321-X', - 'md5': '21ea03e240e10011305acac0cd731813', - 'pid': '050343211', - 'preferred_name': 'Congrès Ouvrier de France' + "$schema": "https://bib.rero.ch/schemas/" + "remote_entities/remote_entity-v0.0.1.json", + "gnd": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_gnd/gnd-agent-v0.0.1.json", + "authorized_access_point": "Congrès Ouvrier de France", + "type": "bf:Organisation", + "conference": True, + "identifier": "http://d-nb.info/gnd/5034321-X", + "md5": "21ea03e240e10011305acac0cd731813", + "pid": "050343211", + "preferred_name": "Congrès Ouvrier de France", }, - 'idref': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_idref/idref-agent-v0.0.1.json', - 'authorized_access_point': 'Congrès ouvrier français', - 'type': 'bf:Organisation', - 'biographical_information': [ - 'L\'ordre des formes exclues suit l\'ordre chronologique des ' - 'publications et correspond à l\'évolution historique ' - '(Cf. la notice des congrès particuliers) On a gardé ' - 'volontairement la forme \'Congrès ouvrier français\' ' - 'pour toute la série' + "idref": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_idref/idref-agent-v0.0.1.json", + "authorized_access_point": "Congrès ouvrier français", + "type": "bf:Organisation", + "biographical_information": [ + "L'ordre des formes exclues suit l'ordre chronologique des " + "publications et correspond à l'évolution historique " + "(Cf. la notice des congrès particuliers) On a gardé " + "volontairement la forme 'Congrès ouvrier français' " + "pour toute la série" + ], + "conference": False, + "country_associated": "fr", + "date_of_establishment": "1876", + "identifier": "http://www.idref.fr/03255608X", + "language": ["fre"], + "md5": "4f838b25c1281bc96aa14b9a4ee49572", + "pid": "03255608X", + "preferred_name": "Congrès ouvrier français", + "variant_access_point": [ + "Congrès ouvrier de France", + "Congrès socialiste ouvrier de France", + "Congrès national ouvrier socialiste", + "Congrès socialiste national ouvrier", ], - 'conference': False, - 'country_associated': 'fr', - 'date_of_establishment': '1876', - 'identifier': 'http://www.idref.fr/03255608X', - 'language': ['fre'], - 'md5': '4f838b25c1281bc96aa14b9a4ee49572', - 'pid': '03255608X', - 'preferred_name': 'Congrès ouvrier français', - 'variant_access_point': [ - 'Congrès ouvrier de France', - 'Congrès socialiste ouvrier de France', - 'Congrès national ouvrier socialiste', - 'Congrès socialiste national ouvrier' + "variant_name": [ + "Congrès ouvrier de France", + "Congrès socialiste ouvrier de France", + "Congrès national ouvrier socialiste", + "Congrès socialiste national ouvrier", ], - 'variant_name': [ - 'Congrès ouvrier de France', - 'Congrès socialiste ouvrier de France', - 'Congrès national ouvrier socialiste', - 'Congrès socialiste national ouvrier' - ] }, - 'pid': '5777972', - 'rero': { - '$schema': 'https://mef.rero.ch/schemas/' - 'agents_rero/rero-agent-v0.0.1.json', - 'authorized_access_point': 'Congrès ouvrier de France', - 'type': 'bf:Organisation', - 'conference': True, - 'identifier': 'http://data.rero.ch/02-A005462931', - 'md5': 'e94636af02fbfabca711ec87a103f1b3', - 'pid': 'A005462931', - 'preferred_name': 'Congrès ouvrier de France', - 'variant_access_point': [ - 'Congrès ouvrier socialiste de France', - 'Congrès national ouvrier socialiste (France)', - 'Congrès socialiste ouvrier de France', - 'Congrès national socialiste ouvrier (France)', - 'Congrès socialiste national ouvrier (France)', - 'Congrès ouvrier français' + "pid": "5777972", + "rero": { + "$schema": "https://mef.rero.ch/schemas/" + "agents_rero/rero-agent-v0.0.1.json", + "authorized_access_point": "Congrès ouvrier de France", + "type": "bf:Organisation", + "conference": True, + "identifier": "http://data.rero.ch/02-A005462931", + "md5": "e94636af02fbfabca711ec87a103f1b3", + "pid": "A005462931", + "preferred_name": "Congrès ouvrier de France", + "variant_access_point": [ + "Congrès ouvrier socialiste de France", + "Congrès national ouvrier socialiste (France)", + "Congrès socialiste ouvrier de France", + "Congrès national socialiste ouvrier (France)", + "Congrès socialiste national ouvrier (France)", + "Congrès ouvrier français", + ], + "variant_name": [ + "Congrès ouvrier socialiste de France", + "Congrès national ouvrier socialiste (France)", + "Congrès socialiste ouvrier de France", + "Congrès national socialiste ouvrier (France)", + "Congrès socialiste national ouvrier (France)", + "Congrès ouvrier français", ], - 'variant_name': [ - 'Congrès ouvrier socialiste de France', - 'Congrès national ouvrier socialiste (France)', - 'Congrès socialiste ouvrier de France', - 'Congrès national socialiste ouvrier (France)', - 'Congrès socialiste national ouvrier (France)', - 'Congrès ouvrier français' - ] }, - 'viaf_pid': '134406719', - 'sources': ['gnd', 'idref', 'rero'], - 'type': 'bf:Organisation' + "viaf_pid": "134406719", + "sources": ["gnd", "idref", "rero"], + "type": "bf:Organisation", } @@ -556,13 +553,14 @@ def mef_record_with_idref_gnd_rero_data(): def mef_record_with_idref_gnd_rero(mef_record_with_idref_gnd_rero_data): """Mef record with idref rero.""" if entity := RemoteEntity.get_record_by_pid( - mef_record_with_idref_gnd_rero_data['pid']): + mef_record_with_idref_gnd_rero_data["pid"] + ): return entity entity = RemoteEntity.create( data=mef_record_with_idref_gnd_rero_data, dbcommit=True, reindex=True, - delete_pid=False + delete_pid=False, ) RemoteEntitiesSearch.flush_and_refresh() return entity @@ -572,7 +570,7 @@ def mef_record_with_idref_gnd_rero(mef_record_with_idref_gnd_rero_data): def stats_cfg_schema(): """Template Jsonschema for records.""" schema_in_bytes = resource_string( - 'rero_ils.modules.stats_cfg.jsonschemas', - 'stats_cfg/stat_cfg-v0.0.1.json', + "rero_ils.modules.stats_cfg.jsonschemas", + "stats_cfg/stat_cfg-v0.0.1.json", ) return get_schema(schema_in_bytes) diff --git a/tests/unit/documents/test_documents_dojson.py b/tests/unit/documents/test_documents_dojson.py index ad70fd05c9..ec70b413ac 100644 --- a/tests/unit/documents/test_documents_dojson.py +++ b/tests/unit/documents/test_documents_dojson.py @@ -25,40 +25,36 @@ from rero_ils.dojson.utils import not_repetitive from rero_ils.modules.documents.dojson.contrib.marc21tojson.rero import marc21 -from rero_ils.modules.documents.dojson.contrib.marc21tojson.rero.model import \ - get_mef_link +from rero_ils.modules.documents.dojson.contrib.marc21tojson.rero.model import ( + get_mef_link, +) from rero_ils.modules.documents.models import DocumentFictionType -from rero_ils.modules.documents.views import create_publication_statement, \ - get_cover_art, get_other_accesses +from rero_ils.modules.documents.views import ( + create_publication_statement, + get_cover_art, + get_other_accesses, +) from rero_ils.modules.entities.models import EntityType def test_not_repetetive(capsys): """Test the function not_repetetive.""" - data_dict = {'sub': ('first', 'second')} + data_dict = {"sub": ("first", "second")} data = not_repetitive( - bibid='pid1', - reroid='rero1', - key='key', - value=data_dict, - subfield='sub' + bibid="pid1", reroid="rero1", key="key", value=data_dict, subfield="sub" ) - assert data == 'first' + assert data == "first" out, err = capsys.readouterr() - assert out == \ - f'WARNING NOT REPETITIVE:\tpid1\trero1\tkey\tsub\t{str(data_dict)}\t\n' - data = {'sub': 'only'} + assert ( + out == f"WARNING NOT REPETITIVE:\tpid1\trero1\tkey\tsub\t{str(data_dict)}\t\n" + ) + data = {"sub": "only"} data = not_repetitive( - bibid='pid1', - reroid='rero1', - key='key', - value=data, - subfield='sub', - default='' + bibid="pid1", reroid="rero1", key="key", value=data, subfield="sub", default="" ) - assert data == 'only' + assert data == "only" out, err = capsys.readouterr() - assert out == '' + assert out == "" # type: leader @@ -82,10 +78,9 @@ def test_marc21_to_type(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('type') == [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_other_book' - }] + assert data.get("type") == [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"} + ] marc21xml = """ @@ -103,15 +98,9 @@ def test_marc21_to_type(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('type') == [ - { - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_other_book' - }, - { - 'main_type': 'docmaintype_score', - 'subtype': 'docsubtype_printed_score' - } + assert data.get("type") == [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"}, + {"main_type": "docmaintype_score", "subtype": "docsubtype_printed_score"}, ] @@ -129,9 +118,7 @@ def test_marc21_to_admin_metadata(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('adminMetadata') == { - 'encodingLevel': 'Full level' - } + assert data.get("adminMetadata") == {"encodingLevel": "Full level"} marc21xml = """ @@ -146,9 +133,9 @@ def test_marc21_to_admin_metadata(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('adminMetadata') == { - 'encodingLevel': 'Less-than-full level, material not examined', - 'note': ['Société de publications romanes (pf/08.05.1985)'], + assert data.get("adminMetadata") == { + "encodingLevel": "Less-than-full level, material not examined", + "note": ["Société de publications romanes (pf/08.05.1985)"], } marc21xml = """ @@ -176,14 +163,14 @@ def test_marc21_to_admin_metadata(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('adminMetadata') == { - 'encodingLevel': 'Less-than-full level, material not examined', - 'note': [ - "Catalogué d'après la couverture (nebpun/12.2019)", - 'BPUN: Sandoz, Pellet, Rosselet, Bähler (nebpun/12.2019)', - '!!!Bibliographie neuchâteloise!!! (necfbv/12.2019/3546)', - '!!! Discographie neuchâteloise!!! (necfbv/02.2021/3502)' - ] + assert data.get("adminMetadata") == { + "encodingLevel": "Less-than-full level, material not examined", + "note": [ + "Catalogué d'après la couverture (nebpun/12.2019)", + "BPUN: Sandoz, Pellet, Rosselet, Bähler (nebpun/12.2019)", + "!!!Bibliographie neuchâteloise!!! (necfbv/12.2019/3546)", + "!!! Discographie neuchâteloise!!! (necfbv/02.2021/3502)", + ], } marc21xml = """ @@ -202,12 +189,9 @@ def test_marc21_to_admin_metadata(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('adminMetadata') == { - 'encodingLevel': 'Less-than-full level, material not examined', - 'note': [ - 'Notice privée (vsbcce/02.2013)', - 'Fonds' - ], + assert data.get("adminMetadata") == { + "encodingLevel": "Less-than-full level, material not examined", + "note": ["Notice privée (vsbcce/02.2013)", "Fonds"], } # field 351 with missing $c @@ -227,11 +211,9 @@ def test_marc21_to_admin_metadata(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('adminMetadata') == { - 'encodingLevel': 'Less-than-full level, material not examined', - 'note': [ - 'Notice privée (vsbcce/02.2013)' - ], + assert data.get("adminMetadata") == { + "encodingLevel": "Less-than-full level, material not examined", + "note": ["Notice privée (vsbcce/02.2013)"], } marc21xml = """ @@ -250,12 +232,12 @@ def test_marc21_to_admin_metadata(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('adminMetadata') == { - 'encodingLevel': 'Unknown', - 'source': 'DLC', - 'descriptionModifier': ['SzZuIDS NEBIS ZBZ', 'RERO vsbcvs'], - 'descriptionLanguage': 'ger', - 'descriptionConventions': ['rda'], + assert data.get("adminMetadata") == { + "encodingLevel": "Unknown", + "source": "DLC", + "descriptionModifier": ["SzZuIDS NEBIS ZBZ", "RERO vsbcvs"], + "descriptionLanguage": "ger", + "descriptionConventions": ["rda"], } @@ -273,10 +255,7 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'article' - } + assert data.get("issuance") == {"main_type": "rdami:1001", "subtype": "article"} marc21xml = """ @@ -287,9 +266,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1003', - 'subtype': 'serialInSerial' + assert data.get("issuance") == { + "main_type": "rdami:1003", + "subtype": "serialInSerial", } marc21xml = """ @@ -301,10 +280,7 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'privateFile' - } + assert data.get("issuance") == {"main_type": "rdami:1001", "subtype": "privateFile"} marc21xml = """ @@ -315,9 +291,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'privateSubfile' + assert data.get("issuance") == { + "main_type": "rdami:1001", + "subtype": "privateSubfile", } marc21xml = """ @@ -329,9 +305,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'materialUnit' + assert data.get("issuance") == { + "main_type": "rdami:1001", + "subtype": "materialUnit", } marc21xml = """ @@ -349,10 +325,7 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1002', - 'subtype': 'set' - } + assert data.get("issuance") == {"main_type": "rdami:1002", "subtype": "set"} marc21xml = """ @@ -363,9 +336,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingWebsite' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingWebsite", } marc21xml = """ @@ -377,9 +350,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingLoose-leaf' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingLoose-leaf", } marc21xml = """ @@ -391,9 +364,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingWebsite' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingWebsite", } marc21xml = """ @@ -405,9 +378,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1003', - 'subtype': 'monographicSeries' + assert data.get("issuance") == { + "main_type": "rdami:1003", + "subtype": "monographicSeries", } marc21xml = """ @@ -419,10 +392,7 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1003', - 'subtype': 'periodical' - } + assert data.get("issuance") == {"main_type": "rdami:1003", "subtype": "periodical"} marc21xml = """ @@ -433,9 +403,9 @@ def test_marc21_to_mode_of_issuance(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'materialUnit' + assert data.get("issuance") == { + "main_type": "rdami:1001", + "subtype": "materialUnit", } @@ -452,7 +422,7 @@ def test_marc21_to_pid(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('pid') == '123456789' + assert data.get("pid") == "123456789" marc21xml = """ @@ -462,7 +432,7 @@ def test_marc21_to_pid(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('pid') is None + assert data.get("pid") is None def test_marc21_to_title_245_with_sufield_c_having_square_bracket(): @@ -490,24 +460,16 @@ def test_marc21_to_title_245_with_sufield_c_having_square_bracket(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'Ma ville en vert'} - ], - 'subtitle': [ - {'value': 'pour un retour de la nature'} - ], + "type": "bf:Title", + "mainTitle": [{"value": "Ma ville en vert"}], + "subtitle": [{"value": "pour un retour de la nature"}], } ] - assert data.get('responsibilityStatement') == [ - [ - {'value': '[Robert Klant ... [et al.]'} - ], - [ - {'value': '[Kitty B.]'} - ] + assert data.get("responsibilityStatement") == [ + [{"value": "[Robert Klant ... [et al.]"}], + [{"value": "[Kitty B.]"}], ] @@ -550,41 +512,27 @@ def test_marc21_to_title_245_with_two_246(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'Guo ji fa'}, - { - 'value': '国际法', - 'language': 'chi-hani' - } - ] + "type": "bf:Title", + "mainTitle": [ + {"value": "Guo ji fa"}, + {"value": "国际法", "language": "chi-hani"}, + ], }, { - 'type': 'bf:ParallelTitle', - 'mainTitle': [ - {'value': 'International law'}, - { - 'value': 'International law', - 'language': 'chi-hani' - } - ] + "type": "bf:ParallelTitle", + "mainTitle": [ + {"value": "International law"}, + {"value": "International law", "language": "chi-hani"}, + ], }, - { - 'type': 'bf:VariantTitle', - 'mainTitle': [{'value': 'Guojifa'}] - } + {"type": "bf:VariantTitle", "mainTitle": [{"value": "Guojifa"}]}, ] - assert data.get('responsibilityStatement') == [ + assert data.get("responsibilityStatement") == [ [ - { - 'value': 'Liang Xi yuan zhu zhu bian, Wang Xianshu fu zhu bian' - }, - { - 'value': '梁西原著主编, 王献枢副主编', - 'language': 'chi-hani' - } + {"value": "Liang Xi yuan zhu zhu bian, Wang Xianshu fu zhu bian"}, + {"value": "梁西原著主编, 王献枢副主编", "language": "chi-hani"}, ] ] @@ -621,33 +569,21 @@ def test_marc21_to_title_245_without_246(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'L.N. Tolstoĭ'}, - { - 'value': 'Л.Н. Толстой', - 'language': 'rus-cyrl' - } + "type": "bf:Title", + "mainTitle": [ + {"value": "L.N. Tolstoĭ"}, + {"value": "Л.Н. Толстой", "language": "rus-cyrl"}, ], - 'subtitle': [ - {'value': 'seminariĭ'}, - { - 'value': 'семинарий', - 'language': 'rus-cyrl' - } + "subtitle": [ + {"value": "seminariĭ"}, + {"value": "семинарий", "language": "rus-cyrl"}, ], } ] - assert data.get('responsibilityStatement') == [ - [ - {'value': 'B.I. Bursov'}, - { - 'value': 'Б.И. Бурсов', - 'language': 'rus-cyrl' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "B.I. Bursov"}, {"value": "Б.И. Бурсов", "language": "rus-cyrl"}] ] @@ -689,59 +625,39 @@ def test_marc21_to_title_245_with_part_without_246(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'mainTitle': [ - {'value': 'L.N. Tolstoĭ'}, - { - 'value': 'Л.Н. Толстой', - 'language': 'rus-cyrl' - } + "mainTitle": [ + {"value": "L.N. Tolstoĭ"}, + {"value": "Л.Н. Толстой", "language": "rus-cyrl"}, ], - 'subtitle': [ - {'value': 'seminariĭ'}, - { - 'value': 'семинарий', - 'language': 'rus-cyrl' - } + "subtitle": [ + {"value": "seminariĭ"}, + {"value": "семинарий", "language": "rus-cyrl"}, ], - 'type': 'bf:Title', - 'part': [{ - 'partNumber': [ - {'value': 'part number'}, - { - 'value': 'Part Number', - 'language': 'rus-cyrl' - } + "type": "bf:Title", + "part": [ + { + "partNumber": [ + {"value": "part number"}, + {"value": "Part Number", "language": "rus-cyrl"}, + ], + "partName": [ + {"value": "part name"}, + {"value": "Part Name", "language": "rus-cyrl"}, ], - 'partName': [ - {'value': 'part name'}, - { - 'value': 'Part Name', - 'language': 'rus-cyrl' - } - ] }, { - 'partNumber': [ - {'value': 'part number 2'}, - { - 'value': 'Part Number 2', - 'language': 'rus-cyrl' - } + "partNumber": [ + {"value": "part number 2"}, + {"value": "Part Number 2", "language": "rus-cyrl"}, ] - } - ] + }, + ], } ] - assert data.get('responsibilityStatement') == [ - [ - {'value': 'B.I. Bursov'}, - { - 'value': 'Б.И. Бурсов', - 'language': 'rus-cyrl' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "B.I. Bursov"}, {"value": "Б.И. Бурсов", "language": "rus-cyrl"}] ] @@ -776,38 +692,21 @@ def test_marc21_to_title_with_multiple_parts(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [{'value': 'Statistique'}], - 'subtitle': [{'value': 'exercices corrigés'}], - 'part': [ - { - 'partNumber': [{'value': 'T. 1'}], - 'partName': [{'value': 'Tome 1'}] - }, - { - 'partNumber': [{'value': 'T. 2'}], - 'partName': [{'value': 'Tome 2'}] - }, - { - 'partName': [{'value': 'Tome 3'}] - - }, - { - 'partNumber': [{'value': 'T. 4'}] - }, - { - 'partNumber': [{'value': 'T. 5'}] - } - ] + "type": "bf:Title", + "mainTitle": [{"value": "Statistique"}], + "subtitle": [{"value": "exercices corrigés"}], + "part": [ + {"partNumber": [{"value": "T. 1"}], "partName": [{"value": "Tome 1"}]}, + {"partNumber": [{"value": "T. 2"}], "partName": [{"value": "Tome 2"}]}, + {"partName": [{"value": "Tome 3"}]}, + {"partNumber": [{"value": "T. 4"}]}, + {"partNumber": [{"value": "T. 5"}]}, + ], } ] - assert data.get('responsibilityStatement') == [ - [ - {'value': 'Christian Labrousse'} - ] - ] + assert data.get("responsibilityStatement") == [[{"value": "Christian Labrousse"}]] def test_marc21_to_title_245_and_246(): @@ -845,38 +744,26 @@ def test_marc21_to_title_245_and_246(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'L.N. Tolstoĭ'}, - { - 'value': 'Л.Н. Толстой', - 'language': 'rus-cyrl' - } + "type": "bf:Title", + "mainTitle": [ + {"value": "L.N. Tolstoĭ"}, + {"value": "Л.Н. Толстой", "language": "rus-cyrl"}, + ], + "subtitle": [ + {"value": "seminariĭ"}, + {"value": "семинарий", "language": "rus-cyrl"}, ], - 'subtitle': [ - {'value': 'seminariĭ'}, - { - 'value': 'семинарий', - 'language': 'rus-cyrl' - } - ] }, { - 'type': 'bf:VariantTitle', - 'mainTitle': [{'value': 'L.N. Tolstoj'}], - 'subtitle': [{'value': 'seminarij / B.I. Bursov'}] - } + "type": "bf:VariantTitle", + "mainTitle": [{"value": "L.N. Tolstoj"}], + "subtitle": [{"value": "seminarij / B.I. Bursov"}], + }, ] - assert data.get('responsibilityStatement') == [ - [ - {'value': 'B.I. Bursov'}, - { - 'value': 'Б.И. Бурсов', - 'language': 'rus-cyrl' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "B.I. Bursov"}, {"value": "Б.И. Бурсов", "language": "rus-cyrl"}] ] @@ -915,45 +802,27 @@ def test_marc21_to_title_245_and_246_with_multiple_responsibilities(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'L.N. Tolstoĭ'}, - { - 'value': 'Л.Н. Толстой', - 'language': 'rus-cyrl' - } + "type": "bf:Title", + "mainTitle": [ + {"value": "L.N. Tolstoĭ"}, + {"value": "Л.Н. Толстой", "language": "rus-cyrl"}, + ], + "subtitle": [ + {"value": "seminariĭ"}, + {"value": "семинарий", "language": "rus-cyrl"}, ], - 'subtitle': [ - {'value': 'seminariĭ'}, - { - 'value': 'семинарий', - 'language': 'rus-cyrl' - } - ] }, { - 'type': 'bf:VariantTitle', - 'mainTitle': [{'value': 'L.N. Tolstoj'}], - 'subtitle': [{'value': 'seminarij / B.I. Bursov'}] - } + "type": "bf:VariantTitle", + "mainTitle": [{"value": "L.N. Tolstoj"}], + "subtitle": [{"value": "seminarij / B.I. Bursov"}], + }, ] - assert data.get('responsibilityStatement') == [ - [ - {'value': 'B.I. Bursov'}, - { - 'value': 'Б.И. Бурсов', - 'language': 'rus-cyrl' - } - ], - [ - {'value': 'Tolstoĭ'}, - { - 'value': 'Толстой', - 'language': 'rus-cyrl' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "B.I. Bursov"}, {"value": "Б.И. Бурсов", "language": "rus-cyrl"}], + [{"value": "Tolstoĭ"}, {"value": "Толстой", "language": "rus-cyrl"}], ] @@ -986,29 +855,24 @@ def test_marc21_to_title_with_variant_without_subtitle(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'TRANEL'} + "type": "bf:Title", + "mainTitle": [{"value": "TRANEL"}], + "subtitle": [ + {"value": "travaux neuchâtelois de linguistique"}, ], - 'subtitle': [ - {'value': 'travaux neuchâtelois de linguistique'}, - ] }, { - 'type': 'bf:VariantTitle', - 'mainTitle': [ - {'value': 'Travaux neuchâtelois de linguistique'} + "type": "bf:VariantTitle", + "mainTitle": [{"value": "Travaux neuchâtelois de linguistique"}], + "part": [ + {"partNumber": [{"value": "T. 1"}], "partName": [{"value": "Tome 1"}]} ], - 'part': [{ - 'partNumber': [{'value': 'T. 1'}], - 'partName': [{'value': 'Tome 1'}] - }] - } + }, ] - assert data.get('responsibilityStatement') == [ - [{'value': 'Institut de linguistique, UNINE'}] + assert data.get("responsibilityStatement") == [ + [{"value": "Institut de linguistique, UNINE"}] ] @@ -1038,19 +902,14 @@ def test_marc21_to_title_with_variant_both_without_subtitle(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ + {"type": "bf:Title", "mainTitle": [{"value": "3 filles et 10 kilos en trop"}]}, { - 'type': 'bf:Title', - 'mainTitle': [{'value': '3 filles et 10 kilos en trop'}] + "type": "bf:VariantTitle", + "mainTitle": [{"value": "Trois filles et dix kilos en trop"}], }, - { - 'type': 'bf:VariantTitle', - 'mainTitle': [{'value': 'Trois filles et dix kilos en trop'}] - } - ] - assert data.get('responsibilityStatement') == [ - [{'value': 'Jacqueline Wilson'}] ] + assert data.get("responsibilityStatement") == [[{"value": "Jacqueline Wilson"}]] def test_marc21_to_title_with_parallel_title(): @@ -1060,7 +919,8 @@ def test_marc21_to_title_with_parallel_title(): - subfield 245 $a did not end with '=' - field 246 with subfield $a only """ - marc21xml = """ + marc21xml = ( + """ 001224s1980 sz |||||| ||||00|| |ger d @@ -1069,8 +929,8 @@ def test_marc21_to_title_with_parallel_title(): Schatzkammer der Schweiz : - Landesmuseums = Le Patrimoine Suisse : joyaux """ \ - """= Patrimonio : oggetti preziosi / + Landesmuseums = Le Patrimoine Suisse : joyaux """ + """= Patrimonio : oggetti preziosi / Redaktion J. Schneider ; Texte R. Degan @@ -1080,34 +940,34 @@ def test_marc21_to_title_with_parallel_title(): Patrimonio culturale della Svizzera """ + ) marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [{'value': 'Schatzkammer der Schweiz'}], - 'subtitle': [{'value': 'Landesmuseums'}] + "type": "bf:Title", + "mainTitle": [{"value": "Schatzkammer der Schweiz"}], + "subtitle": [{"value": "Landesmuseums"}], }, { - 'type': 'bf:ParallelTitle', - 'mainTitle': [{'value': 'Le Patrimoine Suisse'}], - 'subtitle': [{'value': 'joyaux'}] + "type": "bf:ParallelTitle", + "mainTitle": [{"value": "Le Patrimoine Suisse"}], + "subtitle": [{"value": "joyaux"}], }, { - 'type': 'bf:ParallelTitle', - 'mainTitle': [{'value': 'Patrimonio'}], - 'subtitle': [{'value': 'oggetti preziosi'}] - + "type": "bf:ParallelTitle", + "mainTitle": [{"value": "Patrimonio"}], + "subtitle": [{"value": "oggetti preziosi"}], }, { - 'type': 'bf:VariantTitle', - 'mainTitle': [{'value': 'Patrimonio culturale della Svizzera'}] - } + "type": "bf:VariantTitle", + "mainTitle": [{"value": "Patrimonio culturale della Svizzera"}], + }, ] - assert data.get('responsibilityStatement') == [ - [{'value': 'Redaktion J. Schneider'}], - [{'value': 'Texte R. Degan'}] + assert data.get("responsibilityStatement") == [ + [{"value": "Redaktion J. Schneider"}], + [{"value": "Texte R. Degan"}], ] @@ -1146,62 +1006,38 @@ def test_marc21_to_title_245_with_parallel_title_and_246(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [ + assert data.get("title") == [ { - 'type': 'bf:Title', - 'mainTitle': [ - {'value': 'L.N. Tolstoĭ'}, - { - 'value': 'Л.Н. Толстой', - 'language': 'rus-cyrl' - } + "type": "bf:Title", + "mainTitle": [ + {"value": "L.N. Tolstoĭ"}, + {"value": "Л.Н. Толстой", "language": "rus-cyrl"}, + ], + "subtitle": [ + {"value": "seminariĭ"}, + {"value": "семинарий", "language": "rus-cyrl"}, ], - 'subtitle': [ - {'value': 'seminariĭ'}, - { - 'value': 'семинарий', - 'language': 'rus-cyrl' - } - ] }, { - 'type': 'bf:ParallelTitle', - 'mainTitle': [ - {'value': 'TOTO'}, - { - 'value': 'toto', - 'language': 'rus-cyrl' - } - ], - 'subtitle': [ - {'value': 'TITI'}, - { - 'value': 'titi', - 'language': 'rus-cyrl' - } - ] + "type": "bf:ParallelTitle", + "mainTitle": [{"value": "TOTO"}, {"value": "toto", "language": "rus-cyrl"}], + "subtitle": [{"value": "TITI"}, {"value": "titi", "language": "rus-cyrl"}], }, { - 'type': 'bf:VariantTitle', - 'mainTitle': [{'value': 'L.N. Tolstoj'}], - 'subtitle': [{'value': 'seminarij / B.I. Bursov'}] - } + "type": "bf:VariantTitle", + "mainTitle": [{"value": "L.N. Tolstoj"}], + "subtitle": [{"value": "seminarij / B.I. Bursov"}], + }, ] - assert data.get('responsibilityStatement') == [ - [ - {'value': 'B.I. Bursov'}, - { - 'value': 'Б.И. Бурсов', - 'language': 'rus-cyrl' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "B.I. Bursov"}, {"value": "Б.И. Бурсов", "language": "rus-cyrl"}] ] # languages: 008 and 041 [$a, repetitive] def test_marc21_to_language(): """Test dojson marc21languages.""" - field_008 = '881005s1984 xxu|||||| ||||00|| |ara d' + field_008 = "881005s1984 xxu|||||| ||||00|| |ara d" marc21xml = f""" {field_008} @@ -1214,17 +1050,11 @@ def test_marc21_to_language(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [ - { - 'type': 'bf:Language', - 'value': 'ara' - }, - { - 'type': 'bf:Language', - 'value': 'eng' - } + assert data.get("language") == [ + {"type": "bf:Language", "value": "ara"}, + {"type": "bf:Language", "value": "eng"}, ] - field_008 = '881005s1984 xxu|||||| ||||00|| |ara d' + field_008 = "881005s1984 xxu|||||| ||||00|| |ara d" marc21xml = f""" {field_008} @@ -1238,20 +1068,12 @@ def test_marc21_to_language(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [ - { - 'type': 'bf:Language', - 'value': 'ara' - }, { - 'type': 'bf:Language', - 'value': 'eng' - }, - { - 'type': 'bf:Language', - 'value': 'fre' - } + assert data.get("language") == [ + {"type": "bf:Language", "value": "ara"}, + {"type": "bf:Language", "value": "eng"}, + {"type": "bf:Language", "value": "fre"}, ] - field_008 = '881005s1984 xxu|||||| ||||00|| |ara d' + field_008 = "881005s1984 xxu|||||| ||||00|| |ara d" marc21xml = f""" @@ -1263,17 +1085,11 @@ def test_marc21_to_language(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [ - { - 'type': 'bf:Language', - 'value': 'ara' - }, - { - 'type': 'bf:Language', - 'value': 'eng' - } + assert data.get("language") == [ + {"type": "bf:Language", "value": "ara"}, + {"type": "bf:Language", "value": "eng"}, ] - field_008 = '881005s1984 xxu|||||| ||||00|| |ara d' + field_008 = "881005s1984 xxu|||||| ||||00|| |ara d" marc21xml = f""" {field_008} @@ -1285,21 +1101,12 @@ def test_marc21_to_language(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [ - { - 'type': 'bf:Language', - 'value': 'ara' - }, - { - 'type': 'bf:Language', - 'value': 'eng' - }, - { - 'type': 'bf:Language', - 'value': 'rus' - } + assert data.get("language") == [ + {"type": "bf:Language", "value": "ara"}, + {"type": "bf:Language", "value": "eng"}, + {"type": "bf:Language", "value": "rus"}, ] - field_008 = '881005s1984 xxu|||||| ||||00|| |ara d' + field_008 = "881005s1984 xxu|||||| ||||00|| |ara d" marc21xml = f""" {field_008} @@ -1310,16 +1117,12 @@ def test_marc21_to_language(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [ - { - 'type': 'bf:Language', - 'value': 'ara', - 'note': 'LANGUAGE NOTE' - } + assert data.get("language") == [ + {"type": "bf:Language", "value": "ara", "note": "LANGUAGE NOTE"} ] -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_marc21_to_contribution(mock_get, mef_agents_url): """Test dojson marc21_to_contribution.""" marc21xml = """ @@ -1356,40 +1159,34 @@ def test_marc21_to_contribution(mock_get, mef_agents_url): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - contribution = data.get('contribution') + contribution = data.get("contribution") assert contribution == [ { - 'entity': { - 'authorized_access_point': 'Jean-Paul II, Pape, 1954', - 'type': 'bf:Person' + "entity": { + "authorized_access_point": "Jean-Paul II, Pape, 1954", + "type": "bf:Person", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'authorized_access_point': - 'Dumont, Jean, 1921-2014, Historien', - 'type': 'bf:Person' + "entity": { + "authorized_access_point": "Dumont, Jean, 1921-2014, Historien", + "type": "bf:Person", }, - 'role': ['edt'] + "role": ["edt"], }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'RERO' - }, - 'role': ['ctb'] + "entity": {"type": "bf:Organisation", "authorized_access_point": "RERO"}, + "role": ["ctb"], }, { - 'entity': { - 'authorized_access_point': - 'Biennale de céramique contemporaine (17 : 2003 : ' - 'Châteauroux)', - 'type': 'bf:Organisation' + "entity": { + "authorized_access_point": "Biennale de céramique contemporaine (17 : 2003 : " + "Châteauroux)", + "type": "bf:Organisation", }, - 'role': ['aut'] - } - + "role": ["aut"], + }, ] marc21xml = """ @@ -1399,20 +1196,15 @@ def test_marc21_to_contribution(mock_get, mef_agents_url): """ - mock_get.return_value = mock_response(json_data={ - 'pid': 'test', - 'type': 'bf:Person', - 'idref': {'pid': 'XXXXXXXX'} - }) + mock_get.return_value = mock_response( + json_data={"pid": "test", "type": "bf:Person", "idref": {"pid": "XXXXXXXX"}} + ) marc21json = create_record(marc21xml) data = marc21.do(marc21json) - contribution = data.get('contribution') - assert contribution == [{ - 'entity': { - '$ref': f'{mef_agents_url}/idref/XXXXXXXX' - }, - 'role': ['cre'] - }] + contribution = data.get("contribution") + assert contribution == [ + {"entity": {"$ref": f"{mef_agents_url}/idref/XXXXXXXX"}, "role": ["cre"]} + ] marc21xml = """ @@ -1425,18 +1217,17 @@ def test_marc21_to_contribution(mock_get, mef_agents_url): mock_get.return_value = mock_response(status=400) marc21json = create_record(marc21xml) data = marc21.do(marc21json) - contribution = data.get('contribution') - assert contribution == [{ - 'entity': { - 'authorized_access_point': 'Jean-Paul', - 'type': 'bf:Person', - 'identifiedBy': { - 'type': 'IdRef', - 'value': 'YYYYYYYY' - } - }, - 'role': ['cre'] - }] + contribution = data.get("contribution") + assert contribution == [ + { + "entity": { + "authorized_access_point": "Jean-Paul", + "type": "bf:Person", + "identifiedBy": {"type": "IdRef", "value": "YYYYYYYY"}, + }, + "role": ["cre"], + } + ] # Copyright Date: [264 _4 $c non repetitive] @@ -1452,7 +1243,7 @@ def test_marc21copyrightdate(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('copyrightDate') == ['© 1971'] + assert data.get("copyrightDate") == ["© 1971"] marc21xml = """ @@ -1463,7 +1254,7 @@ def test_marc21copyrightdate(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('copyrightDate') == ['© 1971 [extra 1973]'] + assert data.get("copyrightDate") == ["© 1971 [extra 1973]"] def test_marc21_to_provision_activity_manufacture_date(): @@ -1491,23 +1282,14 @@ def test_marc21_to_provision_activity_manufacture_date(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Manufacture', - 'statement': [ - { - 'label': [{'value': 'Bienne'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Impr. Weber'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '[2006]'}], - 'type': 'Date' - } - ] + "type": "bf:Manufacture", + "statement": [ + {"label": [{"value": "Bienne"}], "type": "bf:Place"}, + {"label": [{"value": "Impr. Weber"}], "type": "bf:Agent"}, + {"label": [{"value": "[2006]"}], "type": "Date"}, + ], } ] @@ -1524,14 +1306,14 @@ def test_marc21_provisionActivity_without_264(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'place': [{ - 'country': 'sz' - }], - 'startDate': 2006, - 'endDate': 2010 - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "place": [{"country": "sz"}], + "startDate": 2006, + "endDate": 2010, + } + ] def test_marc21_provisionActivity_without_264_with_752(): @@ -1551,18 +1333,19 @@ def test_marc21_provisionActivity_without_264_with_752(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'place': [{ - 'country': 'sz', - 'identifiedBy': { - 'type': 'IdRef', - 'value': '027401421' + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "place": [ + { + "country": "sz", + "identifiedBy": {"type": "IdRef", "value": "027401421"}, } - }], - 'startDate': 2006, - 'endDate': 2010 - }] + ], + "startDate": 2006, + "endDate": 2010, + } + ] def test_marc21_provisionActivity_with_original_date(): @@ -1576,15 +1359,15 @@ def test_marc21_provisionActivity_with_original_date(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'place': [{ - 'country': 'sz' - }], - 'startDate': 1997, - 'original_date': 1849, - 'endDate': 1849 - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "place": [{"country": "sz"}], + "startDate": 1997, + "original_date": 1849, + "endDate": 1849, + } + ] def test_marc21_to_provision_activity_canton(): @@ -1625,64 +1408,32 @@ def test_marc21_to_provision_activity_canton(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'canton': 'be', - 'country': 'sz' - }], - 'statement': [ - { - 'label': [{'value': 'Biel/Bienne'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Centre PasquArt'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': 'Nürnberg'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Verlag für Moderne Kunst'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': 'Manchester'}], - 'type': 'bf:Place' - }, + "type": "bf:Publication", + "place": [{"canton": "be", "country": "sz"}], + "statement": [ + {"label": [{"value": "Biel/Bienne"}], "type": "bf:Place"}, + {"label": [{"value": "Centre PasquArt"}], "type": "bf:Agent"}, + {"label": [{"value": "Nürnberg"}], "type": "bf:Place"}, + {"label": [{"value": "Verlag für Moderne Kunst"}], "type": "bf:Agent"}, + {"label": [{"value": "Manchester"}], "type": "bf:Place"}, { - 'label': [{ - 'value': 'distrib. in the United Kingdom [etc.]' - }], - 'type': 'bf:Agent' + "label": [{"value": "distrib. in the United Kingdom [etc.]"}], + "type": "bf:Agent", }, - { - 'label': [{'value': '[2006-2010]'}], - 'type': 'Date' - } + {"label": [{"value": "[2006-2010]"}], "type": "Date"}, ], - 'startDate': 2006, - 'endDate': 2010 - }, { - 'type': 'bf:Manufacture', - 'statement': [ - { - 'label': [ - {'value': 'Bienne'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Impr. Weber'} - ], - 'type': 'bf:Agent' - } - ] - } + "startDate": 2006, + "endDate": 2010, + }, + { + "type": "bf:Manufacture", + "statement": [ + {"label": [{"value": "Bienne"}], "type": "bf:Place"}, + {"label": [{"value": "Impr. Weber"}], "type": "bf:Agent"}, + ], + }, ] marc21xml = """ @@ -1697,14 +1448,11 @@ def test_marc21_to_provision_activity_canton(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'canton': 'vd', - 'country': 'sz' - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [{"canton": "vd", "country": "sz"}], + "startDate": 1998, } ] @@ -1725,14 +1473,8 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ - { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'xxc' - }], - 'startDate': 1998 - } + assert data.get("provisionActivity") == [ + {"type": "bf:Publication", "place": [{"country": "xxc"}], "startDate": 1998} ] marc21xml = """ @@ -1746,13 +1488,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'er', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "er", + } + ], + "startDate": 1998, } ] @@ -1767,13 +1511,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'li', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "li", + } + ], + "startDate": 1998, } ] @@ -1788,13 +1534,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'lv', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "lv", + } + ], + "startDate": 1998, } ] @@ -1809,13 +1557,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'xxk', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "xxk", + } + ], + "startDate": 1998, } ] @@ -1830,13 +1580,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'un', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "un", + } + ], + "startDate": 1998, } ] @@ -1851,13 +1603,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'xxu', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "xxu", + } + ], + "startDate": 1998, } ] @@ -1872,13 +1626,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'xxr', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "xxr", + } + ], + "startDate": 1998, } ] @@ -1893,13 +1649,15 @@ def test_marc21_to_provision_activity_obsolete_countries(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'ye', - }], - 'startDate': 1998 + "type": "bf:Publication", + "place": [ + { + "country": "ye", + } + ], + "startDate": 1998, } ] @@ -1923,31 +1681,17 @@ def test_marc21_to_provision_activity_1_place_2_agents(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'fr' - }], - 'statement': [ - { - 'label': [{'value': '[Paris]'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Desclée de Brouwer [puis]'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': 'Etudes augustiniennes'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '1969-'}], - 'type': 'Date' - } + "type": "bf:Publication", + "place": [{"country": "fr"}], + "statement": [ + {"label": [{"value": "[Paris]"}], "type": "bf:Place"}, + {"label": [{"value": "Desclée de Brouwer [puis]"}], "type": "bf:Agent"}, + {"label": [{"value": "Etudes augustiniennes"}], "type": "bf:Agent"}, + {"label": [{"value": "1969-"}], "type": "Date"}, ], - 'startDate': 1969 + "startDate": 1969, } ] @@ -1977,35 +1721,22 @@ def test_marc21_to_provision_activity_1_place_2_agents_with_one_752(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'fr', - 'identifiedBy': { - 'type': 'IdRef', - 'value': '027401421' - } - }], - 'statement': [ + "type": "bf:Publication", + "place": [ { - 'label': [{'value': '[Paris]'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Desclée de Brouwer [puis]'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': 'Etudes augustiniennes'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '1969-'}], - 'type': 'Date' + "country": "fr", + "identifiedBy": {"type": "IdRef", "value": "027401421"}, } ], - 'startDate': 1969 + "statement": [ + {"label": [{"value": "[Paris]"}], "type": "bf:Place"}, + {"label": [{"value": "Desclée de Brouwer [puis]"}], "type": "bf:Agent"}, + {"label": [{"value": "Etudes augustiniennes"}], "type": "bf:Agent"}, + {"label": [{"value": "1969-"}], "type": "Date"}, + ], + "startDate": 1969, } ] @@ -2040,42 +1771,26 @@ def test_marc21_to_provision_activity_1_place_2_agents_with_two_752(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ - { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'fr', - 'identifiedBy': { - 'type': 'IdRef', - 'value': '027401421' - } - }, { - 'country': 'xx', - 'identifiedBy': { - 'type': 'RERO', - 'value': 'A000000001' - } - } - ], - 'statement': [ - { - 'label': [{'value': '[Paris]'}], - 'type': 'bf:Place' - }, + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "place": [ { - 'label': [{'value': 'Desclée de Brouwer [puis]'}], - 'type': 'bf:Agent' + "country": "fr", + "identifiedBy": {"type": "IdRef", "value": "027401421"}, }, { - 'label': [{'value': 'Etudes augustiniennes'}], - 'type': 'bf:Agent' + "country": "xx", + "identifiedBy": {"type": "RERO", "value": "A000000001"}, }, - { - 'label': [{'value': '1969-'}], - 'type': 'Date' - } ], - 'startDate': 1969 + "statement": [ + {"label": [{"value": "[Paris]"}], "type": "bf:Place"}, + {"label": [{"value": "Desclée de Brouwer [puis]"}], "type": "bf:Agent"}, + {"label": [{"value": "Etudes augustiniennes"}], "type": "bf:Agent"}, + {"label": [{"value": "1969-"}], "type": "Date"}, + ], + "startDate": 1969, } ] @@ -2098,37 +1813,24 @@ def test_marc21_to_provision_activity_unknown_place_2_agents(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'be' - }], - 'statement': [ + "type": "bf:Publication", + "place": [{"country": "be"}], + "statement": [ { - 'label': [ - {'value': '[Lieu de publication non identifié]'} - ], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Labor'}], - 'type': 'bf:Agent' + "label": [{"value": "[Lieu de publication non identifié]"}], + "type": "bf:Place", }, - { - 'label': [{'value': 'Nathan'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '1968'}], - 'type': 'Date' - } + {"label": [{"value": "Labor"}], "type": "bf:Agent"}, + {"label": [{"value": "Nathan"}], "type": "bf:Agent"}, + {"label": [{"value": "1968"}], "type": "Date"}, ], - 'startDate': 1968 + "startDate": 1968, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - '[Lieu de publication non identifié] : Labor ; Nathan, 1968' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "[Lieu de publication non identifié] : Labor ; Nathan, 1968" ] @@ -2152,39 +1854,22 @@ def test_marc21_to_provision_activity_3_places_dann_2_agents(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'gw' - }], - 'statement': [ - { - 'label': [{'value': 'Hamm (Westf.)'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': '[dann] Herzberg'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': '[dann] Nordhausen'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'T. Bautz'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '1975-'}], - 'type': 'Date' - } + "type": "bf:Publication", + "place": [{"country": "gw"}], + "statement": [ + {"label": [{"value": "Hamm (Westf.)"}], "type": "bf:Place"}, + {"label": [{"value": "[dann] Herzberg"}], "type": "bf:Place"}, + {"label": [{"value": "[dann] Nordhausen"}], "type": "bf:Place"}, + {"label": [{"value": "T. Bautz"}], "type": "bf:Agent"}, + {"label": [{"value": "1975-"}], "type": "Date"}, ], - 'startDate': 1975 + "startDate": 1975, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - 'Hamm (Westf.) ; [dann] Herzberg ; [dann] Nordhausen : T. Bautz, 1975-' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "Hamm (Westf.) ; [dann] Herzberg ; [dann] Nordhausen : T. Bautz, 1975-" ] @@ -2207,35 +1892,21 @@ def test_marc21_to_provision_activity_2_places_1_agent(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'sz' - }], - 'statement': [ - { - 'label': [{'value': '[Louvain]'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': '[Paris]'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': '[éditeur non identifié]'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '[1966]'}], - 'type': 'Date' - } + "type": "bf:Publication", + "place": [{"country": "sz"}], + "statement": [ + {"label": [{"value": "[Louvain]"}], "type": "bf:Place"}, + {"label": [{"value": "[Paris]"}], "type": "bf:Place"}, + {"label": [{"value": "[éditeur non identifié]"}], "type": "bf:Agent"}, + {"label": [{"value": "[1966]"}], "type": "Date"}, ], - 'startDate': 1966 + "startDate": 1966, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - '[Louvain] ; [Paris] : [éditeur non identifié], [1966]' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "[Louvain] ; [Paris] : [éditeur non identifié], [1966]" ] @@ -2261,28 +1932,20 @@ def test_marc21_to_provision_activity_1_place_1_agent_reprint_date(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'xxu' - }], - 'statement': [ + "type": "bf:Publication", + "place": [{"country": "xxu"}], + "statement": [ + {"label": [{"value": "Washington"}], "type": "bf:Place"}, { - 'label': [{'value': 'Washington'}], - 'type': 'bf:Place' + "label": [{"value": "Carnegie Institution of Washington"}], + "type": "bf:Agent", }, - { - 'label': [{'value': 'Carnegie Institution of Washington'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '1916'}], - 'type': 'Date' - } + {"label": [{"value": "1916"}], "type": "Date"}, ], - 'startDate': 1758, - 'endDate': 1916 + "startDate": 1758, + "endDate": 1916, } ] @@ -2305,32 +1968,21 @@ def test_marc21_to_provision_activity_1_place_1_agent_uncertain_date(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'fr' - }], - 'statement': [ - { - 'label': [{'value': 'Aurillac'}], - 'type': 'bf:Place' - }, - { - 'label': [{'value': 'Impr. moderne'}], - 'type': 'bf:Agent' - }, - { - 'label': [{'value': '[1941?]'}], - 'type': 'Date' - } + "type": "bf:Publication", + "place": [{"country": "fr"}], + "statement": [ + {"label": [{"value": "Aurillac"}], "type": "bf:Place"}, + {"label": [{"value": "Impr. moderne"}], "type": "bf:Agent"}, + {"label": [{"value": "[1941?]"}], "type": "Date"}, ], - 'note': 'Date(s) uncertain or unknown', - 'startDate': 1941 + "note": "Date(s) uncertain or unknown", + "startDate": 1941, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - 'Aurillac : Impr. moderne, [1941?]' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "Aurillac : Impr. moderne, [1941?]" ] @@ -2368,41 +2020,39 @@ def test_marc21_to_provision_activity_1_place_1_agent_chi_hani(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'cc' - }], - 'statement': [ + "type": "bf:Publication", + "place": [{"country": "cc"}], + "statement": [ { - 'label': [ - {'value': 'Beijing'}, - {'value': '北京', 'language': 'chi-hani'} + "label": [ + {"value": "Beijing"}, + {"value": "北京", "language": "chi-hani"}, ], - 'type': 'bf:Place' + "type": "bf:Place", }, { - 'label': [ - {'value': 'Beijing da xue chu ban she'}, - {'value': '北京大学出版社', 'language': 'chi-hani'} + "label": [ + {"value": "Beijing da xue chu ban she"}, + {"value": "北京大学出版社", "language": "chi-hani"}, ], - 'type': 'bf:Agent' + "type": "bf:Agent", }, { - 'label': [ - {'value': '2017'}, - {'language': 'chi-hani', 'value': '2017'} + "label": [ + {"value": "2017"}, + {"language": "chi-hani", "value": "2017"}, ], - 'type': 'Date' - } + "type": "Date", + }, ], - 'startDate': 2017 + "startDate": 2017, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - '北京 : 北京大学出版社, 2017', - 'Beijing : Beijing da xue chu ban she, 2017' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "北京 : 北京大学出版社, 2017", + "Beijing : Beijing da xue chu ban she, 2017", ] marc21xml = """ @@ -2433,40 +2083,39 @@ def test_marc21_to_provision_activity_1_place_1_agent_chi_hani(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'place': [{ - 'country': 'cc' - }], - 'statement': [ - { - 'label': [ - {'value': 'Beijing'}, - {'value': '北京', 'language': 'chi-hani'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Beijing da xue chu ban she'}, - {'value': '北京大学出版社', - 'language': 'chi-hani'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '2017'}, - {'language': 'chi-hani', 'value': '2017'} - ], - 'type': 'Date' - } - ], - 'startDate': 2017 - }] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - '北京 : 北京大学出版社, 2017', - 'Beijing : Beijing da xue chu ban she, 2017' + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "place": [{"country": "cc"}], + "statement": [ + { + "label": [ + {"value": "Beijing"}, + {"value": "北京", "language": "chi-hani"}, + ], + "type": "bf:Place", + }, + { + "label": [ + {"value": "Beijing da xue chu ban she"}, + {"value": "北京大学出版社", "language": "chi-hani"}, + ], + "type": "bf:Agent", + }, + { + "label": [ + {"value": "2017"}, + {"language": "chi-hani", "value": "2017"}, + ], + "type": "Date", + }, + ], + "startDate": 2017, + } + ] + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "北京 : 北京大学出版社, 2017", + "Beijing : Beijing da xue chu ban she, 2017", ] @@ -2493,26 +2142,18 @@ def test_marc21_to_edition_statement_one_field_250(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('editionStatement') == [{ - 'editionDesignation': [ - { - 'value': 'Di 3 ban' - }, - { - 'value': '第3版', - 'language': 'chi-hani' - } - ], - 'responsibility': [ - { - 'value': 'Zeng Lingliang zhu bian' - }, - { - 'value': '曾令良主编', - 'language': 'chi-hani' - } - ] - }] + assert data.get("editionStatement") == [ + { + "editionDesignation": [ + {"value": "Di 3 ban"}, + {"value": "第3版", "language": "chi-hani"}, + ], + "responsibility": [ + {"value": "Zeng Lingliang zhu bian"}, + {"value": "曾令良主编", "language": "chi-hani"}, + ], + } + ] def test_marc21_to_edition_statement_two_fields_250(): @@ -2542,37 +2183,22 @@ def test_marc21_to_edition_statement_two_fields_250(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('editionStatement') == [{ - 'editionDesignation': [ - { - 'value': 'Di 3 ban' - }, - { - 'value': '第3版', - 'language': 'chi-hani' - } - ], - 'responsibility': [ - { - 'value': 'Zeng Lingliang zhu bian' - }, - { - 'value': '曾令良主编', - 'language': 'chi-hani' - } - ] - }, { - 'editionDesignation': [ - { - 'value': 'Edition' - } - ], - 'responsibility': [ - { - 'value': 'Responsibility' - } - ] - }] + assert data.get("editionStatement") == [ + { + "editionDesignation": [ + {"value": "Di 3 ban"}, + {"value": "第3版", "language": "chi-hani"}, + ], + "responsibility": [ + {"value": "Zeng Lingliang zhu bian"}, + {"value": "曾令良主编", "language": "chi-hani"}, + ], + }, + { + "editionDesignation": [{"value": "Edition"}], + "responsibility": [{"value": "Responsibility"}], + }, + ] def test_marc21_to_edition_statement_with_two_subfield_a(): @@ -2601,26 +2227,18 @@ def test_marc21_to_edition_statement_with_two_subfield_a(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('editionStatement') == [{ - 'editionDesignation': [ - { - 'value': 'Di 3 ban' - }, - { - 'value': '第3版', - 'language': 'chi-hani' - } - ], - 'responsibility': [ - { - 'value': 'Zeng Lingliang zhu bian' - }, - { - 'value': '曾令良主编', - 'language': 'chi-hani' - } - ] - }] + assert data.get("editionStatement") == [ + { + "editionDesignation": [ + {"value": "Di 3 ban"}, + {"value": "第3版", "language": "chi-hani"}, + ], + "responsibility": [ + {"value": "Zeng Lingliang zhu bian"}, + {"value": "曾令良主编", "language": "chi-hani"}, + ], + } + ] def test_marc21_to_edition_statement_with_one_bad_field_250(): @@ -2655,32 +2273,19 @@ def test_marc21_to_edition_statement_with_one_bad_field_250(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('editionStatement') == [{ - 'editionDesignation': [ - { - 'value': 'Di 3 ban' - }, - { - 'value': '第3版', - 'language': 'chi-hani' - } - ], - 'responsibility': [ - { - 'value': 'Zeng Lingliang zhu bian' - }, - { - 'value': '曾令良主编', - 'language': 'chi-hani' - } - ] - }, { - 'editionDesignation': [ - { - 'value': 'Edition' - } - ] - }] + assert data.get("editionStatement") == [ + { + "editionDesignation": [ + {"value": "Di 3 ban"}, + {"value": "第3版", "language": "chi-hani"}, + ], + "responsibility": [ + {"value": "Zeng Lingliang zhu bian"}, + {"value": "曾令良主编", "language": "chi-hani"}, + ], + }, + {"editionDesignation": [{"value": "Edition"}]}, + ] def test_marc21_to_provision_activity_1_place_1_agent_ara_arab(): @@ -2708,43 +2313,42 @@ def test_marc21_to_provision_activity_1_place_1_agent_ara_arab(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'ua' - }], - 'statement': [ + "type": "bf:Publication", + "place": [{"country": "ua"}], + "statement": [ { - 'label': [ - {'value': 'al-Qāhirah'}, - {'value': 'القاهرة', - 'language': 'ara-arab'} + "label": [ + {"value": "al-Qāhirah"}, + {"value": "القاهرة", "language": "ara-arab"}, ], - 'type': 'bf:Place' + "type": "bf:Place", }, { - 'label': [ - {'value': 'Al-Hayʾat al-ʿāmmah li quṣūr al-thaqāfah'}, - {'value': 'الهيئة العامة لقصور الثقافة', - 'language': 'ara-arab'} + "label": [ + {"value": "Al-Hayʾat al-ʿāmmah li quṣūr al-thaqāfah"}, + { + "value": "الهيئة العامة لقصور الثقافة", + "language": "ara-arab", + }, ], - 'type': 'bf:Agent' + "type": "bf:Agent", }, { - 'label': [ - {'value': '2014'}, - {'value': '2014', 'language': 'ara-arab'} + "label": [ + {"value": "2014"}, + {"value": "2014", "language": "ara-arab"}, ], - 'type': 'Date' - } + "type": "Date", + }, ], - 'startDate': 2014 + "startDate": 2014, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - 'القاهرة : الهيئة العامة لقصور الثقافة, 2014', - 'al-Qāhirah : Al-Hayʾat al-ʿāmmah li quṣūr al-thaqāfah, 2014' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "القاهرة : الهيئة العامة لقصور الثقافة, 2014", + "al-Qāhirah : Al-Hayʾat al-ʿāmmah li quṣūr al-thaqāfah, 2014", ] @@ -2791,59 +2395,53 @@ def test_marc21_to_provision_activity_2_places_2_agents_rus_cyrl(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'ru' - }], - 'statement': [ + "type": "bf:Publication", + "place": [{"country": "ru"}], + "statement": [ { - 'label': [ - {'value': 'Ierusalim'}, - {'value': 'Иерусалим', - 'language': 'rus-cyrl'} + "label": [ + {"value": "Ierusalim"}, + {"value": "Иерусалим", "language": "rus-cyrl"}, ], - 'type': 'bf:Place' + "type": "bf:Place", }, { - 'label': [ - {'value': 'Gesharim'}, - {'value': 'Гешарим', - 'language': 'rus-cyrl'} + "label": [ + {"value": "Gesharim"}, + {"value": "Гешарим", "language": "rus-cyrl"}, ], - 'type': 'bf:Agent' + "type": "bf:Agent", }, { - 'label': [ - {'value': 'Moskva'}, - {'value': 'Москва', - 'language': 'rus-cyrl'} + "label": [ + {"value": "Moskva"}, + {"value": "Москва", "language": "rus-cyrl"}, ], - 'type': 'bf:Place' + "type": "bf:Place", }, { - 'label': [ - {'value': 'Mosty Kulʹtury'}, - {'value': 'Мосты Культуры', - 'language': 'rus-cyrl'} + "label": [ + {"value": "Mosty Kulʹtury"}, + {"value": "Мосты Культуры", "language": "rus-cyrl"}, ], - 'type': 'bf:Agent' + "type": "bf:Agent", }, { - 'label': [ - {'value': '2017'}, - {'language': 'rus-cyrl', 'value': '2017'} + "label": [ + {"value": "2017"}, + {"language": "rus-cyrl", "value": "2017"}, ], - 'type': 'Date' - } + "type": "Date", + }, ], - 'startDate': 2017 + "startDate": 2017, } ] - assert create_publication_statement(data.get('provisionActivity')[0]) == [ - 'Иерусалим : Гешарим ; Москва : Мосты Культуры, 2017', - 'Ierusalim : Gesharim ; Moskva : Mosty Kulʹtury, 2017' + assert create_publication_statement(data.get("provisionActivity")[0]) == [ + "Иерусалим : Гешарим ; Москва : Мосты Культуры, 2017", + "Ierusalim : Gesharim ; Moskva : Mosty Kulʹtury, 2017", ] @@ -2868,29 +2466,26 @@ def test_marc21_to_provision_activity_exceptions(capsys): marc21json = create_record(marc21xml) data = marc21.do(marc21json) out, err = capsys.readouterr() - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'place': [{ - 'country': 'ru' - }], - 'statement': [ + "type": "bf:Publication", + "place": [{"country": "ru"}], + "statement": [ { - 'label': [ - {'value': 'Ierusalim'}, - {'value': 'Иерусалим', - 'language': 'und-cyrl'} + "label": [ + {"value": "Ierusalim"}, + {"value": "Иерусалим", "language": "und-cyrl"}, ], - 'type': 'bf:Place' + "type": "bf:Place", }, ], - 'startDate': 2017 + "startDate": 2017, } ] - assert out.strip().replace('\n', '') == ( - 'WARNING NOT A LANGUAGE 008:\t???\t???\t\t' - 'WARNING LANGUAGE SCRIPTS:' - '\t???\t???\tcyrl\t008:\tund\t041$a:\t[]\t041$h:\t[]' + assert out.strip().replace("\n", "") == ( + "WARNING NOT A LANGUAGE 008:\t???\t???\t\t" + "WARNING LANGUAGE SCRIPTS:" + "\t???\t???\tcyrl\t008:\tund\t041$a:\t[]\t041$h:\t[]" ) marc21xml = """ @@ -2903,12 +2498,14 @@ def test_marc21_to_provision_activity_exceptions(capsys): marc21json = create_record(marc21xml) data = marc21.do(marc21json) out, err = capsys.readouterr() - assert out.strip() == ('WARNING NOT A LANGUAGE 008:\t???\t???\t\t\n' - 'WARNING INIT CANTONS:\t???\t???\tchbe\t\n' - 'WARNING NOT A COUNTRY:\t???\t???\t\t\n' - 'WARNING START DATE 264:\t???\t???\tNone\t\n' - 'WARNING START DATE 008:\t???\t???\tNone\t\n' - 'WARNING PROVISION ACTIVITY:\t???\t???') + assert out.strip() == ( + "WARNING NOT A LANGUAGE 008:\t???\t???\t\t\n" + "WARNING INIT CANTONS:\t???\t???\tchbe\t\n" + "WARNING NOT A COUNTRY:\t???\t???\t\t\n" + "WARNING START DATE 264:\t???\t???\tNone\t\n" + "WARNING START DATE 008:\t???\t???\tNone\t\n" + "WARNING PROVISION ACTIVITY:\t???\t???" + ) # 300 [$a repetitive]: extent, duration: @@ -2917,6 +2514,7 @@ def test_marc21_to_provision_activity_exceptions(capsys): # 300 [$c repetitive]: format # 300 [$e non epetitive]: accompanying material note + def test_marc21_to_physical_description_plano(): """Test dojson extent, productionMethod.""" @@ -2932,15 +2530,13 @@ def test_marc21_to_physical_description_plano(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('productionMethod') == \ - ['rdapm:1007', 'rdapm:1009'] - assert data.get('extent') == '116 p.' - assert data.get('bookFormat') == ['in-plano'] - assert data.get('dimensions') == ['plano 22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'litho photogravure gravure' - }] + assert data.get("productionMethod") == ["rdapm:1007", "rdapm:1009"] + assert data.get("extent") == "116 p." + assert data.get("bookFormat") == ["in-plano"] + assert data.get("dimensions") == ["plano 22 cm"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "litho photogravure gravure"} + ] def test_marc21_to_physical_description_with_material_note(): @@ -2959,18 +2555,13 @@ def test_marc21_to_physical_description_with_material_note(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('productionMethod') == \ - ['rdapm:1007', 'rdapm:1009'] - assert data.get('extent') == '116 p.' - assert data.get('bookFormat') == ['in-plano'] - assert data.get('dimensions') == ['plano 22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'litho photogravure gravure' - }, { - 'noteType': 'accompanyingMaterial', - 'label': '1 atlas' - } + assert data.get("productionMethod") == ["rdapm:1007", "rdapm:1009"] + assert data.get("extent") == "116 p." + assert data.get("bookFormat") == ["in-plano"] + assert data.get("dimensions") == ["plano 22 cm"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "litho photogravure gravure"}, + {"noteType": "accompanyingMaterial", "label": "1 atlas"}, ] @@ -2990,24 +2581,15 @@ def test_marc21_to_physical_description_with_material_note_plus(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('productionMethod') == \ - ['rdapm:1007', 'rdapm:1009'] - assert data.get('extent') == '116 p.' - assert data.get('bookFormat') == ['in-plano'] - assert data.get('dimensions') == ['plano 22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'litho photogravure gravure' - }, { - 'noteType': 'accompanyingMaterial', - 'label': '1 atlas' - }, { - 'noteType': 'accompanyingMaterial', - 'label': '3 cartes' - }, { - 'noteType': 'accompanyingMaterial', - 'label': 'XXIX f. de pl.' - } + assert data.get("productionMethod") == ["rdapm:1007", "rdapm:1009"] + assert data.get("extent") == "116 p." + assert data.get("bookFormat") == ["in-plano"] + assert data.get("dimensions") == ["plano 22 cm"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "litho photogravure gravure"}, + {"noteType": "accompanyingMaterial", "label": "1 atlas"}, + {"noteType": "accompanyingMaterial", "label": "3 cartes"}, + {"noteType": "accompanyingMaterial", "label": "XXIX f. de pl."}, ] @@ -3024,9 +2606,9 @@ def test_marc21_to_physical_description_300_without_b(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('extent') == '191 p.' - assert data.get('dimensions') == ['21 cm'] - assert data.get('note') is None + assert data.get("extent") == "191 p." + assert data.get("dimensions") == ["21 cm"] + assert data.get("note") is None def test_marc21_to_physical_description_ill_in_8(): @@ -3043,17 +2625,16 @@ def test_marc21_to_physical_description_ill_in_8(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('productionMethod') == ['rdapm:1007'] - assert data.get('extent') == '1 DVD-R (50 min.)' - assert data.get('duration') == ['50 min.'] - assert data.get('illustrativeContent') == ['illustrations'] - assert data.get('colorContent') == ['rdacc:1002'] - assert data.get('bookFormat') == ['8ᵒ'] - assert data.get('dimensions') == ['in-8, 22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'litho Ill.en n. et bl.' - }] + assert data.get("productionMethod") == ["rdapm:1007"] + assert data.get("extent") == "1 DVD-R (50 min.)" + assert data.get("duration") == ["50 min."] + assert data.get("illustrativeContent") == ["illustrations"] + assert data.get("colorContent") == ["rdacc:1002"] + assert data.get("bookFormat") == ["8ᵒ"] + assert data.get("dimensions") == ["in-8, 22 cm"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "litho Ill.en n. et bl."} + ] def test_marc21_to_physical_description_multiple_300(): @@ -3075,21 +2656,19 @@ def test_marc21_to_physical_description_multiple_300(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('productionMethod') == \ - ['rdapm:1007', 'rdapm:1009'] - assert data.get('extent') == '1 DVD-R (50 min.)' - assert data.get('duration') == ['50 min.'] - assert data.get('illustrativeContent') == ['illustrations', 'photographs'] - assert data.get('colorContent') == ['rdacc:1002'] - assert data.get('bookFormat') == ['8ᵒ', 'in-plano'] - assert data.get('dimensions') == ['in-8, 22 cm', 'plano 22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'litho photogravure gravure n. et bl.' - }, { - 'noteType': 'otherPhysicalDetails', - 'label': 'litho Ill.en n. et bl.' - } + assert data.get("productionMethod") == ["rdapm:1007", "rdapm:1009"] + assert data.get("extent") == "1 DVD-R (50 min.)" + assert data.get("duration") == ["50 min."] + assert data.get("illustrativeContent") == ["illustrations", "photographs"] + assert data.get("colorContent") == ["rdacc:1002"] + assert data.get("bookFormat") == ["8ᵒ", "in-plano"] + assert data.get("dimensions") == ["in-8, 22 cm", "plano 22 cm"] + assert data.get("note") == [ + { + "noteType": "otherPhysicalDetails", + "label": "litho photogravure gravure n. et bl.", + }, + {"noteType": "otherPhysicalDetails", "label": "litho Ill.en n. et bl."}, ] @@ -3112,18 +2691,22 @@ def test_marc21_to_series_statement(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Handbuch der Orientalistik'}], - 'seriesEnumeration': [{'value': 'Abt. 7'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Kunst und Archäologie'}], - 'subseriesEnumeration': [{'value': 'Bd. 6'}] - }, { - 'subseriesTitle': [{'value': 'Südostasien'}], - 'subseriesEnumeration': [{'value': 'Abschnitt 6'}] - } - ] - }] + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Handbuch der Orientalistik"}], + "seriesEnumeration": [{"value": "Abt. 7"}], + "subseriesStatement": [ + { + "subseriesTitle": [{"value": "Kunst und Archäologie"}], + "subseriesEnumeration": [{"value": "Bd. 6"}], + }, + { + "subseriesTitle": [{"value": "Südostasien"}], + "subseriesEnumeration": [{"value": "Abschnitt 6"}], + }, + ], + } + ] def test_marc21_to_series_statement_mutiple_490(): @@ -3151,29 +2734,36 @@ def test_marc21_to_series_statement_mutiple_490(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Handbuch der Orientalistik 1'}], - 'seriesEnumeration': [{'value': 'Abt. 7'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Kunst und Archäologie'}], - 'subseriesEnumeration': [{'value': 'Bd. 6'}] - }, { - 'subseriesTitle': [{'value': 'Südostasien'}], - 'subseriesEnumeration': [{'value': 'Abschnitt 6'}] - } - ] - }, { - 'seriesTitle': [{'value': 'Handbuch der Orientalistik 2'}], - 'seriesEnumeration': [{'value': 'Abt. 7'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Kunst und Archäologie'}], - 'subseriesEnumeration': [{'value': 'Bd. 6'}] - }, { - 'subseriesTitle': [{'value': 'Südostasien'}], - 'subseriesEnumeration': [{'value': 'Abschnitt 6'}] - } - ] - }] + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Handbuch der Orientalistik 1"}], + "seriesEnumeration": [{"value": "Abt. 7"}], + "subseriesStatement": [ + { + "subseriesTitle": [{"value": "Kunst und Archäologie"}], + "subseriesEnumeration": [{"value": "Bd. 6"}], + }, + { + "subseriesTitle": [{"value": "Südostasien"}], + "subseriesEnumeration": [{"value": "Abschnitt 6"}], + }, + ], + }, + { + "seriesTitle": [{"value": "Handbuch der Orientalistik 2"}], + "seriesEnumeration": [{"value": "Abt. 7"}], + "subseriesStatement": [ + { + "subseriesTitle": [{"value": "Kunst und Archäologie"}], + "subseriesEnumeration": [{"value": "Bd. 6"}], + }, + { + "subseriesTitle": [{"value": "Südostasien"}], + "subseriesEnumeration": [{"value": "Abschnitt 6"}], + }, + ], + }, + ] # series.name: [490$a repetitive] @@ -3199,16 +2789,18 @@ def test_marc21_to_series_statement_with_alt_graphic(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('seriesStatement') == [{ - 'seriesTitle': [ - {'value': "Žizn' zamečatel'nych ljudej"}, - {'value': 'Жизнь замечательных людей', 'language': 'rus-cyrl'} - ], - 'seriesEnumeration': [ - {'value': 'vypusk 4, 357'}, - {'value': 'выпуск 4, 357', 'language': 'rus-cyrl'} - ] - }] + assert data.get("seriesStatement") == [ + { + "seriesTitle": [ + {"value": "Žizn' zamečatel'nych ljudej"}, + {"value": "Жизнь замечательных людей", "language": "rus-cyrl"}, + ], + "seriesEnumeration": [ + {"value": "vypusk 4, 357"}, + {"value": "выпуск 4, 357", "language": "rus-cyrl"}, + ], + } + ] # series.name: [490$a repetitive] @@ -3228,16 +2820,18 @@ def test_marc21_to_series_statement_with_missig_subfield_v(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Handbuch der Orientalistik'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Kunst und Archäologie'}] - }, { - 'subseriesTitle': [{'value': 'Südostasien'}], - 'subseriesEnumeration': [{'value': 'Abschnitt 6'}] - } - ] - }] + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Handbuch der Orientalistik"}], + "subseriesStatement": [ + {"subseriesTitle": [{"value": "Kunst und Archäologie"}]}, + { + "subseriesTitle": [{"value": "Südostasien"}], + "subseriesEnumeration": [{"value": "Abschnitt 6"}], + }, + ], + } + ] # series.name: [490$a repetitive] @@ -3257,7 +2851,7 @@ def test_marc21_to_series_statement_with_missig_subfield_a(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) # should return None because of the bad formating of field 490 - assert data.get('seriesStatement') is None + assert data.get("seriesStatement") is None # series.name: [490$a repetitive] @@ -3280,15 +2874,18 @@ def test_marc21_to_series_statement_with_succesive_subfield_v(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Handbuch der Orientalistik'}], - 'seriesEnumeration': [{'value': 'Abt. 7, Bd. 7'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Südostasien'}], - 'subseriesEnumeration': [{'value': 'Abschnitt 6, Bd. 6'}] - } - ] - }] + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Handbuch der Orientalistik"}], + "seriesEnumeration": [{"value": "Abt. 7, Bd. 7"}], + "subseriesStatement": [ + { + "subseriesTitle": [{"value": "Südostasien"}], + "subseriesEnumeration": [{"value": "Abschnitt 6, Bd. 6"}], + } + ], + } + ] # summary: [520$a repetitive] @@ -3305,10 +2902,9 @@ def test_marc21_to_summary(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('summary') == [{ - "label": [{"value": "This book is about"}], - "source": "source" - }] + assert data.get("summary") == [ + {"label": [{"value": "This book is about"}], "source": "source"} + ] marc21xml = """ @@ -3324,14 +2920,16 @@ def test_marc21_to_summary(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('summary') == [{ - 'label': [{ - 'value': 'Za wen fen wei si bu fen lu ru', - }, { - 'value': '杂文分为四部分录入', - 'language': 'und-hani' - }] - }] + assert data.get("summary") == [ + { + "label": [ + { + "value": "Za wen fen wei si bu fen lu ru", + }, + {"value": "杂文分为四部分录入", "language": "und-hani"}, + ] + } + ] def test_marc21_to_intended_audience(): @@ -3351,13 +2949,13 @@ def test_marc21_to_intended_audience(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('intendedAudience') == [{ - 'audienceType': 'understanding_level', - 'value': 'target_understanding_teenagers_12_15' - }, { - 'audienceType': 'filmage_ch', - 'value': 'from the age of 12' - }] + assert data.get("intendedAudience") == [ + { + "audienceType": "understanding_level", + "value": "target_understanding_teenagers_12_15", + }, + {"audienceType": "filmage_ch", "value": "from the age of 12"}, + ] marc21xml = """ @@ -3369,10 +2967,9 @@ def test_marc21_to_intended_audience(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('intendedAudience') == [{ - 'audienceType': 'undefined', - 'value': 'Ado (12-15 ans)' - }] + assert data.get("intendedAudience") == [ + {"audienceType": "undefined", "value": "Ado (12-15 ans)"} + ] def test_marc21_to_original_title_from_500(): @@ -3387,7 +2984,7 @@ def test_marc21_to_original_title_from_500(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('originalTitle') == ['Harry Potter secrets'] + assert data.get("originalTitle") == ["Harry Potter secrets"] def test_marc21_to_notes_from_500(): @@ -3405,13 +3002,9 @@ def test_marc21_to_notes_from_500(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('note') == [{ - 'noteType': 'general', - 'label': 'note 1' - }, { - 'noteType': 'general', - 'label': 'note 2' - } + assert data.get("note") == [ + {"noteType": "general", "label": "note 1"}, + {"noteType": "general", "label": "note 2"}, ] @@ -3433,13 +3026,9 @@ def test_marc21_to_notes_from_510(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('note') == [{ - 'noteType': 'cited_by', - 'label': 'note 1 1c' - }, { - 'noteType': 'cited_by', - 'label': 'note 2 2c 2x' - } + assert data.get("note") == [ + {"noteType": "cited_by", "label": "note 1 1c"}, + {"noteType": "cited_by", "label": "note 2 2c 2x"}, ] @@ -3464,19 +3053,11 @@ def test_marc21_to_notes_from_530_545_555_580(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('note') == [{ - 'noteType': 'general', - 'label': 'note 530' - }, { - 'noteType': 'general', - 'label': 'note 545' - }, { - 'noteType': 'general', - 'label': 'note 555' - }, { - 'noteType': 'general', - 'label': 'note 580' - } + assert data.get("note") == [ + {"noteType": "general", "label": "note 530"}, + {"noteType": "general", "label": "note 545"}, + {"noteType": "general", "label": "note 555"}, + {"noteType": "general", "label": "note 580"}, ] @@ -3500,14 +3081,13 @@ def test_marc21_to_classification_from_050(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') == [{ - 'type': 'bf:ClassificationLcc', - 'classificationPortion': 'JK468.I6', - 'assigner': 'LOC' - }, { - 'type': 'bf:ClassificationLcc', - 'classificationPortion': 'JK500.I8' - } + assert data.get("classification") == [ + { + "type": "bf:ClassificationLcc", + "classificationPortion": "JK468.I6", + "assigner": "LOC", + }, + {"type": "bf:ClassificationLcc", "classificationPortion": "JK500.I8"}, ] @@ -3529,14 +3109,13 @@ def test_marc21_to_classification_from_060(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') == [{ - 'type': 'bf:ClassificationNlm', - 'classificationPortion': 'WM 460' - }, { - 'type': 'bf:ClassificationNlm', - 'classificationPortion': 'WM 800', - 'assigner': 'NLM' - } + assert data.get("classification") == [ + {"type": "bf:ClassificationNlm", "classificationPortion": "WM 460"}, + { + "type": "bf:ClassificationNlm", + "classificationPortion": "WM 800", + "assigner": "NLM", + }, ] @@ -3566,26 +3145,31 @@ def test_marc21_to_classification_from_080(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') == [{ - 'type': 'bf:ClassificationUdc', - 'classificationPortion': '821.134.2-31', - 'edition': "Full edition" - }, { - 'type': 'bf:ClassificationUdc', - 'classificationPortion': '900.135.3-32', - }, { - 'type': 'bf:ClassificationUdc', - 'classificationPortion': '700.138.1-45', - 'edition': "Full edition, dollar_2" - }, { - 'type': 'bf:ClassificationUdc', - 'classificationPortion': '600.139.1-46', - 'edition': "Abridged edition, dollar_2" - }, { - 'type': 'bf:ClassificationUdc', - 'classificationPortion': '500.156.1-47', - 'edition': "Abridged edition" - } + assert data.get("classification") == [ + { + "type": "bf:ClassificationUdc", + "classificationPortion": "821.134.2-31", + "edition": "Full edition", + }, + { + "type": "bf:ClassificationUdc", + "classificationPortion": "900.135.3-32", + }, + { + "type": "bf:ClassificationUdc", + "classificationPortion": "700.138.1-45", + "edition": "Full edition, dollar_2", + }, + { + "type": "bf:ClassificationUdc", + "classificationPortion": "600.139.1-46", + "edition": "Abridged edition, dollar_2", + }, + { + "type": "bf:ClassificationUdc", + "classificationPortion": "500.156.1-47", + "edition": "Abridged edition", + }, ] @@ -3616,24 +3200,28 @@ def test_marc21_to_classification_from_082(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') == [{ - 'type': 'bf:ClassificationDdc', - 'classificationPortion': '820', - 'edition': "Abridged edition, 15" - }, { - 'type': 'bf:ClassificationDdc', - 'classificationPortion': '821', - 'edition': "Abridged edition" - }, { - 'type': 'bf:ClassificationDdc', - 'classificationPortion': '822', - 'edition': "15", - 'assigner': 'LOC' - }, { - 'type': 'bf:ClassificationDdc', - 'classificationPortion': '823', - 'edition': "Abridged edition, 15" - } + assert data.get("classification") == [ + { + "type": "bf:ClassificationDdc", + "classificationPortion": "820", + "edition": "Abridged edition, 15", + }, + { + "type": "bf:ClassificationDdc", + "classificationPortion": "821", + "edition": "Abridged edition", + }, + { + "type": "bf:ClassificationDdc", + "classificationPortion": "822", + "edition": "15", + "assigner": "LOC", + }, + { + "type": "bf:ClassificationDdc", + "classificationPortion": "823", + "edition": "Abridged edition, 15", + }, ] @@ -3656,22 +3244,22 @@ def test_marc21_to_subjects_from_980_2_factum(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') is None - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': - 'Conti, Louis de Bourbon, prince de', - 'source': 'Factum', + assert data.get("classification") is None + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Person", + "authorized_access_point": "Conti, Louis de Bourbon, prince de", + "source": "Factum", } - }, { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': - 'Lesdiguières, Marie-Françoise de Gondi', - 'source': 'Factum', + }, + { + "entity": { + "type": "bf:Person", + "authorized_access_point": "Lesdiguières, Marie-Françoise de Gondi", + "source": "Factum", } - } + }, ] @@ -3697,14 +3285,16 @@ def test_marc21_to_classification_from_980_2_musg_musi(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') == [{ - 'type': 'classification_musicale_genres', - 'classificationPortion': 'Opéra', - 'subdivision': ['soli, choeur, orchestre', '1851-1900'] - }, { - 'type': 'classification_musicale_instruments', - 'classificationPortion': 'soli, choeur, piano (adaptation)' - } + assert data.get("classification") == [ + { + "type": "classification_musicale_genres", + "classificationPortion": "Opéra", + "subdivision": ["soli, choeur, orchestre", "1851-1900"], + }, + { + "type": "classification_musicale_instruments", + "classificationPortion": "soli, choeur, piano (adaptation)", + }, ] @@ -3730,14 +3320,13 @@ def test_marc21_to_classification_from_980_2_brp_and_dr_sys(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('classification') == [{ - 'type': 'classification_brunetparguez', - 'classificationPortion': 'brp_value', - 'subdivision': ['brp_subdivision'] - }, { - 'type': 'classification_droit', - 'classificationPortion': 'loi' - } + assert data.get("classification") == [ + { + "type": "classification_brunetparguez", + "classificationPortion": "brp_value", + "subdivision": ["brp_subdivision"], + }, + {"type": "classification_droit", "classificationPortion": "loi"}, ] @@ -3759,13 +3348,9 @@ def test_marc21_to_frequency(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('frequency') == [{ - 'label': 'Annuel', - 'date': '1982-' - }, { - 'label': 'Irrégulier', - 'date': '1953-1981' - } + assert data.get("frequency") == [ + {"label": "Annuel", "date": "1982-"}, + {"label": "Irrégulier", "date": "1953-1981"}, ] # field 310 $a with trailing coma and missing $b, 321 ok @@ -3782,12 +3367,9 @@ def test_marc21_to_frequency(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('frequency') == [{ - 'label': 'Annuel' - }, { - 'label': 'Irrégulier', - 'date': '1953-1981' - } + assert data.get("frequency") == [ + {"label": "Annuel"}, + {"label": "Irrégulier", "date": "1953-1981"}, ] # field 310 ok, field 321 without $a @@ -3804,13 +3386,9 @@ def test_marc21_to_frequency(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('frequency') == [{ - 'label': 'Annuel', - 'date': '1982-' - }, { - 'label': 'missing_label', - 'date': '1953-1981' - } + assert data.get("frequency") == [ + {"label": "Annuel", "date": "1982-"}, + {"label": "missing_label", "date": "1953-1981"}, ] @@ -3826,8 +3404,7 @@ def test_marc21_to_sequence_numbering_from_one_362(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('sequence_numbering') == \ - '1890-1891 ; 1892/1893 ; 1894-1896/1897' + assert data.get("sequence_numbering") == "1890-1891 ; 1892/1893 ; 1894-1896/1897" def test_marc21_to_sequence_numbering_from_two_362(): @@ -3845,8 +3422,10 @@ def test_marc21_to_sequence_numbering_from_two_362(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('sequence_numbering') == \ - '1890-1891 ; 1892/1893 ; 1894-1896/1897 ; 1915/1917-1918/1921 ; 1929' + assert ( + data.get("sequence_numbering") + == "1890-1891 ; 1892/1893 ; 1894-1896/1897 ; 1915/1917-1918/1921 ; 1929" + ) def test_marc21_to_table_of_contents_from_505(): @@ -3866,9 +3445,9 @@ def test_marc21_to_table_of_contents_from_505(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('tableOfContents') == [ + assert data.get("tableOfContents") == [ "Vol. 1: Le prisme noir trad. de l'anglais", - 'Vol. 2 : Le couteau aveuglant' + "Vol. 2 : Le couteau aveuglant", ] @@ -3884,10 +3463,8 @@ def test_marc21_to_usage_and_access_policy(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('usageAndAccessPolicy') == [{ - 'type': 'bf:UsageAndAccessPolicy', - 'label': 'Les archives de C. Roussopoulos' - } + assert data.get("usageAndAccessPolicy") == [ + {"type": "bf:UsageAndAccessPolicy", "label": "Les archives de C. Roussopoulos"} ] marc21xml = """ @@ -3899,10 +3476,11 @@ def test_marc21_to_usage_and_access_policy(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('usageAndAccessPolicy') == [{ - 'type': 'bf:UsageAndAccessPolicy', - 'label': 'Les archives de Carole Roussopoulos' - } + assert data.get("usageAndAccessPolicy") == [ + { + "type": "bf:UsageAndAccessPolicy", + "label": "Les archives de Carole Roussopoulos", + } ] marc21xml = """ @@ -3917,13 +3495,12 @@ def test_marc21_to_usage_and_access_policy(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('usageAndAccessPolicy') == [{ - 'type': 'bf:UsageAndAccessPolicy', - 'label': 'Les archives de C. Roussopoulos' - }, { - 'type': 'bf:UsageAndAccessPolicy', - 'label': 'Les archives de Carole Roussopoulos' - } + assert data.get("usageAndAccessPolicy") == [ + {"type": "bf:UsageAndAccessPolicy", "label": "Les archives de C. Roussopoulos"}, + { + "type": "bf:UsageAndAccessPolicy", + "label": "Les archives de Carole Roussopoulos", + }, ] @@ -3943,7 +3520,7 @@ def test_marc21_to_credits_from_508(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('credits') == ['Ont également collaboré: Marco Praz'] + assert data.get("credits") == ["Ont également collaboré: Marco Praz"] def test_marc21_to_credits_from_511(): @@ -3961,7 +3538,7 @@ def test_marc21_to_credits_from_511(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('credits') == ["Participants ou interprètes: A. Kurmann"] + assert data.get("credits") == ["Participants ou interprètes: A. Kurmann"] # dissertation: [502$a repetitive] @@ -3981,14 +3558,16 @@ def test_marc21_to_dissertation(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('dissertation') == [{ - 'label': [{ - 'value': 'Za wen fen wei si bu fen lu ru', - }, { - 'value': '杂文分为四部分录入', - 'language': 'und-hani' - }] - }] + assert data.get("dissertation") == [ + { + "label": [ + { + "value": "Za wen fen wei si bu fen lu ru", + }, + {"value": "杂文分为四部分录入", "language": "und-hani"}, + ] + } + ] def test_marc21_to_supplementary_content_from_504(): @@ -4003,7 +3582,7 @@ def test_marc21_to_supplementary_content_from_504(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('supplementaryContent') == ['Bibliographie: p. 238-239'] + assert data.get("supplementaryContent") == ["Bibliographie: p. 238-239"] # part_of 773, 800, 830 @@ -4021,9 +3600,10 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'}, - 'numbering': [{'pages': '411'}] + assert data.get("partOf") == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/123456"}, + "numbering": [{"pages": "411"}], } ] @@ -4039,18 +3619,15 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'}, - 'numbering': [{ - 'pages': '411' - }, - { - 'year': '2020', - 'volume': "1", - 'issue': "2", - 'pages': '300' - }] - }] + assert data.get("partOf") == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/123456"}, + "numbering": [ + {"pages": "411"}, + {"year": "2020", "volume": "1", "issue": "2", "pages": "300"}, + ], + } + ] marc21xml = """ @@ -4063,15 +3640,12 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'}, - 'numbering': [ - { - 'volume': "1", - 'issue': "2", - 'pages': '300' - }] - }] + assert data.get("partOf") == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/123456"}, + "numbering": [{"volume": "1", "issue": "2", "pages": "300"}], + } + ] marc21xml = """ @@ -4084,9 +3658,9 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'} - }] + assert data.get("partOf") == [ + {"document": {"$ref": "https://bib.rero.ch/api/documents/123456"}} + ] marc21xml = """ @@ -4100,17 +3674,15 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'}, - 'numbering': [{ - 'year': '2020', - 'pages': '411' - }, - { - 'volume': "1", - 'issue': "2" - }] - }] + assert data.get("partOf") == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/123456"}, + "numbering": [ + {"year": "2020", "pages": "411"}, + {"volume": "1", "issue": "2"}, + ], + } + ] marc21xml = """ @@ -4123,9 +3695,9 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'} - }] + assert data.get("partOf") == [ + {"document": {"$ref": "https://bib.rero.ch/api/documents/123456"}} + ] marc21xml = """ @@ -4138,12 +3710,12 @@ def test_marc21_to_part_of(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/123456'}, - 'numbering': [{ - 'volume': "256" - }] - }] + assert data.get("partOf") == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/123456"}, + "numbering": [{"volume": "256"}], + } + ] def test_marc21_to_specific_document_relation(): @@ -4163,8 +3735,9 @@ def test_marc21_to_specific_document_relation(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('supplement') == [{ - '$ref': 'https://bib.rero.ch/api/documents/2000055', + assert data.get("supplement") == [ + { + "$ref": "https://bib.rero.ch/api/documents/2000055", } ] # two 770 with link @@ -4182,11 +3755,13 @@ def test_marc21_to_specific_document_relation(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('supplement') == [{ - '$ref': 'https://bib.rero.ch/api/documents/2000055', - }, { - '$ref': 'https://bib.rero.ch/api/documents/2000056', - } + assert data.get("supplement") == [ + { + "$ref": "https://bib.rero.ch/api/documents/2000055", + }, + { + "$ref": "https://bib.rero.ch/api/documents/2000056", + }, ] marc21xml = """ @@ -4198,7 +3773,7 @@ def test_marc21_to_specific_document_relation(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('supplement') == [{'label': 'Télé-top-Matin 2000055'}] + assert data.get("supplement") == [{"label": "Télé-top-Matin 2000055"}] marc21xml = """ @@ -4212,8 +3787,9 @@ def test_marc21_to_specific_document_relation(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('hasReproduction') == [{ - 'label': 'Master microfilm. Lausanne : BCU 1998 1 bobine ; 35 mm', + assert data.get("hasReproduction") == [ + { + "label": "Master microfilm. Lausanne : BCU 1998 1 bobine ; 35 mm", } ] @@ -4232,11 +3808,11 @@ def test_marc21_to_specific_document_relation(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('reproductionOf') == [{ - 'label': "Reproduction de l'édition de: Paris : H. Champion, 1931", - }, { - 'label': "Repro. sur microfilm: Ed. de Minuit, 1968. - 189 pages" - } + assert data.get("reproductionOf") == [ + { + "label": "Reproduction de l'édition de: Paris : H. Champion, 1931", + }, + {"label": "Repro. sur microfilm: Ed. de Minuit, 1968. - 189 pages"}, ] @@ -4260,10 +3836,11 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Stuart Hall : critical dialogues'}], - 'seriesEnumeration': [{'value': '411'}], + assert data.get("partOf") is None + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Stuart Hall : critical dialogues"}], + "seriesEnumeration": [{"value": "411"}], } ] @@ -4279,10 +3856,11 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Neuchâtel disparu / Walter Wehinger'}], - 'seriesEnumeration': [{'value': '8'}], + assert data.get("partOf") is None + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Neuchâtel disparu / Walter Wehinger"}], + "seriesEnumeration": [{"value": "8"}], } ] @@ -4298,10 +3876,11 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Neuchâtel disparu / Wehinger'}], - 'seriesEnumeration': [{'value': '8'}], + assert data.get("partOf") is None + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Neuchâtel disparu / Wehinger"}], + "seriesEnumeration": [{"value": "8"}], } ] @@ -4317,19 +3896,19 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Ramsès / Christian Jacq'}], - 'seriesEnumeration': [{'value': '1'}], + assert data.get("partOf") is None + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Ramsès / Christian Jacq"}], + "seriesEnumeration": [{"value": "1"}], + } + ] + assert data.get("work_access_point") == [ + { + "creator": {"preferred_name": "Jacq, Christian", "type": "bf:Person"}, + "title": "Ramsès", } ] - assert data.get('work_access_point') == [{ - 'creator': { - 'preferred_name': 'Jacq, Christian', - 'type': 'bf:Person' - }, - 'title': 'Ramsès' - }] marc21xml = """ @@ -4341,15 +3920,16 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Stuart Hall : critical dialogues'}], - 'seriesEnumeration': [{'value': '411'}], + assert data.get("partOf") is None + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Stuart Hall : critical dialogues"}], + "seriesEnumeration": [{"value": "411"}], } ] - assert data.get('work_access_point') == [{ - 'title': 'Stuart Hall : critical dialogues' - }] + assert data.get("work_access_point") == [ + {"title": "Stuart Hall : critical dialogues"} + ] marc21xml = """ @@ -4361,10 +3941,11 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Stuart Hall : critical dialogues'}], - 'seriesEnumeration': [{'value': '411'}], + assert data.get("partOf") is None + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Stuart Hall : critical dialogues"}], + "seriesEnumeration": [{"value": "411"}], } ] @@ -4383,8 +3964,8 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None - assert data.get('seriesStatement') is None + assert data.get("partOf") is None + assert data.get("seriesStatement") is None marc21xml = """ @@ -4400,11 +3981,12 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None + assert data.get("partOf") is None # the seriesStatement is generated form 490 and not from the 800 - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Stuart Hall : all critical dialogues'}], - 'seriesEnumeration': [{'value': '512'}], + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Stuart Hall : all critical dialogues"}], + "seriesEnumeration": [{"value": "512"}], } ] @@ -4422,11 +4004,12 @@ def test_marc21_to_part_of_without_link(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') is None + assert data.get("partOf") is None # the seriesStatement is generated form 490 and not from the 800 - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Stuart Hall : all critical dialogues'}], - 'seriesEnumeration': [{'value': '512'}], + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Stuart Hall : all critical dialogues"}], + "seriesEnumeration": [{"value": "512"}], } ] @@ -4469,35 +4052,32 @@ def test_marc21_to_part_of_with_multiple_800(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('partOf') == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/780067'}, - 'numbering': [{ - 'volume': "3" - }] - }] + assert data.get("partOf") == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/780067"}, + "numbering": [{"volume": "3"}], + } + ] # the seriesStatement is generated form 490 and not from the 800 - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{ - 'value': 'A la recherche de la Licorne / Mirallès' - }], - 'seriesEnumeration': [{'value': '3'}], - }, { - 'seriesTitle': [{'value': 'Collection "Vécu"'}], - } - ] - assert data.get('work_access_point') == [{ - 'creator': { - 'preferred_name': 'Mirallés, Ana', - 'type': 'bf:Person' + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "A la recherche de la Licorne / Mirallès"}], + "seriesEnumeration": [{"value": "3"}], + }, + { + "seriesTitle": [{"value": 'Collection "Vécu"'}], + }, + ] + assert data.get("work_access_point") == [ + { + "creator": {"preferred_name": "Mirallés, Ana", "type": "bf:Person"}, + "title": "A la recherche de la Licorne", }, - 'title': 'A la recherche de la Licorne' - }, { - 'creator': { - 'preferred_name': 'Ruiz, Emilio', - 'type': 'bf:Person' + { + "creator": {"preferred_name": "Ruiz, Emilio", "type": "bf:Person"}, + "title": "A la recherche de la Licorne", }, - 'title': 'A la recherche de la Licorne' - }] + ] def test_marc21_to_identified_by_from_020(): @@ -4520,22 +4100,10 @@ def test_marc21_to_identified_by_from_020(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Isbn', - 'status': 'invalid or cancelled', - 'value': '8124605254' - }, - { - 'type': 'bf:Isbn', - 'qualifier': 'broché', - 'value': '9788124605257' - }, - { - 'type': 'bf:Isbn', - 'qualifier': 'hbk.', - 'value': '9788189997212' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Isbn", "status": "invalid or cancelled", "value": "8124605254"}, + {"type": "bf:Isbn", "qualifier": "broché", "value": "9788124605257"}, + {"type": "bf:Isbn", "qualifier": "hbk.", "value": "9788189997212"}, ] @@ -4559,29 +4127,12 @@ def test_marc21_to_identified_by_from_022(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Issn', - 'value': '0264-2875' - }, - { - 'type': 'bf:IssnL', - 'value': '0264-2875' - }, - { - 'type': 'bf:Issn', - 'value': '0264-2875' - }, - { - 'type': 'bf:Issn', - 'status': 'invalid', - 'value': '0080-4649' - }, - { - 'type': 'bf:IssnL', - 'status': 'cancelled', - 'value': '0080-4650' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Issn", "value": "0264-2875"}, + {"type": "bf:IssnL", "value": "0264-2875"}, + {"type": "bf:Issn", "value": "0264-2875"}, + {"type": "bf:Issn", "status": "invalid", "value": "0080-4649"}, + {"type": "bf:IssnL", "status": "cancelled", "value": "0080-4650"}, ] @@ -4601,17 +4152,17 @@ def test_marc21_to_identified_by_from_024_snl_bnf(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ + assert data.get("identifiedBy") == [ { - 'type': 'uri', - 'source': 'SNL', - 'value': 'http://permalink.snl.ch/bib/chccsa86779' + "type": "uri", + "source": "SNL", + "value": "http://permalink.snl.ch/bib/chccsa86779", }, { - 'type': 'uri', - 'source': 'BNF', - 'value': 'http://catalogue.bnf.fr/ark:/12148/cb312v' - } + "type": "uri", + "source": "BNF", + "value": "http://catalogue.bnf.fr/ark:/12148/cb312v", + }, ] marc21xml = """ @@ -4624,11 +4175,8 @@ def test_marc21_to_identified_by_from_024_snl_bnf(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Identifier', - 'value': 'http://slsp.ch/12345' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Identifier", "value": "http://slsp.ch/12345"} ] @@ -4667,35 +4215,13 @@ def test_marc21_to_identified_by_from_024_with_subfield_2(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Doi', - 'value': '10.1007/978-3-540-37973-7', - 'note': 'note' - }, - { - 'type': 'bf:Urn', - 'value': 'urn:nbn:de:101:1-201609052530' - }, - { - 'type': 'bf:Local', - 'source': 'NIPO', - 'value': 'NIPO 035-16-060-7' - }, - { - 'type': 'bf:Local', - 'source': 'danacode', - 'value': '7290105422026' - }, - { - 'type': 'bf:Local', - 'source': 'vd18', - 'value': 'VD18 10153438' - }, - { - 'type': 'bf:Gtin14Number', - 'value': '00028947969525' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Doi", "value": "10.1007/978-3-540-37973-7", "note": "note"}, + {"type": "bf:Urn", "value": "urn:nbn:de:101:1-201609052530"}, + {"type": "bf:Local", "source": "NIPO", "value": "NIPO 035-16-060-7"}, + {"type": "bf:Local", "source": "danacode", "value": "7290105422026"}, + {"type": "bf:Local", "source": "vd18", "value": "VD18 10153438"}, + {"type": "bf:Gtin14Number", "value": "00028947969525"}, ] @@ -4755,70 +4281,33 @@ def test_marc21_to_identified_by_from_024_without_subfield_2(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Identifier', - 'value': '9782100745463' - }, - { - 'type': 'bf:Isrc', - 'qualifier': 'vol. 2', - 'value': '702391010582' - }, - { - 'type': 'bf:Isrc', - 'value': 'Erato ECD 88030' - }, - { - 'type': 'bf:Upc', - 'qualifier': 'vol. 5', - 'value': '604907014223' - }, - { - 'type': 'bf:Upc', - 'value': 'EMI Classics 5 55585 2' - }, - { - 'type': 'bf:Ismn', - 'qualifier': 'kritischer B., kartoniert, vol. 1', - 'value': 'M006546565' - }, - { - 'type': 'bf:Ismn', - 'qualifier': 'Kritischer Bericht', - 'value': '9790201858135' - }, - { - 'type': 'bf:Identifier', - 'qualifier': 'Bd. 1', - 'value': '4018262101065' - }, - { - 'type': 'bf:Identifier', - 'qualifier': 'CD audio classe', - 'value': '309-5-56-196162-1' - }, - { - 'type': 'bf:Ean', - 'qualifier': 'Bd 1, pbk.', - 'value': '9783737407427' - }, + assert data.get("identifiedBy") == [ + {"type": "bf:Identifier", "value": "9782100745463"}, + {"type": "bf:Isrc", "qualifier": "vol. 2", "value": "702391010582"}, + {"type": "bf:Isrc", "value": "Erato ECD 88030"}, + {"type": "bf:Upc", "qualifier": "vol. 5", "value": "604907014223"}, + {"type": "bf:Upc", "value": "EMI Classics 5 55585 2"}, { - 'type': 'bf:Identifier', - 'value': 'EP 2305' + "type": "bf:Ismn", + "qualifier": "kritischer B., kartoniert, vol. 1", + "value": "M006546565", }, { - 'type': 'bf:Ean', - 'value': '97 EP 1234' + "type": "bf:Ismn", + "qualifier": "Kritischer Bericht", + "value": "9790201858135", }, + {"type": "bf:Identifier", "qualifier": "Bd. 1", "value": "4018262101065"}, { - 'type': 'bf:Identifier', - 'value': 'ELC1283925' + "type": "bf:Identifier", + "qualifier": "CD audio classe", + "value": "309-5-56-196162-1", }, - { - 'type': 'bf:Isan', - 'value': '0000-0002-A3B1-0000-0-0000-0000-2' - } + {"type": "bf:Ean", "qualifier": "Bd 1, pbk.", "value": "9783737407427"}, + {"type": "bf:Identifier", "value": "EP 2305"}, + {"type": "bf:Ean", "value": "97 EP 1234"}, + {"type": "bf:Identifier", "value": "ELC1283925"}, + {"type": "bf:Isan", "value": "0000-0002-A3B1-0000-0-0000-0000-2"}, ] @@ -4837,12 +4326,12 @@ def test_marc21_to_identified_by_from_028(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ + assert data.get("identifiedBy") == [ { - 'type': 'bf:MusicPublisherNumber', - 'source': 'SRC', - 'qualifier': 'Qualif1, Qualif2', - 'value': '1234' + "type": "bf:MusicPublisherNumber", + "source": "SRC", + "qualifier": "Qualif1, Qualif2", + "value": "1234", } ] @@ -4858,12 +4347,12 @@ def test_marc21_to_identified_by_from_028(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ + assert data.get("identifiedBy") == [ { - 'type': 'bf:Identifier', - 'source': 'SRC', - 'qualifier': 'Qualif1, Qualif2', - 'value': '1234' + "type": "bf:Identifier", + "source": "SRC", + "qualifier": "Qualif1, Qualif2", + "value": "1234", } ] @@ -4893,16 +4382,16 @@ def test_marc21_to_acquisition_terms(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('acquisitionTerms') == [ - 'CHF 68', - '£125.00', - 'Fr. 147.20', - '€133.14', - 'gratuit' + assert data.get("acquisitionTerms") == [ + "CHF 68", + "£125.00", + "Fr. 147.20", + "€133.14", + "gratuit", ] -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_marc21_to_subjects(mock_get, mef_agents_url): """Test dojson subjects from 6xx (L49, L50).""" # field 600 without $t with ref @@ -4917,18 +4406,14 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ - mock_get.return_value = mock_response(json_data={ - 'pid': 'tets', - 'type': 'bf:Person', - 'idref': {'pid': 'XXXXXXXX'} - }) + mock_get.return_value = mock_response( + json_data={"pid": "tets", "type": "bf:Person", "idref": {"pid": "XXXXXXXX"}} + ) marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - '$ref': f'{mef_agents_url}/idref/XXXXXXXX' - } - }] + assert data.get("subjects") == [ + {"entity": {"$ref": f"{mef_agents_url}/idref/XXXXXXXX"}} + ] # field 600 without $t marc21xml = """ @@ -4944,18 +4429,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': - 'Athenagoras (patriarche oecuménique ; 1)', - 'source': 'rero', - 'identifiedBy': { - 'value': 'A009963344', - 'type': 'RERO' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Person", + "authorized_access_point": "Athenagoras (patriarche oecuménique ; 1)", + "source": "rero", + "identifiedBy": {"value": "A009963344", "type": "RERO"}, } } - }] + ] # field 611 without $t marc21xml = """ @@ -4970,17 +4453,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'Belalp Hexe (Blatten)', - 'source': 'rero', - 'identifiedBy': { - 'value': 'A017827554', - 'type': 'RERO' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Organisation", + "authorized_access_point": "Belalp Hexe (Blatten)", + "source": "rero", + "identifiedBy": {"value": "A017827554", "type": "RERO"}, } } - }] + ] # field 600 with $t marc21xml = """ @@ -4995,17 +4477,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Work', - 'authorized_access_point': 'Giraudoux, Jean. Electre', - 'source': 'rero', - 'identifiedBy': { - 'value': '027538303', - 'type': 'IdRef' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Work", + "authorized_access_point": "Giraudoux, Jean. Electre", + "source": "rero", + "identifiedBy": {"value": "027538303", "type": "IdRef"}, } } - }] + ] # field 611 with $t marc21xml = """ @@ -5020,17 +4501,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Work', - 'source': 'rero', - 'authorized_access_point': 'Concile de Vatican 2. Influence reçue', - 'identifiedBy': { - 'value': 'A010067471', - 'type': 'RERO' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Work", + "source": "rero", + "authorized_access_point": "Concile de Vatican 2. Influence reçue", + "identifiedBy": {"value": "A010067471", "type": "RERO"}, } } - }] + ] # field 650 topic marc21xml = """ @@ -5044,17 +4524,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Vie', - 'source': 'rero', - 'identifiedBy': { - 'value': 'A021002965', - 'type': 'RERO' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Vie", + "source": "rero", + "identifiedBy": {"value": "A021002965", "type": "RERO"}, } } - }] + ] # field 650 temporal marc21xml = """ @@ -5068,17 +4547,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Temporal', - 'authorized_access_point': '1961', - 'source': 'rero', - 'identifiedBy': { - 'value': 'G021002965', - 'type': 'RERO' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Temporal", + "authorized_access_point": "1961", + "source": "rero", + "identifiedBy": {"value": "G021002965", "type": "RERO"}, } } - }] + ] # field 651 marc21xml = """ @@ -5092,17 +4570,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'type': 'bf:Place', - 'authorized_access_point': 'Europe occidentale', - 'source': 'rero', - 'identifiedBy': { - 'value': 'A009975209', - 'type': 'RERO' + assert data.get("subjects") == [ + { + "entity": { + "type": "bf:Place", + "authorized_access_point": "Europe occidentale", + "source": "rero", + "identifiedBy": {"value": "A009975209", "type": "RERO"}, } } - }] + ] # field 655 with $0 marc21xml = """ @@ -5116,17 +4593,16 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('genreForm') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Bases de données', - 'source': 'rero', - 'identifiedBy': { - 'value': 'A001234567', - 'type': 'RERO' - } + assert data.get("genreForm") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Bases de données", + "source": "rero", + "identifiedBy": {"value": "A001234567", "type": "RERO"}, + } } - }] + ] # field 655 without $0 marc21xml = """ @@ -5145,13 +4621,15 @@ def test_marc21_to_subjects(mock_get, mef_agents_url): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('genreForm') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Bases de données', - 'source': 'rero' + assert data.get("genreForm") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Bases de données", + "source": "rero", + } } - }] + ] def test_marc21_to_subjects_imported(): @@ -5168,13 +4646,14 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': - 'Pollution - Government policy - Germany (West)' + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Pollution - Government policy - Germany (West)", + } } - }] + ] # field 919 with $2 chrero and $v marc21xml = """ @@ -5190,14 +4669,15 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': - 'Zermatt (Suisse, VS) - 19e s. (fin) - [carte postale]', - 'source': 'chrero' + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Zermatt (Suisse, VS) - 19e s. (fin) - [carte postale]", + "source": "chrero", + } } - }] + ] # field 919 with $2 chrero and without $v marc21xml = """ @@ -5212,12 +4692,14 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [{ - 'note': 'Date not available and automatically set to 2050', - 'place': [{'country': 'xx'}], - 'startDate': 2050, - 'type': 'bf:Publication' - }] + assert data.get("provisionActivity") == [ + { + "note": "Date not available and automatically set to 2050", + "place": [{"country": "xx"}], + "startDate": 2050, + "type": "bf:Publication", + } + ] # field 919 with $2 chrero and without $v marc21xml = """ @@ -5232,12 +4714,14 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [{ - 'note': 'Date not available and automatically set to 2050', - 'place': [{'country': 'xx'}], - 'startDate': 2050, - 'type': 'bf:Publication' - }] + assert data.get("provisionActivity") == [ + { + "note": "Date not available and automatically set to 2050", + "place": [{"country": "xx"}], + "startDate": 2050, + "type": "bf:Publication", + } + ] # field 919 with $2 ram|rameau|gnd|rerovoc marc21xml = """ @@ -5252,13 +4736,15 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Sekundarstufe', - 'source': 'gnd' + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Sekundarstufe", + "source": "gnd", + } } - }] + ] # field 650 _0 marc21xml = """ @@ -5270,13 +4756,15 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'Conference of European Churches', - 'source': 'LCSH' + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Organisation", + "authorized_access_point": "Conference of European Churches", + "source": "LCSH", + } } - }] + ] # field 650 _2 marc21xml = """ @@ -5288,13 +4776,15 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Philosophy, Medical', - 'source': 'MeSH' + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Philosophy, Medical", + "source": "MeSH", + } } - }] + ] # field 650 with $2 rerovoc marc21xml = """ @@ -5307,13 +4797,15 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'société (milieu humain)', - 'source': 'rerovoc' + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "société (milieu humain)", + "source": "rerovoc", + } } - }] + ] # field 650 with $2 rerovoc marc21xml = """ @@ -5327,24 +4819,29 @@ def test_marc21_to_subjects_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects_imported') == [{ - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'Catholic Church', - 'source': 'LCSH', - 'subdivisions': [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Relations' - } - }, { - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Eastern churches' - } - }] + assert data.get("subjects_imported") == [ + { + "entity": { + "type": "bf:Organisation", + "authorized_access_point": "Catholic Church", + "source": "LCSH", + "subdivisions": [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Relations", + } + }, + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Eastern churches", + } + }, + ], + } } - }] + ] def test_marc21_to_genreForm_imported(): @@ -5362,13 +4859,15 @@ def test_marc21_to_genreForm_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('genreForm_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': 'Erlebnisbericht', - 'source': 'gnd-content' + assert data.get("genreForm_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Erlebnisbericht", + "source": "gnd-content", + } } - }] + ] # field 919 with $2 chrero and $v marc21xml = """ @@ -5384,14 +4883,15 @@ def test_marc21_to_genreForm_imported(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('genreForm_imported') == [{ - 'entity': { - 'type': 'bf:Topic', - 'authorized_access_point': - 'Zermatt (Suisse, VS) - 19e s. (fin) - [carte postale]', - 'source': 'gnd-content' + assert data.get("genreForm_imported") == [ + { + "entity": { + "type": "bf:Topic", + "authorized_access_point": "Zermatt (Suisse, VS) - 19e s. (fin) - [carte postale]", + "source": "gnd-content", + } } - }] + ] def test_marc21_to_identified_by_from_035(): @@ -5406,12 +4906,8 @@ def test_marc21_to_identified_by_from_035(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Local', - 'source': 'RERO', - 'value': 'R008945501' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Local", "source": "RERO", "value": "R008945501"} ] marc21xml = """ @@ -5423,16 +4919,12 @@ def test_marc21_to_identified_by_from_035(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Local', - 'source': 'OCoLC', - 'value': 'ocm72868858' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Local", "source": "OCoLC", "value": "ocm72868858"} ] -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_marc21_to_electronicLocator_from_856(mock_cover_get, app): """Test dojson electronicLocator from 856.""" @@ -5447,21 +4939,21 @@ def test_marc21_to_electronicLocator_from_856(mock_cover_get, app): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('electronicLocator') == [ + assert data.get("electronicLocator") == [ { - 'url': 'http://reader.digitale-s.de/r/d/XXX.html', - 'type': 'versionOfResource', - 'content': 'fullText', - 'publicNote': ['Vol. 1'] + "url": "http://reader.digitale-s.de/r/d/XXX.html", + "type": "versionOfResource", + "content": "fullText", + "publicNote": ["Vol. 1"], } ] assert get_cover_art(data) is None assert get_other_accesses(data) == [ { - 'url': 'http://reader.digitale-s.de/r/d/XXX.html', - 'type': 'versionOfResource', - 'content': 'full text', - 'public_note': 'Vol. 1' + "url": "http://reader.digitale-s.de/r/d/XXX.html", + "type": "versionOfResource", + "content": "full text", + "public_note": "Vol. 1", } ] @@ -5477,20 +4969,20 @@ def test_marc21_to_electronicLocator_from_856(mock_cover_get, app): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('electronicLocator') == [ + assert data.get("electronicLocator") == [ { - 'url': 'http://d-nb.info/1071856731/04', - 'type': 'relatedResource', - 'publicNote': ['Inhaltsverzeichnis', 'Bd. 1'] + "url": "http://d-nb.info/1071856731/04", + "type": "relatedResource", + "publicNote": ["Inhaltsverzeichnis", "Bd. 1"], } ] assert get_cover_art(data) is None assert get_other_accesses(data) == [ { - 'content': None, - 'public_note': 'Inhaltsverzeichnis, Bd. 1', - 'type': 'relatedResource', - 'url': 'http://d-nb.info/1071856731/04' + "content": None, + "public_note": "Inhaltsverzeichnis, Bd. 1", + "type": "relatedResource", + "url": "http://d-nb.info/1071856731/04", } ] @@ -5514,31 +5006,31 @@ def test_marc21_to_electronicLocator_from_856(mock_cover_get, app): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('electronicLocator') == [ + assert data.get("electronicLocator") == [ { - 'url': 'http://d-nb.info/1071856731/04', - 'type': 'relatedResource', - 'publicNote': ['Inhaltsverzeichnis', 'Bd. 1'] + "url": "http://d-nb.info/1071856731/04", + "type": "relatedResource", + "publicNote": ["Inhaltsverzeichnis", "Bd. 1"], }, { - 'content': 'coverImage', - 'type': 'relatedResource', - 'url': 'http://d-nb.info/image.png' + "content": "coverImage", + "type": "relatedResource", + "url": "http://d-nb.info/image.png", }, { - 'content': 'coverImage', - 'type': 'versionOfResource', - 'url': 'http://d-nb.info/image2.png' - } + "content": "coverImage", + "type": "versionOfResource", + "url": "http://d-nb.info/image2.png", + }, ] mock_cover_get.return_value = mock_response(json_data={}) - assert get_cover_art(data) == 'http://d-nb.info/image.png' + assert get_cover_art(data) == "http://d-nb.info/image.png" assert get_other_accesses(data) == [ { - 'content': None, - 'public_note': 'Inhaltsverzeichnis, Bd. 1', - 'type': 'relatedResource', - 'url': 'http://d-nb.info/1071856731/04' + "content": None, + "public_note": "Inhaltsverzeichnis, Bd. 1", + "type": "relatedResource", + "url": "http://d-nb.info/1071856731/04", } ] @@ -5556,12 +5048,8 @@ def test_marc21_to_identified_by_from_930(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Local', - 'source': 'OCoLC', - 'value': 'ocm11113722' - } + assert data.get("identifiedBy") == [ + {"type": "bf:Local", "source": "OCoLC", "value": "ocm11113722"} ] # identifier without source in parenthesis marc21xml = """ @@ -5573,54 +5061,48 @@ def test_marc21_to_identified_by_from_930(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') == [ - { - 'type': 'bf:Local', - 'value': 'ocm11113722' - } - ] + assert data.get("identifiedBy") == [{"type": "bf:Local", "value": "ocm11113722"}] -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_get_mef_link(mock_get, capsys, app): """Test get mef contribution link""" - mock_get.return_value = mock_response(json_data={ - 'pid': 'test', - 'idref': {'pid': '003945843'} - }) + mock_get.return_value = mock_response( + json_data={"pid": "test", "idref": {"pid": "003945843"}} + ) mef_url = get_mef_link( - bibid='1', - reroid='1', + bibid="1", + reroid="1", entity_type=EntityType.PERSON, - ids=['(IdRef)003945843'], - key='100..' + ids=["(IdRef)003945843"], + key="100..", ) - assert mef_url == 'https://mef.rero.ch/api/agents/idref/003945843' + assert mef_url == "https://mef.rero.ch/api/agents/idref/003945843" mock_get.return_value = mock_response(status=404) mef_url = get_mef_link( - bibid='1', - reroid='1', + bibid="1", + reroid="1", entity_type=EntityType.PERSON, - ids=['(IdRef)123456789'], - key='100..' + ids=["(IdRef)123456789"], + key="100..", ) assert not mef_url out, err = capsys.readouterr() assert out == ( - 'WARNING GET MEF CONTRIBUTION:\t1\t1\t100..\t(IdRef)123456789\t' - 'https://mef.rero.ch/api/agents/mef/latest/' - 'idref:123456789\t404\t0\t\n' + "WARNING GET MEF CONTRIBUTION:\t1\t1\t100..\t(IdRef)123456789\t" + "https://mef.rero.ch/api/agents/mef/latest/" + "idref:123456789\t404\t0\t\n" ) mock_get.return_value = mock_response(status=400) mef_url = get_mef_link( - bibid='1', - reroid='1', + bibid="1", + reroid="1", entity_type=EntityType.PERSON, - ids=['X123456789'], - key='100..' + ids=["X123456789"], + key="100..", ) assert not mef_url @@ -5640,7 +5122,7 @@ def test_marc21_to_masked(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('_masked') + assert data.get("_masked") marc21xml = """ @@ -5652,7 +5134,7 @@ def test_marc21_to_masked(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert not data.get('_masked') + assert not data.get("_masked") marc21xml = """ @@ -5661,7 +5143,7 @@ def test_marc21_to_masked(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert not data.get('_masked') + assert not data.get("_masked") def test_marc21_to_content_media_carrier(): @@ -5693,11 +5175,13 @@ def test_marc21_to_content_media_carrier(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contentMediaCarrier') == [{ - "contentType": ["rdaco:1020", "rdaco:1014"], - "mediaType": "rdamt:1007", - "carrierType": "rdact:1049" - }] + assert data.get("contentMediaCarrier") == [ + { + "contentType": ["rdaco:1020", "rdaco:1014"], + "mediaType": "rdamt:1007", + "carrierType": "rdact:1049", + } + ] # missing 338 marc21xml = """ @@ -5718,10 +5202,9 @@ def test_marc21_to_content_media_carrier(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contentMediaCarrier') == [{ - "contentType": ["rdaco:1020"], - "mediaType": "rdamt:1002" - }] + assert data.get("contentMediaCarrier") == [ + {"contentType": ["rdaco:1020"], "mediaType": "rdamt:1002"} + ] def test_marc21_to_content_media_carrier_with_linked_fields(): @@ -5767,15 +5250,18 @@ def test_marc21_to_content_media_carrier_with_linked_fields(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contentMediaCarrier') == [{ - "contentType": ["rdaco:1020", "rdaco:1014"], - "mediaType": "rdamt:1007", - "carrierType": "rdact:1049" - }, { - "contentType": ["rdaco:1015"], - "mediaType": "rdamt:1003", - "carrierType": "rdact:1011" - }] + assert data.get("contentMediaCarrier") == [ + { + "contentType": ["rdaco:1020", "rdaco:1014"], + "mediaType": "rdamt:1007", + "carrierType": "rdact:1049", + }, + { + "contentType": ["rdaco:1015"], + "mediaType": "rdamt:1003", + "carrierType": "rdact:1011", + }, + ] # unlinked 337 marc21xml = """ @@ -5804,14 +5290,14 @@ def test_marc21_to_content_media_carrier_with_linked_fields(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contentMediaCarrier') == [{ + assert data.get("contentMediaCarrier") == [ + { "contentType": ["rdaco:1020"], "mediaType": "rdamt:1007", - "carrierType": "rdact:1049" - }, { - "contentType": ["rdaco:1019"], - "mediaType": "rdamt:1007" - }] + "carrierType": "rdact:1049", + }, + {"contentType": ["rdaco:1019"], "mediaType": "rdamt:1007"}, + ] def test_marc21_to_original_language(): @@ -5833,7 +5319,7 @@ def test_marc21_to_original_language(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('originalLanguage') == ['eng'] + assert data.get("originalLanguage") == ["eng"] def test_abbreviated_title(app, marc21_record): @@ -5850,13 +5336,16 @@ def test_abbreviated_title(app, marc21_record): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [{ - 'type': 'bf:AbbreviatedTitle', - 'mainTitle': [{'value': 'Günter Gianni Piontek Skulpt.'}] - }, { - 'type': 'bf:KeyTitle', - 'mainTitle': [{'value': 'Günter Gianni Piontek, Skulpturen'}] - }] + assert data.get("title") == [ + { + "type": "bf:AbbreviatedTitle", + "mainTitle": [{"value": "Günter Gianni Piontek Skulpt."}], + }, + { + "type": "bf:KeyTitle", + "mainTitle": [{"value": "Günter Gianni Piontek, Skulpturen"}], + }, + ] def test_scale_and_cartographic(app, marc21_record): @@ -5873,12 +5362,10 @@ def test_scale_and_cartographic(app, marc21_record): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('scale') == [{ - 'label': '1:25 000', - 'ratio_linear_horizontal': 25000, - 'type': 'Linear scale' - }] - assert data.get('cartographicAttributes') is None + assert data.get("scale") == [ + {"label": "1:25 000", "ratio_linear_horizontal": 25000, "type": "Linear scale"} + ] + assert data.get("cartographicAttributes") is None marc21xml = """ @@ -5909,28 +5396,30 @@ def test_scale_and_cartographic(app, marc21_record): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('scale') == [{ - 'label': 'Echelle 1:50 000', - 'ratio_linear_horizontal': 1000000, - 'type': 'Linear scale' - }, { - 'label': '[Echelles diverses]', - 'ratio_linear_horizontal': 500000, - 'ratio_linear_vertical': 70000, - 'type': 'Linear scale' - }] - assert data.get('cartographicAttributes') == [{ - 'coordinates': { - 'label': "(E 6º50'-E 7º15'/N 46º10'-N 46º20')", - 'latitude': 'N0251500 N0221000', - 'longitude': 'E1103000 E1203000' + assert data.get("scale") == [ + { + "label": "Echelle 1:50 000", + "ratio_linear_horizontal": 1000000, + "type": "Linear scale", }, - 'projection': 'projection conforme cylindrique' - }, { - 'coordinates': { - 'longitude': 'E0033800 E0080300' - } - }] + { + "label": "[Echelles diverses]", + "ratio_linear_horizontal": 500000, + "ratio_linear_vertical": 70000, + "type": "Linear scale", + }, + ] + assert data.get("cartographicAttributes") == [ + { + "coordinates": { + "label": "(E 6º50'-E 7º15'/N 46º10'-N 46º20')", + "latitude": "N0251500 N0221000", + "longitude": "E1103000 E1203000", + }, + "projection": "projection conforme cylindrique", + }, + {"coordinates": {"longitude": "E0033800 E0080300"}}, + ] def test_temporal_coverage(app, marc21_record): @@ -5973,37 +5462,16 @@ def test_temporal_coverage(app, marc21_record): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('temporalCoverage') == [{ - 'end_date': '-130000000', - 'start_date': '-205000000', - 'type': 'period' - }, { - 'date': '-0044-03-15', - 'period_code': ['d9d9'], - 'type': 'time' - }, { - 'date': '+1767', - 'period_code': ['v6w3'], - 'type': 'time' - }, { - 'date': '+1798-08-26', - 'period_code': ['v9w0'], - 'type': 'time' - }, { - 'date': None, - 'period_code': ['w1w1'], - 'type': 'time' - }, { - 'date': None, - 'period_code': ['x1x1'], - 'type': 'time' - }, { - 'period_code': ['x6x6'], - 'type': 'period' - }, { - 'date': '+1972', - 'type': 'time' - }] + assert data.get("temporalCoverage") == [ + {"end_date": "-130000000", "start_date": "-205000000", "type": "period"}, + {"date": "-0044-03-15", "period_code": ["d9d9"], "type": "time"}, + {"date": "+1767", "period_code": ["v6w3"], "type": "time"}, + {"date": "+1798-08-26", "period_code": ["v9w0"], "type": "time"}, + {"date": None, "period_code": ["w1w1"], "type": "time"}, + {"date": None, "period_code": ["x1x1"], "type": "time"}, + {"period_code": ["x6x6"], "type": "period"}, + {"date": "+1972", "type": "time"}, + ] def test_marc21_to_fiction_statement(): @@ -6017,7 +5485,7 @@ def test_marc21_to_fiction_statement(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data['fiction_statement'] == DocumentFictionType.Unspecified.value + assert data["fiction_statement"] == DocumentFictionType.Unspecified.value marc21xml = """ @@ -51,7 +46,7 @@ def test_marc21_to_isbn_ebooks(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('identifiedBy') is None + assert data.get("identifiedBy") is None marc21xml = """ @@ -62,7 +57,7 @@ def test_marc21_to_isbn_ebooks(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert not data.get('identifiedBy') + assert not data.get("identifiedBy") def test_marc21_to_languages_ebooks_from_008(): @@ -76,7 +71,7 @@ def test_marc21_to_languages_ebooks_from_008(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [{'type': 'bf:Language', 'value': 'fre'}] + assert data.get("language") == [{"type": "bf:Language", "value": "fre"}] def test_marc21_to_languages_ebooks(): @@ -96,7 +91,7 @@ def test_marc21_to_languages_ebooks(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('language') == [{'type': 'bf:Language', 'value': 'fre'}] + assert data.get("language") == [{"type": "bf:Language", "value": "fre"}] def test_marc21_to_type_ebooks(): @@ -110,10 +105,9 @@ def test_marc21_to_type_ebooks(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('type') == [{ - 'main_type': 'docmaintype_book', - 'subtype': 'docsubtype_e-book' - }] + assert data.get("type") == [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_e-book"} + ] def test_marc21_to_identifier_rero_id(): @@ -127,11 +121,8 @@ def test_marc21_to_identifier_rero_id(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - identifiers = data.get('identifiedBy', []) - assert identifiers[0] == { - 'type': 'bf:Local', - 'value': 'cantook-EDEN496624' - } + identifiers = data.get("identifiedBy", []) + assert identifiers[0] == {"type": "bf:Local", "value": "cantook-EDEN496624"} def test_marc21_to_title(): @@ -145,10 +136,9 @@ def test_marc21_to_title(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('title') == [{ - 'mainTitle': [{'value': 'Elena et les joueuses'}], - 'type': 'bf:Title' - }] + assert data.get("title") == [ + {"mainTitle": [{"value": "Elena et les joueuses"}], "type": "bf:Title"} + ] def test_marc21_to_extent(): @@ -165,7 +155,7 @@ def test_marc21_to_extent(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('extent') == '1234' + assert data.get("extent") == "1234" def test_marc21_to_description(): @@ -187,7 +177,7 @@ def test_marc21_to_description(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('extent') == '116 p.' + assert data.get("extent") == "116 p." marc21xml = """ @@ -206,7 +196,7 @@ def test_marc21_to_description(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('extent') == '116 p.' + assert data.get("extent") == "116 p." marc21xml = """ @@ -219,7 +209,7 @@ def test_marc21_to_description(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('extent') == '116 p.' + assert data.get("extent") == "116 p." def test_marc21_to_notes(): @@ -240,13 +230,9 @@ def test_marc21_to_notes(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('note') == [{ - 'noteType': 'general', - 'label': 'note 1' - }, { - 'noteType': 'general', - 'label': 'note 2' - } + assert data.get("note") == [ + {"noteType": "general", "label": "note 1"}, + {"noteType": "general", "label": "note 2"}, ] @@ -264,18 +250,12 @@ def test_marc21_to_edition_statement_one_field_250(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('editionStatement') == [{ - 'editionDesignation': [ - { - 'value': '2e ed.' - } - ], - 'responsibility': [ - { - 'value': 'avec un avant-propos par Jean Faret' - } - ] - }] + assert data.get("editionStatement") == [ + { + "editionDesignation": [{"value": "2e ed."}], + "responsibility": [{"value": "avec un avant-propos par Jean Faret"}], + } + ] def test_marc21_to_provision_activity_ebooks_from_field_260(): @@ -291,25 +271,14 @@ def test_marc21_to_provision_activity_ebooks_from_field_260(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': '[2006]'} - ], - 'type': 'Date' - } - + "type": "bf:Publication", + "statement": [ + {"label": [{"value": "Lausanne"}], "type": "bf:Place"}, + {"label": [{"value": "[2006]"}], "type": "Date"}, ], - 'startDate': 2006 + "startDate": 2006, } ] @@ -327,7 +296,7 @@ def test_marc21copyrightdate_ebooks_from_field_264_04(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('copyrightDate') == ['© 1971'] + assert data.get("copyrightDate") == ["© 1971"] marc21xml = """ @@ -338,7 +307,7 @@ def test_marc21copyrightdate_ebooks_from_field_264_04(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('copyrightDate') == ['© 1971 [extra 1973]'] + assert data.get("copyrightDate") == ["© 1971 [extra 1973]"] def test_marc21_to_provision_activity_ebooks_from_field_264_1(): @@ -354,31 +323,16 @@ def test_marc21_to_provision_activity_ebooks_from_field_264_1(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Payot'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '[2006-2010]'} - ], - 'type': 'Date' - } + "type": "bf:Publication", + "statement": [ + {"label": [{"value": "Lausanne"}], "type": "bf:Place"}, + {"label": [{"value": "Payot"}], "type": "bf:Agent"}, + {"label": [{"value": "[2006-2010]"}], "type": "Date"}, ], - 'startDate': 2006, - 'endDate': 2010 + "startDate": 2006, + "endDate": 2010, } ] @@ -396,30 +350,14 @@ def test_marc21_to_provision_activity_ebooks_from_field_264_2(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Distribution', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Payot'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '[2006-2010]'} - ], - 'type': 'Date' - } - - ] + "type": "bf:Distribution", + "statement": [ + {"label": [{"value": "Lausanne"}], "type": "bf:Place"}, + {"label": [{"value": "Payot"}], "type": "bf:Agent"}, + {"label": [{"value": "[2006-2010]"}], "type": "Date"}, + ], } ] @@ -450,37 +388,24 @@ def test_marc21_to_subjects(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('subjects') == [{ - 'entity': { - 'authorized_access_point': 'Croissance personnelle', - 'type': 'bf:Topic' - } - }, { - 'entity': { - 'authorized_access_point': 'Self-Help', - 'type': 'bf:Topic' - } - }, { - 'entity': { - 'authorized_access_point': 'Santé', - 'type': 'bf:Topic' - } - }, { - 'entity': { - 'authorized_access_point': 'Health', - 'type': 'bf:Topic' - } - }, { - 'entity': { - 'authorized_access_point': 'Développement Personnel', - 'type': 'bf:Topic' + assert data.get("subjects") == [ + { + "entity": { + "authorized_access_point": "Croissance personnelle", + "type": "bf:Topic", } - }, { - 'entity': { - 'authorized_access_point': 'Self-Help', - 'type': 'bf:Topic' + }, + {"entity": {"authorized_access_point": "Self-Help", "type": "bf:Topic"}}, + {"entity": {"authorized_access_point": "Santé", "type": "bf:Topic"}}, + {"entity": {"authorized_access_point": "Health", "type": "bf:Topic"}}, + { + "entity": { + "authorized_access_point": "Développement Personnel", + "type": "bf:Topic", } - }] + }, + {"entity": {"authorized_access_point": "Self-Help", "type": "bf:Topic"}}, + ] def test_marc21_to_contribution(): @@ -499,13 +424,10 @@ def test_marc21_to_contribution(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contribution') == [ + assert data.get("contribution") == [ { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Collectif' - }, - 'role': ['aut'] + "entity": {"type": "bf:Person", "authorized_access_point": "Collectif"}, + "role": ["aut"], } ] @@ -538,40 +460,34 @@ def test_marc21_to_contribution(): marc21json = create_record(marc21xml) data = marc21.do(marc21json) - contribution = data.get('contribution') + contribution = data.get("contribution") assert contribution == [ { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Jean-Paul II, Pape, 1954' + "entity": { + "type": "bf:Person", + "authorized_access_point": "Jean-Paul II, Pape, 1954", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'authorized_access_point': - 'Dumont, Jean, 1921-2014, Historien', - 'type': 'bf:Person' + "entity": { + "authorized_access_point": "Dumont, Jean, 1921-2014, Historien", + "type": "bf:Person", }, - 'role': ['edt'] + "role": ["edt"], }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'RERO' - }, - 'role': ['ctb'] + "entity": {"type": "bf:Organisation", "authorized_access_point": "RERO"}, + "role": ["ctb"], }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': - 'Biennale de céramique contemporaine (17 : 2003 : ' - 'Châteauroux)' + "entity": { + "type": "bf:Organisation", + "authorized_access_point": "Biennale de céramique contemporaine (17 : 2003 : " + "Châteauroux)", }, - 'role': ['aut'] - } - + "role": ["aut"], + }, ] @@ -595,21 +511,21 @@ def test_marc21_to_contribution_and_translator(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contribution') == [ + assert data.get("contribution") == [ { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Peeters, Hagar' + "entity": { + "type": "bf:Person", + "authorized_access_point": "Peeters, Hagar", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Maufroy, Sandrine' + "entity": { + "type": "bf:Person", + "authorized_access_point": "Maufroy, Sandrine", }, - 'role': ['trl'] - } + "role": ["trl"], + }, ] @@ -637,22 +553,22 @@ def test_marc21_electronicLocator_ebooks(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('electronicLocator') == [ + assert data.get("electronicLocator") == [ { - 'url': 'http://site1.org/resources/1', - 'type': 'resource', - 'source': 'ebibliomedia' + "url": "http://site1.org/resources/1", + "type": "resource", + "source": "ebibliomedia", }, { - 'url': 'http://site5.org/resources/1', - 'type': 'resource', - 'source': 'mv-cantook' + "url": "http://site5.org/resources/1", + "type": "resource", + "source": "mv-cantook", }, { - 'url': 'http://site2.org/resources/2', - 'type': 'relatedResource', - 'content': 'coverImage' - } + "url": "http://site2.org/resources/2", + "type": "relatedResource", + "content": "coverImage", + }, ] @@ -672,10 +588,10 @@ def test_marc21_cover_art_ebooks(): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('electronicLocator') == [ + assert data.get("electronicLocator") == [ { - 'url': 'http://site2.org/resources/2', - 'type': 'relatedResource', - 'content': 'coverImage' + "url": "http://site2.org/resources/2", + "type": "relatedResource", + "content": "coverImage", } ] diff --git a/tests/unit/documents/test_documents_dojson_marc21.py b/tests/unit/documents/test_documents_dojson_marc21.py index 34c70e7298..ec8aefefdb 100644 --- a/tests/unit/documents/test_documents_dojson_marc21.py +++ b/tests/unit/documents/test_documents_dojson_marc21.py @@ -31,324 +31,300 @@ def add_created_updated(record, updated=False): """Adds _created and _updated to record.""" date = datetime.now(timezone.utc) - record['_created'] = date.isoformat() + record["_created"] = date.isoformat() if updated: - record['_updated'] = date.isoformat() + record["_updated"] = date.isoformat() else: - record['_updated'] = '2027-07-07T07:07:07.000000+00:00' + record["_updated"] = "2027-07-07T07:07:07.000000+00:00" return date, record def test_pid_to_marc21(app, marc21_record): """Test PID to MARC21 transformation.""" record = { - 'pid': '12345678', - 'language': [{ - "type": "bf:Language", - "value": "fre" - }], - 'fiction_statement': 'fiction', - 'provisionActivity': [{ - '_text': [{ - 'language': 'default', - 'value': 'Paris : Ed. Cornélius, 2007-' - }], - 'place': [{ - 'country': 'fr', - 'type': 'bf:Place' - }], - 'startDate': 2007, - 'endDate': 2020, - 'statement': [{ - 'label': [{'value': 'Paris'}], - 'type': 'bf:Place' - }, { - 'label': [{'value': 'Ed. Cornélius'}], - 'type': 'bf:Agent' - }, { - 'label': [{'value': '2007-2020'}], - 'type': 'Date' - }], - 'type': 'bf:Publication' - }] + "pid": "12345678", + "language": [{"type": "bf:Language", "value": "fre"}], + "fiction_statement": "fiction", + "provisionActivity": [ + { + "_text": [ + {"language": "default", "value": "Paris : Ed. Cornélius, 2007-"} + ], + "place": [{"country": "fr", "type": "bf:Place"}], + "startDate": 2007, + "endDate": 2020, + "statement": [ + {"label": [{"value": "Paris"}], "type": "bf:Place"}, + {"label": [{"value": "Ed. Cornélius"}], "type": "bf:Agent"}, + {"label": [{"value": "2007-2020"}], "type": "Date"}, + ], + "type": "bf:Publication", + } + ], } date, record = add_created_updated(record, True) result = to_marc21.do(record) marc21 = deepcopy(marc21_record) - updated = date.strftime('%Y%m%d%H%M%S.0') - created = date.strftime('%y%m%d') - marc21.update({ - '__order__': ('leader', '001', '005', '008', '264_1'), - '001': '12345678', - '005': updated, - '008': f'{created}m20072020xx#|||||||||||||||1|fre|c', - '264_1': { - '__order__': ('a', 'b', 'c'), - 'a': 'Paris', - 'b': 'Ed. Cornélius', - 'c': '2007-2020' + updated = date.strftime("%Y%m%d%H%M%S.0") + created = date.strftime("%y%m%d") + marc21.update( + { + "__order__": ("leader", "001", "005", "008", "264_1"), + "001": "12345678", + "005": updated, + "008": f"{created}m20072020xx#|||||||||||||||1|fre|c", + "264_1": { + "__order__": ("a", "b", "c"), + "a": "Paris", + "b": "Ed. Cornélius", + "c": "2007-2020", + }, } - }) + ) assert result == marc21 # test fiction - assert result['008'][33] == '1' + assert result["008"][33] == "1" def test_identified_by_to_marc21(app, marc21_record): """Test identifiedBy to MARC21 transformation.""" record = { - "identifiedBy": [{ - "type": "bf:Isbn", - "value": "9782824606835" - }, { - "type": "bf:Isbn", - "value": "12345678901??", - "status": "status", - "qualifier": "qualifier" - }] + "identifiedBy": [ + {"type": "bf:Isbn", "value": "9782824606835"}, + { + "type": "bf:Isbn", + "value": "12345678901??", + "status": "status", + "qualifier": "qualifier", + }, + ] } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) # updated = date.strftime('%Y%m%d%H%M%S.0') - record.update({ - '__order__': ('leader', '005', '008', '020__', '020__'), - '' - '020__': ({ - '__order__': ('a', ), - 'a': '9782824606835' - }, { - '__order__': ('z', 'q'), - 'z': '12345678901??', - 'q': 'qualifier' - }) - }) + record.update( + { + "__order__": ("leader", "005", "008", "020__", "020__"), + "" + "020__": ( + {"__order__": ("a",), "a": "9782824606835"}, + {"__order__": ("z", "q"), "z": "12345678901??", "q": "qualifier"}, + ), + } + ) assert result == record def test_title_to_marc21(app, marc21_record): """Test title to MARC21 transformation.""" record = { - 'title': [{ - 'type': 'bf:Title', - 'mainTitle': [{'value': 'Kunst der Farbe'}], - 'subtitle': [{'value': 'Studienausgabe'}] - }], - 'responsibilityStatement': [ - [{'value': 'Johannes Itten'}], - [{'value': "traduit de l'allemand par Valérie Bourgeois"}] - ] + "title": [ + { + "type": "bf:Title", + "mainTitle": [{"value": "Kunst der Farbe"}], + "subtitle": [{"value": "Studienausgabe"}], + } + ], + "responsibilityStatement": [ + [{"value": "Johannes Itten"}], + [{"value": "traduit de l'allemand par Valérie Bourgeois"}], + ], } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '2450_'), - '2450_': { - '__order__': ('a', 'b', 'c'), - 'a': 'Kunst der Farbe', - 'b': 'Studienausgabe', - 'c': "Johannes Itten ; traduit de l'allemand par Valérie Bourgeois" + record.update( + { + "__order__": ("leader", "005", "008", "2450_"), + "2450_": { + "__order__": ("a", "b", "c"), + "a": "Kunst der Farbe", + "b": "Studienausgabe", + "c": "Johannes Itten ; traduit de l'allemand par Valérie Bourgeois", + }, } - }) + ) assert result == record record = { - 'title': [{ - 'type': 'bf:Title', - 'mainTitle': [{'value': 'Statistique'}], - 'subtitle': [{ - 'value': 'exercices corrigés avec rappels de cours'}], - 'part': [{ - 'partNumber': [{'value': 'T. 1'}], - 'partName': [{ - 'value': 'Licence ès sciences économiques, 1ère année, ' - 'étudiants de Grandes écoles' - }] - }, { - 'partNumber': [{'value': 'Section 2'}], - 'partName': [{'value': 'Grandes écoles'}] - }] - }], - 'responsibilityStatement': [ - [{'value': 'Edmond Berrebi'}] - ] + "title": [ + { + "type": "bf:Title", + "mainTitle": [{"value": "Statistique"}], + "subtitle": [{"value": "exercices corrigés avec rappels de cours"}], + "part": [ + { + "partNumber": [{"value": "T. 1"}], + "partName": [ + { + "value": "Licence ès sciences économiques, 1ère année, " + "étudiants de Grandes écoles" + } + ], + }, + { + "partNumber": [{"value": "Section 2"}], + "partName": [{"value": "Grandes écoles"}], + }, + ], + } + ], + "responsibilityStatement": [[{"value": "Edmond Berrebi"}]], } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '2450_'), - '2450_': { - '__order__': ('a', 'b', 'c', 'n', 'p', 'n', 'p'), - 'a': 'Statistique', - 'b': 'exercices corrigés avec rappels de cours', - 'c': 'Edmond Berrebi', - 'n': ('T. 1', 'Section 2'), - 'p': ('Licence ès sciences économiques, 1ère année, étudiants de ' - 'Grandes écoles', - 'Grandes écoles') + record.update( + { + "__order__": ("leader", "005", "008", "2450_"), + "2450_": { + "__order__": ("a", "b", "c", "n", "p", "n", "p"), + "a": "Statistique", + "b": "exercices corrigés avec rappels de cours", + "c": "Edmond Berrebi", + "n": ("T. 1", "Section 2"), + "p": ( + "Licence ès sciences économiques, 1ère année, étudiants de " + "Grandes écoles", + "Grandes écoles", + ), + }, } - }) + ) assert result == record record = { - 'title': [{ - 'mainTitle': [{'value': 'Suisse'}], - 'type': 'bf:Title' - }, { - 'mainTitle': [{'value': 'Schweiz'}], - 'type': 'bf:ParallelTitle' - }, { - 'mainTitle': [{'value': 'Svizzera'}], - 'subtitle': [{'value': 'Le guide Michelin 2020'}], - 'type': 'bf:ParallelTitle' - }] + "title": [ + {"mainTitle": [{"value": "Suisse"}], "type": "bf:Title"}, + {"mainTitle": [{"value": "Schweiz"}], "type": "bf:ParallelTitle"}, + { + "mainTitle": [{"value": "Svizzera"}], + "subtitle": [{"value": "Le guide Michelin 2020"}], + "type": "bf:ParallelTitle", + }, + ] } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '2450_'), - '2450_': { - '__order__': ('a', 'b'), - 'a': 'Suisse', - 'b': 'Schweiz. Svizzera : Le guide Michelin 2020' + record.update( + { + "__order__": ("leader", "005", "008", "2450_"), + "2450_": { + "__order__": ("a", "b"), + "a": "Suisse", + "b": "Schweiz. Svizzera : Le guide Michelin 2020", + }, } - }) + ) assert result == record def test_provision_activity_copyright_date_to_marc21(app, marc21_record): """Test provisionActivity and copyrightDate to MARC21 transformation.""" record = { - 'fiction_statement': 'non_fiction', - "provisionActivity": [{ - "place": [{ - "canton": "vd", - "country": "sz", - "type": "bf:Place" - }], - "startDate": 1980, - "statement": [{ - "label": [{ - "value": "Lausanne" - }], - "type": "bf:Place" - }, { - "label": [{ - "value": "Institut Benjamin Constant" - }], - "type": "bf:Agent" - }, { - "label": [{ - "value": "Genève" - }], - "type": "bf:Place" - }, { - "label": [{ - "value": "Slatkine" - }], - "type": "bf:Agent" - }, { - "label": [{ - "value": "Paris" - }], - "type": "bf:Place" - }, { - "label": [{ - "value": "diff. France : H. Champion" - }], - "type": "bf:Agent" - }, { - "label": [{ - "value": "1980-" - }], - "type": "Date" - }], - "type": "bf:Publication" - }] + "fiction_statement": "non_fiction", + "provisionActivity": [ + { + "place": [{"canton": "vd", "country": "sz", "type": "bf:Place"}], + "startDate": 1980, + "statement": [ + {"label": [{"value": "Lausanne"}], "type": "bf:Place"}, + { + "label": [{"value": "Institut Benjamin Constant"}], + "type": "bf:Agent", + }, + {"label": [{"value": "Genève"}], "type": "bf:Place"}, + {"label": [{"value": "Slatkine"}], "type": "bf:Agent"}, + {"label": [{"value": "Paris"}], "type": "bf:Place"}, + { + "label": [{"value": "diff. France : H. Champion"}], + "type": "bf:Agent", + }, + {"label": [{"value": "1980-"}], "type": "Date"}, + ], + "type": "bf:Publication", + } + ], } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - created = date.strftime('%y%m%d') - record.update({ - '__order__': ('leader', '005', '008', '264_1'), - '008': f'{created}s1980||||xx#|||||||||||||||0|||||c', - '264_1': { - '__order__': ('a', 'b', 'a', 'b', 'a', 'b', 'c'), - 'a': ('Lausanne', 'Genève', 'Paris'), - 'b': ('Institut Benjamin Constant', 'Slatkine', - 'diff. France : H. Champion'), - 'c': '1980-' + created = date.strftime("%y%m%d") + record.update( + { + "__order__": ("leader", "005", "008", "264_1"), + "008": f"{created}s1980||||xx#|||||||||||||||0|||||c", + "264_1": { + "__order__": ("a", "b", "a", "b", "a", "b", "c"), + "a": ("Lausanne", "Genève", "Paris"), + "b": ( + "Institut Benjamin Constant", + "Slatkine", + "diff. France : H. Champion", + ), + "c": "1980-", + }, } - }) + ) assert result == record record = { - "provisionActivity": [{ - "endDate": 1975, - "place": [{ - "canton": "ne", - "country": "sz", - "type": "bf:Place" - }], - "startDate": 1907, - "statement": [{ - "label": [{ - "value": "La Chaux-de-Fonds" - } - ], - "type": "bf:Place" - }, { - "label": [{ - "value": "Union Chrétienne de Jeunes Gens" - }], - "type": "bf:Agent" - }, { - "label": [{ - "value": "1907-1975" - }], - "type": "Date" - }], - "type": "bf:Publication" - }, { - "statement": [{ - "label": [{ - "value": "La Chaux-de-Fonds" - }], - "type": "bf:Place" - }, { - "label": [{ - "value": "[successivement] Impr. C. & J. " - "Robert-Tissot, Imp. Robert-Tissot & Fils" - }], - "type": "bf:Agent" - }], - "type": "bf:Manufacture" - }] + "provisionActivity": [ + { + "endDate": 1975, + "place": [{"canton": "ne", "country": "sz", "type": "bf:Place"}], + "startDate": 1907, + "statement": [ + {"label": [{"value": "La Chaux-de-Fonds"}], "type": "bf:Place"}, + { + "label": [{"value": "Union Chrétienne de Jeunes Gens"}], + "type": "bf:Agent", + }, + {"label": [{"value": "1907-1975"}], "type": "Date"}, + ], + "type": "bf:Publication", + }, + { + "statement": [ + {"label": [{"value": "La Chaux-de-Fonds"}], "type": "bf:Place"}, + { + "label": [ + { + "value": "[successivement] Impr. C. & J. " + "Robert-Tissot, Imp. Robert-Tissot & Fils" + } + ], + "type": "bf:Agent", + }, + ], + "type": "bf:Manufacture", + }, + ] } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - created = date.strftime('%y%m%d') - record.update({ - '__order__': ('leader', '005', '008', '264_1', '264_3'), - '008': f'{created}m19071975xx#|||||||||||||||||||||c', - '264_1': { - '__order__': ('a', 'b', 'c'), - 'a': 'La Chaux-de-Fonds', - 'b': 'Union Chrétienne de Jeunes Gens', - 'c': '1907-1975' - }, - '264_3': { - '__order__': ('a', 'b'), - 'a': 'La Chaux-de-Fonds', - 'b': '[successivement] Impr. C. & J. ' - 'Robert-Tissot, Imp. Robert-Tissot & Fils' + created = date.strftime("%y%m%d") + record.update( + { + "__order__": ("leader", "005", "008", "264_1", "264_3"), + "008": f"{created}m19071975xx#|||||||||||||||||||||c", + "264_1": { + "__order__": ("a", "b", "c"), + "a": "La Chaux-de-Fonds", + "b": "Union Chrétienne de Jeunes Gens", + "c": "1907-1975", + }, + "264_3": { + "__order__": ("a", "b"), + "a": "La Chaux-de-Fonds", + "b": "[successivement] Impr. C. & J. " + "Robert-Tissot, Imp. Robert-Tissot & Fils", + }, } - }) + ) assert result == record @@ -356,24 +332,23 @@ def test_physical_description_to_marc21(app, marc21_record): """Test physical_description to MARC21 transformation.""" record = { "extent": "159 p.", - "note": [{ - "label": "fig.", - "noteType": "otherPhysicalDetails" - }], - "dimensions": ["33 cm"] + "note": [{"label": "fig.", "noteType": "otherPhysicalDetails"}], + "dimensions": ["33 cm"], } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '300__'), - '300__': { - '__order__': ('a', 'b', 'c'), - 'a': '159 p.', - 'b': 'fig.', - 'c': '33 cm' + record.update( + { + "__order__": ("leader", "005", "008", "300__"), + "300__": { + "__order__": ("a", "b", "c"), + "a": "159 p.", + "b": "fig.", + "c": "33 cm", + }, } - }) + ) assert result == record record = { @@ -381,23 +356,24 @@ def test_physical_description_to_marc21(app, marc21_record): "duration": ["1h42"], "dimensions": ["In-plano", "128ᵒ"], "bookFormat": ["128ᵒ", "in-plano"], - "note": [{ - "label": "accompanying material", - "noteType": "accompanyingMaterial" - }], + "note": [ + {"label": "accompanying material", "noteType": "accompanyingMaterial"} + ], } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '300__'), - '300__': { - '__order__': ('a', 'c', 'e'), - 'a': '1 DVD-vidéo (1h42)', - 'c': 'in-plano ; 128ᵒ', - 'e': 'accompanying material' + record.update( + { + "__order__": ("leader", "005", "008", "300__"), + "300__": { + "__order__": ("a", "c", "e"), + "a": "1 DVD-vidéo (1h42)", + "c": "in-plano ; 128ᵒ", + "e": "accompanying material", + }, } - }) + ) assert result == record record = { @@ -405,394 +381,442 @@ def test_physical_description_to_marc21(app, marc21_record): "duration": ["1h42"], "productionMethod": ["rdapm:1001"], "illustrativeContent": ["illustrations"], - "colorContent": ["rdacc:1002"] + "colorContent": ["rdacc:1002"], } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '300__'), - '300__': { - '__order__': ('a', 'b'), - 'a': '1 DVD-vidéo (1h42)', - 'b': 'blueline process ; illustrations ; black and white' + record.update( + { + "__order__": ("leader", "005", "008", "300__"), + "300__": { + "__order__": ("a", "b"), + "a": "1 DVD-vidéo (1h42)", + "b": "blueline process ; illustrations ; black and white", + }, } - }) + ) assert result == record -def test_subjects_to_marc21(app, mef_agents_url, mef_concepts_url, - marc21_record, mef_record_with_idref_gnd, - mef_concept1): +def test_subjects_to_marc21( + app, + mef_agents_url, + mef_concepts_url, + marc21_record, + mef_record_with_idref_gnd, + mef_concept1, +): """Test subjects to MARC21 transformation.""" record = { - 'subjects': [{ - 'entity': { - 'type': 'bf:Topic', - 'source': 'rero', - 'authorized_access_point': 'Roman pour la jeunesse' - } - }, { - 'entity': { - '$ref': - f'{mef_concepts_url}/api/concepts/idref/ent_concept_idref', - 'pid': 'ent_concept' - } - }, { - 'entity': { - 'date_of_birth': '1923', - 'date_of_death': '1999', - 'preferred_name': 'Fujimoto, Satoko', - 'type': 'bf:Person' - }, - 'role': ['ctb', 'aut'] - }, { - 'entity': { - 'conference': False, - 'preferred_name': 'Université de Genève', - 'type': 'bf:Organisation' - }, - 'role': ['ctb'] - }, { - 'entity': { - '$ref': f'{mef_agents_url}/api/agents/gnd/004058518', - 'pid': '5890765', - 'type': 'bf:Organisation' - }, - 'role': ['aut'] - }, { - 'entity': { - 'conference': True, - 'conference_date': '1989', - 'numbering': '4', - 'place': 'Lausanne', - 'preferred_name': 'Congrès des animaux volants', - 'type': 'bf:Organisation' - }, - 'role': ['aut'] - }, { - 'entity': { - 'authorized_access_point': - 'Bases de donn\u00e9esi (Voltenauer, Marc)', - 'type': 'bf:Work', - 'identifiedBy': { - 'type': 'RERO', - 'value': 'A001234567', - 'source': 'rero' + "subjects": [ + { + "entity": { + "type": "bf:Topic", + "source": "rero", + "authorized_access_point": "Roman pour la jeunesse", } - } - }, { - 'entity': { - 'authorized_access_point': 'Suisse', - 'identifiedBy': { - 'type': 'IdRef', - 'value': '027249654' + }, + { + "entity": { + "$ref": f"{mef_concepts_url}/api/concepts/idref/ent_concept_idref", + "pid": "ent_concept", + } + }, + { + "entity": { + "date_of_birth": "1923", + "date_of_death": "1999", + "preferred_name": "Fujimoto, Satoko", + "type": "bf:Person", }, - 'source': 'rero', - 'type': 'bf:Place' - } - }, { - 'entity': { - 'authorized_access_point': '2500 av. J.-C.-20e siècle', - 'type': 'bf:Temporal', - 'identifiedBy': { - 'type': 'RERO', 'value': 'A026984216' + "role": ["ctb", "aut"], + }, + { + "entity": { + "conference": False, + "preferred_name": "Université de Genève", + "type": "bf:Organisation", + }, + "role": ["ctb"], + }, + { + "entity": { + "$ref": f"{mef_agents_url}/api/agents/gnd/004058518", + "pid": "5890765", + "type": "bf:Organisation", + }, + "role": ["aut"], + }, + { + "entity": { + "conference": True, + "conference_date": "1989", + "numbering": "4", + "place": "Lausanne", + "preferred_name": "Congrès des animaux volants", + "type": "bf:Organisation", + }, + "role": ["aut"], + }, + { + "entity": { + "authorized_access_point": "Bases de donn\u00e9esi (Voltenauer, Marc)", + "type": "bf:Work", + "identifiedBy": { + "type": "RERO", + "value": "A001234567", + "source": "rero", + }, } - } - }] + }, + { + "entity": { + "authorized_access_point": "Suisse", + "identifiedBy": {"type": "IdRef", "value": "027249654"}, + "source": "rero", + "type": "bf:Place", + } + }, + { + "entity": { + "authorized_access_point": "2500 av. J.-C.-20e siècle", + "type": "bf:Temporal", + "identifiedBy": {"type": "RERO", "value": "A026984216"}, + } + }, + ] } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '650__', '650__', '6001_', - '610__', '610__', '611__', '600__', '651__', '648_7'), - '650__': ( - GroupableOrderedDict({'a': 'Roman pour la jeunesse'}), - GroupableOrderedDict({'a': 'Antienzymes'}) - ), - '6001_': ( - GroupableOrderedDict({ - 'a': 'Fujimoto, Satoko', - 'd': '1923 - 1999' - }) - ), - '600__': ( - GroupableOrderedDict({ - 't': 'Bases de donnéesi (Voltenauer, Marc)', - '2': 'rero', - '0': 'A001234567' - }) - ), - '610__': ( - GroupableOrderedDict({'a': 'Université de Genève'}), - { - '__order__': ('a', '0', '0'), - 'a': 'Université de Genève', - '0': ('(idref)02643136X', '(gnd)004058518') - } - ), - '611__': ( - GroupableOrderedDict({ - 'a': 'Congrès des animaux volants', - 'd': '1989' - }) - ), - '651__': ( - GroupableOrderedDict({ - 'a': 'Suisse', - '2': 'idref', - '0': '027249654' - }) - ), - '648_7': ( - GroupableOrderedDict({ - 'a': '2500 av. J.-C.-20e siècle', - '2': 'rero', - '0': 'A026984216' - }) - ) - }) - assert result['__order__'] == record['__order__'] - assert result['650__'] == record['650__'] - assert result['600__'] == record['600__'] - assert result['6001_'] == record['6001_'] - assert result['610__'] == record['610__'] - assert result['611__'] == record['611__'] - assert result['651__'] == record['651__'] - assert result['648_7'] == record['648_7'] - - -def test_genre_form_to_marc21(app, mef_concepts_url, marc21_record, - mef_concept1): + record.update( + { + "__order__": ( + "leader", + "005", + "008", + "650__", + "650__", + "6001_", + "610__", + "610__", + "611__", + "600__", + "651__", + "648_7", + ), + "650__": ( + GroupableOrderedDict({"a": "Roman pour la jeunesse"}), + GroupableOrderedDict({"a": "Antienzymes"}), + ), + "6001_": ( + GroupableOrderedDict({"a": "Fujimoto, Satoko", "d": "1923 - 1999"}) + ), + "600__": ( + GroupableOrderedDict( + { + "t": "Bases de donnéesi (Voltenauer, Marc)", + "2": "rero", + "0": "A001234567", + } + ) + ), + "610__": ( + GroupableOrderedDict({"a": "Université de Genève"}), + { + "__order__": ("a", "0", "0"), + "a": "Université de Genève", + "0": ("(idref)02643136X", "(gnd)004058518"), + }, + ), + "611__": ( + GroupableOrderedDict({"a": "Congrès des animaux volants", "d": "1989"}) + ), + "651__": ( + GroupableOrderedDict({"a": "Suisse", "2": "idref", "0": "027249654"}) + ), + "648_7": ( + GroupableOrderedDict( + {"a": "2500 av. J.-C.-20e siècle", "2": "rero", "0": "A026984216"} + ) + ), + } + ) + assert result["__order__"] == record["__order__"] + assert result["650__"] == record["650__"] + assert result["600__"] == record["600__"] + assert result["6001_"] == record["6001_"] + assert result["610__"] == record["610__"] + assert result["611__"] == record["611__"] + assert result["651__"] == record["651__"] + assert result["648_7"] == record["648_7"] + + +def test_genre_form_to_marc21(app, mef_concepts_url, marc21_record, mef_concept1): """Test contribution to MARC21 transformation.""" record = { - 'genreForm': [{ - 'entity': { - 'type': 'bf:Topic', - 'source': 'rero', - 'authorized_access_point': 'Roman pour la jeunesse' - } - }, { - 'entity': { - '$ref': - f'{mef_concepts_url}/api/concepts/idref/ent_concept_idref', - 'pid': 'ent_concept' - } - }] + "genreForm": [ + { + "entity": { + "type": "bf:Topic", + "source": "rero", + "authorized_access_point": "Roman pour la jeunesse", + } + }, + { + "entity": { + "$ref": f"{mef_concepts_url}/api/concepts/idref/ent_concept_idref", + "pid": "ent_concept", + } + }, + ] } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '655__', '655__'), - '655__': ( - GroupableOrderedDict({'a': 'Roman pour la jeunesse'}), - GroupableOrderedDict({'a': 'Antienzymes'}) - ) - }) - assert result['__order__'] == record['__order__'] - assert result['655__'] == record['655__'] - - -def test_contribution_to_marc21(app, mef_agents_url, marc21_record, - mef_record_with_idref_rero, - mef_record_with_idref_gnd, - mef_record_with_idref_gnd_rero): + record.update( + { + "__order__": ("leader", "005", "008", "655__", "655__"), + "655__": ( + GroupableOrderedDict({"a": "Roman pour la jeunesse"}), + GroupableOrderedDict({"a": "Antienzymes"}), + ), + } + ) + assert result["__order__"] == record["__order__"] + assert result["655__"] == record["655__"] + + +def test_contribution_to_marc21( + app, + mef_agents_url, + marc21_record, + mef_record_with_idref_rero, + mef_record_with_idref_gnd, + mef_record_with_idref_gnd_rero, +): """Test contribution to MARC21 transformation.""" record = { - 'contribution': [{ - 'entity': { - 'date_of_birth': '1923', - 'date_of_death': '1999', - 'preferred_name': 'Fujimoto, Satoko', - 'type': 'bf:Person' - }, - 'role': ['ctb', 'aut'] - }, { - 'entity': { - '$ref': f'{mef_agents_url}/idref/' - 'mef_record_with_idref_rero', - 'pid': '6627670', - 'type': 'bf:Person' - }, - 'role': ['trl'] - }, { - 'entity': { - 'conference': False, - 'preferred_name': 'Université de Genève', - 'type': 'bf:Organisation' - }, - 'role': ['ctb'] - }, { - 'entity': { - '$ref': f'{mef_agents_url}/api/agents/gnd/' - 'mef_record_with_idref_gnd', - 'pid': '5890765', - 'type': 'bf:Organisation' - }, - 'role': ['aut'] - }, { - 'entity': { - 'conference': True, - 'conference_date': '1989', - 'numbering': '4', - 'place': 'Lausanne', - 'preferred_name': 'Congrès des animaux volants', - 'type': 'bf:Organisation' - }, - 'role': ['aut'] - }, { - 'entity': { - '$ref': f'{mef_agents_url}/idref/' - 'mef_record_with_idref_gnd_rero', - 'pid': '5777972', - 'type': 'bf:Organisation' - }, - 'role': ['aut'] - }] + "contribution": [ + { + "entity": { + "date_of_birth": "1923", + "date_of_death": "1999", + "preferred_name": "Fujimoto, Satoko", + "type": "bf:Person", + }, + "role": ["ctb", "aut"], + }, + { + "entity": { + "$ref": f"{mef_agents_url}/idref/" "mef_record_with_idref_rero", + "pid": "6627670", + "type": "bf:Person", + }, + "role": ["trl"], + }, + { + "entity": { + "conference": False, + "preferred_name": "Université de Genève", + "type": "bf:Organisation", + }, + "role": ["ctb"], + }, + { + "entity": { + "$ref": f"{mef_agents_url}/api/agents/gnd/" + "mef_record_with_idref_gnd", + "pid": "5890765", + "type": "bf:Organisation", + }, + "role": ["aut"], + }, + { + "entity": { + "conference": True, + "conference_date": "1989", + "numbering": "4", + "place": "Lausanne", + "preferred_name": "Congrès des animaux volants", + "type": "bf:Organisation", + }, + "role": ["aut"], + }, + { + "entity": { + "$ref": f"{mef_agents_url}/idref/" "mef_record_with_idref_gnd_rero", + "pid": "5777972", + "type": "bf:Organisation", + }, + "role": ["aut"], + }, + ] } date, record = add_created_updated(record) with mock.patch( - 'rero_ils.modules.entities.remote_entities.api.' - 'RemoteEntity.get_entity', - side_effect=[mef_record_with_idref_rero, mef_record_with_idref_gnd, - mef_record_with_idref_gnd_rero] + "rero_ils.modules.entities.remote_entities.api." "RemoteEntity.get_entity", + side_effect=[ + mef_record_with_idref_rero, + mef_record_with_idref_gnd, + mef_record_with_idref_gnd_rero, + ], ): result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '7001_', '7001_', '710__', - '710__', '711__', '711__'), - '7001_': ({ - '__order__': ('a', 'd', '4', '4'), - 'a': 'Fujimoto, Satoko', - 'd': '1923 - 1999', - '4': ('ctb', 'aut') - }, { - '__order__': ('a', '4', '0', '0'), - 'a': 'Honnoré, Patrick', - '4': 'trl', - '0': ('(idref)072277742', '(rero)A009220673'), - }), - '710__': ({ - '__order__': ('a', '4'), - 'a': 'Université de Genève', - '4': 'ctb' - }, { - '__order__': ('a', '4', '0', '0'), - 'a': 'Université de Genève', - '4': 'aut', - '0': ('(idref)02643136X', '(gnd)004058518'), - }), - '711__': ({ - '__order__': ('a', 'd', '4'), - 'a': 'Congrès des animaux volants', - 'd': '1989', - '4': 'aut', - }, { - '__order__': ('a', '4', '0', '0', '0'), - 'a': 'Congrès ouvrier français', - '4': 'aut', - '0': ('(idref)03255608X', '(rero)A005462931', '(gnd)050343211') - }) - }) - assert result['__order__'] == record['__order__'] - assert result['7001_'] == record['7001_'] - assert result['710__'] == record['710__'] - assert result['711__'] == record['711__'] + record.update( + { + "__order__": ( + "leader", + "005", + "008", + "7001_", + "7001_", + "710__", + "710__", + "711__", + "711__", + ), + "7001_": ( + { + "__order__": ("a", "d", "4", "4"), + "a": "Fujimoto, Satoko", + "d": "1923 - 1999", + "4": ("ctb", "aut"), + }, + { + "__order__": ("a", "4", "0", "0"), + "a": "Honnoré, Patrick", + "4": "trl", + "0": ("(idref)072277742", "(rero)A009220673"), + }, + ), + "710__": ( + {"__order__": ("a", "4"), "a": "Université de Genève", "4": "ctb"}, + { + "__order__": ("a", "4", "0", "0"), + "a": "Université de Genève", + "4": "aut", + "0": ("(idref)02643136X", "(gnd)004058518"), + }, + ), + "711__": ( + { + "__order__": ("a", "d", "4"), + "a": "Congrès des animaux volants", + "d": "1989", + "4": "aut", + }, + { + "__order__": ("a", "4", "0", "0", "0"), + "a": "Congrès ouvrier français", + "4": "aut", + "0": ("(idref)03255608X", "(rero)A005462931", "(gnd)050343211"), + }, + ), + } + ) + assert result["__order__"] == record["__order__"] + assert result["7001_"] == record["7001_"] + assert result["710__"] == record["710__"] + assert result["711__"] == record["711__"] def test_type_to_marc21(app, marc21_record): """Test type to MARC21 transformation.""" record = { - 'type': [{ - 'main_type': 'docmaintype_comic', - 'subtype': 'docsubtype_manga' - }, { - 'main_type': 'docmaintype_map', - 'subtype': 'docsubtype_atlas' - }] + "type": [ + {"main_type": "docmaintype_comic", "subtype": "docsubtype_manga"}, + {"main_type": "docmaintype_map", "subtype": "docsubtype_atlas"}, + ] } date, record = add_created_updated(record) result = to_marc21.do(record) record = deepcopy(marc21_record) - record.update({ - '__order__': ('leader', '005', '008', '900__', '900__'), - '900__': ({ - '__order__': ('a', 'b'), - 'a': 'docmaintype_comic', - 'b': 'docsubtype_manga' - }, { - '__order__': ('a', 'b'), - 'a': 'docmaintype_map', - 'b': 'docsubtype_atlas' - }) - }) + record.update( + { + "__order__": ("leader", "005", "008", "900__", "900__"), + "900__": ( + { + "__order__": ("a", "b"), + "a": "docmaintype_comic", + "b": "docsubtype_manga", + }, + { + "__order__": ("a", "b"), + "a": "docmaintype_map", + "b": "docsubtype_atlas", + }, + ), + } + ) assert result == record -def test_holdings_items_to_marc21(app, marc21_record, document, - item2_lib_sion, - ebook_5, holding_lib_sion_electronic): +def test_holdings_items_to_marc21( + app, marc21_record, document, item2_lib_sion, ebook_5, holding_lib_sion_electronic +): """Test holding items to MARC21 transformation.""" - record = {'pid': document.pid} + record = {"pid": document.pid} date, record = add_created_updated(record) result = to_marc21.do(record, with_holdings_items=False) marc21 = deepcopy(marc21_record) - marc21.update({ - '__order__': ('leader', '001', '005', '008'), - '001': document.pid - }) + marc21.update({"__order__": ("leader", "001", "005", "008"), "001": document.pid}) assert result == marc21 - record = {'pid': document.pid} + record = {"pid": document.pid} _, record = add_created_updated(record) - item2_lib_sion_save_barcode = item2_lib_sion['barcode'] - item2_lib_sion['barcode'] = '87121336' + item2_lib_sion_save_barcode = item2_lib_sion["barcode"] + item2_lib_sion["barcode"] = "87121336" item2_lib_sion.update(item2_lib_sion, dbcommit=True, reindex=True) result = to_marc21.do(record, with_holdings_items=True) marc21 = deepcopy(marc21_record) - marc21.update({ - '__order__': ('leader', '001', '005', '008', '949__'), - '001': 'doc1', - '949__': ({ - '__order__': ('0', '1', '2', '3', '4', '5', 'a'), - '0': 'org2', - '1': 'The district of Sion Libraries', - '2': 'lib4', - '3': 'Library of Sion', - '4': 'loc8', - '5': 'Sion Library Restricted Space', - 'a': '87121336' - }) - }) + marc21.update( + { + "__order__": ("leader", "001", "005", "008", "949__"), + "001": "doc1", + "949__": ( + { + "__order__": ("0", "1", "2", "3", "4", "5", "a"), + "0": "org2", + "1": "The district of Sion Libraries", + "2": "lib4", + "3": "Library of Sion", + "4": "loc8", + "5": "Sion Library Restricted Space", + "a": "87121336", + } + ), + } + ) assert result == marc21 # clean up modified data - item2_lib_sion['barcode'] = item2_lib_sion_save_barcode + item2_lib_sion["barcode"] = item2_lib_sion_save_barcode item2_lib_sion.update(item2_lib_sion, dbcommit=True, reindex=True) - record = {'pid': ebook_5.pid} + record = {"pid": ebook_5.pid} _, record = add_created_updated(record) result = to_marc21.do(record, with_holdings_items=True) marc21 = deepcopy(marc21_record) - marc21.update({ - '__order__': ('leader', '001', '005', '008', '949__'), - '001': 'ebook5', - '949__': { - '__order__': ('0', '1', '2', '3', '4', '5', 'E'), - '0': 'org2', - '1': 'The district of Sion Libraries', - '2': 'lib4', - '3': 'Library of Sion', - '4': 'loc7', - '5': 'Sion Library Public Space', - 'E': - 'https://bm.ebibliomedia.ch/resources/5f780fc22357943b9a83ca3d' + marc21.update( + { + "__order__": ("leader", "001", "005", "008", "949__"), + "001": "ebook5", + "949__": { + "__order__": ("0", "1", "2", "3", "4", "5", "E"), + "0": "org2", + "1": "The district of Sion Libraries", + "2": "lib4", + "3": "Library of Sion", + "4": "loc7", + "5": "Sion Library Public Space", + "E": "https://bm.ebibliomedia.ch/resources/5f780fc22357943b9a83ca3d", + }, } - }) + ) assert result == marc21 diff --git a/tests/unit/documents/test_documents_dojson_slsp.py b/tests/unit/documents/test_documents_dojson_slsp.py index 69bab0228e..7f8440cce3 100644 --- a/tests/unit/documents/test_documents_dojson_slsp.py +++ b/tests/unit/documents/test_documents_dojson_slsp.py @@ -25,7 +25,7 @@ from rero_ils.modules.documents.dojson.contrib.marc21tojson.slsp import marc21 -@mock.patch('requests.Session.get') +@mock.patch("requests.Session.get") def test_marc21_to_contribution(mock_get): """Test dojson marc21_to_contribution.""" marc21xml = """ @@ -70,56 +70,55 @@ def test_marc21_to_contribution(mock_get): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('contribution') == [{ - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Jean-Paul II, Pape, 1954' + assert data.get("contribution") == [ + { + "entity": { + "type": "bf:Person", + "authorized_access_point": "Jean-Paul II, Pape, 1954", + }, + "role": ["aut"], }, - 'role': ['aut'] - }, { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Dumont, Jean, 1921-2014, Historien' + { + "entity": { + "type": "bf:Person", + "authorized_access_point": "Dumont, Jean, 1921-2014, Historien", + }, + "role": ["edt"], }, - 'role': ['edt'] - }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'RERO' + { + "entity": {"type": "bf:Organisation", "authorized_access_point": "RERO"}, + "role": ["ctb"], }, - 'role': ['ctb'] - }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': - 'Biennale de céramique contemporaine (17 : 2003 : Châteauroux)' + { + "entity": { + "type": "bf:Organisation", + "authorized_access_point": "Biennale de céramique contemporaine (17 : 2003 : Châteauroux)", + }, + "role": ["aut"], }, - 'role': ['aut'] - }] - assert data.get('work_access_point') == [{ - 'creator': { - 'date_of_birth': '1954', - 'numeration': 'II', - 'preferred_name': 'Jean-Paul', - 'qualifier': 'Pape', - 'type': 'bf:Person' + ] + assert data.get("work_access_point") == [ + { + "creator": { + "date_of_birth": "1954", + "numeration": "II", + "preferred_name": "Jean-Paul", + "qualifier": "Pape", + "type": "bf:Person", + }, + "title": "Treaties, etc.", }, - 'title': 'Treaties, etc.' - }, { - 'creator': { - 'preferred_name': 'Santamaría, Germán', - 'type': 'bf:Person' + { + "creator": {"preferred_name": "Santamaría, Germán", "type": "bf:Person"}, + "language": "fre", + "title": "No morirás", }, - 'language': 'fre', - 'title': 'No morirás' - }, { - 'miscellaneous_information': 'language: Coréen', - 'part': [{ - 'partName': 'A.T. et N.T.', - 'partNumber': '000' - }], - 'title': 'Bible' - }] + { + "miscellaneous_information": "language: Coréen", + "part": [{"partName": "A.T. et N.T.", "partNumber": "000"}], + "title": "Bible", + }, + ] marc21xml = """ @@ -137,19 +136,22 @@ def test_marc21_to_contribution(mock_get): """ marc21json = create_record(marc21xml) data = marc21.do(marc21json) - assert data.get('work_access_point') == [{ - 'creator': { - 'date_of_birth': '1919', - 'date_of_death': '1990', - 'preferred_name': 'Santamaría, Germán', - 'type': 'bf:Person' + assert data.get("work_access_point") == [ + { + "creator": { + "date_of_birth": "1919", + "date_of_death": "1990", + "preferred_name": "Santamaría, Germán", + "type": "bf:Person", + }, + "title": "No morirás", }, - 'title': 'No morirás' - }, { - 'creator': { - 'date_of_birth': '1919', - 'preferred_name': 'Santamaría, Germán', - 'type': 'bf:Person' + { + "creator": { + "date_of_birth": "1919", + "preferred_name": "Santamaría, Germán", + "type": "bf:Person", + }, + "title": "No morirás", }, - 'title': 'No morirás' - }] + ] diff --git a/tests/unit/documents/test_documents_dojson_unimarc.py b/tests/unit/documents/test_documents_dojson_unimarc.py index 6acd850799..6b3d6c0325 100644 --- a/tests/unit/documents/test_documents_dojson_unimarc.py +++ b/tests/unit/documents/test_documents_dojson_unimarc.py @@ -46,10 +46,9 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_book", - "subtype": "docsubtype_other_book" - }] + assert data.get("type") == [ + {"main_type": "docmaintype_book", "subtype": "docsubtype_other_book"} + ] unimarcxml = """ @@ -58,9 +57,7 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_serial" - }] + assert data.get("type") == [{"main_type": "docmaintype_serial"}] unimarcxml = """ @@ -69,9 +66,11 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_article", - }] + assert data.get("type") == [ + { + "main_type": "docmaintype_article", + } + ] unimarcxml = """ @@ -80,10 +79,9 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_score", - "subtype": "docsubtype_printed_score" - }] + assert data.get("type") == [ + {"main_type": "docmaintype_score", "subtype": "docsubtype_printed_score"} + ] unimarcxml = """ @@ -92,10 +90,9 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_score", - "subtype": "docsubtype_printed_score" - }] + assert data.get("type") == [ + {"main_type": "docmaintype_score", "subtype": "docsubtype_printed_score"} + ] unimarcxml = """ @@ -104,10 +101,9 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_audio", - "subtype": "docsubtype_music" - }] + assert data.get("type") == [ + {"main_type": "docmaintype_audio", "subtype": "docsubtype_music"} + ] unimarcxml = """ @@ -116,10 +112,9 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_audio", - "subtype": "docsubtype_music" - }] + assert data.get("type") == [ + {"main_type": "docmaintype_audio", "subtype": "docsubtype_music"} + ] unimarcxml = """ @@ -128,10 +123,9 @@ def test_unimarc_to_type(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('type') == [{ - "main_type": "docmaintype_movie_series", - "subtype": "docsubtype_movie" - }] + assert data.get("type") == [ + {"main_type": "docmaintype_movie_series", "subtype": "docsubtype_movie"} + ] def test_marc21_to_mode_of_issuance(): @@ -151,10 +145,7 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'article' - } + assert data.get("issuance") == {"main_type": "rdami:1001", "subtype": "article"} unimarcxml = """ @@ -163,9 +154,9 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'materialUnit' + assert data.get("issuance") == { + "main_type": "rdami:1001", + "subtype": "materialUnit", } unimarcxml = """ @@ -175,10 +166,7 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1001', - 'subtype': 'privateFile' - } + assert data.get("issuance") == {"main_type": "rdami:1001", "subtype": "privateFile"} # rdami:1003 (serial) # serialInSerial > no equivalence @@ -194,10 +182,7 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1003', - 'subtype': 'periodical' - } + assert data.get("issuance") == {"main_type": "rdami:1003", "subtype": "periodical"} unimarcxml = """ @@ -209,9 +194,9 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1003', - 'subtype': 'monographicSeries' + assert data.get("issuance") == { + "main_type": "rdami:1003", + "subtype": "monographicSeries", } # rdami:1004 (integrating resource) @@ -227,9 +212,9 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingWebsite' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingWebsite", } unimarcxml = """ @@ -242,9 +227,9 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingWebsite' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingWebsite", } unimarcxml = """ @@ -257,9 +242,9 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingWebsite' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingWebsite", } unimarcxml = """ @@ -272,9 +257,9 @@ def test_marc21_to_mode_of_issuance(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('issuance') == { - 'main_type': 'rdami:1004', - 'subtype': 'updatingLoose-leaf' + assert data.get("issuance") == { + "main_type": "rdami:1004", + "subtype": "updatingLoose-leaf", } @@ -297,50 +282,21 @@ def test_unimarc_to_title(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ + assert data.get("title") == [ { - 'mainTitle': [ + "mainTitle": [{"value": "main title"}], + "subtitle": [{"value": "subtitle"}], + "part": [ { - 'value': 'main title' - } - ], - 'subtitle': [ - { - 'value': 'subtitle' - } - ], - 'part': [ - { - 'partNumber': [ - { - 'value': 'Part Number' - } - ], - 'partName': [ - { - 'value': 'Part Name' - } - ] + "partNumber": [{"value": "Part Number"}], + "partName": [{"value": "Part Name"}], }, - { - 'partName': [ - { - 'value': 'Part Name 2' - } - ] - } - + {"partName": [{"value": "Part Name 2"}]}, ], - 'type': 'bf:Title' + "type": "bf:Title", } ] - assert data.get('responsibilityStatement') == [ - [ - { - 'value': 'responsibility' - } - ] - ] + assert data.get("responsibilityStatement") == [[{"value": "responsibility"}]] # field 200 to bf:Title # field 200 with $a, $f @@ -354,23 +310,10 @@ def test_unimarc_to_title(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ - { - 'mainTitle': [ - { - 'value': 'main title' - } - ], - 'type': 'bf:Title' - } - ] - assert data.get('responsibilityStatement') == [ - [ - { - 'value': 'responsibility' - } - ] + assert data.get("title") == [ + {"mainTitle": [{"value": "main title"}], "type": "bf:Title"} ] + assert data.get("responsibilityStatement") == [[{"value": "responsibility"}]] # field 200 to bf:Title # field 200 with $a @@ -383,17 +326,10 @@ def test_unimarc_to_title(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ - { - 'mainTitle': [ - { - 'value': 'main title' - } - ], - 'type': 'bf:Title' - } + assert data.get("title") == [ + {"mainTitle": [{"value": "main title"}], "type": "bf:Title"} ] - assert data.get('responsibilityStatement') is None + assert data.get("responsibilityStatement") is None def test_unimarc_to_title_with_alt_graphic_with_bad_lang(): @@ -423,32 +359,18 @@ def test_unimarc_to_title_with_alt_graphic_with_bad_lang(): unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ + assert data.get("title") == [ { - 'mainTitle': [ - { - 'value': 'Aẖbār min Marrākuš' - }, - { - 'value': 'أخبار من مراكش', - 'language': 'und-arab' - } - + "mainTitle": [ + {"value": "Aẖbār min Marrākuš"}, + {"value": "أخبار من مراكش", "language": "und-arab"}, ], - 'type': 'bf:Title' + "type": "bf:Title", } ] - assert data.get('responsibilityStatement') == [ - [ - { - 'value': 'al-Ṭāhir ibn Ǧullūn' - }, - { - 'value': 'لمبرون', - 'language': 'und-arab' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "al-Ṭāhir ibn Ǧullūn"}, {"value": "لمبرون", "language": "und-arab"}] ] @@ -479,31 +401,18 @@ def test_unimarc_to_title_with_alt_graphic(): unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ + assert data.get("title") == [ { - 'mainTitle': [ - { - 'value': 'Aẖbār min Marrākuš' - }, - { - 'value': 'أخبار من مراكش', - 'language': 'ara-arab' - } + "mainTitle": [ + {"value": "Aẖbār min Marrākuš"}, + {"value": "أخبار من مراكش", "language": "ara-arab"}, ], - 'type': 'bf:Title' + "type": "bf:Title", } ] - assert data.get('responsibilityStatement') == [ - [ - { - 'value': 'al-Ṭāhir ibn Ǧullūn' - }, - { - 'value': 'لمبرون', - 'language': 'ara-arab' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "al-Ṭāhir ibn Ǧullūn"}, {"value": "لمبرون", "language": "ara-arab"}] ] @@ -535,73 +444,33 @@ def test_unimarctotitle_with_parallel_title(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ + assert data.get("title") == [ { - 'mainTitle': [ - { - 'value': 'main title' - } - ], - 'subtitle': [ - { - 'value': 'subtitle' - } - ], - 'part': [ + "mainTitle": [{"value": "main title"}], + "subtitle": [{"value": "subtitle"}], + "part": [ { - 'partNumber': [ - { - 'value': 'Part Number' - } - ], - 'partName': [ - { - 'value': 'Part Name' - } - ] + "partNumber": [{"value": "Part Number"}], + "partName": [{"value": "Part Name"}], } ], - 'type': 'bf:Title' + "type": "bf:Title", }, { - 'mainTitle': [ - { - 'value': 'main parallel title' - } - ], - 'subtitle': [ - { - 'value': 'parallel subtitle' - } - ], - 'part': [ + "mainTitle": [{"value": "main parallel title"}], + "subtitle": [{"value": "parallel subtitle"}], + "part": [ { - 'partNumber': [ - { - 'value': 'Part Number parallel' - } - ], - 'partName': [ - { - 'value': 'Part Name parallel' - } - ] + "partNumber": [{"value": "Part Number parallel"}], + "partName": [{"value": "Part Name parallel"}], } ], - 'type': 'bf:ParallelTitle' - } + "type": "bf:ParallelTitle", + }, ] - assert data.get('responsibilityStatement') == [ - [ - { - 'value': 'responsibility f' - } - ], - [ - { - 'value': 'responsibility g' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "responsibility f"}], + [{"value": "responsibility g"}], ] @@ -651,127 +520,55 @@ def test_unimarctotitle_with_parallel_and_variant_title(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('title') == [ + assert data.get("title") == [ { - 'mainTitle': [ - { - 'value': 'main title' - } - ], - 'subtitle': [ - { - 'value': 'subtitle' - } - ], - 'part': [ + "mainTitle": [{"value": "main title"}], + "subtitle": [{"value": "subtitle"}], + "part": [ { - 'partNumber': [ - { - 'value': 'Part Number' - } - ], - 'partName': [ - { - 'value': 'Part Name' - } - ] + "partNumber": [{"value": "Part Number"}], + "partName": [{"value": "Part Name"}], } ], - 'type': 'bf:Title' + "type": "bf:Title", }, { - 'mainTitle': [ + "mainTitle": [{"value": "main parallel title"}], + "subtitle": [{"value": "parallel subtitle"}], + "part": [ { - 'value': 'main parallel title' + "partNumber": [{"value": "Part Number parallel"}], + "partName": [{"value": "Part Name parallel"}], } ], - 'subtitle': [ - { - 'value': 'parallel subtitle' - } - ], - 'part': [ - { - 'partNumber': [ - { - 'value': 'Part Number parallel' - } - ], - 'partName': [ - { - 'value': 'Part Name parallel' - } - ] - } - ], - 'type': 'bf:ParallelTitle' + "type": "bf:ParallelTitle", }, { - 'mainTitle': [ - { - 'value': 'main variant title 512' - } - ], - 'subtitle': [ - { - 'value': 'variant subtitle 512' - } - ], - 'part': [ + "mainTitle": [{"value": "main variant title 512"}], + "subtitle": [{"value": "variant subtitle 512"}], + "part": [ { - 'partNumber': [ - { - 'value': 'Part Number variant 512' - } - ], - 'partName': [ - { - 'value': 'Part Name variant 512' - } - ] + "partNumber": [{"value": "Part Number variant 512"}], + "partName": [{"value": "Part Name variant 512"}], } ], - 'type': 'bf:VariantTitle' + "type": "bf:VariantTitle", }, { - 'mainTitle': [ + "mainTitle": [{"value": "main variant title 514"}], + "subtitle": [{"value": "variant subtitle 514"}], + "part": [ { - 'value': 'main variant title 514' + "partNumber": [{"value": "Part Number variant 514"}], + "partName": [{"value": "Part Name variant 514"}], } ], - 'subtitle': [ - { - 'value': 'variant subtitle 514' - } - ], - 'part': [ - { - 'partNumber': [ - { - 'value': 'Part Number variant 514' - } - ], - 'partName': [ - { - 'value': 'Part Name variant 514' - } - ] - } - ], - 'type': 'bf:VariantTitle' - } + "type": "bf:VariantTitle", + }, ] - assert data.get('responsibilityStatement') == [ - [ - { - 'value': 'responsibility f' - } - ], - [ - { - 'value': 'responsibility g' - } - ] + assert data.get("responsibilityStatement") == [ + [{"value": "responsibility f"}], + [{"value": "responsibility g"}], ] @@ -788,7 +585,7 @@ def test_unimarc_languages(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('language') == [{'value': 'eng', 'type': 'bf:Language'}] + assert data.get("language") == [{"value": "eng", "type": "bf:Language"}] unimarcxml = """ @@ -801,9 +598,9 @@ def test_unimarc_languages(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('language') == [ - {'value': 'eng', 'type': 'bf:Language'}, - {'value': 'fre', 'type': 'bf:Language'} + assert data.get("language") == [ + {"value": "eng", "type": "bf:Language"}, + {"value": "fre", "type": "bf:Language"}, ] unimarcxml = """ @@ -817,8 +614,8 @@ def test_unimarc_languages(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('language') == [ - {'value': 'eng', 'type': 'bf:Language'}, + assert data.get("language") == [ + {"value": "eng", "type": "bf:Language"}, ] @@ -863,50 +660,44 @@ def test_unimarc_contribution(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - contribution = data.get('contribution') + contribution = data.get("contribution") assert contribution == [ { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Jean-Paul II, Pape, 1954' + "entity": { + "type": "bf:Person", + "authorized_access_point": "Jean-Paul II, Pape, 1954", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Dumont, Jean, 1921-2014, Historien' + "entity": { + "type": "bf:Person", + "authorized_access_point": "Dumont, Jean, 1921-2014, Historien", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'Dicker, J., 1921' + "entity": { + "type": "bf:Person", + "authorized_access_point": "Dicker, J., 1921", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'RERO' - }, - 'role': ['aut'] + "entity": {"type": "bf:Organisation", "authorized_access_point": "RERO"}, + "role": ["aut"], }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'LOC (1 : 2020-02-02 : London)' + "entity": { + "type": "bf:Organisation", + "authorized_access_point": "LOC (1 : 2020-02-02 : London)", }, - 'role': ['aut'] + "role": ["aut"], }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'BNF' - }, - 'role': ['aut'] - } + "entity": {"type": "bf:Organisation", "authorized_access_point": "BNF"}, + "role": ["aut"], + }, ] @@ -924,19 +715,11 @@ def test_unimarc_edition(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('editionStatement') == [ - { - 'editionDesignation': [ - { - 'value': '2e ed.' - } - ], - 'responsibility': [ - { - 'value': 'avec un avant-propos par Jean Faret' - } - ] - } + assert data.get("editionStatement") == [ + { + "editionDesignation": [{"value": "2e ed."}], + "responsibility": [{"value": "avec un avant-propos par Jean Faret"}], + } ] @@ -957,30 +740,17 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Payot'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '2015'} - ], - 'type': 'Date' - } - ], - 'startDate': 2015, - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "statement": [ + {"label": [{"value": "Lausanne"}], "type": "bf:Place"}, + {"label": [{"value": "Payot"}], "type": "bf:Agent"}, + {"label": [{"value": "2015"}], "type": "Date"}, + ], + "startDate": 2015, + } + ] unimarcxml = """ @@ -999,42 +769,20 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [ + assert data.get("provisionActivity") == [ { - 'type': 'bf:Publication', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Payot'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '2015'} - ], - 'type': 'Date' - } + "type": "bf:Publication", + "statement": [ + {"label": [{"value": "Lausanne"}], "type": "bf:Place"}, + {"label": [{"value": "Payot"}], "type": "bf:Agent"}, + {"label": [{"value": "2015"}], "type": "Date"}, ], - 'startDate': 2015, + "startDate": 2015, }, { - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - } - ], - 'type': 'bf:Manufacture' - } + "statement": [{"label": [{"value": "Lausanne"}], "type": "bf:Place"}], + "type": "bf:Manufacture", + }, ] unimarcxml = """ @@ -1052,40 +800,20 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'place': [ - { - 'country': 'fr' - }, - ], - 'statement': [ - { - 'label': [ - {'value': '[Paris]'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Desclée de Brouwer [puis]'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': 'Etudes augustiniennes'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '[1969-1999]'} - ], - 'type': 'Date' - } - ] - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "place": [ + {"country": "fr"}, + ], + "statement": [ + {"label": [{"value": "[Paris]"}], "type": "bf:Place"}, + {"label": [{"value": "Desclée de Brouwer [puis]"}], "type": "bf:Agent"}, + {"label": [{"value": "Etudes augustiniennes"}], "type": "bf:Agent"}, + {"label": [{"value": "[1969-1999]"}], "type": "Date"}, + ], + } + ] unimarcxml = """ @@ -1100,41 +828,18 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Publication', - 'statement': [ - { - 'label': [ - {'value': 'Paris'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Champion'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': 'Genève'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Droz'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '1912-1955'} - ], - 'type': 'Date' - } - ] - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Publication", + "statement": [ + {"label": [{"value": "Paris"}], "type": "bf:Place"}, + {"label": [{"value": "Champion"}], "type": "bf:Agent"}, + {"label": [{"value": "Genève"}], "type": "bf:Place"}, + {"label": [{"value": "Droz"}], "type": "bf:Agent"}, + {"label": [{"value": "1912-1955"}], "type": "Date"}, + ], + } + ] unimarcxml = """ @@ -1145,17 +850,12 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Production', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - } - ], - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Production", + "statement": [{"label": [{"value": "Lausanne"}], "type": "bf:Place"}], + } + ] unimarcxml = """ @@ -1166,17 +866,12 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Distribution', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - } - ], - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Distribution", + "statement": [{"label": [{"value": "Lausanne"}], "type": "bf:Place"}], + } + ] unimarcxml = """ @@ -1187,17 +882,12 @@ def test_unimarc_publishers_provision_activity(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('provisionActivity') == [{ - 'type': 'bf:Manufacture', - 'statement': [ - { - 'label': [ - {'value': 'Lausanne'} - ], - 'type': 'bf:Place' - } - ], - }] + assert data.get("provisionActivity") == [ + { + "type": "bf:Manufacture", + "statement": [{"label": [{"value": "Lausanne"}], "type": "bf:Place"}], + } + ] def test_unimarc_copyright_date(): @@ -1211,7 +901,7 @@ def test_unimarc_copyright_date(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('copyrightDate') == ['© 1919'] + assert data.get("copyrightDate") == ["© 1919"] unimarcxml = """ @@ -1222,7 +912,7 @@ def test_unimarc_copyright_date(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('copyrightDate') == ['℗ 1919'] + assert data.get("copyrightDate") == ["℗ 1919"] def test_unimarc_description(): @@ -1246,16 +936,13 @@ def test_unimarc_description(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('extent') == '116 p.' - assert data.get('illustrativeContent') == ['illustrations'] - assert data.get('dimensions') == ['22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'ill.' - }, { - 'noteType': 'accompanyingMaterial', - 'label': '1 volume (200 pages)' - }] + assert data.get("extent") == "116 p." + assert data.get("illustrativeContent") == ["illustrations"] + assert data.get("dimensions") == ["22 cm"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "ill."}, + {"noteType": "accompanyingMaterial", "label": "1 volume (200 pages)"}, + ] unimarcxml = """ @@ -1271,22 +958,15 @@ def test_unimarc_description(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('extent') == '116 p.' - assert data.get('illustrativeContent') == ['illustrations'] - assert data.get('dimensions') == ['22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'ill.' - }, { - 'noteType': 'accompanyingMaterial', - 'label': '1 volume (200 pages)' - }, { - 'noteType': 'accompanyingMaterial', - 'label': 'une brochure (12 pages)' - }, { - 'noteType': 'accompanyingMaterial', - 'label': 'une disquette' - }] + assert data.get("extent") == "116 p." + assert data.get("illustrativeContent") == ["illustrations"] + assert data.get("dimensions") == ["22 cm"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "ill."}, + {"noteType": "accompanyingMaterial", "label": "1 volume (200 pages)"}, + {"noteType": "accompanyingMaterial", "label": "une brochure (12 pages)"}, + {"noteType": "accompanyingMaterial", "label": "une disquette"}, + ] unimarcxml = """ @@ -1305,12 +985,9 @@ def test_unimarc_description(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('illustrativeContent') == ['illustrations'] - assert data.get('dimensions') == ['12 x 15', '19 cm', '22 cm'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'ill.' - }] + assert data.get("illustrativeContent") == ["illustrations"] + assert data.get("dimensions") == ["12 x 15", "19 cm", "22 cm"] + assert data.get("note") == [{"noteType": "otherPhysicalDetails", "label": "ill."}] unimarcxml = """ @@ -1322,12 +999,11 @@ def test_unimarc_description(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('dimensions') == ['24 cm'] - assert data.get('colorContent') == ['rdacc:1003'] - assert data.get('note') == [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'couv. ill. en coul.' - }] + assert data.get("dimensions") == ["24 cm"] + assert data.get("colorContent") == ["rdacc:1003"] + assert data.get("note") == [ + {"noteType": "otherPhysicalDetails", "label": "couv. ill. en coul."} + ] # seriesStatement: [225 repetitive] @@ -1351,17 +1027,21 @@ def test_unimarc_series_statement(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('seriesStatement') == [{ - 'seriesTitle': [{'value': 'Collection formation: Mucchielli'}], - 'seriesEnumeration': [{'value': '5'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Développement personnel'}], - 'subseriesEnumeration': [{'value': '6'}] - }] - }, { - 'seriesTitle': [{'value': 'Collection Two'}], - 'seriesEnumeration': [{'value': '123'}], - } + assert data.get("seriesStatement") == [ + { + "seriesTitle": [{"value": "Collection formation: Mucchielli"}], + "seriesEnumeration": [{"value": "5"}], + "subseriesStatement": [ + { + "subseriesTitle": [{"value": "Développement personnel"}], + "subseriesEnumeration": [{"value": "6"}], + } + ], + }, + { + "seriesTitle": [{"value": "Collection Two"}], + "seriesEnumeration": [{"value": "123"}], + }, ] @@ -1410,13 +1090,12 @@ def test_unimarc_partOf_with_link(document_with_issn): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['partOf'] == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc5'}, - 'numbering': [{ - 'volume': '24', - 'year': '2024' - }] - }] + assert data["partOf"] == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/doc5"}, + "numbering": [{"volume": "24", "year": "2024"}], + } + ] unimarcxml = """ @@ -1430,13 +1109,12 @@ def test_unimarc_partOf_with_link(document_with_issn): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['partOf'] == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc5'}, - 'numbering': [{ - 'volume': 'No 770, 15 mai 2024, pp. 31-41', - 'year': '2024' - }] - }] + assert data["partOf"] == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/doc5"}, + "numbering": [{"volume": "No 770, 15 mai 2024, pp. 31-41", "year": "2024"}], + } + ] unimarcxml = """ @@ -1450,19 +1128,16 @@ def test_unimarc_partOf_with_link(document_with_issn): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['partOf'] == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc5'}, - 'numbering': [{ - 'volume': '3-4,11-15', - 'year': '1867' - }, { - 'volume': '3-4,11-15', - 'year': '1868' - }, { - 'volume': '3-4,11-15', - 'year': '1869' - }] - }] + assert data["partOf"] == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/doc5"}, + "numbering": [ + {"volume": "3-4,11-15", "year": "1867"}, + {"volume": "3-4,11-15", "year": "1868"}, + {"volume": "3-4,11-15", "year": "1869"}, + ], + } + ] unimarcxml = """ @@ -1475,16 +1150,12 @@ def test_unimarc_partOf_with_link(document_with_issn): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['partOf'] == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc5'}, - 'numbering': [{ - 'year': '1867' - }, { - 'year': '1868' - }, { - 'year': '1869' - }] - }] + assert data["partOf"] == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/doc5"}, + "numbering": [{"year": "1867"}, {"year": "1868"}, {"year": "1869"}], + } + ] unimarcxml = """ @@ -1497,10 +1168,12 @@ def test_unimarc_partOf_with_link(document_with_issn): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['partOf'] == [{ - 'document': {'$ref': 'https://bib.rero.ch/api/documents/doc5'}, - 'numbering': [{'year': '1869'}] - }] + assert data["partOf"] == [ + { + "document": {"$ref": "https://bib.rero.ch/api/documents/doc5"}, + "numbering": [{"year": "1869"}], + } + ] # abstract: [330$a repetitive] @@ -1516,8 +1189,7 @@ def test_unimarc_summary(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('summary') == [ - {'label': [{'value': 'This book is about'}]}] + assert data.get("summary") == [{"label": [{"value": "This book is about"}]}] # identifiers:isbn: 010$a @@ -1535,16 +1207,9 @@ def test_unimarc_identifiers(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('identifiedBy') == [ - { - "type": "bf:Local", - "value": "ark:/12148/cb350330441", - "source": "BNF" - }, - { - "type": "bf:Ean", - "value": "9782370550163" - } + assert data.get("identifiedBy") == [ + {"type": "bf:Local", "value": "ark:/12148/cb350330441", "source": "BNF"}, + {"type": "bf:Ean", "value": "9782370550163"}, ] unimarcxml = """ @@ -1556,12 +1221,8 @@ def test_unimarc_identifiers(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('identifiedBy') == [ - { - "type": "bf:Ean", - "value": "978237055016x", - "status": "invalid" - } + assert data.get("identifiedBy") == [ + {"type": "bf:Ean", "value": "978237055016x", "status": "invalid"} ] @@ -1578,10 +1239,7 @@ def test_unimarc_notes(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('note') == [{ - 'noteType': 'general', - 'label': 'note' - }] + assert data.get("note") == [{"noteType": "general", "label": "note"}] unimarcxml = """ @@ -1595,13 +1253,9 @@ def test_unimarc_notes(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('note') == [{ - 'noteType': 'general', - 'label': 'note 1' - }, { - 'noteType': 'general', - 'label': 'note 2' - } + assert data.get("note") == [ + {"noteType": "general", "label": "note 1"}, + {"noteType": "general", "label": "note 2"}, ] @@ -1629,19 +1283,21 @@ def test_unimarc_subjects(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('subjects_imported') == [{ - 'entity': { - 'authorized_access_point': 'subjects 600', - 'type': 'bf:Topic', - 'source': 'rameau' - } - }, { - 'entity': { - 'authorized_access_point': - 'Capet, Louis III, Jr., 1700-1780 -- France', - 'type': 'bf:Topic' - } - }] + assert data.get("subjects_imported") == [ + { + "entity": { + "authorized_access_point": "subjects 600", + "type": "bf:Topic", + "source": "rameau", + } + }, + { + "entity": { + "authorized_access_point": "Capet, Louis III, Jr., 1700-1780 -- France", + "type": "bf:Topic", + } + }, + ] def test_unimarc_to_electronicLocator_from_856(): @@ -1657,10 +1313,12 @@ def test_unimarc_to_electronicLocator_from_856(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data.get('electronicLocator') == [{ - 'url': 'http://gallica.bnf.fr/ark:/12148/btv1b550017355', - 'type': 'resource', - }] + assert data.get("electronicLocator") == [ + { + "url": "http://gallica.bnf.fr/ark:/12148/btv1b550017355", + "type": "resource", + } + ] def test_unimarc_to_isFiktion_from_105(): @@ -1676,7 +1334,7 @@ def test_unimarc_to_isFiktion_from_105(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['fiction_statement'] == DocumentFictionType.Unspecified.value + assert data["fiction_statement"] == DocumentFictionType.Unspecified.value unimarcxml = """ @@ -1688,4 +1346,4 @@ def test_unimarc_to_isFiktion_from_105(): """ unimarcjson = create_record(unimarcxml) data = unimarc.do(unimarcjson) - assert data['fiction_statement'] == DocumentFictionType.Fiction.value + assert data["fiction_statement"] == DocumentFictionType.Fiction.value diff --git a/tests/unit/documents/test_documents_jsonschema.py b/tests/unit/documents/test_documents_jsonschema.py index 56fdd9702d..5c68815f67 100644 --- a/tests/unit/documents/test_documents_jsonschema.py +++ b/tests/unit/documents/test_documents_jsonschema.py @@ -37,7 +37,7 @@ def test_pid(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['pid'] = 25 + document_data_tmp["pid"] = 25 validate(document_data_tmp, document_schema) @@ -46,7 +46,7 @@ def test_title(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['title'] = 2 + document_data_tmp["title"] = 2 validate(document_data_tmp, document_schema) @@ -55,11 +55,11 @@ def test_type(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['type'] = 2 + document_data_tmp["type"] = 2 validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['type'] = 'CD-ROM' + document_data_tmp["type"] = "CD-ROM" validate(document_data_tmp, document_schema) @@ -68,7 +68,7 @@ def test_part_of(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['partOf'] = 2 + document_data_tmp["partOf"] = 2 validate(document_data_tmp, document_schema) @@ -77,324 +77,269 @@ def test_languages(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['language'][0]['value'] = [2] + document_data_tmp["language"][0]["value"] = [2] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['language'][0]['value'] = ['gre'] + document_data_tmp["language"][0]["value"] = ["gre"] validate(document_data_tmp, document_schema) def test_contribution(mef_agents_url, document_schema, document_data_tmp): """Test contribution for jsonschemas.""" - document_data_tmp['contribution'] = [{ - 'entity': { - 'type': 'bf:Person', - 'authorized_access_point': 'dumont, Jean (1954)' - }, - 'role': ['aut'] - }, { - 'entity': { - 'type': 'bf:Organisation', - 'authorized_access_point': 'RERO' - }, - 'role': ['aut'] - }, { - 'entity': { - '$ref': f'{mef_agents_url}/gnd/XXXXXXX' + document_data_tmp["contribution"] = [ + { + "entity": { + "type": "bf:Person", + "authorized_access_point": "dumont, Jean (1954)", + }, + "role": ["aut"], }, - 'role': ['aut'] - }, { - 'entity': { - '$ref': f'{mef_agents_url}/gnd/XXXXXXX' + { + "entity": {"type": "bf:Organisation", "authorized_access_point": "RERO"}, + "role": ["aut"], }, - 'role': ['aut'] - }] + {"entity": {"$ref": f"{mef_agents_url}/gnd/XXXXXXX"}, "role": ["aut"]}, + {"entity": {"$ref": f"{mef_agents_url}/gnd/XXXXXXX"}, "role": ["aut"]}, + ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['contribution'][0]['entity']['type'] = [2] + document_data_tmp["contribution"][0]["entity"]["type"] = [2] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp[ - 'contribution'][0]['entity']['authorized_access_point'] = [2] + document_data_tmp["contribution"][0]["entity"]["authorized_access_point"] = [2] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['contribution'][1]['entity']['type'] = [2] + document_data_tmp["contribution"][1]["entity"]["type"] = [2] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp[ - 'contribution'][1]['entity']['authorized_access_point'] = [2] + document_data_tmp["contribution"][1]["entity"]["authorized_access_point"] = [2] validate(document_data_tmp, document_schema) def test_copyrightDate(document_schema, document_data_tmp): """Test copyright date for jsonschemas.""" - document_data_tmp['copyrightDate'] = ['© 1971'] + document_data_tmp["copyrightDate"] = ["© 1971"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['copyrightDate'] = 1971 + document_data_tmp["copyrightDate"] = 1971 validate(document_data_tmp, document_schema) def test_edition_statement(document_schema, document_data_tmp): """Test edition statement for jsonschemas.""" - document_data_tmp['editionStatement'] = [{ - 'editionDesignation': [{ - 'value': 'Di 3 ban' - }, { - 'value': '第3版', - 'language': 'chi-hani' - }], - 'responsibility': [{ - 'value': 'Zeng Lingliang zhu bian' - }, { - 'value': '曾令良主编', - 'language': 'chi-hani' - }] - }] + document_data_tmp["editionStatement"] = [ + { + "editionDesignation": [ + {"value": "Di 3 ban"}, + {"value": "第3版", "language": "chi-hani"}, + ], + "responsibility": [ + {"value": "Zeng Lingliang zhu bian"}, + {"value": "曾令良主编", "language": "chi-hani"}, + ], + } + ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['editionStatement'] = [{'bad_key': 'bad_value'}] + document_data_tmp["editionStatement"] = [{"bad_key": "bad_value"}] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['editionStatement'] = 'string is a bad type' + document_data_tmp["editionStatement"] = "string is a bad type" validate(document_data_tmp, document_schema) def test_provisionActivity(document_schema, document_data_tmp): """Test publishers for jsonschemas.""" - document_data_tmp['provisionActivity'] = [{ - 'type': 'bf:Publication', - 'place': [ - { - 'country': 'fr' - } - ], - 'statement': [ - { - 'label': [ - {'value': 'Paris'} - ], - 'type': 'bf:Place' - }, - { - 'label': [ - {'value': 'Desclée de Brouwer'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': 'Etudes augustiniennes'} - ], - 'type': 'bf:Agent' - }, - { - 'label': [ - {'value': '1969'} - ], - 'type': 'Date' - } - - ], - 'startDate': 1969 - }] + document_data_tmp["provisionActivity"] = [ + { + "type": "bf:Publication", + "place": [{"country": "fr"}], + "statement": [ + {"label": [{"value": "Paris"}], "type": "bf:Place"}, + {"label": [{"value": "Desclée de Brouwer"}], "type": "bf:Agent"}, + {"label": [{"value": "Etudes augustiniennes"}], "type": "bf:Agent"}, + {"label": [{"value": "1969"}], "type": "Date"}, + ], + "startDate": 1969, + } + ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['provisionActivity'][0]['type'] = [2] + document_data_tmp["provisionActivity"][0]["type"] = [2] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['provisionActivity'][0]['startDate'] = [2] + document_data_tmp["provisionActivity"][0]["startDate"] = [2] validate(document_data_tmp, document_schema) def test_extent(document_schema, document_data_tmp): """Test extent for jsonschemas.""" - document_data_tmp['extent'] = '1 DVD-R (50 min.)' + document_data_tmp["extent"] = "1 DVD-R (50 min.)" validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['extent'] = [2] + document_data_tmp["extent"] = [2] validate(document_data_tmp, document_schema) def test_duration(document_schema, document_data_tmp): """Test duration for jsonschemas.""" - document_data_tmp['duration'] = ['(50 min.)'] + document_data_tmp["duration"] = ["(50 min.)"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['duration'] = [2] + document_data_tmp["duration"] = [2] validate(document_data_tmp, document_schema) def test_production_method(document_schema, document_data_tmp): """Test productionMethod for jsonschemas.""" - document_data_tmp['productionMethod'] = ['rdapm:1007'] + document_data_tmp["productionMethod"] = ["rdapm:1007"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['productionMethod'] = [2] + document_data_tmp["productionMethod"] = [2] validate(document_data_tmp, document_schema) def test_illustrative_content(document_schema, document_data_tmp): """Test illustrativeContent for jsonschemas.""" - document_data_tmp['illustrativeContent'] = ['illustrations'] + document_data_tmp["illustrativeContent"] = ["illustrations"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['illustrativeContent'] = [2] + document_data_tmp["illustrativeContent"] = [2] validate(document_data_tmp, document_schema) def test_color_content(document_schema, document_data_tmp): """Test colorContent for jsonschemas.""" - document_data_tmp['colorContent'] = ['rdacc:1002'] + document_data_tmp["colorContent"] = ["rdacc:1002"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['colorContent'] = [2] + document_data_tmp["colorContent"] = [2] validate(document_data_tmp, document_schema) def test_book_format(document_schema, document_data_tmp): """Test bookFormat for jsonschemas.""" - document_data_tmp['bookFormat'] = ['8ᵒ'] + document_data_tmp["bookFormat"] = ["8ᵒ"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['bookFormat'] = [2] + document_data_tmp["bookFormat"] = [2] validate(document_data_tmp, document_schema) def test_dimensions(document_schema, document_data_tmp): """Test dimensions for jsonschemas.""" - document_data_tmp['dimensions'] = ['in-8, 22 cm'] + document_data_tmp["dimensions"] = ["in-8, 22 cm"] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['dimensions'] = [2] + document_data_tmp["dimensions"] = [2] validate(document_data_tmp, document_schema) def test_series(document_schema, document_data_tmp): """Test series for jsonschemas.""" - document_data_tmp['seriesStatement'] = [{ - 'seriesTitle': [{'value': 'Handbuch der Orientalistik'}], - 'seriesEnumeration': [{'value': 'Abt. 7'}], - 'subseriesStatement': [{ - 'subseriesTitle': [{'value': 'Kunst und Archäologie'}], - 'subseriesEnumeration': [{'value': 'Bd. 6'}] - }, { - 'subseriesTitle': [{'value': 'Südostasien'}], - 'subseriesEnumeration': [{'value': 'Abschnitt 6'}] - } - ] - }] + document_data_tmp["seriesStatement"] = [ + { + "seriesTitle": [{"value": "Handbuch der Orientalistik"}], + "seriesEnumeration": [{"value": "Abt. 7"}], + "subseriesStatement": [ + { + "subseriesTitle": [{"value": "Kunst und Archäologie"}], + "subseriesEnumeration": [{"value": "Bd. 6"}], + }, + { + "subseriesTitle": [{"value": "Südostasien"}], + "subseriesEnumeration": [{"value": "Abschnitt 6"}], + }, + ], + } + ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['seriesStatement'][0]['seriesTitle'] = 2 + document_data_tmp["seriesStatement"][0]["seriesTitle"] = 2 validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['seriesStatement'][0]['seriesEnumeration'] = 2 + document_data_tmp["seriesStatement"][0]["seriesEnumeration"] = 2 validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['seriesStatement'][0]['subseriesStatement'] is None + document_data_tmp["seriesStatement"][0]["subseriesStatement"] is None validate(document_data_tmp, document_schema) def test_note(document_schema, document_data_tmp): """Test note for jsonschemas.""" - document_data_tmp['note'] = [{ - 'noteType': 'otherPhysicalDetails', - 'label': 'litho Ill.en n. et bl.' - }] + document_data_tmp["note"] = [ + {"noteType": "otherPhysicalDetails", "label": "litho Ill.en n. et bl."} + ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['note'][0] = 2 + document_data_tmp["note"][0] = 2 validate(document_data_tmp, document_schema) def test_identifiedby(document_schema, document_data_tmp): """Test identifiers for jsonschemas.""" - document_data_tmp['identifiedBy'] = [ - { - "type": "bf:Local", - "source": "RERO", - "value": "R008745599" - }, - { - "type": "bf:Isbn", - "value": "9782844267788" - }, - { - "type": "bf:Local", - "source": "BNF", - "value": "FRBNF452959040000002" - }, - { - "type": "uri", - "value": "http://catalogue.bnf.fr/ark:/12148/cb45295904f" - } + document_data_tmp["identifiedBy"] = [ + {"type": "bf:Local", "source": "RERO", "value": "R008745599"}, + {"type": "bf:Isbn", "value": "9782844267788"}, + {"type": "bf:Local", "source": "BNF", "value": "FRBNF452959040000002"}, + {"type": "uri", "value": "http://catalogue.bnf.fr/ark:/12148/cb45295904f"}, ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - for identifier in document_data_tmp['identifiedBy']: - identifier['value'] = 2 + for identifier in document_data_tmp["identifiedBy"]: + identifier["value"] = 2 validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['identifiedBy'] = {} + document_data_tmp["identifiedBy"] = {} validate(document_data_tmp, document_schema) def test_subjects(document_schema, document_data_tmp): """Test subjects for jsonschemas.""" - document_data_tmp['subjects'] = [{ - 'entity': { - 'type': "bf:Topic", - 'authorized_access_point': 'ILS' - }}, { - 'entity': { - 'type': "bf:Topic", - 'authorized_access_point': 'informatique' - }}, { - 'entity': { - 'type': "bf:Topic", - 'authorized_access_point': 'bibliothèque' - }} + document_data_tmp["subjects"] = [ + {"entity": {"type": "bf:Topic", "authorized_access_point": "ILS"}}, + {"entity": {"type": "bf:Topic", "authorized_access_point": "informatique"}}, + {"entity": {"type": "bf:Topic", "authorized_access_point": "bibliothèque"}}, ] validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['subjects'] = 2 + document_data_tmp["subjects"] = 2 validate(document_data_tmp, document_schema) @@ -403,7 +348,7 @@ def test_harvested(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['harvested'] = 2 + document_data_tmp["harvested"] = 2 validate(document_data_tmp, document_schema) @@ -412,5 +357,5 @@ def test_draft(document_schema, document_data_tmp): validate(document_data_tmp, document_schema) with pytest.raises(ValidationError): - document_data_tmp['_draft'] = 2 + document_data_tmp["_draft"] = 2 validate(document_data_tmp, document_schema) diff --git a/tests/unit/documents/test_documents_utils.py b/tests/unit/documents/test_documents_utils.py index 9be66a4f57..abdc701220 100644 --- a/tests/unit/documents/test_documents_utils.py +++ b/tests/unit/documents/test_documents_utils.py @@ -24,89 +24,88 @@ def test_format_text(): """Test title format text head.""" - data = [{ - 'mainTitle': [{ - 'value': 'Dingding lixianji' - }, { - 'value': '\u4e01\u4e01\u5386\u9669\u8bb0', - 'language': 'und-hani' - }], - 'type': 'bf:Title' - }] - assert '\u4e01\u4e01\u5386\u9669\u8bb0' == TitleExtension.format_text(data) + data = [ + { + "mainTitle": [ + {"value": "Dingding lixianji"}, + {"value": "\u4e01\u4e01\u5386\u9669\u8bb0", "language": "und-hani"}, + ], + "type": "bf:Title", + } + ] + assert "\u4e01\u4e01\u5386\u9669\u8bb0" == TitleExtension.format_text(data) - data = [{ - 'mainTitle': [{ - 'value': 'Die russischen orthodoxen Bischöfe von 1893', - }], - 'subtitle': [{ - 'value': 'Bio-Bibliographie' - }], - 'type': 'bf:Title' - }] - assert 'Die russischen orthodoxen Bischöfe von 1893 ' \ - ': Bio-Bibliographie' == TitleExtension.format_text(data) + data = [ + { + "mainTitle": [ + { + "value": "Die russischen orthodoxen Bischöfe von 1893", + } + ], + "subtitle": [{"value": "Bio-Bibliographie"}], + "type": "bf:Title", + } + ] + assert ( + "Die russischen orthodoxen Bischöfe von 1893 " + ": Bio-Bibliographie" == TitleExtension.format_text(data) + ) - data = [{ - 'mainTitle': [{ - 'value': 'Die russischen orthodoxen Bischöfe von 1893', - }, { - 'value': 'The Russian Orthodox Bishops of 1893', - 'language': 'eng' - }], - 'subtitle': [{ - 'value': 'Bio-Bibliographie' - }], - 'type': 'bf:Title' - }] - assert 'The Russian Orthodox Bishops of 1893' == \ - TitleExtension.format_text(data) + data = [ + { + "mainTitle": [ + { + "value": "Die russischen orthodoxen Bischöfe von 1893", + }, + {"value": "The Russian Orthodox Bishops of 1893", "language": "eng"}, + ], + "subtitle": [{"value": "Bio-Bibliographie"}], + "type": "bf:Title", + } + ] + assert "The Russian Orthodox Bishops of 1893" == TitleExtension.format_text(data) - data = [{ - 'mainTitle': [{ - 'value': 'main_title_text', - }], - 'subtitle': [{ - 'value': 'subtitle_text' - }], - 'part': [ - { - 'partName': [{'value': 'part1'}, {'value': 'part1.1'}], - 'partNumber': [{'value': 'number1'}, {'value': 'number1.1'}], - }, - { - 'partNumber': [{'value': 'number2'}, {'value': 'number2.2'}], - 'partName': [{'value': 'part2'}] - } - ], - 'type': 'bf:Title' - }] - assert 'main_title_text : subtitle_text. '\ - 'number1, number1.1, part1, part1.1. number2, number2.2, part2' == \ - TitleExtension.format_text(data) + data = [ + { + "mainTitle": [ + { + "value": "main_title_text", + } + ], + "subtitle": [{"value": "subtitle_text"}], + "part": [ + { + "partName": [{"value": "part1"}, {"value": "part1.1"}], + "partNumber": [{"value": "number1"}, {"value": "number1.1"}], + }, + { + "partNumber": [{"value": "number2"}, {"value": "number2.2"}], + "partName": [{"value": "part2"}], + }, + ], + "type": "bf:Title", + } + ] + assert ( + "main_title_text : subtitle_text. " + "number1, number1.1, part1, part1.1. number2, number2.2, part2" + == TitleExtension.format_text(data) + ) - data = [{ - "mainTitle": [ - { - "language": "rus-latn", - "value": "Frant︠s︡uzsko-russkiĭ slovarʹ" - }, - { - "language": "rus-cyrl", - "value": "Французско-русский словарь" - } - ], - "subtitle": [ - { - "language": "rus-latn", - "value": "okolo 25 000 slov" - }, - { - "language": "rus-cyrl", - "value": "около 25 000 слов" - } - ], - "type": "bf:Title" - }] - assert 'Французско-русский словарь : около 25 000 слов' == \ - TitleExtension.format_text(data) + data = [ + { + "mainTitle": [ + {"language": "rus-latn", "value": "Frant︠s︡uzsko-russkiĭ slovarʹ"}, + {"language": "rus-cyrl", "value": "Французско-русский словарь"}, + ], + "subtitle": [ + {"language": "rus-latn", "value": "okolo 25 000 slov"}, + {"language": "rus-cyrl", "value": "около 25 000 слов"}, + ], + "type": "bf:Title", + } + ] + assert ( + "Французско-русский словарь : около 25 000 слов" + == TitleExtension.format_text(data) + ) diff --git a/tests/unit/documents/test_identifiers.py b/tests/unit/documents/test_identifiers.py index 649145dc88..acf76e372f 100644 --- a/tests/unit/documents/test_identifiers.py +++ b/tests/unit/documents/test_identifiers.py @@ -19,18 +19,23 @@ """Tests identifier classes for documents.""" import pytest -from rero_ils.modules.commons.identifiers import Identifier, \ - IdentifierFactory, IdentifierStatus, IdentifierType, \ - InvalidIdentifierException, QualifierIdentifierRenderer +from rero_ils.modules.commons.identifiers import ( + Identifier, + IdentifierFactory, + IdentifierStatus, + IdentifierType, + InvalidIdentifierException, + QualifierIdentifierRenderer, +) def test_identifiers_creation(): """Test identifiers creation using factory or direct object creation.""" - data = {'type': IdentifierType.URI, 'value': 'http://valid.url'} + data = {"type": IdentifierType.URI, "value": "http://valid.url"} assert IdentifierFactory.create_identifier(data) assert Identifier(**data) - data = {'value': 'http://valid.url.but.no.type'} + data = {"value": "http://valid.url.but.no.type"} with pytest.raises(AttributeError): IdentifierFactory.create_identifier(data) with pytest.raises(InvalidIdentifierException): @@ -40,23 +45,23 @@ def test_identifiers_creation(): def test_isbn_identifiers(): """Test ISBN identifiers.""" # VALID ISBN IDENTIFIER - data = {'type': IdentifierType.ISBN, 'value': '978-284426778-8'} + data = {"type": IdentifierType.ISBN, "value": "978-284426778-8"} identifier = IdentifierFactory.create_identifier(data) assert identifier.is_valid() - assert str(identifier) == '978-2-84426-778-8' - assert identifier.normalize() == '9782844267788' + assert str(identifier) == "978-2-84426-778-8" + assert identifier.normalize() == "9782844267788" assert len(identifier.get_alternatives()) == 2 # ISBN-10 and EAN assert hash(identifier) # INVALID ISBN IDENTIFIER - data = {'type': IdentifierType.ISBN, 'value': '978-284426778-X'} + data = {"type": IdentifierType.ISBN, "value": "978-284426778-X"} identifier = IdentifierFactory.create_identifier(data) assert not identifier.is_valid() assert identifier.status == IdentifierStatus.INVALID # unable to normalize an invalid ISBN --> normalize() and str() will return # the original value - assert identifier.normalize() == '978-284426778-X' - assert str(identifier) == '978-284426778-X' + assert identifier.normalize() == "978-284426778-X" + assert str(identifier) == "978-284426778-X" with pytest.raises(InvalidIdentifierException): identifier.validate() assert len(identifier.get_alternatives()) == 0 @@ -64,22 +69,24 @@ def test_isbn_identifiers(): def test_ean_identifiers(): """Test EAN identifiers.""" - data = {'type': IdentifierType.EAN, 'value': '9782844267788'} + data = {"type": IdentifierType.EAN, "value": "9782844267788"} identifier = IdentifierFactory.create_identifier(data) assert identifier.is_valid() - assert str(identifier) == '9782844267788' - assert identifier.normalize() == '9782844267788' + assert str(identifier) == "9782844267788" + assert identifier.normalize() == "9782844267788" assert len(identifier.get_alternatives()) == 2 # ISBN-10 and ISBN-13 def test_identifiers_renderer(): """Test identifiers renderer.""" data = { - 'type': IdentifierType.ISBN, - 'value': '978-284426778-8', - 'qualifier': 'tome 2' + "type": IdentifierType.ISBN, + "value": "978-284426778-8", + "qualifier": "tome 2", } identifier = IdentifierFactory.create_identifier(data) - assert identifier.render() == '978-2-84426-778-8' - assert identifier.render(render_class=QualifierIdentifierRenderer()) == \ - '978-2-84426-778-8, tome 2' + assert identifier.render() == "978-2-84426-778-8" + assert ( + identifier.render(render_class=QualifierIdentifierRenderer()) + == "978-2-84426-778-8, tome 2" + ) diff --git a/tests/unit/test_acq_accounts_jsonschema.py b/tests/unit/test_acq_accounts_jsonschema.py index ab8c6ebac3..027a57e5b2 100644 --- a/tests/unit/test_acq_accounts_jsonschema.py +++ b/tests/unit/test_acq_accounts_jsonschema.py @@ -38,7 +38,7 @@ def test_pid(acq_account_schema, acq_account_fiction_martigny_data_tmp): validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) with pytest.raises(ValidationError): - acq_account_fiction_martigny_data_tmp['pid'] = 25 + acq_account_fiction_martigny_data_tmp["pid"] = 25 validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) @@ -47,45 +47,41 @@ def test_name(acq_account_schema, acq_account_fiction_martigny_data_tmp): validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) with pytest.raises(ValidationError): - acq_account_fiction_martigny_data_tmp['name'] = 25 + acq_account_fiction_martigny_data_tmp["name"] = 25 validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) -def test_description( - acq_account_schema, acq_account_fiction_martigny_data_tmp): +def test_description(acq_account_schema, acq_account_fiction_martigny_data_tmp): """Test description for acq accounts jsonschemas.""" validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) with pytest.raises(ValidationError): - acq_account_fiction_martigny_data_tmp['description'] = 25 + acq_account_fiction_martigny_data_tmp["description"] = 25 validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) -def test_organisation_pid(acq_account_schema, - acq_account_fiction_martigny_data_tmp): +def test_organisation_pid(acq_account_schema, acq_account_fiction_martigny_data_tmp): """Test organisation_pid for acq accounts jsonschemas.""" validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) with pytest.raises(ValidationError): - acq_account_fiction_martigny_data_tmp['organisation_pid'] = 25 + acq_account_fiction_martigny_data_tmp["organisation_pid"] = 25 validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) -def test_budget( - acq_account_schema, acq_account_fiction_martigny_data_tmp): +def test_budget(acq_account_schema, acq_account_fiction_martigny_data_tmp): """Test budget for acq accounts jsonschemas.""" validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) with pytest.raises(ValidationError): - acq_account_fiction_martigny_data_tmp['budget'] = 25 + acq_account_fiction_martigny_data_tmp["budget"] = 25 validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) -def test_allocated_amount( - acq_account_schema, acq_account_fiction_martigny_data_tmp): +def test_allocated_amount(acq_account_schema, acq_account_fiction_martigny_data_tmp): """Test allocated_amount for acq accounts jsonschemas.""" validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) with pytest.raises(ValidationError): - acq_account_fiction_martigny_data_tmp['allocated_amount'] = 'test' + acq_account_fiction_martigny_data_tmp["allocated_amount"] = "test" validate(acq_account_fiction_martigny_data_tmp, acq_account_schema) diff --git a/tests/unit/test_acq_order_lines_jsonschema.py b/tests/unit/test_acq_order_lines_jsonschema.py index e0898fafef..4feab8c1b2 100644 --- a/tests/unit/test_acq_order_lines_jsonschema.py +++ b/tests/unit/test_acq_order_lines_jsonschema.py @@ -25,24 +25,22 @@ from jsonschema.exceptions import ValidationError from rero_ils.modules.acquisition.acq_order_lines.api import AcqOrderLine -from rero_ils.modules.acquisition.acq_order_lines.models import \ - AcqOrderLineNoteType +from rero_ils.modules.acquisition.acq_order_lines.models import AcqOrderLineNoteType -def test_notes(app, acq_order_line_schema, - acq_order_line_fiction_martigny_data_tmp): +def test_notes(app, acq_order_line_schema, acq_order_line_fiction_martigny_data_tmp): """Test notes acq order lines jsonschemas.""" order_line_data = acq_order_line_fiction_martigny_data_tmp - order_line_data['notes'] = [ - {'type': AcqOrderLineNoteType.STAFF, 'content': 'note content'}, - {'type': AcqOrderLineNoteType.VENDOR, 'content': 'note content 2'}, + order_line_data["notes"] = [ + {"type": AcqOrderLineNoteType.STAFF, "content": "note content"}, + {"type": AcqOrderLineNoteType.VENDOR, "content": "note content 2"}, ] validate(order_line_data, acq_order_line_schema) with pytest.raises(ValidationError): - order_line_data['notes'] = [ - {'type': AcqOrderLineNoteType.STAFF, 'content': 'note content'}, - {'type': AcqOrderLineNoteType.STAFF, 'content': 'note content 2'}, + order_line_data["notes"] = [ + {"type": AcqOrderLineNoteType.STAFF, "content": "note content"}, + {"type": AcqOrderLineNoteType.STAFF, "content": "note content 2"}, ] AcqOrderLine.validate(AcqOrderLine(order_line_data)) diff --git a/tests/unit/test_acq_orders_jsonschema.py b/tests/unit/test_acq_orders_jsonschema.py index 52f84d78ac..28b43f2644 100644 --- a/tests/unit/test_acq_orders_jsonschema.py +++ b/tests/unit/test_acq_orders_jsonschema.py @@ -32,15 +32,15 @@ def test_notes(app, acq_order_schema, acq_order_fiction_martigny_data_tmp): """Test notes acq orders jsonschemas.""" order_data = acq_order_fiction_martigny_data_tmp - order_data['notes'] = [ - {'type': AcqOrderNoteType.STAFF, 'content': 'note content'}, - {'type': AcqOrderNoteType.VENDOR, 'content': 'note content 2'}, + order_data["notes"] = [ + {"type": AcqOrderNoteType.STAFF, "content": "note content"}, + {"type": AcqOrderNoteType.VENDOR, "content": "note content 2"}, ] validate(order_data, acq_order_schema) with pytest.raises(ValidationError): - order_data['notes'] = [ - {'type': AcqOrderNoteType.STAFF, 'content': 'note content'}, - {'type': AcqOrderNoteType.STAFF, 'content': 'note content 2'}, + order_data["notes"] = [ + {"type": AcqOrderNoteType.STAFF, "content": "note content"}, + {"type": AcqOrderNoteType.STAFF, "content": "note content 2"}, ] AcqOrder.validate(AcqOrder(order_data)) diff --git a/tests/unit/test_acq_receipt_lines_jsonschema.py b/tests/unit/test_acq_receipt_lines_jsonschema.py index 719340b93d..546c236115 100644 --- a/tests/unit/test_acq_receipt_lines_jsonschema.py +++ b/tests/unit/test_acq_receipt_lines_jsonschema.py @@ -25,18 +25,16 @@ from jsonschema.exceptions import ValidationError -def test_vat_rate( - acq_receipt_line_1_fiction_martigny, acq_receipt_line_schema -): +def test_vat_rate(acq_receipt_line_1_fiction_martigny, acq_receipt_line_schema): """Test VAT rate for acq receipt lines jsonschemas.""" receipt_line_data = acq_receipt_line_1_fiction_martigny validate(receipt_line_data, acq_receipt_line_schema) with pytest.raises(ValidationError): - receipt_line_data['vat_rate'] = -1 + receipt_line_data["vat_rate"] = -1 validate(receipt_line_data, acq_receipt_line_schema) with pytest.raises(ValidationError): - receipt_line_data['vat_rate'] = 101 + receipt_line_data["vat_rate"] = 101 validate(receipt_line_data, acq_receipt_line_schema) diff --git a/tests/unit/test_babel_extractors.py b/tests/unit/test_babel_extractors.py index 519500775a..4523fe50cb 100644 --- a/tests/unit/test_babel_extractors.py +++ b/tests/unit/test_babel_extractors.py @@ -26,11 +26,11 @@ def test_babel_extractors_extract_json(babel_filehandle): fileobj=babel_filehandle, keywords=None, comment_tags=None, - options={'keys_to_translate': "['title']"} + options={"keys_to_translate": "['title']"}, ) assert translations == [ - (4, 'gettext', 'Organisation', []), - (14, 'gettext', 'Schema', []), - (21, 'gettext', 'Organisation ID', []), - (25, 'gettext', 'Name', []) + (4, "gettext", "Organisation", []), + (14, "gettext", "Schema", []), + (21, "gettext", "Organisation ID", []), + (25, "gettext", "Name", []), ] diff --git a/tests/unit/test_budgets_jsonschema.py b/tests/unit/test_budgets_jsonschema.py index 3400a514bb..4a903d8d48 100644 --- a/tests/unit/test_budgets_jsonschema.py +++ b/tests/unit/test_budgets_jsonschema.py @@ -38,7 +38,7 @@ def test_pid(budget_schema, budget_2020_martigny_data_tmp): validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['pid'] = 25 + budget_2020_martigny_data_tmp["pid"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) @@ -47,55 +47,50 @@ def test_name(budget_schema, budget_2020_martigny_data_tmp): validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['name'] = 25 + budget_2020_martigny_data_tmp["name"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) -def test_organisation_pid(budget_schema, - budget_2020_martigny_data_tmp): +def test_organisation_pid(budget_schema, budget_2020_martigny_data_tmp): """Test organisation_pid for budgets jsonschemas.""" validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['organisation_pid'] = 25 + budget_2020_martigny_data_tmp["organisation_pid"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) -def test_library( - budget_schema, budget_2020_martigny_data_tmp): +def test_library(budget_schema, budget_2020_martigny_data_tmp): """Test library for budgets jsonschemas.""" validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['library'] = 25 + budget_2020_martigny_data_tmp["library"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) -def test_start_date( - budget_schema, budget_2020_martigny_data_tmp): +def test_start_date(budget_schema, budget_2020_martigny_data_tmp): """Test start date for budgets jsonschemas.""" validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['start_date'] = 25 + budget_2020_martigny_data_tmp["start_date"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) -def test_end_date( - budget_schema, budget_2020_martigny_data_tmp): +def test_end_date(budget_schema, budget_2020_martigny_data_tmp): """Test end date for budgets jsonschemas.""" validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['end_date'] = 25 + budget_2020_martigny_data_tmp["end_date"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) -def test_is_active( - budget_schema, budget_2020_martigny_data_tmp): +def test_is_active(budget_schema, budget_2020_martigny_data_tmp): """Test is_active te for budgets jsonschemas.""" validate(budget_2020_martigny_data_tmp, budget_schema) with pytest.raises(ValidationError): - budget_2020_martigny_data_tmp['is_active'] = 25 + budget_2020_martigny_data_tmp["is_active"] = 25 validate(budget_2020_martigny_data_tmp, budget_schema) diff --git a/tests/unit/test_circ_policies_jsonschema.py b/tests/unit/test_circ_policies_jsonschema.py index eddf4403c4..ed01925a91 100644 --- a/tests/unit/test_circ_policies_jsonschema.py +++ b/tests/unit/test_circ_policies_jsonschema.py @@ -26,8 +26,10 @@ from jsonschema import validate from jsonschema.exceptions import ValidationError -from rero_ils.modules.circ_policies.api import DUE_SOON_REMINDER_TYPE, \ - OVERDUE_REMINDER_TYPE +from rero_ils.modules.circ_policies.api import ( + DUE_SOON_REMINDER_TYPE, + OVERDUE_REMINDER_TYPE, +) def test_required(circ_policy_schema, circ_policy_martigny_data_tmp): @@ -44,7 +46,7 @@ def test_pid(circ_policy_schema, circ_policy_martigny_data_tmp): validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['pid'] = 25 + circ_policy_martigny_data_tmp["pid"] = 25 validate(circ_policy_martigny_data_tmp, circ_policy_schema) @@ -53,7 +55,7 @@ def test_circ_policy_name(circ_policy_schema, circ_policy_martigny_data_tmp): validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['name'] = 25 + circ_policy_martigny_data_tmp["name"] = 25 validate(circ_policy_martigny_data_tmp, circ_policy_schema) @@ -62,7 +64,7 @@ def test_circ_policy_desc(circ_policy_schema, circ_policy_martigny_data_tmp): validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['description'] = 25 + circ_policy_martigny_data_tmp["description"] = 25 validate(circ_policy_martigny_data_tmp, circ_policy_schema) @@ -71,7 +73,7 @@ def test_circ_policy_org(circ_policy_schema, circ_policy_martigny_data_tmp): validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['organisation_pid'] = 25 + circ_policy_martigny_data_tmp["organisation_pid"] = 25 validate(circ_policy_martigny_data_tmp, circ_policy_schema) @@ -82,7 +84,7 @@ def test_circ_policy_renewal_duration( validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['renewal_duration'] = '25' + circ_policy_martigny_data_tmp["renewal_duration"] = "25" validate(circ_policy_martigny_data_tmp, circ_policy_schema) @@ -93,122 +95,114 @@ def test_circ_policy_checkout_duration( validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['checkout_duration'] = '25' + circ_policy_martigny_data_tmp["checkout_duration"] = "25" validate(circ_policy_martigny_data_tmp, circ_policy_schema) -def test_circ_policy_allow_requests( - circ_policy_schema, circ_policy_martigny_data_tmp -): +def test_circ_policy_allow_requests(circ_policy_schema, circ_policy_martigny_data_tmp): """Test allow_requests for circcirculation policy jsonschema.""" validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['allow_requests'] = 25 + circ_policy_martigny_data_tmp["allow_requests"] = 25 validate(circ_policy_martigny_data_tmp, circ_policy_schema) -def test_circ_policy_number_renewals( - circ_policy_schema, circ_policy_martigny_data_tmp -): +def test_circ_policy_number_renewals(circ_policy_schema, circ_policy_martigny_data_tmp): """Test number_renewals for circulation policy jsonschema.""" validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['number_renewals'] = '25' + circ_policy_martigny_data_tmp["number_renewals"] = "25" validate(circ_policy_martigny_data_tmp, circ_policy_schema) -def test_circ_policy_is_default( - circ_policy_schema, circ_policy_martigny_data_tmp -): +def test_circ_policy_is_default(circ_policy_schema, circ_policy_martigny_data_tmp): """Test is_default for circulation policy jsonschema.""" validate(circ_policy_martigny_data_tmp, circ_policy_schema) with pytest.raises(ValidationError): - circ_policy_martigny_data_tmp['is_default'] = 25 + circ_policy_martigny_data_tmp["is_default"] = 25 validate(circ_policy_martigny_data_tmp, circ_policy_schema) -def test_circ_policy_reminders(circ_policy_schema, - circ_policy_short_martigny): +def test_circ_policy_reminders(circ_policy_schema, circ_policy_short_martigny): """Test reminders section for circulation policy jsonschemas.""" cipo = deepcopy(circ_policy_short_martigny) validate(cipo, circ_policy_schema) # Empty reminders array is invalid with pytest.raises(ValidationError): - cipo['reminders'] = [] + cipo["reminders"] = [] validate(cipo, circ_policy_schema) due_soon_reminder = { - 'type': DUE_SOON_REMINDER_TYPE, - 'days_delay': 3, - 'communication_channel': _('patron_setting'), - 'template': 'email/due_soon/' + "type": DUE_SOON_REMINDER_TYPE, + "days_delay": 3, + "communication_channel": _("patron_setting"), + "template": "email/due_soon/", } - cipo['reminders'].append(due_soon_reminder) + cipo["reminders"].append(due_soon_reminder) validate(cipo, circ_policy_schema) # Tow "DUE_SOON" reminder is disallow with pytest.raises(ValidationError): due_soon_reminder_2 = deepcopy(due_soon_reminder) - due_soon_reminder_2['days_delay'] = 5 - cipo['reminders'].append(due_soon_reminder_2) + due_soon_reminder_2["days_delay"] = 5 + cipo["reminders"].append(due_soon_reminder_2) validate(cipo, circ_policy_schema) # valid for JSON schema cipo.validate() # invalid against extented_validation rules - del cipo['reminders'][1] + del cipo["reminders"][1] # Tow "OVERDUE" reminders with same delay are disallow overdue_reminder = { - 'type': OVERDUE_REMINDER_TYPE, - 'days_delay': 2, - 'communication_channel': _('mail'), - 'template': 'email/overdue' + "type": OVERDUE_REMINDER_TYPE, + "days_delay": 2, + "communication_channel": _("mail"), + "template": "email/overdue", } with pytest.raises(ValidationError): overdue_reminder1 = deepcopy(overdue_reminder) overdue_reminder2 = deepcopy(overdue_reminder) - overdue_reminder2['template'] = 'email/overdue' - cipo['reminders'].extend([overdue_reminder1, overdue_reminder2]) + overdue_reminder2["template"] = "email/overdue" + cipo["reminders"].extend([overdue_reminder1, overdue_reminder2]) validate(cipo, circ_policy_schema) # valid for JSON schema cipo.validate() # invalid against extended_validation rules - del cipo['reminders'] + del cipo["reminders"] -def test_circ_policy_overdue_fees(circ_policy_schema, - circ_policy_short_martigny): +def test_circ_policy_overdue_fees(circ_policy_schema, circ_policy_short_martigny): """Test overdue fees section for circulation policy jsonschemas.""" cipo = deepcopy(circ_policy_short_martigny) validate(cipo, circ_policy_schema) overdue_data = { - 'maximum_total_amount': 100, - 'intervals': [ - {'from': 1, 'to': 5, 'fee_amount': 0.1}, - {'from': 11, 'fee_amount': 0.5}, - {'from': 6, 'to': 10, 'fee_amount': 0.1} - ] + "maximum_total_amount": 100, + "intervals": [ + {"from": 1, "to": 5, "fee_amount": 0.1}, + {"from": 11, "fee_amount": 0.5}, + {"from": 6, "to": 10, "fee_amount": 0.1}, + ], } - cipo['overdue_fees'] = overdue_data + cipo["overdue_fees"] = overdue_data validate(cipo, circ_policy_schema) cipo.validate() # two intervals with no upper limit with pytest.raises(ValidationError): invalid_overdue_data = deepcopy(overdue_data) - del invalid_overdue_data['intervals'][2]['to'] - cipo['overdue_fees'] = invalid_overdue_data + del invalid_overdue_data["intervals"][2]["to"] + cipo["overdue_fees"] = invalid_overdue_data cipo.validate() # two intervals with conflict on lower interval limit with pytest.raises(ValidationError): invalid_overdue_data = deepcopy(overdue_data) - invalid_overdue_data['intervals'][2]['from'] = 4 - cipo['overdue_fees'] = invalid_overdue_data + invalid_overdue_data["intervals"][2]["from"] = 4 + cipo["overdue_fees"] = invalid_overdue_data cipo.validate() # two intervals with conflict on upper interval limit with pytest.raises(ValidationError): invalid_overdue_data = deepcopy(overdue_data) - invalid_overdue_data['intervals'][0]['to'] = 7 - cipo['overdue_fees'] = invalid_overdue_data + invalid_overdue_data["intervals"][0]["to"] = 7 + cipo["overdue_fees"] = invalid_overdue_data cipo.validate() diff --git a/tests/unit/test_cli_fixtures.py b/tests/unit/test_cli_fixtures.py index b32b81f5fb..b7a2d190a1 100644 --- a/tests/unit/test_cli_fixtures.py +++ b/tests/unit/test_cli_fixtures.py @@ -26,34 +26,25 @@ from rero_ils.modules.cli.fixtures import count_cli, create -def test_count(app, script_info): +def test_count(app): """Test count cli.""" - json_file_name = join(dirname(__file__), '../data/documents.json') + json_file_name = join(dirname(__file__), "../data/documents.json") runner = CliRunner() - result = runner.invoke( - count_cli, - [json_file_name], - obj=script_info - ) + result = runner.invoke(count_cli, [json_file_name]) assert result.exit_code == 0 - assert result.output.strip().split('\n')[1] == 'Count: 2' + assert result.output.strip().split("\n")[1] == "Count: 2" runner = CliRunner() - result = runner.invoke( - count_cli, - [json_file_name, '-l'], - obj=script_info - ) + result = runner.invoke(count_cli, [json_file_name, "-l"]) assert result.exit_code == 0 - assert result.output.strip().split('\n')[1] == 'Count: 2' + assert result.output.strip().split("\n")[1] == "Count: 2" -@mock.patch('requests.Session.get') -def test_create(mock_contributions_mef_get, app, script_info, - entity_person_response_data): +@mock.patch("requests.Session.get") +def test_create(mock_contributions_mef_get, app, entity_person_response_data): """Test create cli.""" - json_file_name = join(dirname(__file__), '../data/documents.json') + json_file_name = join(dirname(__file__), "../data/documents.json") mock_contributions_mef_get.return_value = mock_response( json_data=entity_person_response_data ) @@ -61,28 +52,46 @@ def test_create(mock_contributions_mef_get, app, script_info, runner = CliRunner() result = runner.invoke( create, - [json_file_name, '--pid_type', 'doc', '--append', '--reindex', - '--dbcommit', '--verbose', '--debug', '--lazy', '--dont-stop'], - obj=script_info + [ + json_file_name, + "--pid_type", + "doc", + "--append", + "--reindex", + "--dbcommit", + "--verbose", + "--debug", + "--lazy", + "--dont-stop", + ], ) # assert result.exit_code == 0 - assert result.output.strip().split('\n')[3:] == [ - 'DB commit: 2', - 'Append fixtures new identifiers: 2', - 'DB commit append: 2' + assert result.output.strip().split("\n")[3:] == [ + "DB commit: 2", + "Append fixtures new identifiers: 2", + "DB commit append: 2", ] runner = CliRunner() result = runner.invoke( create, - [json_file_name, '--pid_type', 'doc', '--append', '--reindex', - '--dbcommit', '--verbose', '--debug', '--lazy', '--dont-stop', - '--create_or_update'], - obj=script_info + [ + json_file_name, + "--pid_type", + "doc", + "--append", + "--reindex", + "--dbcommit", + "--verbose", + "--debug", + "--lazy", + "--dont-stop", + "--create_or_update", + ], ) # assert result.exit_code == 0 - assert result.output.strip().split('\n')[3:] == [ - 'DB commit: 2', - 'Append fixtures new identifiers: 0', - 'DB commit append: 0' + assert result.output.strip().split("\n")[3:] == [ + "DB commit: 2", + "Append fixtures new identifiers: 0", + "DB commit append: 0", ] diff --git a/tests/unit/test_cli_index.py b/tests/unit/test_cli_index.py index 8b1da7f5e6..c69292407e 100644 --- a/tests/unit/test_cli_index.py +++ b/tests/unit/test_cli_index.py @@ -19,12 +19,18 @@ from click.testing import CliRunner -from rero_ils.modules.cli.index import delete_queue, init_queue, purge_queue, \ - reindex, reindex_missing, run +from rero_ils.modules.cli.index import ( + delete_queue, + init_queue, + purge_queue, + reindex, + reindex_missing, + run, +) from rero_ils.modules.organisations.api import Organisation -def test_cli_reindex_missing(app, script_info, org_sion_data): +def test_cli_reindex_missing(app, org_sion_data): """Test reindex missing cli.""" org = Organisation.create( data=org_sion_data, @@ -33,117 +39,69 @@ def test_cli_reindex_missing(app, script_info, org_sion_data): ) runner = CliRunner() - res = runner.invoke( - reindex_missing, - ['-t', 'xxx', '-t', 'org', '-v'], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Indexing missing xxx: ERROR pid type does not exist!', - 'Indexing missing org: 1', - '1\torg\torg2' + res = runner.invoke(reindex_missing, ["-t", "xxx", "-t", "org", "-v"]) + assert res.output.strip().split("\n") == [ + "Indexing missing xxx: ERROR pid type does not exist!", + "Indexing missing org: 1", + "1\torg\torg2", ] # test reindex with integreted queue # - we have to initialize the default indexer queue runner = CliRunner() - res = runner.invoke( - init_queue, - [], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Queue has been initialized: indexer' - ] + res = runner.invoke(init_queue, []) + assert res.output.strip().split("\n") == ["Queue has been initialized: indexer"] runner = CliRunner() - res = runner.invoke( - reindex, - ['-t', 'org', '--yes-i-know'], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Sending org to indexing queue (indexer): 1', - 'Execute "invenio reroils index run" command to process the queue!' + res = runner.invoke(reindex, ["-t", "org", "--yes-i-know"]) + assert res.output.strip().split("\n") == [ + "Sending org to indexing queue (indexer): 1", + 'Execute "invenio reroils index run" command to process the queue!', ] runner = CliRunner() - res = runner.invoke( - run, - [], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Indexing records ...', - '"indexer" indexed: 1 error: 0' + res = runner.invoke(run, []) + assert res.output.strip().split("\n") == [ + "Indexing records ...", + '"indexer" indexed: 1 error: 0', ] # - test direct indexing: runner = CliRunner() - res = runner.invoke( - reindex, - ['-t', 'org', '--yes-i-know', '-d'], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Indexing org: 1', - f'1\t{org.id}\t{org.pid}' + res = runner.invoke(reindex, ["-t", "org", "--yes-i-know", "-d"]) + assert res.output.strip().split("\n") == [ + "Indexing org: 1", + f"1\t{org.id}\t{org.pid}", ] # test reindex with dynamicly created queue `test_queue` # - initialize a new indexer queue - queue_name = 'test_queue' + queue_name = "test_queue" runner = CliRunner() - res = runner.invoke( - init_queue, - ['-n', queue_name], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Queue has been initialized: test_queue' - ] + res = runner.invoke(init_queue, ["-n", queue_name]) + assert res.output.strip().split("\n") == ["Queue has been initialized: test_queue"] runner = CliRunner() - res = runner.invoke( - reindex, - ['-t', 'org', '-q', queue_name, '--yes-i-know'], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - 'Sending org to indexing queue (test_queue): 1', + res = runner.invoke(reindex, ["-t", "org", "-q", queue_name, "--yes-i-know"]) + assert res.output.strip().split("\n") == [ + "Sending org to indexing queue (test_queue): 1", f'Execute "invenio reroils index run -q {queue_name}" ' - 'command to process the queue!' + "command to process the queue!", ] runner = CliRunner() - res = runner.invoke( - run, - ['-q', queue_name], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - f'Indexing records ({queue_name})...', - f'"{queue_name}" indexed: 1 error: 0' + res = runner.invoke(run, ["-q", queue_name]) + assert res.output.strip().split("\n") == [ + f"Indexing records ({queue_name})...", + f'"{queue_name}" indexed: 1 error: 0', ] # - purge the new indexer queue - queue_name = 'test_queue' + queue_name = "test_queue" runner = CliRunner() - res = runner.invoke( - purge_queue, - ['-n', queue_name], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - f'Queue has been purged: {queue_name} 0' - ] + res = runner.invoke(purge_queue, ["-n", queue_name]) + assert res.output.strip().split("\n") == [f"Queue has been purged: {queue_name} 0"] # - delete the new indexer queue - queue_name = 'test_queue' + queue_name = "test_queue" runner = CliRunner() - res = runner.invoke( - delete_queue, - ['-n', queue_name], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - f'Queue has been deleted: {queue_name}' - ] + res = runner.invoke(delete_queue, ["-n", queue_name]) + assert res.output.strip().split("\n") == [f"Queue has been deleted: {queue_name}"] diff --git a/tests/unit/test_cli_utils.py b/tests/unit/test_cli_utils.py index a1ec17d877..bf3903870f 100644 --- a/tests/unit/test_cli_utils.py +++ b/tests/unit/test_cli_utils.py @@ -21,51 +21,42 @@ from click.testing import CliRunner -from rero_ils.modules.cli.utils import check_validate, extract_from_xml, \ - token_create +from rero_ils.modules.cli.utils import check_validate, extract_from_xml, token_create -def test_cli_validate(app, script_info): +def test_cli_validate(app): """Test validate cli.""" runner = CliRunner() - file_name = join(dirname(__file__), '../data/documents.json') + file_name = join(dirname(__file__), "../data/documents.json") - res = runner.invoke( - check_validate, - [file_name, 'doc', '-v'], - obj=script_info - ) - assert res.output.strip().split('\n') == [ - f'Testing json schema for file: {file_name} type: doc', - '\tTest record: 1', - '\tTest record: 2' + res = runner.invoke(check_validate, [file_name, "doc", "-v"]) + assert res.output.strip().split("\n") == [ + f"Testing json schema for file: {file_name} type: doc", + "\tTest record: 1", + "\tTest record: 2", ] -def test_cli_access_token(app, script_info, patron_martigny): +def test_cli_access_token(app, patron_martigny): """Test access token cli.""" runner = CliRunner() res = runner.invoke( token_create, - ['-n', 'test', '-u', patron_martigny.dumps().get('email'), - '-t', 'my_token'], - obj=script_info + ["-n", "test", "-u", patron_martigny.dumps().get("email"), "-t", "my_token"], ) - assert res.output.strip().split('\n') == ['my_token'] + assert res.output.strip().split("\n") == ["my_token"] -def test_cli_extract_from_xml(app, tmpdir, document_marcxml, script_info): +def test_cli_extract_from_xml(app, tmpdir, document_marcxml): """Test extract from xml cli.""" - pids_path = join(dirname(__file__), '..', 'data', '001.pids') - xml_path = join(dirname(__file__), '..', 'data', 'xml', 'documents.xml') - temp_file_name = join(tmpdir, 'temp.xml') + pids_path = join(dirname(__file__), "..", "data", "001.pids") + xml_path = join(dirname(__file__), "..", "data", "xml", "documents.xml") + temp_file_name = join(tmpdir, "temp.xml") runner = CliRunner() result = runner.invoke( - extract_from_xml, - [pids_path, xml_path, temp_file_name, '-v'], - obj=script_info + extract_from_xml, [pids_path, xml_path, temp_file_name, "-v"] ) assert result.exit_code == 0 - results_output = result.output.split('\n') - assert results_output[0] == 'Extract pids from xml: ' - assert results_output[4] == 'Search pids count: 1' + results_output = result.output.split("\n") + assert results_output[0] == "Extract pids from xml: " + assert results_output[4] == "Search pids count: 1" diff --git a/tests/unit/test_contributions_jsonschema.py b/tests/unit/test_contributions_jsonschema.py index 32b200cf41..afd0d9edd6 100644 --- a/tests/unit/test_contributions_jsonschema.py +++ b/tests/unit/test_contributions_jsonschema.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -'''patron JSON schema tests.''' +"""patron JSON schema tests.""" from __future__ import absolute_import, print_function @@ -25,7 +25,7 @@ def test_required(remote_entities_schema, entity_person_data_tmp): - '''Test required for patron jsonschemas.''' + """Test required for patron jsonschemas.""" validate(entity_person_data_tmp, remote_entities_schema) with pytest.raises(ValidationError): @@ -33,31 +33,32 @@ def test_required(remote_entities_schema, entity_person_data_tmp): validate(entity_person_data_tmp, remote_entities_schema) with pytest.raises(ValidationError): - validate({ - 'pid': 'ent_pers', - 'viaf_pid': '56597999', - 'sources': [ - 'rero', - 'gnd' - ]}, remote_entities_schema) + validate( + {"pid": "ent_pers", "viaf_pid": "56597999", "sources": ["rero", "gnd"]}, + remote_entities_schema, + ) validate(entity_person_data_tmp, remote_entities_schema) with pytest.raises(ValidationError): - validate({ - '$schema': 'https://bib.rero.ch/schemas/remote_entities/' - 'remote_entity-v0.0.1.json', - 'viaf_pid': '56597999', - 'sources': [ - 'rero', - 'gnd' - ]}, remote_entities_schema) + validate( + { + "$schema": "https://bib.rero.ch/schemas/remote_entities/" + "remote_entity-v0.0.1.json", + "viaf_pid": "56597999", + "sources": ["rero", "gnd"], + }, + remote_entities_schema, + ) validate(entity_person_data_tmp, remote_entities_schema) with pytest.raises(ValidationError): - validate({ - '$schema': 'https://bib.rero.ch/schemas/remote_entities/' - 'remote_entity-v0.0.1.json', - 'pid': 'ent_pers', - 'viaf_pid': '56597999' - }, remote_entities_schema) + validate( + { + "$schema": "https://bib.rero.ch/schemas/remote_entities/" + "remote_entity-v0.0.1.json", + "pid": "ent_pers", + "viaf_pid": "56597999", + }, + remote_entities_schema, + ) validate(entity_person_data_tmp, remote_entities_schema) diff --git a/tests/unit/test_cql_parser.py b/tests/unit/test_cql_parser.py index 72c86c875a..29c131dfb7 100644 --- a/tests/unit/test_cql_parser.py +++ b/tests/unit/test_cql_parser.py @@ -19,17 +19,29 @@ import pytest -from rero_ils.modules.sru.cql_parser import RESERVED_PREFIXES, Boolean, \ - Diagnostic, Index, ModifiableObject, ModifierClause, PrefixableObject, \ - PrefixedObject, Relation, SearchClause, Term, Triple, parse +from rero_ils.modules.sru.cql_parser import ( + RESERVED_PREFIXES, + Boolean, + Diagnostic, + Index, + ModifiableObject, + ModifierClause, + PrefixableObject, + PrefixedObject, + Relation, + SearchClause, + Term, + Triple, + parse, +) def test_diagnostic(): """Test Diagnostic class.""" diag = Diagnostic() diag.code = 45 - diag.details = 'test' - assert str(diag) == 'info:srw/diagnostic/1/45 [Malformed Query]: test' + diag.details = "test" + assert str(diag) == "info:srw/diagnostic/1/45 [Malformed Query]: test" def test_get_query_clause(app): @@ -39,21 +51,21 @@ def test_get_query_clause(app): assert isinstance(query, SearchClause) # Check Index assert isinstance(query.index, Index) - assert query.index.prefix == 'dc' - assert query.index.value == 'anywhere' + assert query.index.prefix == "dc" + assert query.index.value == "anywhere" # Check Relation assert isinstance(query.relation, Relation) - assert query.relation.value == 'all' + assert query.relation.value == "all" # Check Value assert isinstance(query.term, Term) assert query.term.value == '"spam hamm"' es_string = query.to_es() - assert es_string == '(spam AND hamm)' + assert es_string == "(spam AND hamm)" - query = parse('((title=spam) or (subtitle=hamm)) or eggs') + query = parse("((title=spam) or (subtitle=hamm)) or eggs") es_string = query.to_es() - assert es_string == '((title:spam OR subtitle:hamm) OR eggs)' - assert query.get_result_set_id() == '' + assert es_string == "((title:spam OR subtitle:hamm) OR eggs)" + assert query.get_result_set_id() == "" def test_get_query_clause_no_xml_character(app): @@ -67,7 +79,7 @@ def test_get_query_clause_no_xml_character(app): assert isinstance(query, SearchClause) assert query.term.value == '">><<"' es_string = query.to_es() - assert es_string == '(>><<)' + assert es_string == "(>><<)" def test_get_query_clause_utf8(app): @@ -77,16 +89,16 @@ def test_get_query_clause_utf8(app): assert isinstance(query, SearchClause) # Check Index assert isinstance(query.index, Index) - assert query.index.prefix == 'dc' - assert query.index.value == 'anywhere' + assert query.index.prefix == "dc" + assert query.index.value == "anywhere" # Check Relation assert isinstance(query.relation, Relation) - assert query.relation.value == 'any' + assert query.relation.value == "any" # Check Value assert isinstance(query.term, Term) assert query.term.value == '"späm hämm"' es_string = query.to_es() - assert es_string == '(späm OR hämm)' + assert es_string == "(späm OR hämm)" def test_get_query_clause_modifiers(): @@ -95,10 +107,10 @@ def test_get_query_clause_modifiers(): assert len(query.relation.modifiers) > 0 for mod in query.relation.modifiers: assert isinstance(mod, ModifierClause) - assert str(query.relation.modifiers[0].type) == 'cql.stem' - assert str(query.relation.modifiers[1].type) == 'rel.algorithm' - assert str(query.relation.modifiers[1].comparison) == '=' - assert str(query.relation.modifiers[1].value) == 'okapi' + assert str(query.relation.modifiers[0].type) == "cql.stem" + assert str(query.relation.modifiers[1].type) == "rel.algorithm" + assert str(query.relation.modifiers[1].comparison) == "=" + assert str(query.relation.modifiers[1].value) == "okapi" with pytest.raises(Diagnostic): query.to_es() @@ -106,46 +118,42 @@ def test_get_query_clause_modifiers(): def test_get_query_clause_with_prefix(app): """Check that simple clause with prefix is parsed correctly.""" query = parse( - '>cql="info:srw/cql-context-set/1/cql-v1.1" cql.anywhere ' - 'cql.all "spam"' + '>cql="info:srw/cql-context-set/1/cql-v1.1" cql.anywhere ' 'cql.all "spam"' ) # Check query instance assert isinstance(query, SearchClause) # Check Index assert isinstance(query.index, Index) - assert query.index.prefix == 'cql' - assert query.index.value == 'anywhere' + assert query.index.prefix == "cql" + assert query.index.value == "anywhere" # Check Relation assert isinstance(query.relation, Relation) - assert query.relation.value == 'all' + assert query.relation.value == "all" # Check Value assert isinstance(query.term, Term) assert query.term.value == '"spam"' es_string = query.to_es() - assert es_string == '(spam)' + assert es_string == "(spam)" def test_get_query_clause_with_relation_modifier(): """Check that simple clause with relation modifier is parsed correctly.""" - query = parse( - 'anywhere all/relevant "spam"' - ) + query = parse('anywhere all/relevant "spam"') # Check query instance assert isinstance(query, SearchClause) # Check Index assert isinstance(query.index, Index) - assert query.index.value == 'anywhere' + assert query.index.value == "anywhere" # Check Relation assert isinstance(query.relation, Relation) - assert query.relation.value == 'all' + assert query.relation.value == "all" # Check Value assert isinstance(query.term, Term) assert query.term.value == '"spam"' with pytest.raises(Diagnostic) as err: query.to_es() assert str(err.value).startswith( - 'info:srw/diagnostic/1/21 ' - '[Unsupported combination of relation modifers]' + "info:srw/diagnostic/1/21 " "[Unsupported combination of relation modifers]" ) @@ -156,126 +164,124 @@ def test_get_query_clause_with_sorting(app): assert isinstance(query, SearchClause) # Check Index assert isinstance(query.index, Index) - assert query.index.value == 'serverchoice' + assert query.index.value == "serverchoice" # Check Relation assert isinstance(query.relation, Relation) - assert query.relation.value == '=' + assert query.relation.value == "=" # Check Value assert isinstance(query.term, Term) assert query.term.value == '"cat"' with pytest.raises(Diagnostic) as err: query.to_es() - assert str(err.value) == \ - 'info:srw/diagnostic/1/80 [Sort not supported]: ' + print("--------", err.value) + assert str(err.value) == "info:srw/diagnostic/1/80 [Sort not supported]: " def test_get_query_clause_with_relation(app): """Check that relation clause is parsed correctly.""" - query = parse('year > 1999') + query = parse("year > 1999") # Check query instance assert isinstance(query, SearchClause) # Check Relation assert isinstance(query.relation, Relation) - assert query.relation.value == '>' + assert query.relation.value == ">" # Check Value assert isinstance(query.term, Term) - assert query.term.value == '1999' + assert query.term.value == "1999" es_string = query.to_es() - assert es_string == 'year:>1999' + assert es_string == "year:>1999" query = parse( - 'ind1 = 1 AND ind2 > 2 AND ind3 >= 3 AND ' + - 'ind4 < 4 AND ind5 <= 5 AND ind6 <> 6' + "ind1 = 1 AND ind2 > 2 AND ind3 >= 3 AND " + + "ind4 < 4 AND ind5 <= 5 AND ind6 <> 6" ) es_string = query.to_es() assert es_string == ( - '(((((ind1:1 AND ind2:>2) AND ind3:>=3) ' + "(((((ind1:1 AND ind2:>2) AND ind3:>=3) " 'AND ind4:<4) AND ind5:<=5) AND ind6:-"6")' ) def test_get_query_triple(app): """Check that query with boolean is parsed correctly.""" - query = parse('dc.anywhere all spam and dc.anywhere all eggs') + query = parse("dc.anywhere all spam and dc.anywhere all eggs") # Check query instance assert isinstance(query, Triple) # Check left clause assert isinstance(query.left_operand, SearchClause) # remember terms get quoted during parsing - assert query.left_operand.to_es() == '(spam)' + assert query.left_operand.to_es() == "(spam)" # Check boolean assert isinstance(query.boolean, Boolean) - assert query.boolean.value == 'and' + assert query.boolean.value == "and" # Check right clause assert isinstance(query.right_operand, SearchClause) # Remember terms get quoted during parsing - assert query.right_operand.to_es() == '(eggs)' + assert query.right_operand.to_es() == "(eggs)" es_string = query.to_es() - assert es_string == '((spam) AND (eggs))' + assert es_string == "((spam) AND (eggs))" query = parse("dc.anywhere prox spam") with pytest.raises(Diagnostic) as err: query.to_es() assert str(err.value) == ( - 'info:srw/diagnostic/1/37 [Unsupported boolean operator]: prox' + "info:srw/diagnostic/1/37 [Unsupported boolean operator]: prox" ) - assert query.get_result_set_id() == '' + assert query.get_result_set_id() == "" def test_get_query_triple_with_sort(app): """Check that query with boolean is parsed correctly.""" - query = parse( - 'dc.anywhere all spam and dc.anywhere all eggs sortBy subtitle' - ) + query = parse("dc.anywhere all spam and dc.anywhere all eggs sortBy subtitle") # Check query instance assert isinstance(query, Triple) with pytest.raises(Diagnostic) as err: query.to_es() - assert str(err.value) == 'info:srw/diagnostic/1/80 [Sort not supported]: ' + assert str(err.value) == "info:srw/diagnostic/1/80 [Sort not supported]: " def test_get_query_with_modifiers(): """Check that query with modifiers is parsed correctly.""" # Relation Modifiers - q_string = 'dc.anywhere any/relevant spam' + q_string = "dc.anywhere any/relevant spam" query = parse(q_string) assert len(query.relation.modifiers) > 0 for mod in query.relation.modifiers: assert isinstance(mod, ModifierClause) - assert str(query.relation.modifiers[0].type) == 'cql.relevant' + assert str(query.relation.modifiers[0].type) == "cql.relevant" assert not str(query.relation.modifiers[0].comparison) assert not str(query.relation.modifiers[0].value) with pytest.raises(Diagnostic): query.to_es() # Boolean modifiers - q_string = 'dc.anywhere all spam and/rel.combine=sum dc.anywhere all eggs' + q_string = "dc.anywhere all spam and/rel.combine=sum dc.anywhere all eggs" query = parse(q_string) assert len(query.boolean.modifiers) > 0 for mod in query.boolean.modifiers: assert isinstance(mod, ModifierClause) - assert str(query.boolean.modifiers[0].type) == 'rel.combine' - assert str(query.boolean.modifiers[0].comparison) == '=' - assert str(query.boolean.modifiers[0].value) == 'sum' + assert str(query.boolean.modifiers[0].type) == "rel.combine" + assert str(query.boolean.modifiers[0].comparison) == "=" + assert str(query.boolean.modifiers[0].value) == "sum" with pytest.raises(Diagnostic): query.to_es() def test_errors(): """Check errors are trown correctly.""" - q_string = '' + q_string = "" with pytest.raises(Diagnostic) as err: parse(q_string) assert str(err.value) == ( - 'info:srw/diagnostic/1/10 [Malformed Query]: ' - 'Expected Boolean or Relation but got: ' + "info:srw/diagnostic/1/10 [Malformed Query]: " + "Expected Boolean or Relation but got: " ) - q_string = '123 456' + q_string = "123 456" with pytest.raises(Diagnostic) as err: parse(q_string) assert str(err.value) == ( - 'info:srw/diagnostic/1/10 [Malformed Query]: ' - 'Expected Term, got end of query.' + "info:srw/diagnostic/1/10 [Malformed Query]: " + "Expected Term, got end of query." ) - q_string = '123 any 789 abc' + q_string = "123 any 789 abc" with pytest.raises(Diagnostic) as err: parse(q_string) assert str(err.value) == ( @@ -286,94 +292,84 @@ def test_errors(): def test_prefixable_object(): """Test PrefixableObject.""" - prefix_object = PrefixableObject(query='query') - assert not prefix_object.resolve_prefix('unknown') - assert prefix_object.resolve_prefix('cql') == RESERVED_PREFIXES.get('cql') - prefix_object.add_prefix('name', 'identifier') + prefix_object = PrefixableObject(query="query") + assert not prefix_object.resolve_prefix("unknown") + assert prefix_object.resolve_prefix("cql") == RESERVED_PREFIXES.get("cql") + prefix_object.add_prefix("name", "identifier") prefix_object.error_on_duplicate_prefix = True with pytest.raises(Diagnostic) as err: - prefix_object.add_prefix('name', 'identifier') - assert str(err.value) == ( - 'info:srw/diagnostic/1/45 [Malformed Query]: name' - ) - assert prefix_object.resolve_prefix('name') == 'identifier' + prefix_object.add_prefix("name", "identifier") + assert str(err.value) == ("info:srw/diagnostic/1/45 [Malformed Query]: name") + assert prefix_object.resolve_prefix("name") == "identifier" - parent_object = PrefixableObject(query='query') - parent_object.add_prefix('parent_name', 'parent_identifier') + parent_object = PrefixableObject(query="query") + parent_object.add_prefix("parent_name", "parent_identifier") prefix_object.parent = parent_object - assert prefix_object.resolve_prefix('parent_name') == 'parent_identifier' + assert prefix_object.resolve_prefix("parent_name") == "parent_identifier" prefix_object.parent = None - config_object = PrefixableObject(query='query') - config_object.add_prefix('config_name', 'config_identifier') + config_object = PrefixableObject(query="query") + config_object.add_prefix("config_name", "config_identifier") prefix_object.config = config_object - assert prefix_object.resolve_prefix('config_name') == 'config_identifier' + assert prefix_object.resolve_prefix("config_name") == "config_identifier" def test_prefixed_object(): """Test PrefixedObject.""" - prefixed_object = PrefixedObject('"TICK.TACK"', query='query') - assert prefixed_object.prefix == 'tick' - assert prefixed_object.value == 'tack' + prefixed_object = PrefixedObject('"TICK.TACK"', query="query") + assert prefixed_object.prefix == "tick" + assert prefixed_object.value == "tack" with pytest.raises(Diagnostic) as err: - PrefixedObject(".TICK", query='query') + PrefixedObject(".TICK", query="query") assert str(err.value) == ( - 'info:srw/diagnostic/1/15 [Malformed Query]: Null indexset' + "info:srw/diagnostic/1/15 [Malformed Query]: Null indexset" ) with pytest.raises(Diagnostic) as err: - PrefixedObject("TICK.TACK.TOCK", query='query') + PrefixedObject("TICK.TACK.TOCK", query="query") assert str(err.value) == ( - 'info:srw/diagnostic/1/15 [Malformed Query]: ' + "info:srw/diagnostic/1/15 [Malformed Query]: " 'Multiple "." characters: tick.tack.tock' ) with pytest.raises(Diagnostic) as err: - PrefixedObject('"TICK"', query='query', - error_on_quoted_identifier=True) - assert str(err.value) == ( - 'info:srw/diagnostic/1/14 [Malformed Query]: "tick"' - ) + PrefixedObject('"TICK"', query="query", error_on_quoted_identifier=True) + assert str(err.value) == ('info:srw/diagnostic/1/14 [Malformed Query]: "tick"') - parent = PrefixedObject('TUTU', query='query') - parent.prefix_uri = 'prefix_tutu' + parent = PrefixedObject("TUTU", query="query") + parent.prefix_uri = "prefix_tutu" prefixed_object.parent = parent - assert prefixed_object.resolve_prefix() == 'prefix_tutu' - prefixed_object.prefix_uri = 'prefix_url' - assert prefixed_object.resolve_prefix() == 'prefix_url' + assert prefixed_object.resolve_prefix() == "prefix_tutu" + prefixed_object.prefix_uri = "prefix_url" + assert prefixed_object.resolve_prefix() == "prefix_url" def test_modifiable_object(): """Test ModifiableObject.""" modifiable_object = ModifiableObject() assert modifiable_object[0] is None - assert modifiable_object['type'] is None - modifier_clause = ModifierClause('type', comp='comparison', val='value', - query='query') + assert modifiable_object["type"] is None + modifier_clause = ModifierClause( + "type", comp="comparison", val="value", query="query" + ) modifiable_object.modifiers.append(modifier_clause) assert modifiable_object[0] == modifier_clause - assert modifiable_object['type'] == modifier_clause + assert modifiable_object["type"] == modifier_clause def test_term(): """Test Term.""" with pytest.raises(Diagnostic) as err: - Term('', query='query', error_on_empty_term=True) - assert str(err.value) == ( - 'info:srw/diagnostic/1/27 [Malformed Query]: ' - ) + Term("", query="query", error_on_empty_term=True) + assert str(err.value) == ("info:srw/diagnostic/1/27 [Malformed Query]: ") with pytest.raises(Diagnostic) as err: - Term('>=', query='query') - assert str(err.value) == ( - 'info:srw/diagnostic/1/25 [Malformed Query]: >=' - ) + Term(">=", query="query") + assert str(err.value) == ("info:srw/diagnostic/1/25 [Malformed Query]: >=") with pytest.raises(Diagnostic) as err: - Term('^', query='query') + Term("^", query="query") assert str(err.value) == ( - 'info:srw/diagnostic/1/32 [Malformed Query]: ' - 'Only anchoring charater(s) in term: ^' + "info:srw/diagnostic/1/32 [Malformed Query]: " + "Only anchoring charater(s) in term: ^" ) with pytest.raises(Diagnostic) as err: - Term('\\\\x\\yz\\', query='query') - assert str(err.value) == ( - 'info:srw/diagnostic/1/26 [Malformed Query]: \\\\x\\yz\\' - ) + Term("\\\\x\\yz\\", query="query") + assert str(err.value) == ("info:srw/diagnostic/1/26 [Malformed Query]: \\\\x\\yz\\") diff --git a/tests/unit/test_csv.py b/tests/unit/test_csv.py index 46f91d81b8..c373724c71 100644 --- a/tests/unit/test_csv.py +++ b/tests/unit/test_csv.py @@ -26,76 +26,65 @@ from rero_ils.modules.cli.fixtures import bulk_load, bulk_save, create_csv -def test_create_csv(app, tmpdir, script_info): +def test_create_csv(app, tmpdir): """Test create_csv cli.""" tmp_dir_name = tmpdir.dirname - json_file_name = join(dirname(__file__), '../data/documents.json') + json_file_name = join(dirname(__file__), "../data/documents.json") runner = CliRunner() result = runner.invoke( - create_csv, - ['doc', json_file_name, tmp_dir_name, '-l', '-v'], - obj=script_info + create_csv, ["doc", json_file_name, tmp_dir_name, "-l", "-v"] ) assert result.exit_code == 0 - file_name_pidstore = join(tmp_dir_name, 'doc_pidstore.csv') - file_name_metadata = join(tmp_dir_name, 'doc_metadata.csv') - file_name_pids = join(tmp_dir_name, 'doc_pids.csv') - output = result.output.split('\n') - assert output[0] == f'Create CSV files for: doc from: {json_file_name}' - assert output[1] == f'\t{file_name_pidstore}' - assert output[2] == f'\t{file_name_metadata}' - assert output[3] == f'\t{file_name_pids}' - assert output[4].split(':')[0] == '1\tdoc\t1' - assert output[5].split(':')[0] == '2\tdoc\t2' + file_name_pidstore = join(tmp_dir_name, "doc_pidstore.csv") + file_name_metadata = join(tmp_dir_name, "doc_metadata.csv") + file_name_pids = join(tmp_dir_name, "doc_pids.csv") + output = result.output.split("\n") + assert output[0] == f"Create CSV files for: doc from: {json_file_name}" + assert output[1] == f"\t{file_name_pidstore}" + assert output[2] == f"\t{file_name_metadata}" + assert output[3] == f"\t{file_name_pids}" + assert output[4].split(":")[0] == "1\tdoc\t1" + assert output[5].split(":")[0] == "2\tdoc\t2" - result = runner.invoke( - bulk_load, - ['doc', file_name_metadata, '-r'], - obj=script_info - ) + result = runner.invoke(bulk_load, ["doc", file_name_metadata, "-r"]) assert result.exit_code == 0 - assert result.output.split('\n') == [ - 'Load doc CSV files into database.', - ' Number of records to load: 2', - f' Load pids: {file_name_pids}', - f' Load pidstore: {file_name_pidstore}', - f' Load metatada: {file_name_metadata}', - '' + assert result.output.split("\n") == [ + "Load doc CSV files into database.", + " Number of records to load: 2", + f" Load pids: {file_name_pids}", + f" Load pidstore: {file_name_pidstore}", + f" Load metatada: {file_name_metadata}", + "", ] - result = runner.invoke( - bulk_save, - [tmp_dir_name, '-t', 'xxx', '-t', 'doc'], - obj=script_info - ) + result = runner.invoke(bulk_save, [tmp_dir_name, "-t", "xxx", "-t", "doc"]) assert result.exit_code == 0 - assert result.output.split('\n') == [ - 'Error xxx does not exist!', - f'Save doc CSV files to directory: {tmp_dir_name}', - 'Saved records: 2', - '' + assert result.output.split("\n") == [ + "Error xxx does not exist!", + f"Save doc CSV files to directory: {tmp_dir_name}", + "Saved records: 2", + "", ] - saved_name_meta = join(tmp_dir_name, 'documents_small_metadata.csv') + saved_name_meta = join(tmp_dir_name, "documents_small_metadata.csv") with open(file_name_metadata) as meta, open(saved_name_meta) as saved_meta: for line1, line2 in zip(meta, saved_meta): - line1 = line1.strip().split('\t')[2:] + line1 = line1.strip().split("\t")[2:] json1 = json.loads(line1[1]) del line1[1] - line2 = line2.strip().split('\t')[2:] + line2 = line2.strip().split("\t")[2:] json2 = json.loads(line2[1]) del line2[1] assert line1 == line2 assert json1 == json2 - saved_name_pidstore = join(tmp_dir_name, 'documents_small_pidstore.csv') - with open(file_name_pidstore) as pids, \ - open(saved_name_pidstore) as saved_pidstore: + saved_name_pidstore = join(tmp_dir_name, "documents_small_pidstore.csv") + with open(file_name_pidstore) as pids, open(saved_name_pidstore) as saved_pidstore: for line1, line2 in zip(pids, saved_pidstore): - line1 = line1.strip().split('\t')[2:] - line2 = line2.strip().split('\t')[2:] + line1 = line1.strip().split("\t")[2:] + line2 = line2.strip().split("\t")[2:] assert line1 == line2 - saved_name_pids = join(tmp_dir_name, 'documents_small_pids.csv') + saved_name_pids = join(tmp_dir_name, "documents_small_pids.csv") with open(file_name_pids) as pids, open(saved_name_pids) as saved_pids: for line1, line2 in zip(pids, saved_pids): line1 = line1.strip() diff --git a/tests/unit/test_holdings_jsonschema.py b/tests/unit/test_holdings_jsonschema.py index 6bf4b0d12c..d1c0fc3c25 100644 --- a/tests/unit/test_holdings_jsonschema.py +++ b/tests/unit/test_holdings_jsonschema.py @@ -34,64 +34,62 @@ def test_required(holding_schema, holding_lib_martigny_data): with pytest.raises(ValidationError): validate({}, holding_schema) - validate( - holding_lib_martigny_data, holding_schema) + validate(holding_lib_martigny_data, holding_schema) -def test_required_patterns( - holding_schema, holding_lib_martigny_w_patterns_data): +def test_required_patterns(holding_schema, holding_lib_martigny_w_patterns_data): """Test required for holdings jsonschemas.""" validate(holding_lib_martigny_w_patterns_data, holding_schema) with pytest.raises(ValidationError): validate({}, holding_schema) - validate( - holding_lib_martigny_w_patterns_data, holding_schema) + validate(holding_lib_martigny_w_patterns_data, holding_schema) def test_required_patterns_frequency( - holding_schema, holding_lib_martigny_w_patterns_data): + holding_schema, holding_lib_martigny_w_patterns_data +): """Test required for frequency in the patterns.""" holding = copy.deepcopy(holding_lib_martigny_w_patterns_data) - del holding['patterns']['frequency'] + del holding["patterns"]["frequency"] with pytest.raises(ValidationError): - validate( - holding, holding_schema) + validate(holding, holding_schema) def test_holdings_all_jsonschema_keys_values( - holding_schema, holding_lib_martigny_w_patterns_data): + holding_schema, holding_lib_martigny_w_patterns_data +): """Test all keys and values for holdings jsonschema.""" record = holding_lib_martigny_w_patterns_data validate(record, holding_schema) validator = [ - {'key': 'pid', 'value': 25}, - {'key': 'call_number', 'value': 25}, - {'key': 'second_call_number', 'value': 25}, - {'key': 'document', 'value': 25}, - {'key': 'circulation_category', 'value': 25}, - {'key': 'organisation', 'value': 25}, - {'key': 'library', 'value': 25}, - {'key': 'location', 'value': 25}, - {'key': 'holdings_type', 'value': 25}, - {'key': 'patterns', 'value': 25}, - {'key': 'enumerationAndChronology', 'value': 25}, - {'key': 'supplementaryContent', 'value': 25}, - {'key': 'index', 'value': 25}, - {'key': 'missing_issues', 'value': 25}, - {'key': 'notes', 'value': 25}, - {'key': 'vendor', 'value': 25}, - {'key': 'issue_binding', 'value': 25}, - {'key': 'acquisition_status', 'value': 25}, - {'key': 'acquisition_method', 'value': 25}, - {'key': 'acquisition_expected_end_date', 'value': 25}, - {'key': 'general_retention_policy', 'value': 25}, - {'key': 'completeness', 'value': 25}, - {'key': 'composite_copy_report', 'value': 25}, - {'key': '_masked', 'value': 25} + {"key": "pid", "value": 25}, + {"key": "call_number", "value": 25}, + {"key": "second_call_number", "value": 25}, + {"key": "document", "value": 25}, + {"key": "circulation_category", "value": 25}, + {"key": "organisation", "value": 25}, + {"key": "library", "value": 25}, + {"key": "location", "value": 25}, + {"key": "holdings_type", "value": 25}, + {"key": "patterns", "value": 25}, + {"key": "enumerationAndChronology", "value": 25}, + {"key": "supplementaryContent", "value": 25}, + {"key": "index", "value": 25}, + {"key": "missing_issues", "value": 25}, + {"key": "notes", "value": 25}, + {"key": "vendor", "value": 25}, + {"key": "issue_binding", "value": 25}, + {"key": "acquisition_status", "value": 25}, + {"key": "acquisition_method", "value": 25}, + {"key": "acquisition_expected_end_date", "value": 25}, + {"key": "general_retention_policy", "value": 25}, + {"key": "completeness", "value": 25}, + {"key": "composite_copy_report", "value": 25}, + {"key": "_masked", "value": 25}, ] for element in validator: with pytest.raises(ValidationError): - record[element['key']] = element['value'] + record[element["key"]] = element["value"] validate(record, holding_schema) diff --git a/tests/unit/test_ill_requests_jsonschema.py b/tests/unit/test_ill_requests_jsonschema.py index 629c103531..eb5236f563 100644 --- a/tests/unit/test_ill_requests_jsonschema.py +++ b/tests/unit/test_ill_requests_jsonschema.py @@ -38,22 +38,22 @@ def test_required(ill_request_schema, ill_request_martigny_data_tmp): # check PID - pid should be a string with pytest.raises(ValidationError): - ill_request_martigny_data_tmp['pid'] = 25 + ill_request_martigny_data_tmp["pid"] = 25 validate(ill_request_martigny_data_tmp, ill_request_schema) # status - check allowed values with pytest.raises(ValidationError): - ill_request_martigny_data_tmp['status'] = 'fake' + ill_request_martigny_data_tmp["status"] = "fake" validate(ill_request_martigny_data_tmp, ill_request_schema) # check document title - length > 2 with pytest.raises(ValidationError): - ill_request_martigny_data_tmp['document']['title'] = 'no' + ill_request_martigny_data_tmp["document"]["title"] = "no" validate(ill_request_martigny_data_tmp, ill_request_schema) # check document year - length > 0 with pytest.raises(ValidationError): - ill_request_martigny_data_tmp['document']['year'] = '' + ill_request_martigny_data_tmp["document"]["year"] = "" validate(ill_request_martigny_data_tmp, ill_request_schema) @@ -62,22 +62,20 @@ def test_extended_validation(app, ill_request_martigny_data_tmp): data = copy.deepcopy(ill_request_martigny_data_tmp) # pages are required if request is a request copy - data['copy'] = True - if 'pages' in data: - del data['pages'] + data["copy"] = True + if "pages" in data: + del data["pages"] with pytest.raises(ValidationError): ILLRequest.validate(ILLRequest(data)) # test on 'notes' field :: have 2 note of the same type is disallowed data = copy.deepcopy(ill_request_martigny_data_tmp) - data['notes'] = [{ - 'type': ILLRequestNoteStatus.PUBLIC_NOTE, - 'content': 'dummy content' - }] + data["notes"] = [ + {"type": ILLRequestNoteStatus.PUBLIC_NOTE, "content": "dummy content"} + ] ILLRequest.validate(ILLRequest(data)) with pytest.raises(ValidationError): - data['notes'].append({ - 'type': ILLRequestNoteStatus.PUBLIC_NOTE, - 'content': 'second dummy note' - }) + data["notes"].append( + {"type": ILLRequestNoteStatus.PUBLIC_NOTE, "content": "second dummy note"} + ) ILLRequest.validate(ILLRequest(data)) diff --git a/tests/unit/test_item_types_jsonschema.py b/tests/unit/test_item_types_jsonschema.py index 5d33a4c6ec..253fe078a9 100644 --- a/tests/unit/test_item_types_jsonschema.py +++ b/tests/unit/test_item_types_jsonschema.py @@ -38,7 +38,7 @@ def test_pid(item_type_schema, item_type_data_tmp): validate(item_type_data_tmp, item_type_schema) with pytest.raises(ValidationError): - item_type_data_tmp['pid'] = 25 + item_type_data_tmp["pid"] = 25 validate(item_type_data_tmp, item_type_schema) @@ -47,7 +47,7 @@ def test_name(item_type_schema, item_type_data_tmp): validate(item_type_data_tmp, item_type_schema) with pytest.raises(ValidationError): - item_type_data_tmp['name'] = 25 + item_type_data_tmp["name"] = 25 validate(item_type_data_tmp, item_type_schema) @@ -56,7 +56,7 @@ def test_description(item_type_schema, item_type_data_tmp): validate(item_type_data_tmp, item_type_schema) with pytest.raises(ValidationError): - item_type_data_tmp['description'] = 25 + item_type_data_tmp["description"] = 25 validate(item_type_data_tmp, item_type_schema) @@ -65,5 +65,5 @@ def test_organisation_pid(item_type_schema, item_type_data_tmp): validate(item_type_data_tmp, item_type_schema) with pytest.raises(ValidationError): - item_type_data_tmp['organisation_pid'] = 25 + item_type_data_tmp["organisation_pid"] = 25 validate(item_type_data_tmp, item_type_schema) diff --git a/tests/unit/test_items_jsonschema.py b/tests/unit/test_items_jsonschema.py index e052aa61dd..4bd7e0c777 100644 --- a/tests/unit/test_items_jsonschema.py +++ b/tests/unit/test_items_jsonschema.py @@ -37,64 +37,63 @@ def test_required(item_schema, item_lib_martigny_data_tmp): validate({}, item_schema) -def test_item_all_jsonschema_keys_values( - item_schema, item_lib_martigny_data_tmp): +def test_item_all_jsonschema_keys_values(item_schema, item_lib_martigny_data_tmp): """Test all keys and values for item jsonschema.""" record = item_lib_martigny_data_tmp validate(record, item_schema) validator = [ - {'key': 'pid', 'value': 25}, - {'key': 'type', 'value': 25}, - {'key': 'barcode', 'value': 25}, - {'key': 'call_number', 'value': 25}, - {'key': 'second_call_number', 'value': 25}, - {'key': 'item_type', 'value': 25}, - {'key': 'location', 'value': 25}, - {'key': 'temporary_location', 'value': 25}, - {'key': 'enumerationAndChronology', 'value': 25}, - {'key': 'document', 'value': 25}, - {'key': 'type', 'value': 25}, - {'key': 'issue', 'value': 25}, - {'key': 'status', 'value': 25}, - {'key': 'holding', 'value': 25}, - {'key': 'organisation', 'value': 25}, - {'key': 'library', 'value': 25}, - {'key': 'ur', 'value': 25}, - {'key': 'pac_code', 'value': 25}, - {'key': 'price', 'value': '25'}, - {'key': '_masked', 'value': 25}, - {'key': 'legacy_checkout_count', 'value': '25'} + {"key": "pid", "value": 25}, + {"key": "type", "value": 25}, + {"key": "barcode", "value": 25}, + {"key": "call_number", "value": 25}, + {"key": "second_call_number", "value": 25}, + {"key": "item_type", "value": 25}, + {"key": "location", "value": 25}, + {"key": "temporary_location", "value": 25}, + {"key": "enumerationAndChronology", "value": 25}, + {"key": "document", "value": 25}, + {"key": "type", "value": 25}, + {"key": "issue", "value": 25}, + {"key": "status", "value": 25}, + {"key": "holding", "value": 25}, + {"key": "organisation", "value": 25}, + {"key": "library", "value": 25}, + {"key": "ur", "value": 25}, + {"key": "pac_code", "value": 25}, + {"key": "price", "value": "25"}, + {"key": "_masked", "value": 25}, + {"key": "legacy_checkout_count", "value": "25"}, ] for element in validator: with pytest.raises(ValidationError): - record[element['key']] = element['value'] + record[element["key"]] = element["value"] validate(record, item_schema) def test_item_notes(item_schema, item_lib_martigny_data_tmp): """Test notes for item jsonschemas.""" validate(item_lib_martigny_data_tmp, item_schema) - item_lib_martigny_data_tmp['notes'] = [] + item_lib_martigny_data_tmp["notes"] = [] - public_note = dict(type=ItemNoteTypes.GENERAL, content='public note') - staff_note = dict(type=ItemNoteTypes.STAFF, content='staff note') - dummy_note = dict(type='dummy', content='dummy note') - long_note = dict(type=ItemNoteTypes.CHECKIN, content='note' * 501) + public_note = dict(type=ItemNoteTypes.GENERAL, content="public note") + staff_note = dict(type=ItemNoteTypes.STAFF, content="staff note") + dummy_note = dict(type="dummy", content="dummy note") + long_note = dict(type=ItemNoteTypes.CHECKIN, content="note" * 501) - item_lib_martigny_data_tmp['notes'] = [public_note] + item_lib_martigny_data_tmp["notes"] = [public_note] validate(item_lib_martigny_data_tmp, item_schema) - item_lib_martigny_data_tmp['notes'] = [public_note, staff_note] + item_lib_martigny_data_tmp["notes"] = [public_note, staff_note] validate(item_lib_martigny_data_tmp, item_schema) # add a not-valid note type should raise a validation error with pytest.raises(ValidationError): - item_lib_martigny_data_tmp['notes'] = [dummy_note] + item_lib_martigny_data_tmp["notes"] = [dummy_note] validate(item_lib_martigny_data_tmp, item_schema) # add a too long note content with pytest.raises(ValidationError): - item_lib_martigny_data_tmp['notes'] = [long_note] + item_lib_martigny_data_tmp["notes"] = [long_note] validate(item_lib_martigny_data_tmp, item_schema) @@ -103,8 +102,8 @@ def test_new_acquisition(item_schema, item_lib_martigny_data_tmp): validate(item_lib_martigny_data_tmp, item_schema) with pytest.raises(ValidationError): - acq_date = datetime.date.today().strftime('%Y/%m/%d') - item_lib_martigny_data_tmp['acquisition_date'] = acq_date + acq_date = datetime.date.today().strftime("%Y/%m/%d") + item_lib_martigny_data_tmp["acquisition_date"] = acq_date validate(item_lib_martigny_data_tmp, item_schema) @@ -114,18 +113,16 @@ def test_temporary_item_type(item_schema, item_lib_martigny): # tmp_itty cannot be the same than main_itty with pytest.raises(ValidationError): - data['temporary_item_type'] = { - '$ref': data['item_type']['$ref'] - } + data["temporary_item_type"] = {"$ref": data["item_type"]["$ref"]} validate(data, item_schema) data.validate() # check extented_validation # tmp_itty_enddate must be older than current date with pytest.raises(ValidationError): - current_date = datetime.datetime.now().strftime('%Y-%m-%d') - data['temporary_item_type'] = { - '$ref': get_ref_for_pid('itty', 'sample'), - 'end_date': current_date + current_date = datetime.datetime.now().strftime("%Y-%m-%d") + data["temporary_item_type"] = { + "$ref": get_ref_for_pid("itty", "sample"), + "end_date": current_date, } validate(data, item_schema) data.validate() # check extented_validation diff --git a/tests/unit/test_libraries_jsonschema.py b/tests/unit/test_libraries_jsonschema.py index 421ac4167c..6eb2a7913b 100644 --- a/tests/unit/test_libraries_jsonschema.py +++ b/tests/unit/test_libraries_jsonschema.py @@ -41,7 +41,7 @@ def test_pid(library_schema, lib_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(lib_martigny_data) - data['pid'] = 25 + data["pid"] = 25 validate(data, library_schema) @@ -51,7 +51,7 @@ def test_name(library_schema, lib_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(lib_martigny_data) - data['name'] = 25 + data["name"] = 25 validate(data, library_schema) @@ -61,7 +61,7 @@ def test_address(library_schema, lib_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(lib_martigny_data) - data['address'] = 25 + data["address"] = 25 validate(data, library_schema) @@ -71,32 +71,32 @@ def test_acquisition_settings(library_schema, lib_martigny_data): with pytest.raises(ValidationError): copied_data = copy.deepcopy(lib_martigny_data) - acq_data = copied_data['acquisition_settings'] - del acq_data['billing_informations']['address'] + acq_data = copied_data["acquisition_settings"] + del acq_data["billing_informations"]["address"] validate(copied_data, library_schema) with pytest.raises(ValidationError): copied_data = copy.deepcopy(lib_martigny_data) - acq_data = copied_data['acquisition_settings'] - del acq_data['billing_informations']['name'] + acq_data = copied_data["acquisition_settings"] + del acq_data["billing_informations"]["name"] validate(copied_data, library_schema) with pytest.raises(ValidationError): copied_data = copy.deepcopy(lib_martigny_data) - acq_data = copied_data['acquisition_settings'] - adr_data = acq_data['billing_informations']['address'] - del adr_data['street'] + acq_data = copied_data["acquisition_settings"] + adr_data = acq_data["billing_informations"]["address"] + del adr_data["street"] validate(copied_data, library_schema) with pytest.raises(ValidationError): copied_data = copy.deepcopy(lib_martigny_data) - acq_data = copied_data['acquisition_settings'] - acq_data['dummy'] = 'some data' + acq_data = copied_data["acquisition_settings"] + acq_data["dummy"] = "some data" validate(copied_data, library_schema) with pytest.raises(ValidationError): copied_data = copy.deepcopy(lib_martigny_data) - acq_data = copied_data['acquisition_settings'] - adr_data = acq_data['billing_informations']['address'] - adr_data['dummy'] = 'some data' + acq_data = copied_data["acquisition_settings"] + adr_data = acq_data["billing_informations"]["address"] + adr_data["dummy"] = "some data" validate(copied_data, library_schema) diff --git a/tests/unit/test_local_fields_jsonschema.py b/tests/unit/test_local_fields_jsonschema.py index a9befd9327..0e23cc857e 100644 --- a/tests/unit/test_local_fields_jsonschema.py +++ b/tests/unit/test_local_fields_jsonschema.py @@ -26,20 +26,19 @@ from jsonschema.exceptions import ValidationError -def test_local_fields_fields_required( - local_fields_schema, local_field_martigny_data): +def test_local_fields_fields_required(local_fields_schema, local_field_martigny_data): """Test required for local fields jsonschemas.""" record = copy.deepcopy(local_field_martigny_data) validate(record, local_fields_schema) # Check minlength with pytest.raises(ValidationError): - record['fields'] = ['12'] + record["fields"] = ["12"] validate(record, local_fields_schema) # Check missing fields with pytest.raises(ValidationError): - del record['fields'] + del record["fields"] validate(record, local_fields_schema) # Check empty schema @@ -48,17 +47,17 @@ def test_local_fields_fields_required( def test_local_fields_all_jsonschema_keys_values( - local_fields_schema, local_field_martigny_data): + local_fields_schema, local_field_martigny_data +): """Test all keys and values for local fields jsonschema.""" record = copy.deepcopy(local_field_martigny_data) validate(record, local_fields_schema) validator = [ - {'key': 'pid', 'value': 25}, - {'key': 'organisation', 'value': 25}, - {'key': 'parent', 'value': 25}, - + {"key": "pid", "value": 25}, + {"key": "organisation", "value": 25}, + {"key": "parent", "value": 25}, ] for element in validator: with pytest.raises(ValidationError): - record[element['key']] = element['value'] + record[element["key"]] = element["value"] validate(record, local_fields_schema) diff --git a/tests/unit/test_locations_jsonschema.py b/tests/unit/test_locations_jsonschema.py index 555df137a2..81118b4895 100644 --- a/tests/unit/test_locations_jsonschema.py +++ b/tests/unit/test_locations_jsonschema.py @@ -42,7 +42,7 @@ def test_locations_pid(location_schema, loc_public_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(loc_public_martigny_data) - data['pid'] = 25 + data["pid"] = 25 validate(data, location_schema) @@ -52,7 +52,7 @@ def test_locations_name(location_schema, loc_public_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(loc_public_martigny_data) - data['name'] = 25 + data["name"] = 25 validate(data, location_schema) @@ -60,7 +60,6 @@ def test_locations_email(location_schema, loc_public_martigny_data): """Test email for location jsonschemas.""" data = loc_public_martigny_data - data['notification_email'] = 'test@test.@be' + data["notification_email"] = "test@test.@be" with pytest.raises(ValidationError): - validate(data, location_schema, - format_checker=ils_record_format_checker) + validate(data, location_schema, format_checker=ils_record_format_checker) diff --git a/tests/unit/test_normalizer_stop_words.py b/tests/unit/test_normalizer_stop_words.py index f75d205a67..07f8104bea 100644 --- a/tests/unit/test_normalizer_stop_words.py +++ b/tests/unit/test_normalizer_stop_words.py @@ -23,15 +23,25 @@ def test_normalize(app): """Test stop words normalize.""" # ---- The string is not analyzed - app.config['RERO_ILS_STOP_WORDS_ACTIVATE'] = False + app.config["RERO_ILS_STOP_WORDS_ACTIVATE"] = False normalizer = NormalizerStopWords(app) text = "L'été a été très chaud." assert text == normalizer.normalize(text) # ---- The string is analyzed - app.config['RERO_ILS_STOP_WORDS_ACTIVATE'] = True - app.config['RERO_ILS_STOP_WORDS_PUNCTUATION'] = [ - '"', ',', ';', ':', r'\.', '_', r'\?', r'\!', r'\*', r'\+', '\n' + app.config["RERO_ILS_STOP_WORDS_ACTIVATE"] = True + app.config["RERO_ILS_STOP_WORDS_PUNCTUATION"] = [ + '"', + ",", + ";", + ":", + r"\.", + "_", + r"\?", + r"\!", + r"\*", + r"\+", + "\n", ] normalizer = NormalizerStopWords(app) text = "L'été a été très chaud." @@ -41,27 +51,28 @@ def test_normalize(app): # Deleting words for the defined language. text_norm = "été a été très chaud" - app.config['RERO_ILS_STOP_WORDS'] = { - 'fre': ["de", "des", "du", "l'", "la", "le", "les", "un", "une"] + app.config["RERO_ILS_STOP_WORDS"] = { + "fre": ["de", "des", "du", "l'", "la", "le", "les", "un", "une"] } - assert text_norm == normalizer.normalize(text, 'fre') + assert text_norm == normalizer.normalize(text, "fre") - text = 'Journal des tribunaux : jurisprudence fédérale. ' \ - '4, Droit pénal et procédure pénale' - text_norm = 'Journal tribunaux jurisprudence fédérale ' \ - '4 Droit pénal et procédure pénale' - assert text_norm == normalizer.normalize(text, 'fre') + text = ( + "Journal des tribunaux : jurisprudence fédérale. " + "4, Droit pénal et procédure pénale" + ) + text_norm = ( + "Journal tribunaux jurisprudence fédérale " "4 Droit pénal et procédure pénale" + ) + assert text_norm == normalizer.normalize(text, "fre") # The language was not found in the definition of stop words. text = "He plays this musical phrase quite well." text_norm = "He plays this musical phrase quite well" - assert text_norm == normalizer.normalize(text, 'eng') + assert text_norm == normalizer.normalize(text, "eng") # Deleting words with the default definition. text = "L'été a été très chaud." text_norm = "été a été chaud" - app.config['RERO_ILS_STOP_WORDS'] = { - 'default': ["l'", "très"] - } + app.config["RERO_ILS_STOP_WORDS"] = {"default": ["l'", "très"]} normalizer = NormalizerStopWords(app) - assert text_norm == normalizer.normalize(text, 'und') + assert text_norm == normalizer.normalize(text, "und") diff --git a/tests/unit/test_notifications_jsonschema.py b/tests/unit/test_notifications_jsonschema.py index 1243cec62e..7f6e9e17f4 100644 --- a/tests/unit/test_notifications_jsonschema.py +++ b/tests/unit/test_notifications_jsonschema.py @@ -39,17 +39,16 @@ def test_pid(notification_schema, dummy_notification): validate(dummy_notification, notification_schema) with pytest.raises(ValidationError): - dummy_notification['pid'] = 25 + dummy_notification["pid"] = 25 validate(dummy_notification, notification_schema) -def test_notification_type( - notification_schema, dummy_notification): +def test_notification_type(notification_schema, dummy_notification): """Test type for notification jsonschemas.""" validate(dummy_notification, notification_schema) with pytest.raises(ValidationError): - dummy_notification['notification_type'] = 25 + dummy_notification["notification_type"] = 25 validate(dummy_notification, notification_schema) @@ -58,10 +57,10 @@ def test_loan(app, notification_schema, dummy_notification): validate(dummy_notification, notification_schema) with pytest.raises(ValidationError): - dummy_notification['context']['loan'] = 25 + dummy_notification["context"]["loan"] = 25 validate(dummy_notification, notification_schema) with pytest.raises(ValidationError): notif = Notification(dummy_notification) - del notif['context']['loan'] + del notif["context"]["loan"] notif.validate() diff --git a/tests/unit/test_operation_logs_jsonschema.py b/tests/unit/test_operation_logs_jsonschema.py index 9b99fe6e6c..7a0074211e 100644 --- a/tests/unit/test_operation_logs_jsonschema.py +++ b/tests/unit/test_operation_logs_jsonschema.py @@ -33,20 +33,21 @@ def test_required(operation_log_schema, operation_log_data): def test_operation_log_all_jsonschema_keys_values( - operation_log_schema, operation_log_data): + operation_log_schema, operation_log_data +): """Test all keys and values for operation log jsonschema.""" record = operation_log_data validate(record, operation_log_schema) validator = [ - {'key': 'pid', 'value': 25}, - {'key': 'operation', 'value': 25}, - {'key': 'record', 'value': 25}, - {'key': 'date', 'value': 25}, - {'key': 'organisation', 'value': 25}, - {'key': 'user', 'value': 25}, - {'key': 'user_name', 'value': 25} + {"key": "pid", "value": 25}, + {"key": "operation", "value": 25}, + {"key": "record", "value": 25}, + {"key": "date", "value": 25}, + {"key": "organisation", "value": 25}, + {"key": "user", "value": 25}, + {"key": "user_name", "value": 25}, ] for element in validator: with pytest.raises(ValidationError): - record[element['key']] = element['value'] + record[element["key"]] = element["value"] validate(record, operation_log_schema) diff --git a/tests/unit/test_organisations_jsonschema.py b/tests/unit/test_organisations_jsonschema.py index 7a2aa4626a..a5f33a7632 100644 --- a/tests/unit/test_organisations_jsonschema.py +++ b/tests/unit/test_organisations_jsonschema.py @@ -41,7 +41,7 @@ def test_pid(organisation_schema, org_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(org_martigny_data) - data['pid'] = 25 + data["pid"] = 25 validate(data, organisation_schema) @@ -51,7 +51,7 @@ def test_name(organisation_schema, org_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(org_martigny_data) - data['name'] = 25 + data["name"] = 25 validate(data, organisation_schema) @@ -61,7 +61,7 @@ def test_address(organisation_schema, org_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(org_martigny_data) - data['address'] = 25 + data["address"] = 25 validate(data, organisation_schema) @@ -71,15 +71,15 @@ def test_default_currency(organisation_schema, org_martigny_data): with pytest.raises(ValidationError): data = copy.deepcopy(org_martigny_data) - data['default_currency'] = 25 # bad type + data["default_currency"] = 25 # bad type validate(data, organisation_schema) with pytest.raises(ValidationError): data = copy.deepcopy(org_martigny_data) - data['default_currency'] = "dummy" # bad string length + data["default_currency"] = "dummy" # bad string length validate(data, organisation_schema) with pytest.raises(ValidationError): data = copy.deepcopy(org_martigny_data) - data['default_currency'] = "chf" # bad string case + data["default_currency"] = "chf" # bad string case validate(data, organisation_schema) diff --git a/tests/unit/test_patron_transaction_events_jsonschema.py b/tests/unit/test_patron_transaction_events_jsonschema.py index 7cdd0eecc3..eb7d8a6c17 100644 --- a/tests/unit/test_patron_transaction_events_jsonschema.py +++ b/tests/unit/test_patron_transaction_events_jsonschema.py @@ -27,142 +27,145 @@ def test_patron_transaction_events_required( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test required for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): validate({}, patron_transaction_event_schema) def test_patron_transaction_events_pid( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test pid for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['pid'] = 25 + data["pid"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_note( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test note for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['note'] = 25 + data["note"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_type( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test type for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['type'] = 25 + data["type"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_subtype( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test subtype for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['subtype'] = 25 + data["subtype"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_operator( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test operator for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['operator'] = 25 + data["operator"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_library( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test library for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['library'] = 25 + data["library"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_creation_date( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test creation_date for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['creation_date'] = 25 + data["creation_date"] = 25 validate(data, patron_transaction_event_schema) def test_patron_transaction_events_amount( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test amount for patron transaction event jsonschemas.""" - validate(patron_transaction_overdue_event_saxon_data, - patron_transaction_event_schema) + validate( + patron_transaction_overdue_event_saxon_data, patron_transaction_event_schema + ) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['amount'] = '25' + data["amount"] = "25" validate(data, patron_transaction_event_schema) def test_patron_transaction_steps( - patron_transaction_event_schema, - patron_transaction_overdue_event_saxon_data): + patron_transaction_event_schema, patron_transaction_overdue_event_saxon_data +): """Test amount for patron transaction event jsonschemas.""" with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['steps'] = [] + data["steps"] = [] validate(data, patron_transaction_event_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['steps'] = [{ - 'timestamp': '2020-12-31', - 'amount': '2' - }] + data["steps"] = [{"timestamp": "2020-12-31", "amount": "2"}] validate(data, patron_transaction_event_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_event_saxon_data) - data['steps'] = [{ - 'dummy': '2020-12-31', - 'amount': 2 - }] + data["steps"] = [{"dummy": "2020-12-31", "amount": 2}] validate(data, patron_transaction_event_schema) diff --git a/tests/unit/test_patron_transactions_jsonschema.py b/tests/unit/test_patron_transactions_jsonschema.py index a1da6264d4..0cb8d1c97f 100644 --- a/tests/unit/test_patron_transactions_jsonschema.py +++ b/tests/unit/test_patron_transactions_jsonschema.py @@ -27,7 +27,8 @@ def test_patron_transactions_required( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test required for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) @@ -36,99 +37,108 @@ def test_patron_transactions_required( def test_patron_transactions_pid( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test pid for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['pid'] = 25 + data["pid"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_note( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test note for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['note'] = 25 + data["note"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_status( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test status for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['status'] = 25 + data["status"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_type( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test type for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['type'] = 25 + data["type"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_patron( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test patron for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['patron'] = 25 + data["patron"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_notification( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test notification for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['notification'] = 25 + data["notification"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_organisation( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test organisation for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['organisation'] = 25 + data["organisation"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_creation_date( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test creation_date for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['creation_date'] = 25 + data["creation_date"] = 25 validate(data, patron_transaction_schema) def test_patron_transactions_total_amount( - patron_transaction_schema, patron_transaction_overdue_saxon_data): + patron_transaction_schema, patron_transaction_overdue_saxon_data +): """Test total_amount for patron transaction jsonschemas.""" validate(patron_transaction_overdue_saxon_data, patron_transaction_schema) with pytest.raises(ValidationError): data = copy.deepcopy(patron_transaction_overdue_saxon_data) - data['total_amount'] = '25' + data["total_amount"] = "25" validate(data, patron_transaction_schema) diff --git a/tests/unit/test_patron_types_jsonschema.py b/tests/unit/test_patron_types_jsonschema.py index 0a57c2151a..ce1371d0ee 100644 --- a/tests/unit/test_patron_types_jsonschema.py +++ b/tests/unit/test_patron_types_jsonschema.py @@ -40,7 +40,7 @@ def test_pid(patron_type_schema, patron_type_data_tmp): validate(patron_type_data_tmp, patron_type_schema) with pytest.raises(ValidationError): - patron_type_data_tmp['pid'] = 25 + patron_type_data_tmp["pid"] = 25 validate(patron_type_data_tmp, patron_type_schema) @@ -49,7 +49,7 @@ def test_name(patron_type_schema, patron_type_data_tmp): validate(patron_type_data_tmp, patron_type_schema) with pytest.raises(ValidationError): - patron_type_data_tmp['name'] = 25 + patron_type_data_tmp["name"] = 25 validate(patron_type_data_tmp, patron_type_schema) @@ -58,32 +58,30 @@ def test_description(patron_type_schema, patron_type_data_tmp): validate(patron_type_data_tmp, patron_type_schema) with pytest.raises(ValidationError): - patron_type_data_tmp['description'] = 25 + patron_type_data_tmp["description"] = 25 validate(patron_type_data_tmp, patron_type_schema) -def test_organisation_pid( - patron_type_schema, patron_type_data_tmp): +def test_organisation_pid(patron_type_schema, patron_type_data_tmp): """Test organisation_pid for patron type jsonschemas.""" validate(patron_type_data_tmp, patron_type_schema) with pytest.raises(ValidationError): - patron_type_data_tmp['organisation_pid'] = 25 + patron_type_data_tmp["organisation_pid"] = 25 validate(patron_type_data_tmp, patron_type_schema) -def test_subscription_amount( - patron_type_schema, patron_type_data_tmp): +def test_subscription_amount(patron_type_schema, patron_type_data_tmp): """Test subscription amount for patron type jsonschemas.""" - patron_type_data_tmp['subscription_amount'] = 25 + patron_type_data_tmp["subscription_amount"] = 25 validate(patron_type_data_tmp, patron_type_schema) with pytest.raises(ValidationError): - patron_type_data_tmp['organisation_pid'] = -25 + patron_type_data_tmp["organisation_pid"] = -25 validate(patron_type_data_tmp, patron_type_schema) with pytest.raises(ValidationError): - patron_type_data_tmp['organisation_pid'] = '35' + patron_type_data_tmp["organisation_pid"] = "35" validate(patron_type_data_tmp, patron_type_schema) @@ -92,31 +90,26 @@ def test_limits(patron_type_schema, patron_type_tmp): data = patron_type_tmp # checkout limits :: library limit > general limit - data['limits'] = { - 'checkout_limits': { - 'global_limit': 20, - 'library_limit': 15 - } - } + data["limits"] = {"checkout_limits": {"global_limit": 20, "library_limit": 15}} validate(data, patron_type_schema) with pytest.raises(ValidationError): - data['limits']['checkout_limits']['library_limit'] = 40 + data["limits"]["checkout_limits"]["library_limit"] = 40 validate(data, patron_type_schema) # valid for JSON schema data.validate() # invalid against extented_validation rules - data['limits']['checkout_limits']['library_limit'] = 15 + data["limits"]["checkout_limits"]["library_limit"] = 15 with pytest.raises(ValidationError): - lib_ref = get_ref_for_pid('lib', 'dummy') - data['limits']['checkout_limits']['library_exceptions'] = [ - {'library': {'$ref': lib_ref}, 'value': 15} + lib_ref = get_ref_for_pid("lib", "dummy") + data["limits"]["checkout_limits"]["library_exceptions"] = [ + {"library": {"$ref": lib_ref}, "value": 15} ] validate(data, patron_type_schema) # valid for JSON schema data.validate() # invalid against extented_validation rules with pytest.raises(ValidationError): - data['limits']['checkout_limits']['library_exceptions'] = [ - {'library': {'$ref': lib_ref}, 'value': 5}, - {'library': {'$ref': lib_ref}, 'value': 7} + data["limits"]["checkout_limits"]["library_exceptions"] = [ + {"library": {"$ref": lib_ref}, "value": 5}, + {"library": {"$ref": lib_ref}, "value": 7}, ] validate(data, patron_type_schema) # valid for JSON schema data.validate() # invalid against extented_validation rules diff --git a/tests/unit/test_patrons_jsonschema.py b/tests/unit/test_patrons_jsonschema.py index dfad8ea750..8f6e6d2b81 100644 --- a/tests/unit/test_patrons_jsonschema.py +++ b/tests/unit/test_patrons_jsonschema.py @@ -40,7 +40,7 @@ def test_pid(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['pid'] = 25 + patron_martigny_data_tmp_with_id["pid"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -49,7 +49,7 @@ def test_first_name(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['first_name'] = 25 + patron_martigny_data_tmp_with_id["first_name"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -58,7 +58,7 @@ def test_last_name(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['last_name'] = 25 + patron_martigny_data_tmp_with_id["last_name"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -67,7 +67,7 @@ def test_street(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['street'] = 25 + patron_martigny_data_tmp_with_id["street"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -76,7 +76,7 @@ def test_postal_code(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['postal_code'] = 25 + patron_martigny_data_tmp_with_id["postal_code"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -85,7 +85,7 @@ def test_city(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['city'] = 25 + patron_martigny_data_tmp_with_id["city"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -96,12 +96,12 @@ def test_barcode(patron_schema, patron_martigny_data_tmp_with_id): # bad type for barcode (int instead of string) with pytest.raises(ValidationError): - data['patron']['barcode'][0] = 25 + data["patron"]["barcode"][0] = 25 validate(data, patron_schema) # try to validate a patron without barcode with pytest.raises(ValidationError) as e: - del data['patron']['barcode'] + del data["patron"]["barcode"] validate(data, patron_schema) assert "'barcode' is a required property" in str(e) @@ -111,7 +111,7 @@ def test_birth_date(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['birth_date'] = 25 + patron_martigny_data_tmp_with_id["birth_date"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -122,11 +122,13 @@ def test_additional_email(app, patron_martigny): original_user_email = user.email user._email = None - patron_martigny['patron']['communication_channel'] = 'email' + patron_martigny["patron"]["communication_channel"] = "email" with pytest.raises(ValidationError) as e: Patron.validate(patron_martigny) - assert 'At least one email should be defined for an email ' \ - 'communication channel' in str(e) + assert ( + "At least one email should be defined for an email " + "communication channel" in str(e) + ) user._email = original_user_email @@ -136,7 +138,7 @@ def test_phone(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['home_phone'] = 25 + patron_martigny_data_tmp_with_id["home_phone"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -145,7 +147,7 @@ def test_patron_type(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['patron_type_pid'] = 25 + patron_martigny_data_tmp_with_id["patron_type_pid"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -154,7 +156,7 @@ def test_roles(patron_schema, patron_martigny_data_tmp_with_id): validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['roles'] = 'text' + patron_martigny_data_tmp_with_id["roles"] = "text" validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -164,15 +166,15 @@ def test_blocked(patron_schema, patron_martigny_data_tmp_with_id): # blocked is a boolean field, should fail with everything except boolean with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['patron']['blocked'] = 25 + patron_martigny_data_tmp_with_id["patron"]["blocked"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['patron']['blocked'] = 'text' + patron_martigny_data_tmp_with_id["patron"]["blocked"] = "text" validate(patron_martigny_data_tmp_with_id, patron_schema) # Should pass with boolean - patron_martigny_data_tmp_with_id['patron']['blocked'] = False + patron_martigny_data_tmp_with_id["patron"]["blocked"] = False validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -182,14 +184,14 @@ def test_blocked_note(patron_schema, patron_martigny_data_tmp_with_id): # blocked_note is text field. Should fail except with text. with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['patron']['blocked_note'] = 25 + patron_martigny_data_tmp_with_id["patron"]["blocked_note"] = 25 validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['patron']['blocked_note'] = True + patron_martigny_data_tmp_with_id["patron"]["blocked_note"] = True validate(patron_martigny_data_tmp_with_id, patron_schema) - patron_martigny_data_tmp_with_id['patron']['blocked_note'] = 'Lost card' + patron_martigny_data_tmp_with_id["patron"]["blocked_note"] = "Lost card" validate(patron_martigny_data_tmp_with_id, patron_schema) @@ -197,13 +199,13 @@ def test_local_codes(patron_schema, patron_martigny_data_tmp_with_id): """Test local codes for patron jsonschemas.""" with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['local_codes'] = 'data' + patron_martigny_data_tmp_with_id["local_codes"] = "data" validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['local_codes'] = ['data', 12] + patron_martigny_data_tmp_with_id["local_codes"] = ["data", 12] validate(patron_martigny_data_tmp_with_id, patron_schema) with pytest.raises(ValidationError): - patron_martigny_data_tmp_with_id['local_codes'] = ['data', 'data'] + patron_martigny_data_tmp_with_id["local_codes"] = ["data", "data"] validate(patron_martigny_data_tmp_with_id, patron_schema) diff --git a/tests/unit/test_sru_explain.py b/tests/unit/test_sru_explain.py index ec7898cb3e..ea447295e3 100644 --- a/tests/unit/test_sru_explain.py +++ b/tests/unit/test_sru_explain.py @@ -22,15 +22,16 @@ def test_explain(app): """Test Explain.""" - explain = Explain('api/sru') - explain_strings = str(explain).split('\n') - assert explain_strings[0] == \ - '' - assert explain.database == 'api/sru' - assert explain.number_of_records == app.config.get( - 'RERO_SRU_NUMBER_OF_RECORDS') - assert explain.maximum_records == app.config.get( - 'RERO_SRU_MAXIMUM_RECORDS') - assert explain.doc_type == 'doc' - assert explain.index == app.config.get( - 'RECORDS_REST_ENDPOINTS', {}).get('doc', {}).get('search_index') + explain = Explain("api/sru") + explain_strings = str(explain).split("\n") + assert ( + explain_strings[0] + == '' + ) + assert explain.database == "api/sru" + assert explain.number_of_records == app.config.get("RERO_SRU_NUMBER_OF_RECORDS") + assert explain.maximum_records == app.config.get("RERO_SRU_MAXIMUM_RECORDS") + assert explain.doc_type == "doc" + assert explain.index == app.config.get("RECORDS_REST_ENDPOINTS", {}).get( + "doc", {} + ).get("search_index") diff --git a/tests/unit/test_stats_cfg_jsonschema.py b/tests/unit/test_stats_cfg_jsonschema.py index 96b4efc656..d76efa123a 100644 --- a/tests/unit/test_stats_cfg_jsonschema.py +++ b/tests/unit/test_stats_cfg_jsonschema.py @@ -39,29 +39,20 @@ def test_valid_configuration(stats_cfg_schema, stats_cfg_martigny_data): def test_valid_circulation_n_docs(stats_cfg_schema): """Test number of documents.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'catalog', - 'indicator': { - 'type': 'number_of_documents' - } - }, - 'is_active': True + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": {"type": "catalog", "indicator": {"type": "number_of_documents"}}, + "is_active": True, } - for dist in ['created_month', 'created_year', 'imported', - 'owning_library']: - data['category']['indicator']['distributions'] = [dist] + for dist in ["created_month", "created_year", "imported", "owning_library"]: + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -69,28 +60,23 @@ def test_valid_circulation_n_docs(stats_cfg_schema): def test_valid_circulation_n_serial_holdings(stats_cfg_schema): """Test number of serial holdings.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'catalog', - 'indicator': { - 'type': 'number_of_serial_holdings' - } + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": { + "type": "catalog", + "indicator": {"type": "number_of_serial_holdings"}, }, - 'is_active': True + "is_active": True, } - for dist in ['created_month', 'created_year', 'owning_library']: - data['category']['indicator']['distributions'] = [dist] + for dist in ["created_month", "created_year", "owning_library"]: + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -98,30 +84,28 @@ def test_valid_circulation_n_serial_holdings(stats_cfg_schema): def test_valid_circulation_n_items(stats_cfg_schema): """Test number of items.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'catalog', - 'indicator': { - 'type': 'number_of_items' - } - }, - 'is_active': True + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": {"type": "catalog", "indicator": {"type": "number_of_items"}}, + "is_active": True, } - for dist in ['created_month', 'created_year', 'owning_library', - 'owning_location', 'document_type', 'document_subtype', - 'type']: - data['category']['indicator']['distributions'] = [dist] + for dist in [ + "created_month", + "created_year", + "owning_library", + "owning_location", + "document_type", + "document_subtype", + "type", + ]: + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -129,31 +113,31 @@ def test_valid_circulation_n_items(stats_cfg_schema): def test_valid_circulation_n_patrons(stats_cfg_schema): """Test number of patrons.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'user_management', - 'indicator': { - 'type': 'number_of_patrons' - } + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": { + "type": "user_management", + "indicator": {"type": "number_of_patrons"}, }, - 'is_active': True + "is_active": True, } for dist in [ - 'created_month', 'created_year', 'postal_code', 'type', 'gender', - 'birth_year', 'role' + "created_month", + "created_year", + "postal_code", + "type", + "gender", + "birth_year", + "role", ]: - data['category']['indicator']['distributions'] = [dist] + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -161,33 +145,33 @@ def test_valid_circulation_n_patrons(stats_cfg_schema): def test_valid_circulation_n_active_patrons(stats_cfg_schema): """Test number of active patrons.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'user_management', - 'indicator': { - 'type': 'number_of_active_patrons' - } + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": { + "type": "user_management", + "indicator": {"type": "number_of_active_patrons"}, }, - 'is_active': True + "is_active": True, } - for period in ['year', 'month']: - data['category']['indicator']['period'] = period + for period in ["year", "month"]: + data["category"]["indicator"]["period"] = period for dist in [ - 'created_month', 'created_year', 'postal_code', 'type', 'gender', - 'birth_year', 'role' + "created_month", + "created_year", + "postal_code", + "type", + "gender", + "birth_year", + "role", ]: - data['category']['indicator']['distributions'] = [dist] + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -195,35 +179,34 @@ def test_valid_circulation_n_active_patrons(stats_cfg_schema): def test_valid_circulation_n_deleted_items(stats_cfg_schema): """Test number of deleted items.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'catalog', - 'indicator': { - 'type': 'number_of_deleted_items' - } + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": { + "type": "catalog", + "indicator": {"type": "number_of_deleted_items"}, }, - 'is_active': True + "is_active": True, } - for period in ['year', 'month']: - data['category']['indicator']['period'] = period - for dist in ['action_month', 'action_year', 'owning_library', - 'operator_library']: - data['category']['indicator']['distributions'] = [dist] + for period in ["year", "month"]: + data["category"]["indicator"]["period"] = period + for dist in [ + "action_month", + "action_year", + "owning_library", + "operator_library", + ]: + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) - data['category']['indicator']['period'] = 'day' + data["category"]["indicator"]["period"] = "day" with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -231,30 +214,23 @@ def test_valid_circulation_n_deleted_items(stats_cfg_schema): def test_valid_circulation_n_ill_requests(stats_cfg_schema): """Test number of ill requests.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", "frequency": "month", - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'circulation', - 'indicator': { - 'type': 'number_of_ill_requests' - } + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": { + "type": "circulation", + "indicator": {"type": "number_of_ill_requests"}, }, - 'is_active': True + "is_active": True, } - for dist in [ - 'created_month', 'created_year', 'pickup_location', 'status' - ]: - data['category']['indicator']['distributions'] = [dist] + for dist in ["created_month", "created_year", "pickup_location", "status"]: + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -262,42 +238,39 @@ def test_valid_circulation_n_ill_requests(stats_cfg_schema): def test_valid_circulation_n_circulations(stats_cfg_schema): """Test number of ill requests.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'circulation', - 'indicator': { - } - }, - 'is_active': True + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": {"type": "circulation", "indicator": {}}, + "is_active": True, } - for trigger in [ - 'checkin' - ]: - data['category']['indicator']['type'] = f'number_of_{trigger}s' - for period in ['year', 'month']: - data['category']['indicator']['period'] = period + for trigger in ["checkin"]: + data["category"]["indicator"]["type"] = f"number_of_{trigger}s" + for period in ["year", "month"]: + data["category"]["indicator"]["period"] = period for dist in [ - 'transaction_month', 'transaction_year', - 'transaction_location', 'patron_type', 'patron_age', - 'patron_postal_code', 'document_type', 'transaction_channel', - 'owning_library', 'owning_location' + "transaction_month", + "transaction_year", + "transaction_location", + "patron_type", + "patron_age", + "patron_postal_code", + "document_type", + "transaction_channel", + "owning_library", + "owning_location", ]: - data['category']['indicator']['distributions'] = [dist] + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) - data['category']['indicator']['period'] = 'day' + data["category"]["indicator"]["period"] = "day" with pytest.raises(ValidationError): validate(data, stats_cfg_schema) @@ -305,41 +278,38 @@ def test_valid_circulation_n_circulations(stats_cfg_schema): def test_valid_circulation_n_requests(stats_cfg_schema): """Test number of requests.""" data = { - '$schema': - 'https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json', - 'pid': 'statcfg1', - 'name': 'foo', - 'description': 'bar', - 'frequency': 'month', - "library": { - "$ref": "https://bib.rero.ch/api/libraries/lib1" - }, - 'category': { - 'type': 'circulation', - 'indicator': { - } - }, - 'is_active': True + "$schema": "https://bib.rero.ch/schemas/stats_cfg/stat_cfg-v0.0.1.json", + "pid": "statcfg1", + "name": "foo", + "description": "bar", + "frequency": "month", + "library": {"$ref": "https://bib.rero.ch/api/libraries/lib1"}, + "category": {"type": "circulation", "indicator": {}}, + "is_active": True, } - for trigger in [ - 'request' - ]: - data['category']['indicator']['type'] = f'number_of_{trigger}s' - for period in ['year', 'month']: - data['category']['indicator']['period'] = period + for trigger in ["request"]: + data["category"]["indicator"]["type"] = f"number_of_{trigger}s" + for period in ["year", "month"]: + data["category"]["indicator"]["period"] = period for dist in [ - 'transaction_month', 'transaction_year', - 'patron_type', 'patron_age', - 'patron_postal_code', 'document_type', 'transaction_channel', - 'owning_library', 'pickup_location', 'owning_location' + "transaction_month", + "transaction_year", + "patron_type", + "patron_age", + "patron_postal_code", + "document_type", + "transaction_channel", + "owning_library", + "pickup_location", + "owning_location", ]: - data['category']['indicator']['distributions'] = [dist] + data["category"]["indicator"]["distributions"] = [dist] validate(data, stats_cfg_schema) - data['category']['indicator']['distributions'] = ["foo"] + data["category"]["indicator"]["distributions"] = ["foo"] with pytest.raises(ValidationError): validate(data, stats_cfg_schema) - data['category']['indicator']['period'] = 'day' + data["category"]["indicator"]["period"] = "day" with pytest.raises(ValidationError): validate(data, stats_cfg_schema) diff --git a/tests/unit/test_templates_jsonschema.py b/tests/unit/test_templates_jsonschema.py index 3c4324d852..d99711d756 100644 --- a/tests/unit/test_templates_jsonschema.py +++ b/tests/unit/test_templates_jsonschema.py @@ -34,21 +34,22 @@ def test_required(template_schema, templ_doc_public_martigny_data_tmp): def test_template_all_jsonschema_keys_values( - template_schema, templ_doc_public_martigny_data_tmp): + template_schema, templ_doc_public_martigny_data_tmp +): """Test all keys and values for template jsonschema.""" record = templ_doc_public_martigny_data_tmp validate(record, template_schema) validator = [ - {'key': 'pid', 'value': 25}, - {'key': 'name', 'value': 25}, - {'key': 'description', 'value': 25}, - {'key': 'organistion', 'value': 25}, - {'key': 'template_type', 'value': 25}, - {'key': 'creator', 'value': 25}, - {'key': 'visibility', 'value': 25}, - {'key': 'data', 'value': 25} + {"key": "pid", "value": 25}, + {"key": "name", "value": 25}, + {"key": "description", "value": 25}, + {"key": "organistion", "value": 25}, + {"key": "template_type", "value": 25}, + {"key": "creator", "value": 25}, + {"key": "visibility", "value": 25}, + {"key": "data", "value": 25}, ] for element in validator: with pytest.raises(ValidationError): - record[element['key']] = element['value'] + record[element["key"]] = element["value"] validate(record, template_schema) diff --git a/tests/unit/test_users_jsonschema.py b/tests/unit/test_users_jsonschema.py index bbded3746d..c7476c9244 100644 --- a/tests/unit/test_users_jsonschema.py +++ b/tests/unit/test_users_jsonschema.py @@ -30,28 +30,27 @@ def test_required(user_schema, user_data_tmp): validate({}, user_schema) -def test_user_all_jsonschema_keys_values( - user_schema, user_data_tmp): +def test_user_all_jsonschema_keys_values(user_schema, user_data_tmp): """Test all keys and values for user jsonschema.""" record = user_data_tmp validate(record, user_schema) validator = [ - {'key': 'first_name', 'value': 25}, - {'key': 'last_name', 'value': 25}, - {'key': 'birth_date', 'value': 25}, - {'key': 'gender', 'value': 25}, - {'key': 'street', 'value': 25}, - {'key': 'postal_code', 'value': 25}, - {'key': 'city', 'value': 25}, - {'key': 'country', 'value': 25}, - {'key': 'mobile_phone', 'value': 25}, - {'key': 'business_phone', 'value': 25}, - {'key': 'mobile_phone', 'value': 25}, - {'key': 'other_phone', 'value': 25}, - {'key': 'keep_history', 'value': 25}, - {'key': 'user_id', 'value': '25'} + {"key": "first_name", "value": 25}, + {"key": "last_name", "value": 25}, + {"key": "birth_date", "value": 25}, + {"key": "gender", "value": 25}, + {"key": "street", "value": 25}, + {"key": "postal_code", "value": 25}, + {"key": "city", "value": 25}, + {"key": "country", "value": 25}, + {"key": "mobile_phone", "value": 25}, + {"key": "business_phone", "value": 25}, + {"key": "mobile_phone", "value": 25}, + {"key": "other_phone", "value": 25}, + {"key": "keep_history", "value": 25}, + {"key": "user_id", "value": "25"}, ] for element in validator: with pytest.raises(ValidationError): - record[element['key']] = element['value'] + record[element["key"]] = element["value"] validate(record, user_schema) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index d3468f903e..4d3773aa78 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -24,29 +24,38 @@ from rero_ils.modules.patron_types.api import PatronType from rero_ils.modules.patrons.api import Patron -from rero_ils.modules.utils import PasswordValidatorException, add_years, \ - extracted_data_from_ref, get_endpoint_configuration, \ - get_schema_for_resource, password_generator, password_validator, \ - read_json_record -from rero_ils.utils import get_current_language, language_iso639_2to1, \ - language_mapping, unique_list +from rero_ils.modules.utils import ( + PasswordValidatorException, + add_years, + extracted_data_from_ref, + get_endpoint_configuration, + get_schema_for_resource, + password_generator, + password_validator, + read_json_record, +) +from rero_ils.utils import ( + get_current_language, + language_iso639_2to1, + language_mapping, + unique_list, +) def test_unique_list(): """Test unicity of list.""" - list = ['python', 'snail', 'python', 'snail'] - assert ['python', 'snail'] == unique_list(list) + list = ["python", "snail", "python", "snail"] + assert ["python", "snail"] == unique_list(list) def test_read_json_record(request): """Test IlsRecord PID after validation failed""" - file_name = os.path.join(request.fspath.dirname, '..', 'data', - 'documents.json') + file_name = os.path.join(request.fspath.dirname, "..", "data", "documents.json") with open(file_name) as json_file: count = 0 for record in read_json_record(json_file): count += 1 - assert record.get('pid') == str(count) + assert record.get("pid") == str(count) assert count == 2 @@ -56,48 +65,49 @@ def test_add_years(): one_year_later = add_years(initial_date, 1) assert initial_date.year == one_year_later.year - 1 - initial_date = datetime.strptime('2020-02-29', '%Y-%m-%d') + initial_date = datetime.strptime("2020-02-29", "%Y-%m-%d") tow_years_later = add_years(initial_date, 2) four_years_later = add_years(initial_date, 4) assert tow_years_later.month == 3 and tow_years_later.day == 1 - assert four_years_later.month == initial_date.month and \ - four_years_later.day == initial_date.day + assert ( + four_years_later.month == initial_date.month + and four_years_later.day == initial_date.day + ) def test_get_schema_for_resources(app): """Test get_schemas_for_resource function.""" - json_schema = 'https://bib.rero.ch/schemas/patrons/patron-v0.0.1.json' + json_schema = "https://bib.rero.ch/schemas/patrons/patron-v0.0.1.json" assert get_schema_for_resource(Patron) == json_schema - assert get_schema_for_resource('ptrn') == json_schema + assert get_schema_for_resource("ptrn") == json_schema def test_get_endpoint_configuration(app): """Test get_endpoint_configuration.""" - assert get_endpoint_configuration('loc')['pid_type'] == 'loc' - assert get_endpoint_configuration('locations')['pid_type'] == 'loc' - assert get_endpoint_configuration(PatronType)['pid_type'] == 'ptty' - assert get_endpoint_configuration('dummy') is None + assert get_endpoint_configuration("loc")["pid_type"] == "loc" + assert get_endpoint_configuration("locations")["pid_type"] == "loc" + assert get_endpoint_configuration(PatronType)["pid_type"] == "ptty" + assert get_endpoint_configuration("dummy") is None -def test_extract_data_from_ref(app, patron_sion_data, - patron_type_grown_sion): +def test_extract_data_from_ref(app, patron_sion_data, patron_type_grown_sion): """Test extract_data_from_ref.""" # Check real data - ptty = patron_sion_data['patron']['type'] - assert extracted_data_from_ref(ptty, data='pid') == 'ptty4' - assert extracted_data_from_ref(ptty, data='resource') == 'patron_types' - assert extracted_data_from_ref(ptty, data='record_class') == PatronType - ptty_record = extracted_data_from_ref(ptty, data='record') + ptty = patron_sion_data["patron"]["type"] + assert extracted_data_from_ref(ptty, data="pid") == "ptty4" + assert extracted_data_from_ref(ptty, data="resource") == "patron_types" + assert extracted_data_from_ref(ptty, data="record_class") == PatronType + ptty_record = extracted_data_from_ref(ptty, data="record") assert ptty_record.pid == patron_type_grown_sion.pid - assert extracted_data_from_ref(ptty, data='es_record')['pid'] == 'ptty4' + assert extracted_data_from_ref(ptty, data="es_record")["pid"] == "ptty4" # check dummy data - assert extracted_data_from_ref('dummy_data', data='pid') is None - assert extracted_data_from_ref('dummy_data', data='resource') is None - assert extracted_data_from_ref('dummy_data', data='record_class') is None - assert extracted_data_from_ref('dummy_data', data='record') is None - assert extracted_data_from_ref(ptty, data='dummy') is None - assert extracted_data_from_ref('dummy_data', data='es_record') is None + assert extracted_data_from_ref("dummy_data", data="pid") is None + assert extracted_data_from_ref("dummy_data", data="resource") is None + assert extracted_data_from_ref("dummy_data", data="record_class") is None + assert extracted_data_from_ref("dummy_data", data="record") is None + assert extracted_data_from_ref(ptty, data="dummy") is None + assert extracted_data_from_ref("dummy_data", data="es_record") is None def test_current_language(app): @@ -108,33 +118,33 @@ def test_current_language(app): def test_language_iso639_2to1(app): """Test convert MARC language code to language.""" - assert language_iso639_2to1('eng') == 'en' - assert language_iso639_2to1('fre') == 'fr' - assert language_iso639_2to1('ger') == 'de' - assert language_iso639_2to1('ita') == 'it' + assert language_iso639_2to1("eng") == "en" + assert language_iso639_2to1("fre") == "fr" + assert language_iso639_2to1("ger") == "de" + assert language_iso639_2to1("ita") == "it" # default language - assert language_iso639_2to1('rus') == 'en' + assert language_iso639_2to1("rus") == "en" def test_language_mapping(app): """Test language mapping.""" - assert 'fre' == language_mapping('fre') - assert 'dut' == language_mapping('dum') + assert "fre" == language_mapping("fre") + assert "dut" == language_mapping("dum") def test_password_validator(): """Test password validator.""" with pytest.raises(PasswordValidatorException): - password_validator('foo') - password_validator('foobarbar') - password_validator('1244567899') - password_validator('foobar123') - password_validator('FooBar123', length=12) - - assert password_validator('FooBar12') - assert password_validator('FooBar123') - assert password_validator('Foo Bar 123') - assert password_validator('FooBar123$', special_char=True) + password_validator("foo") + password_validator("foobarbar") + password_validator("1244567899") + password_validator("foobar123") + password_validator("FooBar123", length=12) + + assert password_validator("FooBar12") + assert password_validator("FooBar123") + assert password_validator("Foo Bar 123") + assert password_validator("FooBar123$", special_char=True) def test_password_generator(): diff --git a/tests/unit/test_vendors_jsonschema.py b/tests/unit/test_vendors_jsonschema.py index 2006fd3862..5e70665a82 100644 --- a/tests/unit/test_vendors_jsonschema.py +++ b/tests/unit/test_vendors_jsonschema.py @@ -29,20 +29,18 @@ from rero_ils.modules.vendors.api import Vendor -def test_vendors_special_rero_validation( - app, vendor_martigny_data, vendors_schema -): +def test_vendors_special_rero_validation(app, vendor_martigny_data, vendors_schema): """Test RERO special validation data""" record = copy.deepcopy(vendor_martigny_data) validate(record, vendors_schema) - record['contacts'].append(record['contacts'][0]) + record["contacts"].append(record["contacts"][0]) with pytest.raises(ValidationError) as err: Vendor.validate(Vendor(record)) - assert 'Can not have multiple contacts with the same type' in str(err) + assert "Can not have multiple contacts with the same type" in str(err) - record['contacts'] = vendor_martigny_data['contacts'] - record['notes'].append(record['notes'][0]) + record["contacts"] = vendor_martigny_data["contacts"] + record["notes"].append(record["notes"][0]) with pytest.raises(ValidationError) as err: Vendor.validate(Vendor(record)) - assert 'Can not have multiple notes with the same type' in str(err) + assert "Can not have multiple notes with the same type" in str(err) diff --git a/tests/utils.py b/tests/utils.py index af48eee219..c86ab7b72c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -24,8 +24,7 @@ import jsonref import xmltodict from flask import url_for -from invenio_accounts.testutils import login_user_via_session, \ - login_user_via_view +from invenio_accounts.testutils import login_user_via_session, login_user_via_view from invenio_circulation.api import get_loan_for_item from invenio_db import db from invenio_oauth2server.models import Client, Token @@ -68,9 +67,9 @@ def check_permission(permission_policy, actions, record): """ for action_name, action_result in actions.items(): result = permission_policy(action_name, record=record).can() - assert \ - result == action_result, \ - f'{action_name} :: return {result} but should {action_result}' + assert ( + result == action_result + ), f"{action_name} :: return {result} but should {action_result}" def login_user(client, user): @@ -94,9 +93,7 @@ def get_xml_dict(response, ordered=False): """Get XML from response.""" if ordered: return xmltodict.parse(response.get_data(as_text=True)) - return json.loads(json.dumps( - xmltodict.parse(response.get_data(as_text=True)) - )) + return json.loads(json.dumps(xmltodict.parse(response.get_data(as_text=True)))) def get_csv(response): @@ -111,8 +108,8 @@ def parse_csv(raw_data): def postdata( - client, endpoint, data=None, headers=None, url_data=None, - force_data_as_json=True): + client, endpoint, data=None, headers=None, url_data=None, force_data_as_json=True +): """Build URL from given endpoint and send given data to it. :param force_data_as_json: the data sent forced json. @@ -121,19 +118,12 @@ def postdata( if data is None: data = {} if headers is None: - headers = [ - ('Accept', 'application/json'), - ('Content-Type', 'application/json') - ] + headers = [("Accept", "application/json"), ("Content-Type", "application/json")] if url_data is None: url_data = {} if force_data_as_json: data = json.dumps(data) - res = client.post( - url_for(endpoint, **url_data), - data=data, - headers=headers - ) + res = client.post(url_for(endpoint, **url_data), data=data, headers=headers) output = get_json(res) return res, output @@ -145,9 +135,13 @@ def to_relative_url(url): external urls. """ parsed = urlparse(url) - return parsed.path + '?' + '&'.join([ - f'{param}={val[0]}' for param, val in parse_qs(parsed.query).items() - ]) + return ( + parsed.path + + "?" + + "&".join( + [f"{param}={val[0]}" for param, val in parse_qs(parsed.query).items()] + ) + ) def get_mapping(name): @@ -155,50 +149,46 @@ def get_mapping(name): return current_search.client.indices.get_mapping(name) -def flush_index(name): - """Flush index.""" - return current_search.flush_and_refresh(name) - - def loaded_resources_report(): """For debug only: returns a list or count of loaded objects.""" objects = { - 'organisations': Organisation, - 'libraries': Library, - 'locations': Location, - 'circ_policies': CircPolicy, - 'item_types': ItemType, - 'patron_types': PatronType, - 'patrons': Patron, - 'documents': Document, - 'items': Item, - 'holdings': Holding + "organisations": Organisation, + "libraries": Library, + "locations": Location, + "circ_policies": CircPolicy, + "item_types": ItemType, + "patron_types": PatronType, + "patrons": Patron, + "documents": Document, + "items": Item, + "holdings": Holding, } report = {} for object in objects: object_pids = objects[object].get_all_pids() report[object] = len(list(object_pids)) item_details = [] - if object == 'items': + if object == "items": for item in object_pids: item_details.append( { - 'item_pid': item, - 'item_status': objects[object].get_record_by_pid( - item).status, - 'requests': objects[object].get_record_by_pid( - item).number_of_requests(), - 'loans': get_loan_for_item(item_pid_to_object(item)) + "item_pid": item, + "item_status": objects[object].get_record_by_pid(item).status, + "requests": objects[object] + .get_record_by_pid(item) + .number_of_requests(), + "loans": get_loan_for_item(item_pid_to_object(item)), } ) - report['item_details'] = item_details + report["item_details"] = item_details return report -def mock_response(status=200, content="CONTENT", headers=None, json_data=None, - raise_for_status=None): +def mock_response( + status=200, content="CONTENT", headers=None, json_data=None, raise_for_status=None +): """Mock a request response.""" - headers = headers or {'Content-Type': 'text/plain'} + headers = headers or {"Content-Type": "text/plain"} mock_resp = Mock() # mock raise_for_status call w/optional error mock_resp.raise_for_status = Mock() @@ -211,7 +201,7 @@ def mock_response(status=200, content="CONTENT", headers=None, json_data=None, mock_resp.text = content # add json data if provided if json_data: - mock_resp.headers['Content-Type'] = 'application/json' + mock_resp.headers["Content-Type"] = "application/json" mock_resp.json = MagicMock(return_value=json_data) mock_resp.text = json.dumps(json_data) mock_resp.content = json.dumps(json_data) @@ -222,7 +212,7 @@ def get_timezone_difference(timezone, date): """Get timezone offset difference, in hours.""" if date.tzinfo is not None: date = date.replace(tzinfo=None) - return int(timezone.utcoffset(date).total_seconds()/3600) + return int(timezone.utcoffset(date).total_seconds() / 3600) def check_timezone_date(timezone, date, expected=None): @@ -235,8 +225,7 @@ def check_timezone_date(timezone, date, expected=None): hour = (date.hour + difference) % 24 # Prepare date tocheck_date = date.astimezone(timezone) - error_msg = "Date: %s. Expected: %s. Minutes should be: %s. Hour: %s" % ( - tocheck_date, date, date.minute, hour) + error_msg = f"Date: {tocheck_date}. Expected: {date}. Minutes should be: {date.minute}. Hour: {hour}" # Expected list defines accepted hours for tests if expected: assert hour in expected, error_msg @@ -246,22 +235,20 @@ def check_timezone_date(timezone, date, expected=None): def jsonloader(uri, **kwargs): """This method will be used by the mock to replace requests.get.""" - ref_split = uri.split('/') + ref_split = uri.split("/") # TODO: find a better way to determine name and path. - if ref_split[-2] == 'common': - path = 'rero_ils.jsonschemas' - name = f'common/{ref_split[-1]}' + if ref_split[-2] == "common": + path = "rero_ils.jsonschemas" + name = f"common/{ref_split[-1]}" else: - if ref_split[-2] in ['remote_entities', 'local_entities']: - path = f'rero_ils.modules.entities.{ref_split[-2]}.jsonschemas' + if ref_split[-2] in ["remote_entities", "local_entities"]: + path = f"rero_ils.modules.entities.{ref_split[-2]}.jsonschemas" else: - path = f'rero_ils.modules.{ref_split[-2]}.jsonschemas' - name = f'{ref_split[-2]}/{ref_split[-1]}' + path = f"rero_ils.modules.{ref_split[-2]}.jsonschemas" + name = f"{ref_split[-2]}/{ref_split[-1]}" schema_in_bytes = resource_string(path, name) - schema = json.loads(schema_in_bytes.decode('utf8')) - - return schema + return json.loads(schema_in_bytes.decode("utf8")) def get_schema(schema_in_bytes): @@ -273,7 +260,7 @@ def get_schema(schema_in_bytes): :schema_in_bytes: schema in bytes. :returns: resolved json schema. """ - schema = jsonref.loads(schema_in_bytes.decode('utf8'), loader=jsonloader) + schema = jsonref.loads(schema_in_bytes.decode("utf8"), loader=jsonloader) # Replace all remaining $refs while schema != jsonref.loads(jsonref.dumps(schema), loader=jsonloader): @@ -289,18 +276,18 @@ def create_new_item_from_existing_item(item=None): :return: the newly created item """ data = deepcopy(item) - data.pop('barcode') - data['status'] = ItemStatus.ON_SHELF - new_item = Item.create(data=data, dbcommit=True, - reindex=True, delete_pid=True) - flush_index(ItemsSearch.Meta.index) + data.pop("barcode") + data["status"] = ItemStatus.ON_SHELF + new_item = Item.create(data=data, dbcommit=True, reindex=True, delete_pid=True) + ItemsSearch.flush_and_refresh() assert new_item.status == ItemStatus.ON_SHELF assert new_item.number_of_requests() == 0 return new_item def item_record_to_a_specific_loan_state( - item=None, loan_state=None, params=None, copy_item=True): + item=None, loan_state=None, params=None, copy_item=True +): """Put an item into a specific circulation loan state. :param item: the item record @@ -314,50 +301,46 @@ def item_record_to_a_specific_loan_state( item = create_new_item_from_existing_item(item=item) # complete missing parameters - params.setdefault('transaction_date', - datetime.now(timezone.utc).isoformat()) - params.setdefault('document_pid', item.document_pid) + params.setdefault("transaction_date", datetime.now(timezone.utc).isoformat()) + params.setdefault("document_pid", item.document_pid) # a parameter to allow in_transit returns - checkin_transaction_location_pid = \ - params.pop('checkin_transaction_location_pid', None) - patron = Patron.get_record_by_pid(params.get('patron_pid')) + checkin_transaction_location_pid = params.pop( + "checkin_transaction_location_pid", None + ) + patron = Patron.get_record_by_pid(params.get("patron_pid")) # perform circulation actions if loan_state in [ - LoanState.PENDING, LoanState.ITEM_AT_DESK, - LoanState.ITEM_ON_LOAN, - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_IN_TRANSIT_TO_HOUSE + LoanState.PENDING, + LoanState.ITEM_AT_DESK, + LoanState.ITEM_ON_LOAN, + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, + LoanState.ITEM_IN_TRANSIT_TO_HOUSE, ]: item, actions = item.request(**params) - loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.REQUEST].get("pid")) assert item.number_of_requests() >= 1 - assert item.is_requested_by_patron(patron.get( - 'patron', {}).get('barcode')[0]) + assert item.is_requested_by_patron(patron.get("patron", {}).get("barcode")[0]) if loan_state in [ - LoanState.ITEM_AT_DESK, - LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, - LoanState.ITEM_IN_TRANSIT_TO_HOUSE + LoanState.ITEM_AT_DESK, + LoanState.ITEM_IN_TRANSIT_FOR_PICKUP, + LoanState.ITEM_IN_TRANSIT_TO_HOUSE, ]: item, actions = item.validate_request(**params, pid=loan.pid) - loan = Loan.get_record_by_pid(actions[LoanAction.VALIDATE].get('pid')) - if loan_state in [ - LoanState.ITEM_ON_LOAN, - LoanState.ITEM_IN_TRANSIT_TO_HOUSE - ]: + loan = Loan.get_record_by_pid(actions[LoanAction.VALIDATE].get("pid")) + if loan_state in [LoanState.ITEM_ON_LOAN, LoanState.ITEM_IN_TRANSIT_TO_HOUSE]: item, actions = item.checkout(**params, pid=loan.pid) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKOUT].get("pid")) if loan_state == LoanState.ITEM_IN_TRANSIT_TO_HOUSE: if checkin_transaction_location_pid: - params['transaction_location_pid'] = \ - checkin_transaction_location_pid + params["transaction_location_pid"] = checkin_transaction_location_pid item, actions = item.checkin(**params, pid=loan.pid) - loan = Loan.get_record_by_pid(actions[LoanAction.CHECKIN].get('pid')) + loan = Loan.get_record_by_pid(actions[LoanAction.CHECKIN].get("pid")) - flush_index(ItemsSearch.Meta.index) - flush_index(LoansSearch.Meta.index) + ItemsSearch.flush_and_refresh() + LoansSearch.flush_and_refresh() - assert loan['state'] == loan_state + assert loan["state"] == loan_state return item, loan @@ -368,7 +351,7 @@ def create_patron(data): :returns: - A freshly created Patron instance. """ ptrn = create_patron_from_data(data=data) - flush_index(PatronsSearch.Meta.index) + PatronsSearch.flush_and_refresh() return ptrn @@ -381,7 +364,7 @@ def create_user_token(client_name, user, access_token): user_id=user.id, is_internal=True, is_confidential=False, - _default_scopes='' + _default_scopes="", ) client.gen_salt() token = Token( @@ -391,7 +374,7 @@ def create_user_token(client_name, user, access_token): expires=None, is_personal=True, is_internal=True, - _scopes='' + _scopes="", ) db.session.add(client) db.session.add(token) @@ -411,10 +394,11 @@ def create_selfcheck_terminal(data): def patch_expiration_date(data): """Patch expiration date for patrons.""" - if data.get('patron', {}).get('expiration_date'): + if data.get("patron", {}).get("expiration_date"): # expiration date in one year - data['patron']['expiration_date'] = \ - (datetime.now() + timedelta(days=365)).strftime('%Y-%m-%d') + data["patron"]["expiration_date"] = ( + datetime.now() + timedelta(days=365) + ).strftime("%Y-%m-%d") return data @@ -423,7 +407,7 @@ def clean_text(data): if isinstance(data, list): data = [clean_text(val) for val in data] elif isinstance(data, dict): - if '_text' in data: - del data['_text'] + if "_text" in data: + del data["_text"] data = {key: clean_text(val) for key, val in data.items()} return data