diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index a20571d62d..55a0fcc22a 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -20,7 +20,7 @@ jobs: packages: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 578e3a4003..7ff3741fa9 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -20,7 +20,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c06bb784e2..9af0a6d96c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -11,7 +11,7 @@ jobs: contents: read steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index beca9347b7..232d36124a 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -10,7 +10,7 @@ jobs: contents: read steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 🐍 uses: actions/setup-python@v4 diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 9c53a76fab..d840653ec5 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -25,7 +25,7 @@ jobs: if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 - name: Disable network offload @@ -74,7 +74,7 @@ jobs: if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 - name: Disable network offload @@ -124,7 +124,7 @@ jobs: baseimage: ${{ steps.baseimage.outputs.tag }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false @@ -253,7 +253,7 @@ jobs: POSTGRES_DB: ${{ env.POSTGRES_DB }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false @@ -311,7 +311,7 @@ jobs: image: ["scripts", "webapp", "exec"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false fetch-depth: 0 diff --git a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py new file mode 100644 index 0000000000..0f11714085 --- /dev/null +++ b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py @@ -0,0 +1,111 @@ +"""Type coerce collection settings + +Revision ID: 2b672c6fb2b9 +Revises: 0df58829fc1a +Create Date: 2023-09-05 06:40:35.739869+00:00 + +""" +import json +import logging +from copy import deepcopy +from typing import Any, Dict, Optional, Tuple + +from pydantic import PositiveInt, ValidationError, parse_obj_as + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "2b672c6fb2b9" +down_revision = "0df58829fc1a" +branch_labels = None +depends_on = None + + +log = logging.getLogger(f"palace.migration.{revision}") +log.setLevel(logging.INFO) +log.disabled = False + + +# All the settings types that have non-str types +ALL_SETTING_TYPES: Dict[str, Any] = { + "verify_certificate": Optional[bool], + "default_reservation_period": Optional[PositiveInt], + "loan_limit": Optional[PositiveInt], + "hold_limit": Optional[PositiveInt], + "max_retry_count": Optional[PositiveInt], + "ebook_loan_duration": Optional[PositiveInt], + "default_loan_duration": Optional[PositiveInt], +} + + +def _coerce_types(original_settings: Dict[str, Any]) -> Tuple[bool, Dict[str, Any]]: + """Coerce the types, in-place""" + modified = False + modified_settings = deepcopy(original_settings) + for setting_name, setting_type in ALL_SETTING_TYPES.items(): + if setting_name in original_settings: + # If the setting is an empty string, we set it to None + if original_settings[setting_name] == "": + setting = None + else: + setting = original_settings[setting_name] + + try: + modified = True + modified_settings[setting_name] = parse_obj_as(setting_type, setting) + except ValidationError as e: + log.error( + f"Error while parsing setting {setting_name}. Settings: {original_settings}." + ) + raise e + + return modified, modified_settings + + +def upgrade() -> None: + connection = op.get_bind() + # Fetch all integration settings with the 'licenses' goal + results = connection.execute( + "SELECT id, settings from integration_configurations where goal='LICENSE_GOAL';" + ).fetchall() + + # For each integration setting, we check id any of the non-str + # keys are present in the DB + # We then type-coerce that value + for settings_id, settings in results: + modified, updated_settings = _coerce_types(settings) + if modified: + log.info( + f"Updating settings for integration_configuration (id:{settings_id}). " + f"Original settings: {settings}. New settings: {updated_settings}." + ) + # If any of the values were modified, we update the DB + connection.execute( + "UPDATE integration_configurations SET settings=%s where id=%s", + json.dumps(updated_settings), + settings_id, + ) + + # Do the same for any Library settings + results = connection.execute( + "SELECT ilc.parent_id, ilc.library_id, ilc.settings from integration_library_configurations ilc " + "join integration_configurations ic on ilc.parent_id = ic.id where ic.goal='LICENSE_GOAL';" + ).fetchall() + + for parent_id, library_id, settings in results: + modified, updated_settings = _coerce_types(settings) + if modified: + log.info( + f"Updating settings for integration_library_configuration (parent_id:{parent_id}/library_id:{library_id}). " + f"Original settings: {settings}. New settings: {updated_settings}." + ) + connection.execute( + "UPDATE integration_library_configurations SET settings=%s where parent_id=%s and library_id=%s", + json.dumps(updated_settings), + parent_id, + library_id, + ) + + +def downgrade() -> None: + """There is no need to revert the types back to strings""" diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index 8ead576df3..4b1508f1bd 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -356,10 +356,10 @@ def process_settings( # validate then apply try: - settings_class(**collection_settings) + validated_settings = settings_class(**collection_settings) except ProblemError as ex: return ex.problem_detail - collection.integration_configuration.settings_dict = collection_settings + collection.integration_configuration.settings_dict = validated_settings.dict() return None def _set_external_integration_link( diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index e70070d4ab..7f05d7d73d 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -382,10 +382,10 @@ def _set_configuration_library( config = None # Validate first - protocol_class.library_settings_class()(**info_copy) + validated_data = protocol_class.library_settings_class()(**info_copy) # Attach the configuration config = configuration.for_library(cast(int, library.id), create=True) - config.settings_dict = info_copy + config.settings_dict = validated_data.dict() return config def _set_integration_library(self, integration, library_info, protocol): diff --git a/core/configuration/library.py b/core/configuration/library.py index f5330e27de..410417885d 100644 --- a/core/configuration/library.py +++ b/core/configuration/library.py @@ -638,12 +638,17 @@ def validate_language_codes( ) -> Optional[List[str]]: """Verify that collection languages are valid.""" if value is not None: + languages = [] for language in value: - if not LanguageCodes.string_to_alpha_3(language): + validated_language = LanguageCodes.string_to_alpha_3(language) + if validated_language is None: field_label = cls.get_form_field_label(field.name) raise SettingsValidationError( problem_detail=UNKNOWN_LANGUAGE.detailed( f'"{field_label}": "{language}" is not a valid language code.' ) ) + if validated_language not in languages: + languages.append(validated_language) + return languages return value diff --git a/core/model/collection.py b/core/model/collection.py index 1d4cc7e5cf..df102b004d 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -325,10 +325,11 @@ def default_loan_period(self, library, medium=EditionConstants.BOOK_MEDIUM): that someone who borrows a non-open-access item from this collection has it for this number of days. """ - return ( + value = ( self.default_loan_period_setting(library, medium) or self.STANDARD_DEFAULT_LOAN_PERIOD ) + return value @classmethod def loan_period_key(cls, medium=EditionConstants.BOOK_MEDIUM): diff --git a/core/model/edition.py b/core/model/edition.py index 3e0589d100..214e87a5f8 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -49,6 +49,21 @@ class Edition(Base, EditionConstants): # in a pinch. MAX_FALLBACK_THUMBNAIL_HEIGHT = 500 + # Postgresql doesn't allow indices to exceed 1/3 of a buffer page. + # We saw the following error here: https://ebce-lyrasis.atlassian.net/browse/PP-188: + # + # Index row size 3208 exceeds btree version 4 maximum 2704 for index "ix_editions_author" + # DETAIL: Index row references tuple (48187,9) in relation "editions". + # HINT: Values larger than 1/3 of a buffer page cannot be indexed. + # + # On rare occasions the author (and sort_author) fields can contain a concatenated list of a + # large number of authors which breaks the index and causes failures. What exactly that threshold is + # I am not entirely certain. It appears that 2704 is the size that broke the 1/3 of a buffer page + # limit. However, I'm not sure how the index size is calculated. I experimented + # with different values. Author field values exceeding 2700 characters in length produced the aforementioned + # error with an index row size of 2800. Author field values below 2650 characters seemed to be okay. + SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR = 2650 + # This Edition is associated with one particular # identifier--the one used by its data source to identify # it. Through the Equivalency class, it is associated with a @@ -724,6 +739,19 @@ def calculate_author(self): sort_author = " ; ".join(sorted(sort_names)) else: sort_author = self.UNKNOWN_AUTHOR + + def truncate_string(mystr: str): + if len(mystr) > self.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR: + return ( + mystr[: (self.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR - 3)] + + "..." + ) + return mystr + + # Very long author and sort_author strings can cause issues for Postgres indices. See + # comment above the SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR constant for details. + author = truncate_string(author) + sort_author = truncate_string(sort_author) return author, sort_author def choose_cover(self, policy=None): diff --git a/core/util/notifications.py b/core/util/notifications.py index 4a42b1d903..5641c0d15c 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -65,12 +65,15 @@ def send_loan_expiry_message( edition: Edition = loan.license_pool.presentation_edition identifier: Identifier = loan.license_pool.identifier library_short_name = loan.library and loan.library.short_name + title = f"Only {days_to_expiry} {'days' if days_to_expiry != 1 else 'day'} left on your loan!" + body = f"Your loan on {edition.title} is expiring soon" for token in tokens: msg = messaging.Message( token=token.device_token, + notification=messaging.Notification(title=title, body=body), data=dict( - title=f"Only {days_to_expiry} {'days' if days_to_expiry != 1 else 'day'} left on your loan!", - body=f"Your loan on {edition.title} is expiring soon", + title=title, + body=body, event_type=NotificationConstants.LOAN_EXPIRY_TYPE, loans_endpoint=f"{url}/{loan.library.short_name}/loans", external_identifier=loan.patron.external_identifier, @@ -129,11 +132,13 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: loans_api = f"{url}/{hold.patron.library.short_name}/loans" work: Work = hold.work identifier: Identifier = hold.license_pool.identifier + title = f'Your hold on "{work.title}" is available!' for token in tokens: msg = messaging.Message( token=token.device_token, + notification=messaging.Notification(title=title), data=dict( - title=f'Your hold on "{work.title}" is available!', + title=title, event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, loans_endpoint=loans_api, external_identifier=hold.patron.external_identifier, diff --git a/docker/ci/check_service_status.sh b/docker/ci/check_service_status.sh index 487a7b4bcc..a76ff6d02d 100644 --- a/docker/ci/check_service_status.sh +++ b/docker/ci/check_service_status.sh @@ -29,3 +29,31 @@ function check_service_status() echo " OK" fi } + +function check_crontab() { + container="$1" + + # Installing the crontab will reveal any errors and exit with an error code + $(docker exec "$container" /bin/bash -c "crontab /etc/cron.d/circulation") + validate_status=$? + if [[ "$validate_status" != 0 ]]; then + echo " FAIL: crontab is incorrect" + exit 1 + else + echo " OK" + fi +} + +function run_script() { + container="$1" + script="$2" + + output=$(docker exec "$container" /bin/bash -c "$script") + script_status=$? + if [[ "$script_status" != 0 ]]; then + echo " FAIL: script run failed" + exit 1 + else + echo " OK" + fi +} diff --git a/docker/ci/test_scripts.sh b/docker/ci/test_scripts.sh index 2463a046a5..d283e87093 100755 --- a/docker/ci/test_scripts.sh +++ b/docker/ci/test_scripts.sh @@ -14,4 +14,11 @@ wait_for_runit "$container" # Make sure that cron is running in the scripts container check_service_status "$container" /etc/service/cron + +# Ensure the installed crontab has no problems +check_crontab "$container" + +# Run a single script to ensure basic settings are correct +# The opds2 import script will only test the DB configuration +run_script "$container" "source ../env/bin/activate && ./opds2_import_monitor" exit 0 diff --git a/poetry.lock b/poetry.lock index 0ef4e4503c..42710ed461 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alembic" -version = "1.11.3" +version = "1.12.0" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.7" files = [ - {file = "alembic-1.11.3-py3-none-any.whl", hash = "sha256:d6c96c2482740592777c400550a523bc7a9aada4e210cae2e733354ddae6f6f8"}, - {file = "alembic-1.11.3.tar.gz", hash = "sha256:3db4ce81a9072e1b5aa44c2d202add24553182672a12daf21608d6f62a8f9cf9"}, + {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, + {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, ] [package.dependencies] @@ -2572,13 +2572,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.3.3" +version = "3.4.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, + {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, ] [package.dependencies] @@ -2859,65 +2859,79 @@ files = [ [[package]] name = "pyinstrument" -version = "4.5.1" +version = "4.5.3" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f334250b158010d1e2c70d9d10b880f848e03a917079b366b1e2d8890348d41"}, - {file = "pyinstrument-4.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:55537cd763aee8bce65a201d5ec1aef74677d9ff3ab3391316604ca68740d92a"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d7933bd83e913e21c4031d5c1aeeb2483147e4037363f43475df9ad962c748"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0d8f6b6df7ce338af35b213cd89b685b2a7c15569f482476c4e0942700b3e71"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98101d064b7af008189dd6f0bdd01f9be39bc6a4630505dfb13ff6ef51a0c67c"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46f1607e29f93da16d38be41ad2062a56731ff4efa24e561ac848719e8b8ca41"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e287ebc1a8b00d3a767829c03f210df0824ab2e0f6340e8f63bab6fcef1b3546"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d15613b8d5d509c29001f2edfadd73d418c2814262433fd1225c4f7893e4010a"}, - {file = "pyinstrument-4.5.1-cp310-cp310-win32.whl", hash = "sha256:04c67f08bac41173bc6b44396c60bf1a1879864d0684a7717b1bb8be27793bd9"}, - {file = "pyinstrument-4.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:dc07267447935d28ee914f955613b04d621e5bb44995f793508d6f0eb3ec2818"}, - {file = "pyinstrument-4.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8285cfb25b9ee72766bdac8db8c276755115a6e729cda4571005d1ba58c99dda"}, - {file = "pyinstrument-4.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b58239f4a0fe64f688260be0e5b4a1d19a23b890b284cf6c1c8bd0ead4616f41"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4039210a80015ae0ad2016a3b3311b068f5b334d5f5ce3c54d473f8624db0d35"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b28a4c5926036155062c83e15ca93437dbe2d41dd5feeac96f72d4d16b3431c"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d2c2a9de60712abd2228033e4ac63cdee86783af5288f2d7f8efc365e33425"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bf0fdb17cb245c53826c77e2b95095a8fb5053e49ae8ef18aecbbd184028f9e7"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:65ac43f8a1b74a331b5a4f60985531654a8d71a7698e6be5ac7e8493e7a37f37"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:61632d287f70d850a517533b9e1bf8da41527ffc4d781d4b65106f64ee33cb98"}, - {file = "pyinstrument-4.5.1-cp311-cp311-win32.whl", hash = "sha256:22ae739152ed2366c654f80aa073579f9d5a93caffa74dcb839a62640ffe429f"}, - {file = "pyinstrument-4.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:c72a33168485172a7c2dbd6c4aa3262c8d2a6154bc0792403d8e0689c6ff5304"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8c3dabcb70b705d1342f52f0c3a00647c8a244d1e6ffe46459c05d4533ffabfc"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17d469572d48ee0b78d4ff7ed3972ff40abc70c7dab4777897c843cb03a6ab7b"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66416fa4b3413bc60e6b499e60e8d009384c85cd03535f82337dce55801c43f"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c888fca16c3ae04a6d7b5a29ee0c12f9fa23792fab695117160c48c3113428f"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:861fe8c41ac7e54a57ed6ef63268c2843fbc695012427a3d19b2eb1307d9bc61"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0bf91cd5d6c80ff25fd1a136545a5cf752522190b6e6f3806559c352f18d0e73"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b16afb5e67d4d901ef702160e85e04001183b7cdea7e38c8dfb37e491986ccff"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-win32.whl", hash = "sha256:f12312341c505e7441e5503b7c77974cff4156d072f0e7f9f822a6b5fdafbc20"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:06d96b442a1ae7c267aa34450b028d80559c4f968b10e4d3ce631b0a6ccea6ef"}, - {file = "pyinstrument-4.5.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c6234094ff0ea7d51e7d4699f192019359bf12d5bbe9e1c9c5d1983562162d58"}, - {file = "pyinstrument-4.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f025522edc35831af34bcdbe300b272b432d2afd9811eb780e326116096cbff5"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0a091c575367af427e80829ec414f69a8398acdd68ddfaeb335598071329b44"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ec169cd288f230cbc6a1773384f20481b0a14d2d7cceecf1fb65e56835eaa9a"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004745e83c79d0db7ea8787aba476f13d8bb6d00d75b00d8dbd933a9c7ee1685"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:54be442df5039bc7c73e3e86de0093ca82f3e446392bebab29e51a1512c796cb"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:35e5be8621b3381cf10b1f16bbae527cb7902e87b64e0c9706bc244f6fee51b1"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50e93fac7e42dba8b3c630ed00808e7664d0d6c6b0c477462e7b061a31be23dc"}, - {file = "pyinstrument-4.5.1-cp38-cp38-win32.whl", hash = "sha256:b0a88bfe24d4efb129ef2ae7e2d50fa29908634e893bf154e29f91655c558692"}, - {file = "pyinstrument-4.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:b8a71ef9c2ad81e5f3d5f92e1d21a0c9b5f9992e94d0bfcfa9020ea88df4e69f"}, - {file = "pyinstrument-4.5.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9882827e681466d1aff931479387ed77e29674c179bc10fc67f1fa96f724dd20"}, - {file = "pyinstrument-4.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:427228a011d5be21ff009dc05fcd512cee86ea2a51687a3300b8b822bad6815b"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50501756570352e78aaf2aee509b5eb6c68706a2f2701dc3a84b066e570c61ca"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6471f47860f1a5807c182be7184839d747e2702625d44ec19a8f652380541020"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59727936e862677e9716b9317e209e5e31aa1da7eb03c65083d9dee8b5fbe0f8"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9341a07885cba57c2a134847aacb629f27b4ce06a4950a4619629d35a6d8619c"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:63c27f2ae8f0501dca4d52b42285be36095f4461dd9e340d32104c2b2df3a731"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1bda9b73dde7df63d7606e37340ba0a63ad59053e59eff318f3b67d5a7ea5579"}, - {file = "pyinstrument-4.5.1-cp39-cp39-win32.whl", hash = "sha256:300ed27714c43ae2feb7572e9b3ca39660fb89b3b298e94ad24b64609f823d3c"}, - {file = "pyinstrument-4.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:f2d8e4a9a8167c2a47874d72d6ab0a4266ed484e9ae30f35a515f8594b224b51"}, - {file = "pyinstrument-4.5.1.tar.gz", hash = "sha256:b55a93be883c65650515319455636d32ab32692b097faa1e07f8cd9d4e0eeaa9"}, + {file = "pyinstrument-4.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:94f82899486441f0b31c53c4250cb65a9f20036cacb6fb75315069a7b1e3703b"}, + {file = "pyinstrument-4.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e295571bec2bfc1cfbb1ddd66aa5d06c54cf67179c46f0bbdcf709e8130533fd"}, + {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d2b2e9c0e6b6cf444716829a00855796a7f80b5bcabe07ddb29dd5c238e5014"}, + {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3741e001a2b06be9dc435329f14507b571b273aca8b243b8d2cffd786de1b205"}, + {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92d450301957fa328391ab3da13a26249268233ea0fd1542613c148b8a635950"}, + {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4d6bccf4da8c13065c4096e4669ce483d1614698a279419090b9374f0b96328f"}, + {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:612c99a6fbad1bcabae0fe7571f5ede0ecd577d1d4a975d19fcfa281997f7075"}, + {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bb637628274e819faec00532cada45d0da8ae4f4033baa84f9cdce559911a4a4"}, + {file = "pyinstrument-4.5.3-cp310-cp310-win32.whl", hash = "sha256:5490c4ddd0f946de2c503c22e1099b34b241d9f4ac80f27b3dc7e484818b734b"}, + {file = "pyinstrument-4.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:48372e82347281c843f9cd710fc848cb5869634e225d5bffcc627673e7554ac9"}, + {file = "pyinstrument-4.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5739550f6a631250aac0b01778882d3e77b3e4ed5c01f4112769ec023cac345d"}, + {file = "pyinstrument-4.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f83b475f90764beb9a44505539f19c005ca31526f35358cde0a02b140c09c4e"}, + {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118440b4c6a925f811d97fba02e99066fca8090710fa51c6873834dd37b39040"}, + {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ce3adea93d6f4ff54893428b49f1b771f9aa7294a79d812a207c7dd9cbe8161"}, + {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a5851bf8c253d37b415388a1511239a3486249d87a0436d47317480d1e9557b"}, + {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3bb877bd2bc9bf492257891e585287f65c6374a1511e64f888a1ad112c18103b"}, + {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b9d7254b729571151070a61c7f6c86d02320d62145b9f664a96258fcc26ad1a"}, + {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f294980f636193fdb70f671d17fc98fd4f3624aef1ce061b36be14f53bbe84b4"}, + {file = "pyinstrument-4.5.3-cp311-cp311-win32.whl", hash = "sha256:c04e101c32102091280ac759578d991a3a71a41fe357c651cd78b8bbe9879daf"}, + {file = "pyinstrument-4.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:d4cf26f0f813db178eb36db8fa0ae48cd600b7e3c0447beddd8e7e7cec26e992"}, + {file = "pyinstrument-4.5.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:82d49865f6aef776ab914b9f09c26ad6279397d8fd26a79a3008c1becab4d88c"}, + {file = "pyinstrument-4.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d4efe0cdccdd44514a6ae7c061dd88d221dd77ae7d7bfd2d743c1f51f90fa3e1"}, + {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ec7d794ad206a2ad905160308cc27ad3a985691e99c31e79cfd8de53b75455"}, + {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342923b5c7654c73bcd263733b1e9d2b990c2af60d429badcc7cfd5a21bb384b"}, + {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c0d73fd1c7de4b8fca509b7c292709dbe1990527601c7d2307d4f9aca110df"}, + {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2be28ec4efa59dd9539bd803381c768a2f2453b6de201e102bf02e17a3efd3f2"}, + {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ae906be229fa5ce649016206baa5d20f6a49bb7b6c7643d019f8024e2d11d66"}, + {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d06ef692650f24feb3817869e6519ac117c3358bfe6474c0ded2cbca53c69a5f"}, + {file = "pyinstrument-4.5.3-cp312-cp312-win32.whl", hash = "sha256:f27742fa4b40c2fde105c24b190fa7d54e76195bc4c8d8a4fc5fa1af663468d3"}, + {file = "pyinstrument-4.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:ba6b864a8234f3faf1a3a52587368975d9aca6944a06a68114eb1153501679b4"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efa3140c8813056c5af939f39d750461bb917a0ba96b76cd0171c033939ae0bc"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70c7542c8edfbaee7d2263b07997e668daf6c73e8386abdd1b1a243e88c29da3"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df3e9fdea7f4a2a39a4403044c06efd5d00674807b9f8c104d24f5bf1412e33f"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64aefe67e6ad5a8254f36e0cadaa06f873539d34a3e18b883b8fa7278752f541"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1949f4c4f92ea674415c74a6e5d2105b92175019b03b4808bb61d9a777baffc"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:376924b603278f9df034a8b4a4826ef708abb99acd161b65b66e8b62d596b7c9"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:59caa57aa868098cbe81c842aeac24efef861a9fb1a1f34aa227b6d57b497e57"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-win32.whl", hash = "sha256:2b9da8eb4f947aba804f61cc311f466105161deebbe49b0a651c20cc0bd804b9"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9c08df4e0b3615df56affdb0898f89c3a964779b344b11f9edae4b5b7ac6d033"}, + {file = "pyinstrument-4.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:944c8a1e8451b9114cff42a0d7d59e482bbf060ccc3ef927d351f8d383f52678"}, + {file = "pyinstrument-4.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:80f89f8f296005eb1f8616cd602ffbdf9efcc069e145a35f35654270c2b7641f"}, + {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520c67144da37e93dc03445f8138ef5a9af6f68f89baacb658731d886763f018"}, + {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad9677beb345b3a3fe9967e90dfbbcf458f73ae8fc522fdbfda5bab75a1e5014"}, + {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e77f1a708a895f25300f7dc9b4fd5b34218ecc9c7084733d5ebb849e3ff5af99"}, + {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:057feb33230caead5bfe25d488060d07065a1bf7f19f5b2004e661a38dddc9e3"}, + {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:31e4a23672cfb8f9864bebea6246182d9398a9131606dc53bce124955258705f"}, + {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:087ff4e3faca326da071bc73214d73b98c9d7ebea53e70fbe1c033bb6c75f847"}, + {file = "pyinstrument-4.5.3-cp38-cp38-win32.whl", hash = "sha256:e7ab85c0090fd21b5c7910ef01da37be25b574db2cbdc7584e4e2371cb1f13b0"}, + {file = "pyinstrument-4.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:2ef3c856d0ab98372e08e444f6a81efc93dc160d867e3aee1bf4702bd779535d"}, + {file = "pyinstrument-4.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f168dfa4328c25c0c3444b62cc8445ac7c0dbbb6cdaf79022267571e12d78d3c"}, + {file = "pyinstrument-4.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7241f588e770bfe642cd19e2c8b7560a9cf9e0c2998c0a70ee0ea6333d7404b3"}, + {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093d1119e20fc68a9f991a1de0bc046fb29e996298d0442c928415738b2546ae"}, + {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afa5fdcd65ae4d2c11871da01576c3c2c19f70135f6b107cb7550a334441b4f8"}, + {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5f5d219b5f52b33462179ecf33ad8651672bc9410f6f6dfd3edf2095acae42"}, + {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c9dc5c501ca01c8649a967442d52eedaee63c52fcdc0fd4fb69974bc4d678978"}, + {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37d25f3aafb4f24080dd4b0966d9a022f660735f8136b7234ec2c7b8ceab14c4"}, + {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33ee99f15ac13d02a0319c2d7671dd2ccc19c615c167a9f5fbba43b50c225102"}, + {file = "pyinstrument-4.5.3-cp39-cp39-win32.whl", hash = "sha256:f467f9308a613fec0be43fa49469ad2f2c99e62e801802e8d59d938acc4acda9"}, + {file = "pyinstrument-4.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:b027951df515c896243145239e91aeb63b19b642d0f4d5ff702a9393dd4736fa"}, + {file = "pyinstrument-4.5.3.tar.gz", hash = "sha256:0885b01a901231d071cb182de33012e9b8cbd958fb048236ee2a6e760c6c6e21"}, ] [package.extras] -jupyter = ["ipython"] +bin = ["click", "nox"] +docs = ["furo (==2021.6.18b36)", "myst-parser (==0.15.1)", "sphinx (==4.2.0)", "sphinxcontrib-programoutput (==0.17)"] +examples = ["django", "numpy"] +test = ["flaky", "greenlet (>=3.0.0a1)", "ipython", "pytest", "pytest-asyncio (==0.12.0)", "sphinx-autobuild (==2021.3.14)", "trio"] +types = ["typing-extensions"] [[package]] name = "pyjwt" @@ -3079,13 +3093,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -3733,13 +3747,13 @@ files = [ [[package]] name = "tox" -version = "4.11.0" +version = "4.11.3" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.11.0-py3-none-any.whl", hash = "sha256:7f7e5f1b20115560e610b9a11143bbcf48270ec3293f36c0a18be7b287c3b41f"}, - {file = "tox-4.11.0.tar.gz", hash = "sha256:cc665e1e6b095f843b952ea5696f7a64bb64982aff62b62547ef171fa60e21eb"}, + {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"}, + {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"}, ] [package.dependencies] @@ -3860,13 +3874,13 @@ files = [ [[package]] name = "types-pillow" -version = "10.0.0.2" +version = "10.0.0.3" description = "Typing stubs for Pillow" optional = false python-versions = "*" files = [ - {file = "types-Pillow-10.0.0.2.tar.gz", hash = "sha256:fe09380ab22d412ced989a067e9ee4af719fa3a47ba1b53b232b46514a871042"}, - {file = "types_Pillow-10.0.0.2-py3-none-any.whl", hash = "sha256:29d51a3ce6ef51fabf728a504d33b4836187ff14256b2e86996d55c91ab214b1"}, + {file = "types-Pillow-10.0.0.3.tar.gz", hash = "sha256:ae0c877d363da349bbb82c5463c9e78037290cc07d3714cb0ceaf5d2f7f5c825"}, + {file = "types_Pillow-10.0.0.3-py3-none-any.whl", hash = "sha256:54a49f3c6a3f5e95ebeee396d7773dde22ce2515d594f9c0596c0a983558f0d4"}, ] [[package]] diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index 812f63e710..c22d98b754 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -178,7 +178,6 @@ def test_collections_get_collection_protocols( def test_collections_get_collections_with_multiple_collections( self, settings_ctrl_fixture: SettingsControllerFixture ): - old_prior_test_results = HasSelfTests.prior_test_results setattr( HasCollectionSelfTests, @@ -666,6 +665,7 @@ def test_collections_post_edit( ("overdrive_client_key", "user2"), ("overdrive_client_secret", "password"), ("overdrive_website_id", "1234"), + ("max_retry_count", "10"), ( "libraries", json.dumps([{"short_name": "L1", "ils_name": "the_ils"}]), @@ -684,6 +684,11 @@ def test_collections_post_edit( "overdrive_client_key" ) + # Type coercion stays intact + assert 10 == collection.integration_configuration.settings_dict.get( + "max_retry_count" + ) + # A library now has access to the collection. assert [collection] == l1.collections @@ -754,6 +759,50 @@ def test_collections_post_edit( # The collection now has a parent. assert parent == collection.parent + library = settings_ctrl_fixture.ctrl.db.default_library() + collection2 = settings_ctrl_fixture.ctrl.db.collection( + name="Collection 2", protocol=ExternalIntegration.ODL + ) + with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + flask.request.form = ImmutableMultiDict( + [ + ("id", str(collection2.id)), + ("name", "Collection 2"), + ("protocol", ExternalIntegration.ODL), + ("external_account_id", "1234"), + ("username", "user"), + ("password", "password"), + ("data_source", "datasource"), + ("passphrase_hint", "passphrase_hint"), + ("passphrase_hint_url", "http://passphrase_hint_url.com"), + ( + "libraries", + json.dumps( + [ + { + "short_name": library.short_name, + "ebook_loan_duration": "200", + } + ] + ), + ), + ] + ) + response = ( + settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + ) + assert response.status_code == 200 + + settings_ctrl_fixture.ctrl.db.session.refresh(collection2) + assert len(collection2.integration_configuration.library_configurations) == 1 + # The library configuration value was correctly coerced to int + assert ( + collection2.integration_configuration.library_configurations[ + 0 + ].settings_dict.get("ebook_loan_duration") + == 200 + ) + def _base_collections_post_request(self, collection): """A template for POST requests to the collections controller.""" return [ diff --git a/tests/core/configuration/test_library.py b/tests/core/configuration/test_library.py new file mode 100644 index 0000000000..ad00aa2860 --- /dev/null +++ b/tests/core/configuration/test_library.py @@ -0,0 +1,55 @@ +from functools import partial +from typing import Callable, List, Optional + +import pytest + +from core.configuration.library import LibrarySettings +from core.util.problem_detail import ProblemError + +LibrarySettingsFixture = Callable[..., LibrarySettings] + + +@pytest.fixture +def library_settings() -> LibrarySettingsFixture: + # Provide a default library settings object for tests, it just gives + # default values for required fields, so we can construct the settings + # without worrying about the defaults. + return partial( + LibrarySettings, + website="http://library.com", + help_web="http://library.com/help", + ) + + +@pytest.mark.parametrize( + "languages,expected", + [ + (None, None), + ([], []), + (["English"], ["eng"]), + (["English", "eng", "fr", "fre", "french"], ["eng", "fre"]), + ], +) +def test_validate_language_codes( + languages: Optional[List[str]], + expected: Optional[List[str]], + library_settings: LibrarySettingsFixture, +) -> None: + settings = library_settings(large_collection_languages=languages) + assert settings.large_collection_languages == expected + + settings = library_settings(small_collection_languages=languages) + assert settings.small_collection_languages == expected + + settings = library_settings(tiny_collection_languages=languages) + assert settings.tiny_collection_languages == expected + + +def test_validate_language_codes_error( + library_settings: LibrarySettingsFixture, +) -> None: + with pytest.raises(ProblemError) as excinfo: + library_settings(large_collection_languages=["eng", "xyz"]) + + assert excinfo.value.problem_detail.detail is not None + assert '"xyz" is not a valid language code' in excinfo.value.problem_detail.detail diff --git a/tests/core/models/test_edition.py b/tests/core/models/test_edition.py index 38abdfd4f3..2d107220a8 100644 --- a/tests/core/models/test_edition.py +++ b/tests/core/models/test_edition.py @@ -1,3 +1,6 @@ +import random +import string + from core.model import PresentationCalculationPolicy, get_one_or_create from core.model.constants import MediaTypes from core.model.contributor import Contributor @@ -325,6 +328,78 @@ def test_calculate_presentation_author(self, db: DatabaseTransactionFixture): assert "Kelly Accumulator, Bob A. Bitshifter" == wr.author assert "Accumulator, Kelly ; Bitshifter, Bob" == wr.sort_author + def test_calculate_presentation_very_long_author( + self, db: DatabaseTransactionFixture + ): + authors = [] + + # author names should be unique and not similar to ensure that the + # test mirrors the types of long author lists we'd expect in real data. + def generate_random_author(): + return "".join( + random.choices( + string.ascii_uppercase + string.ascii_lowercase + string.digits, + k=25, + ) + ) + + for i in range(0, 500): + author, ignore = db.contributor( + sort_name=", ".join( + [ + generate_random_author(), + generate_random_author(), + ] + ) + ) + authors.append(author.sort_name) + + untruncated_sort_authors = ", ".join([x for x in sorted(authors)]) + wr = db.edition(authors=authors) + wr.calculate_presentation() + db.session.commit() + + def do_check(original_str: str, truncated_str: str): + assert ( + len(truncated_str) + == Edition.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR + ) + assert truncated_str.endswith("...") + assert not original_str.endswith("...") + assert ( + len(original_str) + > Edition.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR + ) + + do_check(untruncated_sort_authors, wr.sort_author) + # Since we'd expect the sort_author and auth to be equal (since sort_author is assigned to the + # author field by default if no author is specified) we should verify that the author field also + # passes the check. + do_check(untruncated_sort_authors, wr.author) + + def test_calculate_presentation_shortish_author( + self, db: DatabaseTransactionFixture + ): + authors = [] + author, ignore = db.contributor(sort_name=f"AuthorLast, AuthorFirst") + authors.append(author.sort_name) + wr = db.edition(authors=authors) + author, sort_author = wr.calculate_author() + wr.calculate_presentation() + db.session.commit() + + def do_check(original_str: str, calculated_str: str): + assert calculated_str == original_str + assert not calculated_str.endswith("...") + assert ( + len(original_str) + <= Edition.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR + ) + assert not original_str.endswith("...") + + do_check(author, wr.author) + do_check(sort_author, wr.sort_author) + def test_set_summary(self, db: DatabaseTransactionFixture): e, pool = db.edition(with_license_pool=True) work = db.work(presentation_edition=e) diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index f5a9b52593..c47914ecc4 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -52,6 +52,10 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur (), { "token": "atoken", + "notification": messaging.Notification( + title="Only 1 day left on your loan!", + body=f"Your loan on {work.presentation_edition.title} is expiring soon", + ), "data": dict( title="Only 1 day left on your loan!", body=f"Your loan on {work.presentation_edition.title} is expiring soon", @@ -176,6 +180,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): assert messaging.Message.call_args_list == [ mock.call( token="test-token-1", + notification=messaging.Notification( + title=f'Your hold on "{work1.title}" is available!', + ), data=dict( title=f'Your hold on "{work1.title}" is available!', event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, @@ -189,6 +196,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token="test-token-2", + notification=messaging.Notification( + title=f'Your hold on "{work1.title}" is available!', + ), data=dict( title=f'Your hold on "{work1.title}" is available!', event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, @@ -202,6 +212,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token="test-token-3", + notification=messaging.Notification( + title=f'Your hold on "{work2.title}" is available!', + ), data=dict( title=f'Your hold on "{work2.title}" is available!', event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, diff --git a/tests/migration/test_20230905_2b672c6fb2b9.py b/tests/migration/test_20230905_2b672c6fb2b9.py new file mode 100644 index 0000000000..fa3e94a605 --- /dev/null +++ b/tests/migration/test_20230905_2b672c6fb2b9.py @@ -0,0 +1,167 @@ +import json +from typing import Any, Dict + +import pytest +from pytest_alembic import MigrationContext +from sqlalchemy.engine import Connection, Engine + +from tests.migration.conftest import CreateLibrary + + +class CreateConfiguration: + def __call__( + self, + connection: Connection, + goal: str, + protocol: str, + name: str, + settings: Dict[str, Any], + ) -> int: + integration_configuration = connection.execute( + "INSERT INTO integration_configurations (goal, protocol, name, settings, self_test_results) VALUES (%s, %s, %s, %s, '{}') returning id", + goal, + protocol, + name, + json.dumps(settings), + ).fetchone() + assert integration_configuration is not None + assert isinstance(integration_configuration.id, int) + return integration_configuration.id + + +@pytest.fixture +def create_integration_configuration() -> CreateConfiguration: + return CreateConfiguration() + + +def fetch_config(connection: Connection, _id: int) -> Dict[str, Any]: + integration_config = connection.execute( + "SELECT settings FROM integration_configurations where id=%s", _id + ).fetchone() + assert integration_config is not None + assert isinstance(integration_config.settings, dict) + return integration_config.settings + + +def fetch_library_config( + connection: Connection, parent_id: int, library_id: int +) -> Dict[str, Any]: + integration_lib_config = connection.execute( + "SELECT parent_id, settings FROM integration_library_configurations where parent_id=%s and library_id=%s", + parent_id, + library_id, + ).fetchone() + assert integration_lib_config is not None + assert isinstance(integration_lib_config.settings, dict) + return integration_lib_config.settings + + +MIGRATION_UID = "2b672c6fb2b9" + + +def test_settings_coersion( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_integration_configuration: CreateConfiguration, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_UID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + config_id = create_integration_configuration( + connection, + "LICENSE_GOAL", + "Axis 360", + "axis-test-1", + dict( + verify_certificate="true", + loan_limit="20", + default_reservation_period="12", + key="value", + ), + ) + + # Test 2 library configs, to the same parent + library_id = create_library(connection) + library_id2 = create_library(connection) + + library_settings = dict( + hold_limit="30", + max_retry_count="2", + ebook_loan_duration="10", + default_loan_duration="11", + unchanged="value", + ) + connection.execute( + "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", + library_id, + config_id, + json.dumps(library_settings), + ) + library_settings = dict( + hold_limit="31", + max_retry_count="3", + ebook_loan_duration="", + default_loan_duration="12", + unchanged="value1", + ) + connection.execute( + "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", + library_id2, + config_id, + json.dumps(library_settings), + ) + + other_config_settings = dict( + verify_certificate="true", + loan_limit="20", + default_reservation_period="12", + key="value", + ) + other_config_id = create_integration_configuration( + connection, "PATRON_AUTH_GOAL", "Other", "other-test", other_config_settings + ) + connection.execute( + "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", + library_id2, + other_config_id, + json.dumps(other_config_settings), + ) + + alembic_runner.migrate_up_one() + + axis_config = fetch_config(connection, config_id) + assert axis_config["verify_certificate"] == True + assert axis_config["loan_limit"] == 20 + assert axis_config["default_reservation_period"] == 12 + # Unknown settings remain as-is + assert axis_config["key"] == "value" + + odl_config = fetch_library_config( + connection, parent_id=config_id, library_id=library_id + ) + assert odl_config["hold_limit"] == 30 + assert odl_config["max_retry_count"] == 2 + assert odl_config["ebook_loan_duration"] == 10 + assert odl_config["default_loan_duration"] == 11 + # Unknown settings remain as-is + assert odl_config["unchanged"] == "value" + + odl_config2 = fetch_library_config( + connection, parent_id=config_id, library_id=library_id2 + ) + assert odl_config2["hold_limit"] == 31 + assert odl_config2["max_retry_count"] == 3 + assert odl_config2["ebook_loan_duration"] is None + assert odl_config2["default_loan_duration"] == 12 + # Unknown settings remain as-is + assert odl_config2["unchanged"] == "value1" + + # Other integration is unchanged + other_config = fetch_config(connection, other_config_id) + assert other_config == other_config_settings + other_library_config = fetch_library_config( + connection, parent_id=other_config_id, library_id=library_id2 + ) + assert other_library_config == other_config_settings