From 08b030406a1312fa3ed40463645febad749d07b8 Mon Sep 17 00:00:00 2001 From: petechd <53475968+petechd@users.noreply.github.com> Date: Thu, 18 May 2023 10:52:36 +0100 Subject: [PATCH 01/21] Make response expiry date mandatory (#1104) --- app/data_models/metadata_proxy.py | 2 +- app/data_models/questionnaire_store.py | 6 ++---- app/utilities/metadata_parser.py | 2 +- app/utilities/metadata_parser_v2.py | 2 +- tests/app/data_model/conftest.py | 6 +++++- tests/app/data_model/test_metadata_proxy.py | 6 ++++++ tests/app/parser/conftest.py | 10 ++++++++++ tests/app/submitter/conftest.py | 3 +++ tests/app/views/handlers/conftest.py | 6 ++++++ .../test_individual_response_fulfilment_request.py | 2 ++ .../handlers/test_question_with_dynamic_answers.py | 2 ++ tests/functional/jwt_helper.js | 5 +++++ tests/integration/create_token.py | 2 ++ tests/integration/routes/test_errors.py | 2 ++ tests/integration/routes/test_jwt_authentication.py | 2 ++ tests/integration/test_flush_data.py | 2 ++ 16 files changed, 52 insertions(+), 8 deletions(-) diff --git a/app/data_models/metadata_proxy.py b/app/data_models/metadata_proxy.py index 3eed9a85f5..28e12dee3d 100644 --- a/app/data_models/metadata_proxy.py +++ b/app/data_models/metadata_proxy.py @@ -49,11 +49,11 @@ class MetadataProxy: case_id: str collection_exercise_sid: str response_id: str + response_expires_at: datetime survey_metadata: Optional[SurveyMetadata] = None schema_url: Optional[str] = None schema_name: Optional[str] = None language_code: Optional[str] = None - response_expires_at: Optional[datetime] = None channel: Optional[str] = None region_code: Optional[str] = None version: Optional[AuthPayloadVersion] = None diff --git a/app/data_models/questionnaire_store.py b/app/data_models/questionnaire_store.py index f4352112c7..1848a3d4b2 100644 --- a/app/data_models/questionnaire_store.py +++ b/app/data_models/questionnaire_store.py @@ -93,12 +93,10 @@ def save(self) -> None: collection_exercise_sid = ( self.collection_exercise_sid or self._metadata["collection_exercise_sid"] ) - response_expires_at = self._metadata.get("response_expires_at") + response_expires_at = self._metadata["response_expires_at"] self._storage.save( data=data, collection_exercise_sid=collection_exercise_sid, submitted_at=self.submitted_at, - expires_at=parse_iso_8601_datetime(response_expires_at) - if response_expires_at - else None, + expires_at=parse_iso_8601_datetime(response_expires_at), ) diff --git a/app/utilities/metadata_parser.py b/app/utilities/metadata_parser.py index 537adae896..f0d2681aa1 100644 --- a/app/utilities/metadata_parser.py +++ b/app/utilities/metadata_parser.py @@ -65,7 +65,7 @@ class RunnerMetadataSchema(Schema, StripWhitespaceMixin): ) # type:ignore case_type = VALIDATORS["string"](required=False) # type:ignore response_expires_at = VALIDATORS["iso_8601_date_string"]( - required=False, + required=True, validate=lambda x: parse_iso_8601_datetime(x) > datetime.now(tz=timezone.utc), ) # type:ignore diff --git a/app/utilities/metadata_parser_v2.py b/app/utilities/metadata_parser_v2.py index 366f997435..51fa8eaa8d 100644 --- a/app/utilities/metadata_parser_v2.py +++ b/app/utilities/metadata_parser_v2.py @@ -89,7 +89,7 @@ class RunnerMetadataSchema(Schema, StripWhitespaceMixin): required=False, validate=validate.Length(min=1) ) # type:ignore response_expires_at = VALIDATORS["iso_8601_date_string"]( - required=False, + required=True, validate=lambda x: parse_iso_8601_datetime(x) > datetime.now(tz=timezone.utc), ) # type:ignore region_code = VALIDATORS["string"]( diff --git a/tests/app/data_model/conftest.py b/tests/app/data_model/conftest.py index 50905ab2f7..4d71f6623d 100644 --- a/tests/app/data_model/conftest.py +++ b/tests/app/data_model/conftest.py @@ -6,6 +6,7 @@ from app.data_models.progress_store import CompletionStatus from app.data_models.session_store import SessionStore from app.storage import storage_encryption +from tests.app.parser.conftest import get_response_expires_at @pytest.fixture @@ -81,7 +82,10 @@ def store_to_serialize(answer_store): @pytest.fixture def basic_input(): return { - "METADATA": {"test": True}, + "METADATA": { + "test": True, + "response_expires_at": get_response_expires_at(), + }, "ANSWERS": [{"answer_id": "test", "value": "test"}], "LISTS": [], "PROGRESS": [ diff --git a/tests/app/data_model/test_metadata_proxy.py b/tests/app/data_model/test_metadata_proxy.py index 73804f5860..79dce605c7 100644 --- a/tests/app/data_model/test_metadata_proxy.py +++ b/tests/app/data_model/test_metadata_proxy.py @@ -12,6 +12,7 @@ "tx_id": "tx_id", "collection_exercise_sid": "collection_exercise_sid", "case_id": "case_id", + "response_expires_at": "2023-04-24T10:46:32+00:00", } METADATA_V2 = { @@ -22,6 +23,7 @@ "tx_id": "tx_id", "collection_exercise_sid": "collection_exercise_sid", "case_id": "case_id", + "response_expires_at": "2023-04-24T10:46:32+00:00", "survey_metadata": { "data": { "ru_ref": "432423423423", @@ -46,6 +48,10 @@ MetadataProxy.from_dict(METADATA_V2)["schema_name"], METADATA_V2["schema_name"], ), + ( + MetadataProxy.from_dict(METADATA_V2)["response_expires_at"], + METADATA_V2["response_expires_at"], + ), (MetadataProxy.from_dict(METADATA_V1)["non_existing"], None), (MetadataProxy.from_dict(METADATA_V2)["non_existing"], None), ), diff --git a/tests/app/parser/conftest.py b/tests/app/parser/conftest.py index d408e09103..ab7bbc6e26 100644 --- a/tests/app/parser/conftest.py +++ b/tests/app/parser/conftest.py @@ -1,5 +1,6 @@ # pylint: disable=redefined-outer-name import uuid +from datetime import datetime, timedelta, timezone import pytest @@ -37,6 +38,7 @@ def fake_metadata_runner(): "response_id": str(uuid.uuid4()), "account_service_url": "https://ras.ons.gov.uk", "case_id": str(uuid.uuid4()), + "response_expires_at": get_response_expires_at(), } @@ -48,6 +50,7 @@ def fake_business_metadata_runner(): metadata["eq_id"] = "mbs" metadata["form_type"] = "0253" + metadata["response_expires_at"] = get_response_expires_at() return metadata @@ -67,6 +70,7 @@ def fake_metadata_full(): "return_by": "2016-07-07", "case_ref": "1000000000000001", "case_id": str(uuid.uuid4()), + "response_expires_at": get_response_expires_at(), } return dict(fake_metadata_runner(), **fake_questionnaire_claims) @@ -84,6 +88,7 @@ def fake_metadata_runner_v2(): "case_id": str(uuid.uuid4()), "version": AuthPayloadVersion.V2.value, "survey_metadata": {"data": {"key": "value"}}, + "response_expires_at": get_response_expires_at(), } @@ -103,6 +108,7 @@ def fake_metadata_full_v2_business(): "case_ref": "1000000000000001", "ru_ref": "123456789", "form_type": "I", + "response_expires_at": get_response_expires_at(), } metadata = fake_metadata_runner_v2() @@ -140,3 +146,7 @@ def fake_questionnaire_metadata_requirements_full(): {"name": "ref_p_end_date", "type": "string"}, {"name": "account_service_url", "type": "url", "optional": True}, ] + + +def get_response_expires_at() -> str: + return (datetime.now(tz=timezone.utc) + timedelta(days=1)).isoformat() diff --git a/tests/app/submitter/conftest.py b/tests/app/submitter/conftest.py index 67a15ef3e1..c92d9da656 100644 --- a/tests/app/submitter/conftest.py +++ b/tests/app/submitter/conftest.py @@ -15,6 +15,7 @@ from app.questionnaire.questionnaire_schema import QuestionnaireSchema from app.settings import ACCOUNT_SERVICE_BASE_URL_SOCIAL from app.submitter import RabbitMQSubmitter +from tests.app.parser.conftest import get_response_expires_at METADATA_V1 = MetadataProxy.from_dict( { @@ -39,6 +40,7 @@ "display_address": "68 Abingdon Road, Goathill", "case_ref": "1000000000000001", "jti": str(uuid.uuid4()), + "response_expires_at": get_response_expires_at(), } ) @@ -69,6 +71,7 @@ "region_code": "GB-ENG", "channel": "RH", "jti": str(uuid.uuid4()), + "response_expires_at": get_response_expires_at(), } ) diff --git a/tests/app/views/handlers/conftest.py b/tests/app/views/handlers/conftest.py index 003f9e973a..6fde601054 100644 --- a/tests/app/views/handlers/conftest.py +++ b/tests/app/views/handlers/conftest.py @@ -11,6 +11,7 @@ from app.data_models.session_data import SessionData from app.data_models.session_store import SessionStore from app.questionnaire import QuestionnaireSchema +from tests.app.parser.conftest import get_response_expires_at time_to_freeze = datetime.now(timezone.utc).replace(second=0, microsecond=0) tx_id = str(uuid.uuid4()) @@ -39,6 +40,7 @@ channel = "H" case_ref = "1000000000000001" region_code = "GB_WLS" +response_expires_at = get_response_expires_at() @pytest.fixture @@ -126,6 +128,7 @@ def metadata(): "region_code": region_code, "case_id": case_id, "language_code": language_code, + "response_expires_at": response_expires_at, } ) @@ -143,6 +146,7 @@ def metadata_v2(): "channel": channel, "region_code": region_code, "account_service_url": "account_service_url", + "response_expires_at": get_response_expires_at(), "survey_metadata": { "data": { "period_id": period_id, @@ -210,6 +214,7 @@ def mock_questionnaire_store(mocker): "schema_name": schema_name, "account_service_url": "account_service_url", "response_id": "response_id", + "response_expires_at": get_response_expires_at(), } ) return questionnaire_store @@ -231,6 +236,7 @@ def mock_questionnaire_store_v2(mocker): "channel": channel, "region_code": region_code, "account_service_url": "account_service_url", + "response_expires_at": get_response_expires_at(), "survey_metadata": { "data": { "period_id": period_id, diff --git a/tests/app/views/handlers/test_individual_response_fulfilment_request.py b/tests/app/views/handlers/test_individual_response_fulfilment_request.py index ae295f8d3f..ff9f6c8007 100644 --- a/tests/app/views/handlers/test_individual_response_fulfilment_request.py +++ b/tests/app/views/handlers/test_individual_response_fulfilment_request.py @@ -14,6 +14,7 @@ GB_WLS_REGION_CODE, IndividualResponseFulfilmentRequest, ) +from tests.app.parser.conftest import get_response_expires_at DUMMY_MOBILE_NUMBER = "07700900258" @@ -27,6 +28,7 @@ def test_sms_fulfilment_request_payload(): response_id="response_id", account_service_url="account_service_url", collection_exercise_sid="collection_exercise_sid", + response_expires_at=get_response_expires_at(), ) fulfilment_request = IndividualResponseFulfilmentRequest( diff --git a/tests/app/views/handlers/test_question_with_dynamic_answers.py b/tests/app/views/handlers/test_question_with_dynamic_answers.py index 4e771b7788..22beb7ffbc 100644 --- a/tests/app/views/handlers/test_question_with_dynamic_answers.py +++ b/tests/app/views/handlers/test_question_with_dynamic_answers.py @@ -8,6 +8,7 @@ from app.utilities.schema import load_schema_from_name from app.views.handlers.question import Question +from ...parser.conftest import get_response_expires_at from .conftest import set_storage_data @@ -35,6 +36,7 @@ def test_question_with_dynamic_answers(storage, language, mocker): questionnaire_store.list_store = ListStore( [{"items": ["tUJzGV", "vhECeh"], "name": "supermarkets"}] ) + questionnaire_store.set_metadata({"response_expires_at": get_response_expires_at()}) schema = load_schema_from_name("test_dynamic_answers_list_source") mocker.patch( diff --git a/tests/functional/jwt_helper.js b/tests/functional/jwt_helper.js index 678b7d8d8d..75f458fda9 100644 --- a/tests/functional/jwt_helper.js +++ b/tests/functional/jwt_helper.js @@ -90,6 +90,9 @@ export function generateToken( const iat = KJUR.jws.IntDate.get("now"); const exp = KJUR.jws.IntDate.get("now") + 1800; const caseId = uuidv4(); + const currentDate = new Date(); + currentDate.setUTCDate(currentDate.getUTCDate() + 1); + const isoDate = currentDate.toISOString(); if (version === "v2") { payload = { @@ -106,6 +109,7 @@ export function generateToken( account_service_url: "http://localhost:8000", survey_metadata: getSurveyMetadata(theme, userId, displayAddress, periodId, periodStr), version: "v2", + response_expires_at: isoDate, }; } else { payload = { @@ -131,6 +135,7 @@ export function generateToken( region_code: regionCode, language_code: languageCode, account_service_url: "http://localhost:8000", + response_expires_at: isoDate, }; } diff --git a/tests/integration/create_token.py b/tests/integration/create_token.py index 1c88bf92bb..c611ad9918 100644 --- a/tests/integration/create_token.py +++ b/tests/integration/create_token.py @@ -5,6 +5,7 @@ from app.authentication.auth_payload_version import AuthPayloadVersion from app.keys import KEY_PURPOSE_AUTHENTICATION +from tests.app.parser.conftest import get_response_expires_at ACCOUNT_SERVICE_URL = "http://upstream.url" @@ -88,6 +89,7 @@ def _get_payload_with_params( payload_vars["exp"] = payload_vars["iat"] + float(3600) # one hour from now payload_vars["jti"] = str(uuid4()) payload_vars["case_id"] = str(uuid4()) + payload_vars["response_expires_at"] = get_response_expires_at() for key, value in extra_payload.items(): payload_vars[key] = value diff --git a/tests/integration/routes/test_errors.py b/tests/integration/routes/test_errors.py index db4414e8cb..4d5d2502ac 100644 --- a/tests/integration/routes/test_errors.py +++ b/tests/integration/routes/test_errors.py @@ -6,6 +6,7 @@ ACCOUNT_SERVICE_BASE_URL_SOCIAL, ONS_URL, ) +from tests.app.parser.conftest import get_response_expires_at from tests.integration.create_token import ACCOUNT_SERVICE_URL from tests.integration.integration_test_case import IntegrationTestCase @@ -31,6 +32,7 @@ class TestErrors(IntegrationTestCase): # pylint: disable=too-many-public-method "language_code": "en", "account_service_url": "http://correct.place", "roles": [], + "response_expires_at": get_response_expires_at(), } def test_errors_404(self): diff --git a/tests/integration/routes/test_jwt_authentication.py b/tests/integration/routes/test_jwt_authentication.py index ff15a28325..0d1a9eff39 100644 --- a/tests/integration/routes/test_jwt_authentication.py +++ b/tests/integration/routes/test_jwt_authentication.py @@ -13,6 +13,7 @@ TEST_DO_NOT_USE_SR_PUBLIC_KEY, TEST_DO_NOT_USE_UPSTREAM_PRIVATE_KEY, ) +from tests.app.parser.conftest import get_response_expires_at from tests.integration.app_context_test_case import AppContextTestCase from tests.integration.integration_test_case import ( EQ_USER_AUTHENTICATION_RRM_PRIVATE_KEY_KID, @@ -85,6 +86,7 @@ def create_payload(): "ru_name": "Test", "return_by": "2016-09-09", "account_service_url": "http://upstream.url/", + "response_expires_at": get_response_expires_at(), } diff --git a/tests/integration/test_flush_data.py b/tests/integration/test_flush_data.py index 3ca7de5d95..9fa232c81e 100644 --- a/tests/integration/test_flush_data.py +++ b/tests/integration/test_flush_data.py @@ -3,6 +3,7 @@ from mock import patch +from tests.app.parser.conftest import get_response_expires_at from tests.integration.integration_test_case import IntegrationTestCase @@ -179,6 +180,7 @@ def test_flush_data_successful_v2( "lists": [], }, "started_at": "2023-02-07T11:42:32.380784+00:00", + "response_expires_at": get_response_expires_at(), } self.launchSurveyV2("test_textfield") form_data = {"name-answer": "Joe Bloggs"} From 18d09f497485e5f5e56e5ab3f528b80a8974dc3f Mon Sep 17 00:00:00 2001 From: petechd <53475968+petechd@users.noreply.github.com> Date: Thu, 18 May 2023 15:40:32 +0100 Subject: [PATCH 02/21] Bind additional contexts to flush requests (#1108) --- app/routes/flush.py | 9 +++++- tests/integration/test_flush_data.py | 47 ++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/app/routes/flush.py b/app/routes/flush.py index 2ff8897a86..28cb2093b2 100644 --- a/app/routes/flush.py +++ b/app/routes/flush.py @@ -55,7 +55,14 @@ def flush_data() -> Response: user = _get_user(decrypted_token["response_id"]) if metadata := get_metadata(user): - contextvars.bind_contextvars(tx_id=metadata.tx_id) + contextvars.bind_contextvars( + tx_id=metadata.tx_id, + ce_id=metadata.collection_exercise_sid, + ) + if schema_name := metadata.schema_name: + contextvars.bind_contextvars(schema_name=schema_name) + if schema_url := metadata.schema_url: + contextvars.bind_contextvars(schema_url=schema_url) if _submit_data(user): return Response(status=200) return Response(status=404) diff --git a/tests/integration/test_flush_data.py b/tests/integration/test_flush_data.py index 9fa232c81e..121c9ad9e1 100644 --- a/tests/integration/test_flush_data.py +++ b/tests/integration/test_flush_data.py @@ -1,11 +1,15 @@ import time import uuid +from httmock import HTTMock, urlmatch from mock import patch +from app.utilities.schema import get_schema_path_map from tests.app.parser.conftest import get_response_expires_at from tests.integration.integration_test_case import IntegrationTestCase +SCHEMA_PATH_MAP = get_schema_path_map(include_test_schemas=True) + class TestFlushData(IntegrationTestCase): def setUp(self): @@ -30,6 +34,14 @@ def tearDown(self): super().tearDown() + @staticmethod + @urlmatch(netloc=r"eq-survey-register", path=r"\/my-test-schema") + def schema_url_mock(_url, _request): + schema_path = SCHEMA_PATH_MAP["test"]["en"]["test_textfield"] + + with open(schema_path, encoding="utf8") as json_data: + return json_data.read() + def test_flush_data_successful(self): self.post( url="/flush?token=" @@ -193,3 +205,38 @@ def test_flush_data_successful_v2( self.assertStatusOK() mock_convert_answers_v2.assert_called_once() mock_convert_answers.assert_not_called() + + def test_flush_logs_output(self): + with self.assertLogs() as logs: + self.post( + url=f"/flush?token={self.token_generator.create_token(schema_name='test_textfield', payload=self.get_payload())}" + ) + + flush_log = logs.output[5] + + self.assertIn("successfully flushed answers", flush_log) + self.assertIn("tx_id", flush_log) + self.assertIn("ce_id", flush_log) + self.assertIn("schema_name", flush_log) + self.assertNotIn("schema_url", flush_log) + + def test_flush_logs_output_schema_url(self): + schema_url = "http://eq-survey-register.url/my-test-schema" + token = self.token_generator.create_token_with_schema_url( + "test_textfield", schema_url + ) + with HTTMock(self.schema_url_mock): + self.get(url=f"/session?token={token}") + self.assertStatusOK() + with self.assertLogs() as logs: + self.post( + url=f"/flush?token={self.token_generator.create_token_with_schema_url('test_textfield', schema_url, payload=self.get_payload())}" + ) + + flush_log = logs.output[6] + + self.assertIn("successfully flushed answers", flush_log) + self.assertIn("tx_id", flush_log) + self.assertIn("ce_id", flush_log) + self.assertIn("schema_name", flush_log) + self.assertIn("schema_url", flush_log) From cee23dc0833e88ab6d85eb46ef9974878db09f7f Mon Sep 17 00:00:00 2001 From: Mebin Abraham <35296336+MebinAbraham@users.noreply.github.com> Date: Tue, 23 May 2023 10:16:52 +0100 Subject: [PATCH 03/21] Schemas v3.56.0 (#1110) --- .schemas-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.schemas-version b/.schemas-version index 304e303931..436ebfe052 100644 --- a/.schemas-version +++ b/.schemas-version @@ -1 +1 @@ -v3.55.0 +v3.56.0 From 36fb6c7bb45d04c9cdf363a6169d4c6d258e4039 Mon Sep 17 00:00:00 2001 From: Mebin Abraham <35296336+MebinAbraham@users.noreply.github.com> Date: Thu, 25 May 2023 13:35:41 +0100 Subject: [PATCH 04/21] Schemas v3.57.0 (#1113) --- .schemas-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.schemas-version b/.schemas-version index 436ebfe052..5af1c91d2a 100644 --- a/.schemas-version +++ b/.schemas-version @@ -1 +1 @@ -v3.56.0 +v3.57.0 From 566a5262646b821932729917aff5dbacf5a4c991 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Tue, 23 May 2023 09:50:08 +0100 Subject: [PATCH 05/21] Add support for SDS integration --- ...ad_version.py => auth_payload_versions.py} | 4 + app/data_models/metadata_proxy.py | 2 +- app/routes/flush.py | 2 +- app/routes/session.py | 79 +++++++- app/submitter/converter_v2.py | 2 +- app/utilities/metadata_parser_v2.py | 2 +- app/utilities/prepop_parser.py | 72 +++++++ app/views/handlers/feedback.py | 2 +- app/views/handlers/submission.py | 2 +- schemas/test/en/test_prepop.json | 61 ++++++ scripts/mock_sds_endpoint.py | 135 ++++++++++++++ tests/app/conftest.py | 34 ++++ tests/app/data_model/test_metadata_proxy.py | 2 +- tests/app/parser/conftest.py | 2 +- tests/app/parser/test_metadata_parser.py | 2 +- tests/app/parser/test_prepop_parser.py | 175 ++++++++++++++++++ .../test_value_source_resolver.py | 2 +- tests/app/submitter/conftest.py | 2 +- .../submitter/test_convert_payload_0_0_1.py | 2 +- .../submitter/test_convert_payload_0_0_3.py | 2 +- tests/app/submitter/test_converter.py | 2 +- tests/app/test_request_prepop_data.py | 141 ++++++++++++++ tests/app/utilities/test_schema.py | 31 ---- tests/app/views/handlers/conftest.py | 2 +- .../views/handlers/test_feedback_upload.py | 2 +- .../views/handlers/test_submission_handler.py | 2 +- tests/integration/create_token.py | 35 +++- tests/integration/integration_test_case.py | 11 ++ tests/integration/routes/test_session.py | 6 + 29 files changed, 768 insertions(+), 50 deletions(-) rename app/authentication/{auth_payload_version.py => auth_payload_versions.py} (58%) create mode 100644 app/utilities/prepop_parser.py create mode 100644 schemas/test/en/test_prepop.json create mode 100644 scripts/mock_sds_endpoint.py create mode 100644 tests/app/parser/test_prepop_parser.py create mode 100644 tests/app/test_request_prepop_data.py diff --git a/app/authentication/auth_payload_version.py b/app/authentication/auth_payload_versions.py similarity index 58% rename from app/authentication/auth_payload_version.py rename to app/authentication/auth_payload_versions.py index fe4d338d37..f357ee6050 100644 --- a/app/authentication/auth_payload_version.py +++ b/app/authentication/auth_payload_versions.py @@ -3,3 +3,7 @@ class AuthPayloadVersion(Enum): V2 = "v2" + + +class PrepopSchemaVersion(Enum): + V1 = "v1" diff --git a/app/data_models/metadata_proxy.py b/app/data_models/metadata_proxy.py index 28e12dee3d..a3b2414674 100644 --- a/app/data_models/metadata_proxy.py +++ b/app/data_models/metadata_proxy.py @@ -7,7 +7,7 @@ from werkzeug.datastructures import ImmutableDict -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.utilities.make_immutable import make_immutable diff --git a/app/routes/flush.py b/app/routes/flush.py index 28cb2093b2..e99cbb6b89 100644 --- a/app/routes/flush.py +++ b/app/routes/flush.py @@ -7,7 +7,7 @@ from sdc.crypto.key_store import KeyStore from structlog import contextvars, get_logger -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.authentication.user import User from app.authentication.user_id_generator import UserIDGenerator from app.data_models import QuestionnaireStore diff --git a/app/routes/session.py b/app/routes/session.py index 56c2653a69..0ca3eaa9bc 100644 --- a/app/routes/session.py +++ b/app/routes/session.py @@ -1,17 +1,21 @@ +import json from datetime import datetime, timezone from typing import Any, Iterable, Mapping +import requests from flask import Blueprint, g, jsonify, redirect, request from flask import session as cookie_session from flask import url_for from flask_login import login_required, logout_user from marshmallow import INCLUDE, ValidationError +from requests import RequestException +from requests.adapters import HTTPAdapter, Retry from sdc.crypto.exceptions import InvalidTokenException from structlog import contextvars, get_logger from werkzeug.exceptions import Unauthorized from werkzeug.wrappers.response import Response -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.authentication.authenticator import decrypt_token, store_session from app.authentication.jti_claim_storage import JtiTokenUsed, use_jti_claim from app.data_models.metadata_proxy import MetadataProxy @@ -28,12 +32,31 @@ validate_questionnaire_claims, validate_runner_claims_v2, ) +from app.utilities.prepop_parser import validate_prepop_data_v1 from app.utilities.schema import load_schema_from_metadata logger = get_logger() session_blueprint = Blueprint("session", __name__) +PREPOP_URL = "" +PREPOP_REQUEST_MAX_BACKOFF = 0.2 +PREPOP_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + PREPOP_REQUEST_MAX_RETRIES +PREPOP_REQUEST_TIMEOUT = 3 +PREPOP_REQUEST_RETRY_STATUS_CODES = [ + 408, + 429, + 500, + 502, + 503, + 504, +] + + +class PrepopRequestFailed(Exception): + def __str__(self) -> str: + return "Prepop request failed" + @session_blueprint.after_request def add_cache_control(response: Response) -> Response: @@ -128,9 +151,63 @@ def login() -> Response: cookie_session["language_code"] = metadata.language_code + if (dataset_id := metadata["sds_dataset_id"]) and ru_ref: + get_prepop_data(prepop_url=PREPOP_URL, dataset_id=dataset_id, ru_ref=ru_ref) + return redirect(url_for("questionnaire.get_questionnaire")) +def get_prepop_data(prepop_url: str, dataset_id: str, ru_ref: str) -> dict: + constructed_prepop_url = f"{prepop_url}?dataset_id={dataset_id}&unit_id={ru_ref}" + + session = requests.Session() + + retries = Retry( + total=PREPOP_REQUEST_MAX_RETRIES, + status_forcelist=PREPOP_REQUEST_RETRY_STATUS_CODES, + ) # Codes to retry according to Google Docs https://cloud.google.com/storage/docs/retry-strategy#client-libraries + + # Type ignore: MyPy does not recognise BACKOFF_MAX however it is a property, albeit deprecated + retries.BACKOFF_MAX = PREPOP_REQUEST_MAX_BACKOFF # type: ignore + + session.mount("http://", HTTPAdapter(max_retries=retries)) + session.mount("https://", HTTPAdapter(max_retries=retries)) + + try: + response = session.get(constructed_prepop_url, timeout=PREPOP_REQUEST_TIMEOUT) + except RequestException as exc: + logger.exception( + "Error requesting prepopulated data", + prepop_url=constructed_prepop_url, + ) + raise PrepopRequestFailed from exc + + if response.status_code == 200: + prepop_response_content = response.content.decode() + prepop_data = json.loads(prepop_response_content) + + return validate_prepop_data( + prepop_data=prepop_data, dataset_id=dataset_id, ru_ref=ru_ref + ) + + logger.error( + "got a non-200 response for prepop data request", + status_code=response.status_code, + schema_url=constructed_prepop_url, + ) + + raise PrepopRequestFailed + + +def validate_prepop_data(prepop_data: Mapping, dataset_id: str, ru_ref: str) -> dict: + try: + return validate_prepop_data_v1( + prepop_data=prepop_data, dataset_id=dataset_id, ru_ref=ru_ref + ) + except ValidationError as e: + raise ValidationError("Invalid prepopulation data") from e + + def validate_jti(decrypted_token: dict[str, str | list | int]) -> None: # Type ignore: decrypted_token["exp"] will return a valid timestamp with compatible typing expires_at = datetime.fromtimestamp(decrypted_token["exp"], tz=timezone.utc) # type: ignore diff --git a/app/submitter/converter_v2.py b/app/submitter/converter_v2.py index c2e88745b2..e393890b61 100644 --- a/app/submitter/converter_v2.py +++ b/app/submitter/converter_v2.py @@ -3,7 +3,7 @@ from structlog import get_logger -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models import AnswerStore, ListStore, ProgressStore, QuestionnaireStore from app.data_models.metadata_proxy import MetadataProxy, NoMetadataException from app.questionnaire.questionnaire_schema import ( diff --git a/app/utilities/metadata_parser_v2.py b/app/utilities/metadata_parser_v2.py index 51fa8eaa8d..41ff03a10c 100644 --- a/app/utilities/metadata_parser_v2.py +++ b/app/utilities/metadata_parser_v2.py @@ -14,7 +14,7 @@ ) from structlog import get_logger -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.questionnaire.rules.utils import parse_iso_8601_datetime from app.utilities.metadata_validators import DateString, RegionCode, UUIDString diff --git a/app/utilities/prepop_parser.py b/app/utilities/prepop_parser.py new file mode 100644 index 0000000000..925840b224 --- /dev/null +++ b/app/utilities/prepop_parser.py @@ -0,0 +1,72 @@ +from typing import Mapping + +from marshmallow import ( + EXCLUDE, + INCLUDE, + Schema, + ValidationError, + fields, + validate, + validates_schema, +) + +from app.authentication.auth_payload_versions import PrepopSchemaVersion +from app.utilities.metadata_parser_v2 import VALIDATORS, StripWhitespaceMixin + + +class ItemsSchema(Schema): + identifier = VALIDATORS["string"](validate=validate.Length(min=1)) + + +class ItemsData(Schema, StripWhitespaceMixin): + pass + + +class PrepopData(Schema, StripWhitespaceMixin): + identifier = VALIDATORS["string"](validate=validate.Length(min=1)) + schema_version = VALIDATORS["string"]( + validate=validate.OneOf([PrepopSchemaVersion.V1.value]) + ) + items = fields.Nested(ItemsData, required=False, unknown=EXCLUDE) + + @validates_schema() + def validate_unit_id(self, data, **kwargs): + # pylint: disable=no-self-use, unused-argument + if ( + data + and data.get("identifier") + and data["identifier"] != self.context["ru_ref"] + ): + raise ValidationError("Prepop data did not return the specified Unit ID") + + +class PrepopMetadataSchema(Schema, StripWhitespaceMixin): + dataset_id = VALIDATORS["string"](validate=validate.Length(min=1)) + survey_id = VALIDATORS["string"](validate=validate.Length(min=1)) + data = fields.Nested( + PrepopData, required=True, unknown=EXCLUDE, validate=validate.Length(min=1) + ) + + @validates_schema() + def validate_dataset_id(self, data, **kwargs): + # pylint: disable=no-self-use, unused-argument + if ( + data + and data.get("dataset_id") + and data["dataset_id"] != self.context["dataset_id"] + ): + raise ValidationError("Prepop data did not return the specified Dataset ID") + + +def validate_prepop_data_v1(prepop_data: Mapping, dataset_id: str, ru_ref: str) -> dict: + """Validate claims required for runner to function""" + prepop_metadata_schema = PrepopMetadataSchema(unknown=INCLUDE) + prepop_metadata_schema.context = {"dataset_id": dataset_id, "ru_ref": ru_ref} + validated_prepop_data = prepop_metadata_schema.load(prepop_data) + + if prepop_items := prepop_data.get("data", {}).get("items"): + for key, values in prepop_items.items(): + items = [ItemsSchema(unknown=INCLUDE).load(value) for value in values] + validated_prepop_data["data"]["items"][key] = items + + return validated_prepop_data diff --git a/app/views/handlers/feedback.py b/app/views/handlers/feedback.py index b13b163754..8f34d0bbf1 100644 --- a/app/views/handlers/feedback.py +++ b/app/views/handlers/feedback.py @@ -7,7 +7,7 @@ from sdc.crypto.encrypter import encrypt from werkzeug.datastructures import MultiDict -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models import QuestionnaireStore from app.data_models.metadata_proxy import MetadataProxy, NoMetadataException from app.data_models.session_data import SessionData diff --git a/app/views/handlers/submission.py b/app/views/handlers/submission.py index 97857f6578..8fd24d4a38 100644 --- a/app/views/handlers/submission.py +++ b/app/views/handlers/submission.py @@ -5,7 +5,7 @@ from flask import session as cookie_session from sdc.crypto.encrypter import encrypt -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models.metadata_proxy import MetadataProxy from app.globals import get_session_store from app.keys import KEY_PURPOSE_SUBMISSION diff --git a/schemas/test/en/test_prepop.json b/schemas/test/en/test_prepop.json new file mode 100644 index 0000000000..88d1df69ff --- /dev/null +++ b/schemas/test/en/test_prepop.json @@ -0,0 +1,61 @@ +{ + "mime_type": "application/json/ons/eq", + "language": "en", + "schema_version": "0.0.1", + "data_version": "0.0.3", + "survey_id": "0", + "title": "Test Prepop Data", + "theme": "default", + "description": "A questionnaire to demo loading prepop data.", + "metadata": [ + { + "name": "user_id", + "type": "string" + }, + { + "name": "period_id", + "type": "string" + }, + { + "name": "ru_name", + "type": "string" + }, + { + "name": "sds_dataset_id", + "type": "string" + } + ], + "questionnaire_flow": { + "type": "Linear", + "options": { + "summary": { + "collapsible": false + } + } + }, + "sections": [ + { + "id": "interstitial-section", + "groups": [ + { + "blocks": [ + { + "id": "interstitial-definition", + "content": { + "title": "Prepop", + "contents": [ + { + "description": "You have successfully loaded Prepop data" + } + ] + }, + "type": "Interstitial" + } + ], + "id": "interstitial", + "title": "Interstitial Definition" + } + ] + } + ] +} diff --git a/scripts/mock_sds_endpoint.py b/scripts/mock_sds_endpoint.py new file mode 100644 index 0000000000..9cdff6cf32 --- /dev/null +++ b/scripts/mock_sds_endpoint.py @@ -0,0 +1,135 @@ +from flask import Flask, request + +app = Flask(__name__) + +PREPOP_TILES_AND_SLATE_PAYLOAD = { + "dataset_id": "001", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "items": { + "local_units": [ + { + "identifier": "0001", + "lu_name": "TEST NAME. 1", + "lu_address": [ + "FIRST ADDRESS 1", + "FIRST ADDRESS 2", + "TOWN", + "COUNTY", + "POST CODE", + ], + }, + { + "identifier": "0002", + "lu_name": "TEST NAME 2", + "lu_address": [ + "SECOND ADDRESS 1", + "SECOND ADDRESS 1", + "TOWN", + "COUNTY", + "POSTCODE", + ], + }, + ] + }, + }, +} + +PREPOP_PRODCOM_PAYLOAD = { + "dataset_id": "002", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "note": { + "title": "Volume of total production", + "description": "Figures should cover the total quantity of the goods produced during the period of the return", + }, + "items": { + "products": [ + { + "identifier": "89929001", + "name": "Articles and equipment for sports or outdoor games", + "cn_codes": "2504 + 250610 + 2512 + 2519 + 2524", + "guidance_include": { + "title": "Include", + "list": [ + "for children's playgrounds", + "swimming pools and paddling pools", + ], + }, + "guidance_exclude": { + "title": "Exclude", + "list": [ + "sports holdalls, gloves, clothing of textile materials, footwear, protective eyewear, rackets, balls, skates", + "for skiing, water sports, golf, fishing', for skiing, water sports, golf, fishing, table tennis, PE, gymnastics, athletics", + ], + }, + "value_sales": { + "answer_code": "89929001", + "label": "Value of sales", + }, + "volume_sales": { + "answer_code": "89929002", + "label": "Volume of sales", + "unit_label": "Tonnes", + }, + "total_volume": { + "answer_code": "89929005", + "label": "Total volume produced", + "unit_label": "Tonnes", + }, + }, + { + "identifier": "201630601", + "name": "Other Minerals", + "cn_codes": "5908 + 5910 + 591110 + 591120 + 591140", + "guidance_include": { + "title": "Include", + "list": [ + "natural graphite", + "quartz for industrial use", + "diatomite; magnesia; feldspar", + "magnesite; natural magnesium carbonate", + "talc including steatite and chlorite", + "unexpanded vermiculite and perlite", + ], + }, + "guidance_exclude": { + "title": "Exclude", + "list": ["natural quartz sands"], + }, + "value_sales": { + "answer_code": "201630601", + "label": "Value of sales", + }, + "volume_sales": { + "answer_code": "201630602", + "label": "Volume of sales", + "unit_label": "Kilogram", + }, + "total_volume": { + "answer_code": "201630605", + "label": "Total volume produced", + "unit_label": "Kilogram", + }, + }, + ] + }, + }, +} + + +@app.route("/v1/unit_data") +def get_sds_data(): + dataset_id = request.args.get("dataset_id") + + if dataset_id == "002": + return PREPOP_PRODCOM_PAYLOAD + return PREPOP_TILES_AND_SLATE_PAYLOAD + + +if __name__ == "__main__": + app.run(host="localhost", port=5003) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 6c6a2cff45..fd6a04c60d 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -1,10 +1,14 @@ # pylint: disable=redefined-outer-name from datetime import datetime, timedelta, timezone +from http.client import HTTPMessage import fakeredis import pytest from mock import MagicMock +from mock.mock import Mock +from requests.adapters import ConnectTimeoutError, ReadTimeoutError +from urllib3.connectionpool import HTTPConnectionPool, HTTPResponse from app.data_models import QuestionnaireStore from app.data_models.answer_store import AnswerStore @@ -189,3 +193,33 @@ def current_location(): @pytest.fixture def mock_autoescape_context(mocker): return mocker.Mock(autoescape=True) + + +@pytest.fixture(name="mocked_response_content") +def mocked_response_content_fixture(mocker): + decodable_content = Mock() + decodable_content.decode.return_value = b"{}" + mocker.patch("requests.models.Response.content", decodable_content) + + +@pytest.fixture(name="mocked_make_request_with_timeout") +def mocked_make_request_with_timeout_fixture( + mocker, mocked_response_content # pylint: disable=unused-argument +): + connect_timeout_error = ConnectTimeoutError("connect timed out") + read_timeout_error = ReadTimeoutError( + pool=None, message="read timed out", url="test-url" + ) + + response_not_timed_out = HTTPResponse(status=200, headers={}, msg=HTTPMessage()) + response_not_timed_out.drain_conn = Mock(return_value=None) + + return mocker.patch.object( + HTTPConnectionPool, + "_make_request", + side_effect=[ + connect_timeout_error, + read_timeout_error, + response_not_timed_out, + ], + ) diff --git a/tests/app/data_model/test_metadata_proxy.py b/tests/app/data_model/test_metadata_proxy.py index 79dce605c7..93c5cd603c 100644 --- a/tests/app/data_model/test_metadata_proxy.py +++ b/tests/app/data_model/test_metadata_proxy.py @@ -1,7 +1,7 @@ import pytest from werkzeug.datastructures import ImmutableDict -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models.metadata_proxy import MetadataProxy, SurveyMetadata METADATA_V1 = { diff --git a/tests/app/parser/conftest.py b/tests/app/parser/conftest.py index ab7bbc6e26..e1c13e6e85 100644 --- a/tests/app/parser/conftest.py +++ b/tests/app/parser/conftest.py @@ -4,7 +4,7 @@ import pytest -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion def get_metadata(version): diff --git a/tests/app/parser/test_metadata_parser.py b/tests/app/parser/test_metadata_parser.py index 74121bfc2c..10b44e1350 100644 --- a/tests/app/parser/test_metadata_parser.py +++ b/tests/app/parser/test_metadata_parser.py @@ -4,7 +4,7 @@ from freezegun import freeze_time from marshmallow import ValidationError -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.utilities.metadata_parser import validate_runner_claims from app.utilities.metadata_parser_v2 import ( validate_questionnaire_claims, diff --git a/tests/app/parser/test_prepop_parser.py b/tests/app/parser/test_prepop_parser.py new file mode 100644 index 0000000000..210798a55e --- /dev/null +++ b/tests/app/parser/test_prepop_parser.py @@ -0,0 +1,175 @@ +import pytest +from marshmallow import ValidationError + +from app.routes.session import validate_prepop_data +from app.utilities.prepop_parser import validate_prepop_data_v1 + +PREPOP_PAYLOAD = { + "dataset_id": "001", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "items": { + "local_units": [ + { + "identifier": "0001", + "lu_name": "TEST NAME. 1", + "lu_address": [ + "FIRST ADDRESS 1", + "FIRST ADDRESS 2", + "TOWN", + "COUNTY", + "POST CODE", + ], + }, + { + "identifier": "0002", + "lu_name": "TEST NAME 2", + "lu_address": [ + "SECOND ADDRESS 1", + "SECOND ADDRESS 1", + "TOWN", + "COUNTY", + "POSTCODE", + ], + }, + ] + }, + }, +} + + +def test_invalid_prepop_data_payload_raises_error(): + with pytest.raises(ValidationError): + validate_prepop_data(prepop_data={}, dataset_id="001", ru_ref="12346789012A") + + +def test_validate_prepop_payload(): + validated_payload = validate_prepop_data_v1( + prepop_data=PREPOP_PAYLOAD, dataset_id="001", ru_ref="12346789012A" + ) + + assert validated_payload == PREPOP_PAYLOAD + + +def test_validate_prepop_payload_incorrect_dataset_id(): + with pytest.raises(ValidationError): + validate_prepop_data_v1( + prepop_data=PREPOP_PAYLOAD, dataset_id="002", ru_ref="12346789012A" + ) + + +def test_validate_prepop_payload_incorrect_ru_ref(): + with pytest.raises(ValidationError): + validate_prepop_data_v1( + prepop_data=PREPOP_PAYLOAD, dataset_id="001", ru_ref="000000000001" + ) + + +def test_prepop_payload_with_no_items_is_validated(): + payload = { + "dataset_id": "001", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + }, + } + + validated_payload = validate_prepop_data_v1( + prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + ) + + assert validated_payload == payload + + +def test_validate_prepop_payload_missing_survey_id(): + payload = { + "dataset_id": "001", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + }, + } + + with pytest.raises(ValidationError): + validate_prepop_data_v1( + prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + ) + + +def test_validate_prepop_payload_with_unknown_field(): + payload = { + "dataset_id": "001", + "survey_id": "123", + "some_field": "value", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + }, + } + + validated_payload = validate_prepop_data_v1( + prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + ) + + assert validated_payload == payload + + +def test_validate_prepop_invalid_schema_version(): + payload = { + "dataset_id": "001", + "survey_id": "123", + "some_field": "value", + "data": { + "schema_version": "v2", + "identifier": "12346789012A", + }, + } + + with pytest.raises(ValidationError): + validate_prepop_data_v1( + prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + ) + + +def test_validate_prepop_payload_missing_identifier_in_items(): + payload = { + "dataset_id": "001", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "items": { + "local_units": [ + { + "identifier": "0001", + "lu_name": "TEST NAME. 1", + "lu_address": [ + "FIRST ADDRESS 1", + "FIRST ADDRESS 2", + "TOWN", + "COUNTY", + "POST CODE", + ], + }, + { + "lu_name": "TEST NAME 2", + "lu_address": [ + "SECOND ADDRESS 1", + "SECOND ADDRESS 1", + "TOWN", + "COUNTY", + "POSTCODE", + ], + }, + ] + }, + }, + } + + with pytest.raises(ValidationError): + validate_prepop_data_v1( + prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + ) diff --git a/tests/app/questionnaire/test_value_source_resolver.py b/tests/app/questionnaire/test_value_source_resolver.py index 38ff197d53..3da939f1f1 100644 --- a/tests/app/questionnaire/test_value_source_resolver.py +++ b/tests/app/questionnaire/test_value_source_resolver.py @@ -3,7 +3,7 @@ import pytest from mock import Mock -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models import AnswerStore, ListStore, ProgressStore from app.data_models.answer import Answer, AnswerDict from app.data_models.metadata_proxy import MetadataProxy, NoMetadataException diff --git a/tests/app/submitter/conftest.py b/tests/app/submitter/conftest.py index c92d9da656..08116ee25e 100644 --- a/tests/app/submitter/conftest.py +++ b/tests/app/submitter/conftest.py @@ -7,7 +7,7 @@ from mock import MagicMock from requests import Response -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models import QuestionnaireStore from app.data_models.answer import Answer from app.data_models.answer_store import AnswerStore diff --git a/tests/app/submitter/test_convert_payload_0_0_1.py b/tests/app/submitter/test_convert_payload_0_0_1.py index 6c14a3a4a2..52a1315376 100644 --- a/tests/app/submitter/test_convert_payload_0_0_1.py +++ b/tests/app/submitter/test_convert_payload_0_0_1.py @@ -2,7 +2,7 @@ import pytest -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models.answer import Answer from app.data_models.answer_store import AnswerStore from app.questionnaire.questionnaire_schema import QuestionnaireSchema diff --git a/tests/app/submitter/test_convert_payload_0_0_3.py b/tests/app/submitter/test_convert_payload_0_0_3.py index aadf9d8862..bb7ac7fddc 100644 --- a/tests/app/submitter/test_convert_payload_0_0_3.py +++ b/tests/app/submitter/test_convert_payload_0_0_3.py @@ -3,7 +3,7 @@ import pytest -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models.answer import Answer from app.data_models.answer_store import AnswerStore from app.data_models.list_store import ListStore diff --git a/tests/app/submitter/test_converter.py b/tests/app/submitter/test_converter.py index 6dde299be1..d9634a4593 100644 --- a/tests/app/submitter/test_converter.py +++ b/tests/app/submitter/test_converter.py @@ -2,7 +2,7 @@ import pytest -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.questionnaire.questionnaire_schema import QuestionnaireSchema from app.submitter.converter import convert_answers from app.submitter.converter_v2 import ( diff --git a/tests/app/test_request_prepop_data.py b/tests/app/test_request_prepop_data.py new file mode 100644 index 0000000000..0937301796 --- /dev/null +++ b/tests/app/test_request_prepop_data.py @@ -0,0 +1,141 @@ +import pytest +import responses +from requests import RequestException + +from app.routes.session import ( + PREPOP_REQUEST_MAX_RETRIES, + PrepopRequestFailed, + get_prepop_data, +) +from tests.app.utilities.test_schema import get_mocked_make_request + +TEST_SDS_URL = "http://test.domain/v1/unit_data" + +mock_prepop_payload = { + "dataset_id": "001", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "items": { + "local_units": [ + { + "identifier": "0001", + "lu_name": "TEST NAME. 1", + "lu_address": [ + "FIRST ADDRESS 1", + "FIRST ADDRESS 2", + "TOWN", + "COUNTY", + "POST CODE", + ], + }, + { + "identifier": "0002", + "lu_name": "TEST NAME 2", + "lu_address": [ + "SECOND ADDRESS 1", + "SECOND ADDRESS 1", + "TOWN", + "COUNTY", + "POSTCODE", + ], + }, + ] + }, + }, +} + + +@responses.activate +def test_get_prepop_data_200(): + responses.add(responses.GET, TEST_SDS_URL, json=mock_prepop_payload, status=200) + loaded_prepop_data = get_prepop_data( + prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert loaded_prepop_data == mock_prepop_payload + + +@pytest.mark.parametrize( + "status_code", + [401, 403, 404, 501, 511], +) +@responses.activate +def test_get_prepop_data_non_200(status_code): + responses.add( + responses.GET, TEST_SDS_URL, json=mock_prepop_payload, status=status_code + ) + + with pytest.raises(PrepopRequestFailed) as exc: + get_prepop_data( + prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert str(exc.value) == "Prepop request failed" + + +@responses.activate +def test_get_prepop_data_request_failed(): + responses.add(responses.GET, TEST_SDS_URL, body=RequestException()) + with pytest.raises(PrepopRequestFailed) as exc: + get_prepop_data( + prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert str(exc.value) == "Prepop request failed" + + +def test_get_prepop_data_retries_timeout_error( + mocker, mocked_make_request_with_timeout +): + mocker.patch( + "app.routes.session.validate_prepop_data", return_value=mock_prepop_payload + ) + + try: + prepop_data = get_prepop_data( + prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + except PrepopRequestFailed: + return pytest.fail("Prepop request unexpectedly failed") + + assert prepop_data == mock_prepop_payload + + expected_call = PREPOP_REQUEST_MAX_RETRIES + 1 # Max retries + the initial request + assert mocked_make_request_with_timeout.call_count == expected_call + + +@pytest.mark.usefixtures("mocked_response_content") +def test_get_prepop_data_retries_transient_error(mocker): + mocked_make_request = get_mocked_make_request(mocker, status_codes=[500, 500, 200]) + + mocker.patch( + "app.routes.session.validate_prepop_data", return_value=mock_prepop_payload + ) + + try: + prepop_data = get_prepop_data( + prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + except PrepopRequestFailed: + return pytest.fail("Prepop request unexpectedly failed") + + assert prepop_data == mock_prepop_payload + + expected_call = PREPOP_REQUEST_MAX_RETRIES + 1 # Max retries + the initial request + assert mocked_make_request.call_count == expected_call + + +def test_get_prepop_data_max_retries(mocker): + mocked_make_request = get_mocked_make_request( + mocker, status_codes=[500, 500, 500, 500] + ) + + with pytest.raises(PrepopRequestFailed) as exc: + get_prepop_data( + prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert str(exc.value) == "Prepop request failed" + assert mocked_make_request.call_count == 3 diff --git a/tests/app/utilities/test_schema.py b/tests/app/utilities/test_schema.py index c93689431a..2df35289c2 100644 --- a/tests/app/utilities/test_schema.py +++ b/tests/app/utilities/test_schema.py @@ -5,7 +5,6 @@ import responses from mock import Mock, patch from requests import RequestException -from requests.adapters import ConnectTimeoutError, ReadTimeoutError from urllib3.connectionpool import HTTPConnectionPool, HTTPResponse from app.questionnaire import QuestionnaireSchema @@ -250,13 +249,6 @@ def test_load_schema_from_metadata_with_schema_url_and_override_language_code(): assert loaded_schema.language_code == language_code -@pytest.fixture(name="mocked_response_content") -def mocked_response_content_fixture(mocker): - decodable_content = Mock() - decodable_content.decode.return_value = b"{}" - mocker.patch("requests.models.Response.content", decodable_content) - - def get_mocked_make_request(mocker, status_codes): mocked_responses = [] for status_code in status_codes: @@ -279,29 +271,6 @@ def get_mocked_make_request(mocker, status_codes): return patched_make_request -@pytest.fixture(name="mocked_make_request_with_timeout") -def mocked_make_request_with_timeout_fixture( - mocker, mocked_response_content # pylint: disable=unused-argument -): - connect_timeout_error = ConnectTimeoutError("connect timed out") - read_timeout_error = ReadTimeoutError( - pool=None, message="read timed out", url="test-url" - ) - - response_not_timed_out = HTTPResponse(status=200, headers={}, msg=HTTPMessage()) - response_not_timed_out.drain_conn = Mock(return_value=None) - - return mocker.patch.object( - HTTPConnectionPool, - "_make_request", - side_effect=[ - connect_timeout_error, - read_timeout_error, - response_not_timed_out, - ], - ) - - def test_load_schema_from_url_retries_timeout_error(mocked_make_request_with_timeout): load_schema_from_url.cache_clear() diff --git a/tests/app/views/handlers/conftest.py b/tests/app/views/handlers/conftest.py index 6fde601054..8ac724a725 100644 --- a/tests/app/views/handlers/conftest.py +++ b/tests/app/views/handlers/conftest.py @@ -5,7 +5,7 @@ from freezegun import freeze_time from mock import Mock -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models import QuestionnaireStore from app.data_models.metadata_proxy import MetadataProxy from app.data_models.session_data import SessionData diff --git a/tests/app/views/handlers/test_feedback_upload.py b/tests/app/views/handlers/test_feedback_upload.py index 3f4415d265..89904b4119 100644 --- a/tests/app/views/handlers/test_feedback_upload.py +++ b/tests/app/views/handlers/test_feedback_upload.py @@ -2,7 +2,7 @@ from freezegun import freeze_time -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.questionnaire.questionnaire_schema import DEFAULT_LANGUAGE_CODE from app.views.handlers.feedback import ( FeedbackMetadata, diff --git a/tests/app/views/handlers/test_submission_handler.py b/tests/app/views/handlers/test_submission_handler.py index c4466ce2a3..6ce4431ffa 100644 --- a/tests/app/views/handlers/test_submission_handler.py +++ b/tests/app/views/handlers/test_submission_handler.py @@ -3,7 +3,7 @@ import pytest from freezegun import freeze_time -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.data_models.session_store import SessionStore from app.questionnaire.questionnaire_schema import QuestionnaireSchema from app.utilities.schema import load_schema_from_name diff --git a/tests/integration/create_token.py b/tests/integration/create_token.py index c611ad9918..603346dc56 100644 --- a/tests/integration/create_token.py +++ b/tests/integration/create_token.py @@ -3,7 +3,7 @@ from sdc.crypto.encrypter import encrypt -from app.authentication.auth_payload_version import AuthPayloadVersion +from app.authentication.auth_payload_versions import AuthPayloadVersion from app.keys import KEY_PURPOSE_AUTHENTICATION from tests.app.parser.conftest import get_response_expires_at @@ -51,6 +51,30 @@ "account_service_url": ACCOUNT_SERVICE_URL, } +PAYLOAD_V2_PREPOP = { + "version": AuthPayloadVersion.V2.value, + "survey_metadata": { + "data": { + "user_id": "integration-test", + "period_str": "April 2016", + "period_id": "201604", + "ru_ref": "123456789012A", + "ru_name": "Integration Testing", + "ref_p_start_date": "2016-04-01", + "ref_p_end_date": "2016-04-30", + "trad_as": "Integration Tests", + "employment_date": "1983-06-02", + "display_address": "68 Abingdon Road, Goathill", + "sds_dataset_id": "001", + } + }, + "collection_exercise_sid": "789", + "response_id": "1234567890123456", + "language_code": "en", + "roles": [], + "account_service_url": ACCOUNT_SERVICE_URL, +} + PAYLOAD_V2_SOCIAL = { "version": AuthPayloadVersion.V2.value, "survey_metadata": { @@ -113,6 +137,15 @@ def create_token_v2(self, schema_name, theme="default", **extra_payload): return self.generate_token(payload) + def create_prepop_token(self, schema_name, **extra_payload): + payload = PAYLOAD_V2_PREPOP + + payload = self._get_payload_with_params( + schema_name=schema_name, payload=payload, **extra_payload + ) + + return self.generate_token(payload) + def create_token_invalid_version(self, schema_name, **extra_payload): payload = self._get_payload_with_params( schema_name=schema_name, payload=PAYLOAD_V2_BUSINESS, **extra_payload diff --git a/tests/integration/integration_test_case.py b/tests/integration/integration_test_case.py index 44f0f08709..e10001a7c9 100644 --- a/tests/integration/integration_test_case.py +++ b/tests/integration/integration_test_case.py @@ -127,6 +127,17 @@ def launchSurvey(self, schema_name="test_dates", **payload_kwargs): self.get(f"/session?token={token}") + def launchPrepopSurvey(self, schema_name="test_prepop", **payload_kwargs): + """ + Launch a survey as an authenticated user and follow re-directs + :param schema_name: The name of the schema to load + """ + token = self.token_generator.create_prepop_token( + schema_name=schema_name, **payload_kwargs + ) + + self.get(f"/session?token={token}") + def launchSurveyV2( self, theme="default", schema_name="test_dates", **payload_kwargs ): diff --git a/tests/integration/routes/test_session.py b/tests/integration/routes/test_session.py index 64379e468c..6762005014 100644 --- a/tests/integration/routes/test_session.py +++ b/tests/integration/routes/test_session.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta, timezone from freezegun import freeze_time +from mock.mock import patch from app.questionnaire.questionnaire_schema import DEFAULT_LANGUAGE_CODE from app.settings import ACCOUNT_SERVICE_BASE_URL, ACCOUNT_SERVICE_BASE_URL_SOCIAL @@ -88,6 +89,11 @@ def test_patch_session_expiry_extends_session(self): self.assertIn("expires_at", parsed_json) self.assertEqual(parsed_json["expires_at"], expected_expires_at) + def test_prepop_data_is_loaded_when_sds_dataset_id_in_metadata(self): + with patch("app.routes.session.get_prepop_data", return_value={}): + self.launchPrepopSurvey() + self.assertStatusOK() + class TestCensusSession(IntegrationTestCase): def setUp(self): From ee92cf54e19b80f4216f4ac42d9036b91da57b29 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Fri, 26 May 2023 08:53:57 +0100 Subject: [PATCH 06/21] Use supplementary data instead of prepop --- app/routes/session.py | 71 ++++---- ...parser.py => supplementary_data_parser.py} | 23 ++- ...epop.json => test_supplementary_data.json} | 0 scripts/mock_sds_endpoint.py | 8 +- ...r.py => test_supplementary_data_parser.py} | 68 ++++---- tests/app/test_request_prepop_data.py | 141 ---------------- tests/app/test_request_supplementary_data.py | 152 ++++++++++++++++++ tests/integration/create_token.py | 6 +- tests/integration/integration_test_case.py | 6 +- tests/integration/routes/test_session.py | 4 +- 10 files changed, 260 insertions(+), 219 deletions(-) rename app/utilities/{prepop_parser.py => supplementary_data_parser.py} (74%) rename schemas/test/en/{test_prepop.json => test_supplementary_data.json} (100%) rename tests/app/parser/{test_prepop_parser.py => test_supplementary_data_parser.py} (62%) delete mode 100644 tests/app/test_request_prepop_data.py create mode 100644 tests/app/test_request_supplementary_data.py diff --git a/app/routes/session.py b/app/routes/session.py index 0ca3eaa9bc..ddbd1ca526 100644 --- a/app/routes/session.py +++ b/app/routes/session.py @@ -32,18 +32,18 @@ validate_questionnaire_claims, validate_runner_claims_v2, ) -from app.utilities.prepop_parser import validate_prepop_data_v1 from app.utilities.schema import load_schema_from_metadata +from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 logger = get_logger() session_blueprint = Blueprint("session", __name__) -PREPOP_URL = "" -PREPOP_REQUEST_MAX_BACKOFF = 0.2 -PREPOP_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + PREPOP_REQUEST_MAX_RETRIES -PREPOP_REQUEST_TIMEOUT = 3 -PREPOP_REQUEST_RETRY_STATUS_CODES = [ +SUPPLEMENTARY_DATA_URL = "http://localhost:5003/v1/unit_data" +SUPPLEMENTARY_DATA_REQUEST_MAX_BACKOFF = 0.2 +SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES +SUPPLEMENTARY_DATA_REQUEST_TIMEOUT = 3 +SUPPLEMENTARY_DATA_REQUEST_RETRY_STATUS_CODES = [ 408, 429, 500, @@ -53,9 +53,9 @@ ] -class PrepopRequestFailed(Exception): +class SupplementaryDataRequestFailed(Exception): def __str__(self) -> str: - return "Prepop request failed" + return "Supplementary Data request failed" @session_blueprint.after_request @@ -152,60 +152,73 @@ def login() -> Response: cookie_session["language_code"] = metadata.language_code if (dataset_id := metadata["sds_dataset_id"]) and ru_ref: - get_prepop_data(prepop_url=PREPOP_URL, dataset_id=dataset_id, ru_ref=ru_ref) + get_supplementary_data( + supplementary_data_url=SUPPLEMENTARY_DATA_URL, + dataset_id=dataset_id, + ru_ref=ru_ref, + ) return redirect(url_for("questionnaire.get_questionnaire")) -def get_prepop_data(prepop_url: str, dataset_id: str, ru_ref: str) -> dict: - constructed_prepop_url = f"{prepop_url}?dataset_id={dataset_id}&unit_id={ru_ref}" +def get_supplementary_data( + supplementary_data_url: str, dataset_id: str, ru_ref: str +) -> dict: + constructed_supplementary_data_url = ( + f"{supplementary_data_url}?dataset_id={dataset_id}&unit_id={ru_ref}" + ) session = requests.Session() retries = Retry( - total=PREPOP_REQUEST_MAX_RETRIES, - status_forcelist=PREPOP_REQUEST_RETRY_STATUS_CODES, + total=SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES, + status_forcelist=SUPPLEMENTARY_DATA_REQUEST_RETRY_STATUS_CODES, ) # Codes to retry according to Google Docs https://cloud.google.com/storage/docs/retry-strategy#client-libraries # Type ignore: MyPy does not recognise BACKOFF_MAX however it is a property, albeit deprecated - retries.BACKOFF_MAX = PREPOP_REQUEST_MAX_BACKOFF # type: ignore + retries.BACKOFF_MAX = SUPPLEMENTARY_DATA_REQUEST_MAX_BACKOFF # type: ignore session.mount("http://", HTTPAdapter(max_retries=retries)) session.mount("https://", HTTPAdapter(max_retries=retries)) try: - response = session.get(constructed_prepop_url, timeout=PREPOP_REQUEST_TIMEOUT) + response = session.get( + constructed_supplementary_data_url, + timeout=SUPPLEMENTARY_DATA_REQUEST_TIMEOUT, + ) except RequestException as exc: logger.exception( - "Error requesting prepopulated data", - prepop_url=constructed_prepop_url, + "Error requesting supplementary data", + supplementary_data_url=constructed_supplementary_data_url, ) - raise PrepopRequestFailed from exc + raise SupplementaryDataRequestFailed from exc if response.status_code == 200: - prepop_response_content = response.content.decode() - prepop_data = json.loads(prepop_response_content) + supplementary_data_response_content = response.content.decode() + supplementary_data = json.loads(supplementary_data_response_content) - return validate_prepop_data( - prepop_data=prepop_data, dataset_id=dataset_id, ru_ref=ru_ref + return validate_supplementary_data( + supplementary_data=supplementary_data, dataset_id=dataset_id, ru_ref=ru_ref ) logger.error( - "got a non-200 response for prepop data request", + "got a non-200 response for supplementary data request", status_code=response.status_code, - schema_url=constructed_prepop_url, + schema_url=constructed_supplementary_data_url, ) - raise PrepopRequestFailed + raise SupplementaryDataRequestFailed -def validate_prepop_data(prepop_data: Mapping, dataset_id: str, ru_ref: str) -> dict: +def validate_supplementary_data( + supplementary_data: Mapping, dataset_id: str, ru_ref: str +) -> dict: try: - return validate_prepop_data_v1( - prepop_data=prepop_data, dataset_id=dataset_id, ru_ref=ru_ref + return validate_supplementary_data_v1( + supplementary_data=supplementary_data, dataset_id=dataset_id, ru_ref=ru_ref ) except ValidationError as e: - raise ValidationError("Invalid prepopulation data") from e + raise ValidationError("Invalid supplementary_dataulation data") from e def validate_jti(decrypted_token: dict[str, str | list | int]) -> None: diff --git a/app/utilities/prepop_parser.py b/app/utilities/supplementary_data_parser.py similarity index 74% rename from app/utilities/prepop_parser.py rename to app/utilities/supplementary_data_parser.py index 925840b224..82f2f587e4 100644 --- a/app/utilities/prepop_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -58,15 +58,22 @@ def validate_dataset_id(self, data, **kwargs): raise ValidationError("Prepop data did not return the specified Dataset ID") -def validate_prepop_data_v1(prepop_data: Mapping, dataset_id: str, ru_ref: str) -> dict: +def validate_supplementary_data_v1( + supplementary_data: Mapping, dataset_id: str, ru_ref: str +) -> dict: """Validate claims required for runner to function""" - prepop_metadata_schema = PrepopMetadataSchema(unknown=INCLUDE) - prepop_metadata_schema.context = {"dataset_id": dataset_id, "ru_ref": ru_ref} - validated_prepop_data = prepop_metadata_schema.load(prepop_data) + supplementary_data_metadata_schema = PrepopMetadataSchema(unknown=INCLUDE) + supplementary_data_metadata_schema.context = { + "dataset_id": dataset_id, + "ru_ref": ru_ref, + } + validated_supplementary_data = supplementary_data_metadata_schema.load( + supplementary_data + ) - if prepop_items := prepop_data.get("data", {}).get("items"): - for key, values in prepop_items.items(): + if supplementary_data_items := supplementary_data.get("data", {}).get("items"): + for key, values in supplementary_data_items.items(): items = [ItemsSchema(unknown=INCLUDE).load(value) for value in values] - validated_prepop_data["data"]["items"][key] = items + validated_supplementary_data["data"]["items"][key] = items - return validated_prepop_data + return validated_supplementary_data diff --git a/schemas/test/en/test_prepop.json b/schemas/test/en/test_supplementary_data.json similarity index 100% rename from schemas/test/en/test_prepop.json rename to schemas/test/en/test_supplementary_data.json diff --git a/scripts/mock_sds_endpoint.py b/scripts/mock_sds_endpoint.py index 9cdff6cf32..a5bb7fe8f3 100644 --- a/scripts/mock_sds_endpoint.py +++ b/scripts/mock_sds_endpoint.py @@ -2,7 +2,7 @@ app = Flask(__name__) -PREPOP_TILES_AND_SLATE_PAYLOAD = { +SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD = { "dataset_id": "001", "survey_id": "123", "data": { @@ -37,7 +37,7 @@ }, } -PREPOP_PRODCOM_PAYLOAD = { +SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD = { "dataset_id": "002", "survey_id": "123", "data": { @@ -127,8 +127,8 @@ def get_sds_data(): dataset_id = request.args.get("dataset_id") if dataset_id == "002": - return PREPOP_PRODCOM_PAYLOAD - return PREPOP_TILES_AND_SLATE_PAYLOAD + return SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD + return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD if __name__ == "__main__": diff --git a/tests/app/parser/test_prepop_parser.py b/tests/app/parser/test_supplementary_data_parser.py similarity index 62% rename from tests/app/parser/test_prepop_parser.py rename to tests/app/parser/test_supplementary_data_parser.py index 210798a55e..cd9d8b3ae8 100644 --- a/tests/app/parser/test_prepop_parser.py +++ b/tests/app/parser/test_supplementary_data_parser.py @@ -1,10 +1,10 @@ import pytest from marshmallow import ValidationError -from app.routes.session import validate_prepop_data -from app.utilities.prepop_parser import validate_prepop_data_v1 +from app.routes.session import validate_supplementary_data +from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 -PREPOP_PAYLOAD = { +SUPPLEMENTARY_DATA_PAYLOAD = { "dataset_id": "001", "survey_id": "123", "data": { @@ -40,34 +40,42 @@ } -def test_invalid_prepop_data_payload_raises_error(): +def test_invalid_supplementary_data_payload_raises_error(): with pytest.raises(ValidationError): - validate_prepop_data(prepop_data={}, dataset_id="001", ru_ref="12346789012A") + validate_supplementary_data( + supplementary_data={}, dataset_id="001", ru_ref="12346789012A" + ) -def test_validate_prepop_payload(): - validated_payload = validate_prepop_data_v1( - prepop_data=PREPOP_PAYLOAD, dataset_id="001", ru_ref="12346789012A" +def test_validate_supplementary_data_payload(): + validated_payload = validate_supplementary_data_v1( + supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, + dataset_id="001", + ru_ref="12346789012A", ) - assert validated_payload == PREPOP_PAYLOAD + assert validated_payload == SUPPLEMENTARY_DATA_PAYLOAD -def test_validate_prepop_payload_incorrect_dataset_id(): +def test_validate_supplementary_data_payload_incorrect_dataset_id(): with pytest.raises(ValidationError): - validate_prepop_data_v1( - prepop_data=PREPOP_PAYLOAD, dataset_id="002", ru_ref="12346789012A" + validate_supplementary_data_v1( + supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, + dataset_id="002", + ru_ref="12346789012A", ) -def test_validate_prepop_payload_incorrect_ru_ref(): +def test_validate_supplementary_data_payload_incorrect_ru_ref(): with pytest.raises(ValidationError): - validate_prepop_data_v1( - prepop_data=PREPOP_PAYLOAD, dataset_id="001", ru_ref="000000000001" + validate_supplementary_data_v1( + supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, + dataset_id="001", + ru_ref="000000000001", ) -def test_prepop_payload_with_no_items_is_validated(): +def test_supplementary_data_payload_with_no_items_is_validated(): payload = { "dataset_id": "001", "survey_id": "123", @@ -77,14 +85,14 @@ def test_prepop_payload_with_no_items_is_validated(): }, } - validated_payload = validate_prepop_data_v1( - prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + validated_payload = validate_supplementary_data_v1( + supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" ) assert validated_payload == payload -def test_validate_prepop_payload_missing_survey_id(): +def test_validate_supplementary_data_payload_missing_survey_id(): payload = { "dataset_id": "001", "data": { @@ -94,12 +102,12 @@ def test_validate_prepop_payload_missing_survey_id(): } with pytest.raises(ValidationError): - validate_prepop_data_v1( - prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + validate_supplementary_data_v1( + supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" ) -def test_validate_prepop_payload_with_unknown_field(): +def test_validate_supplementary_data_payload_with_unknown_field(): payload = { "dataset_id": "001", "survey_id": "123", @@ -110,14 +118,14 @@ def test_validate_prepop_payload_with_unknown_field(): }, } - validated_payload = validate_prepop_data_v1( - prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + validated_payload = validate_supplementary_data_v1( + supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" ) assert validated_payload == payload -def test_validate_prepop_invalid_schema_version(): +def test_validate_supplementary_data_invalid_schema_version(): payload = { "dataset_id": "001", "survey_id": "123", @@ -129,12 +137,12 @@ def test_validate_prepop_invalid_schema_version(): } with pytest.raises(ValidationError): - validate_prepop_data_v1( - prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + validate_supplementary_data_v1( + supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" ) -def test_validate_prepop_payload_missing_identifier_in_items(): +def test_validate_supplementary_data_payload_missing_identifier_in_items(): payload = { "dataset_id": "001", "survey_id": "123", @@ -170,6 +178,6 @@ def test_validate_prepop_payload_missing_identifier_in_items(): } with pytest.raises(ValidationError): - validate_prepop_data_v1( - prepop_data=payload, dataset_id="001", ru_ref="12346789012A" + validate_supplementary_data_v1( + supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" ) diff --git a/tests/app/test_request_prepop_data.py b/tests/app/test_request_prepop_data.py deleted file mode 100644 index 0937301796..0000000000 --- a/tests/app/test_request_prepop_data.py +++ /dev/null @@ -1,141 +0,0 @@ -import pytest -import responses -from requests import RequestException - -from app.routes.session import ( - PREPOP_REQUEST_MAX_RETRIES, - PrepopRequestFailed, - get_prepop_data, -) -from tests.app.utilities.test_schema import get_mocked_make_request - -TEST_SDS_URL = "http://test.domain/v1/unit_data" - -mock_prepop_payload = { - "dataset_id": "001", - "survey_id": "123", - "data": { - "schema_version": "v1", - "identifier": "12346789012A", - "items": { - "local_units": [ - { - "identifier": "0001", - "lu_name": "TEST NAME. 1", - "lu_address": [ - "FIRST ADDRESS 1", - "FIRST ADDRESS 2", - "TOWN", - "COUNTY", - "POST CODE", - ], - }, - { - "identifier": "0002", - "lu_name": "TEST NAME 2", - "lu_address": [ - "SECOND ADDRESS 1", - "SECOND ADDRESS 1", - "TOWN", - "COUNTY", - "POSTCODE", - ], - }, - ] - }, - }, -} - - -@responses.activate -def test_get_prepop_data_200(): - responses.add(responses.GET, TEST_SDS_URL, json=mock_prepop_payload, status=200) - loaded_prepop_data = get_prepop_data( - prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) - - assert loaded_prepop_data == mock_prepop_payload - - -@pytest.mark.parametrize( - "status_code", - [401, 403, 404, 501, 511], -) -@responses.activate -def test_get_prepop_data_non_200(status_code): - responses.add( - responses.GET, TEST_SDS_URL, json=mock_prepop_payload, status=status_code - ) - - with pytest.raises(PrepopRequestFailed) as exc: - get_prepop_data( - prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) - - assert str(exc.value) == "Prepop request failed" - - -@responses.activate -def test_get_prepop_data_request_failed(): - responses.add(responses.GET, TEST_SDS_URL, body=RequestException()) - with pytest.raises(PrepopRequestFailed) as exc: - get_prepop_data( - prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) - - assert str(exc.value) == "Prepop request failed" - - -def test_get_prepop_data_retries_timeout_error( - mocker, mocked_make_request_with_timeout -): - mocker.patch( - "app.routes.session.validate_prepop_data", return_value=mock_prepop_payload - ) - - try: - prepop_data = get_prepop_data( - prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) - except PrepopRequestFailed: - return pytest.fail("Prepop request unexpectedly failed") - - assert prepop_data == mock_prepop_payload - - expected_call = PREPOP_REQUEST_MAX_RETRIES + 1 # Max retries + the initial request - assert mocked_make_request_with_timeout.call_count == expected_call - - -@pytest.mark.usefixtures("mocked_response_content") -def test_get_prepop_data_retries_transient_error(mocker): - mocked_make_request = get_mocked_make_request(mocker, status_codes=[500, 500, 200]) - - mocker.patch( - "app.routes.session.validate_prepop_data", return_value=mock_prepop_payload - ) - - try: - prepop_data = get_prepop_data( - prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) - except PrepopRequestFailed: - return pytest.fail("Prepop request unexpectedly failed") - - assert prepop_data == mock_prepop_payload - - expected_call = PREPOP_REQUEST_MAX_RETRIES + 1 # Max retries + the initial request - assert mocked_make_request.call_count == expected_call - - -def test_get_prepop_data_max_retries(mocker): - mocked_make_request = get_mocked_make_request( - mocker, status_codes=[500, 500, 500, 500] - ) - - with pytest.raises(PrepopRequestFailed) as exc: - get_prepop_data( - prepop_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) - - assert str(exc.value) == "Prepop request failed" - assert mocked_make_request.call_count == 3 diff --git a/tests/app/test_request_supplementary_data.py b/tests/app/test_request_supplementary_data.py new file mode 100644 index 0000000000..bdef68fc98 --- /dev/null +++ b/tests/app/test_request_supplementary_data.py @@ -0,0 +1,152 @@ +import pytest +import responses +from requests import RequestException + +from app.routes.session import ( + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES, + SupplementaryDataRequestFailed, + get_supplementary_data, +) +from tests.app.utilities.test_schema import get_mocked_make_request + +TEST_SDS_URL = "http://test.domain/v1/unit_data" + +mock_supplementary_data_payload = { + "dataset_id": "001", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "items": { + "local_units": [ + { + "identifier": "0001", + "lu_name": "TEST NAME. 1", + "lu_address": [ + "FIRST ADDRESS 1", + "FIRST ADDRESS 2", + "TOWN", + "COUNTY", + "POST CODE", + ], + }, + { + "identifier": "0002", + "lu_name": "TEST NAME 2", + "lu_address": [ + "SECOND ADDRESS 1", + "SECOND ADDRESS 1", + "TOWN", + "COUNTY", + "POSTCODE", + ], + }, + ] + }, + }, +} + + +@responses.activate +def test_get_supplementary_data_200(): + responses.add( + responses.GET, TEST_SDS_URL, json=mock_supplementary_data_payload, status=200 + ) + loaded_supplementary_data = get_supplementary_data( + supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert loaded_supplementary_data == mock_supplementary_data_payload + + +@pytest.mark.parametrize( + "status_code", + [401, 403, 404, 501, 511], +) +@responses.activate +def test_get_supplementary_data_non_200(status_code): + responses.add( + responses.GET, + TEST_SDS_URL, + json=mock_supplementary_data_payload, + status=status_code, + ) + + with pytest.raises(SupplementaryDataRequestFailed) as exc: + get_supplementary_data( + supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert str(exc.value) == "Supplementary Data request failed" + + +@responses.activate +def test_get_supplementary_data_request_failed(): + responses.add(responses.GET, TEST_SDS_URL, body=RequestException()) + with pytest.raises(SupplementaryDataRequestFailed) as exc: + get_supplementary_data( + supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert str(exc.value) == "Supplementary Data request failed" + + +def test_get_supplementary_data_retries_timeout_error( + mocker, mocked_make_request_with_timeout +): + mocker.patch( + "app.routes.session.validate_supplementary_data", + return_value=mock_supplementary_data_payload, + ) + + try: + supplementary_data = get_supplementary_data( + supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + except SupplementaryDataRequestFailed: + return pytest.fail("Supplementary data request unexpectedly failed") + + assert supplementary_data == mock_supplementary_data_payload + + expected_call = ( + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES + 1 + ) # Max retries + the initial request + assert mocked_make_request_with_timeout.call_count == expected_call + + +@pytest.mark.usefixtures("mocked_response_content") +def test_get_supplementary_data_retries_transient_error(mocker): + mocked_make_request = get_mocked_make_request(mocker, status_codes=[500, 500, 200]) + + mocker.patch( + "app.routes.session.validate_supplementary_data", + return_value=mock_supplementary_data_payload, + ) + + try: + supplementary_data = get_supplementary_data( + supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + except SupplementaryDataRequestFailed: + return pytest.fail("Supplementary data request unexpectedly failed") + + assert supplementary_data == mock_supplementary_data_payload + + expected_call = ( + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES + 1 + ) # Max retries + the initial request + assert mocked_make_request.call_count == expected_call + + +def test_get_supplementary_data_max_retries(mocker): + mocked_make_request = get_mocked_make_request( + mocker, status_codes=[500, 500, 500, 500] + ) + + with pytest.raises(SupplementaryDataRequestFailed) as exc: + get_supplementary_data( + supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + ) + + assert str(exc.value) == "Supplementary Data request failed" + assert mocked_make_request.call_count == 3 diff --git a/tests/integration/create_token.py b/tests/integration/create_token.py index 603346dc56..96dbba83f8 100644 --- a/tests/integration/create_token.py +++ b/tests/integration/create_token.py @@ -51,7 +51,7 @@ "account_service_url": ACCOUNT_SERVICE_URL, } -PAYLOAD_V2_PREPOP = { +PAYLOAD_V2_SUPPLEMENTARY_DATA = { "version": AuthPayloadVersion.V2.value, "survey_metadata": { "data": { @@ -137,8 +137,8 @@ def create_token_v2(self, schema_name, theme="default", **extra_payload): return self.generate_token(payload) - def create_prepop_token(self, schema_name, **extra_payload): - payload = PAYLOAD_V2_PREPOP + def create_supplementary_data_token(self, schema_name, **extra_payload): + payload = PAYLOAD_V2_SUPPLEMENTARY_DATA payload = self._get_payload_with_params( schema_name=schema_name, payload=payload, **extra_payload diff --git a/tests/integration/integration_test_case.py b/tests/integration/integration_test_case.py index e10001a7c9..280fcb9e91 100644 --- a/tests/integration/integration_test_case.py +++ b/tests/integration/integration_test_case.py @@ -127,12 +127,14 @@ def launchSurvey(self, schema_name="test_dates", **payload_kwargs): self.get(f"/session?token={token}") - def launchPrepopSurvey(self, schema_name="test_prepop", **payload_kwargs): + def launchPrepopSurvey( + self, schema_name="test_supplementary_data", **payload_kwargs + ): """ Launch a survey as an authenticated user and follow re-directs :param schema_name: The name of the schema to load """ - token = self.token_generator.create_prepop_token( + token = self.token_generator.create_supplementary_data_token( schema_name=schema_name, **payload_kwargs ) diff --git a/tests/integration/routes/test_session.py b/tests/integration/routes/test_session.py index 6762005014..2157d5702e 100644 --- a/tests/integration/routes/test_session.py +++ b/tests/integration/routes/test_session.py @@ -89,8 +89,8 @@ def test_patch_session_expiry_extends_session(self): self.assertIn("expires_at", parsed_json) self.assertEqual(parsed_json["expires_at"], expected_expires_at) - def test_prepop_data_is_loaded_when_sds_dataset_id_in_metadata(self): - with patch("app.routes.session.get_prepop_data", return_value={}): + def test_supplementary_data_is_loaded_when_sds_dataset_id_in_metadata(self): + with patch("app.routes.session.get_supplementary_data", return_value={}): self.launchPrepopSurvey() self.assertStatusOK() From e1528ec1d6df647794ce61f92ca4bfb7feb88f47 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Fri, 26 May 2023 09:09:39 +0100 Subject: [PATCH 07/21] Update references to prepop --- app/authentication/auth_payload_versions.py | 2 +- app/utilities/supplementary_data_parser.py | 25 ++++++++++++++------- tests/integration/integration_test_case.py | 2 +- tests/integration/routes/test_session.py | 2 +- 4 files changed, 20 insertions(+), 11 deletions(-) diff --git a/app/authentication/auth_payload_versions.py b/app/authentication/auth_payload_versions.py index f357ee6050..f467b7e118 100644 --- a/app/authentication/auth_payload_versions.py +++ b/app/authentication/auth_payload_versions.py @@ -5,5 +5,5 @@ class AuthPayloadVersion(Enum): V2 = "v2" -class PrepopSchemaVersion(Enum): +class SupplementaryDataSchemaVersion(Enum): V1 = "v1" diff --git a/app/utilities/supplementary_data_parser.py b/app/utilities/supplementary_data_parser.py index 82f2f587e4..5c3a1e38aa 100644 --- a/app/utilities/supplementary_data_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -10,7 +10,7 @@ validates_schema, ) -from app.authentication.auth_payload_versions import PrepopSchemaVersion +from app.authentication.auth_payload_versions import SupplementaryDataSchemaVersion from app.utilities.metadata_parser_v2 import VALIDATORS, StripWhitespaceMixin @@ -22,10 +22,10 @@ class ItemsData(Schema, StripWhitespaceMixin): pass -class PrepopData(Schema, StripWhitespaceMixin): +class SupplementaryData(Schema, StripWhitespaceMixin): identifier = VALIDATORS["string"](validate=validate.Length(min=1)) schema_version = VALIDATORS["string"]( - validate=validate.OneOf([PrepopSchemaVersion.V1.value]) + validate=validate.OneOf([SupplementaryDataSchemaVersion.V1.value]) ) items = fields.Nested(ItemsData, required=False, unknown=EXCLUDE) @@ -37,14 +37,19 @@ def validate_unit_id(self, data, **kwargs): and data.get("identifier") and data["identifier"] != self.context["ru_ref"] ): - raise ValidationError("Prepop data did not return the specified Unit ID") + raise ValidationError( + "Supplementary data did not return the specified Unit ID" + ) -class PrepopMetadataSchema(Schema, StripWhitespaceMixin): +class SupplementaryDataMetadataSchema(Schema, StripWhitespaceMixin): dataset_id = VALIDATORS["string"](validate=validate.Length(min=1)) survey_id = VALIDATORS["string"](validate=validate.Length(min=1)) data = fields.Nested( - PrepopData, required=True, unknown=EXCLUDE, validate=validate.Length(min=1) + SupplementaryData, + required=True, + unknown=EXCLUDE, + validate=validate.Length(min=1), ) @validates_schema() @@ -55,14 +60,18 @@ def validate_dataset_id(self, data, **kwargs): and data.get("dataset_id") and data["dataset_id"] != self.context["dataset_id"] ): - raise ValidationError("Prepop data did not return the specified Dataset ID") + raise ValidationError( + "Supplementary data did not return the specified Dataset ID" + ) def validate_supplementary_data_v1( supplementary_data: Mapping, dataset_id: str, ru_ref: str ) -> dict: """Validate claims required for runner to function""" - supplementary_data_metadata_schema = PrepopMetadataSchema(unknown=INCLUDE) + supplementary_data_metadata_schema = SupplementaryDataMetadataSchema( + unknown=INCLUDE + ) supplementary_data_metadata_schema.context = { "dataset_id": dataset_id, "ru_ref": ru_ref, diff --git a/tests/integration/integration_test_case.py b/tests/integration/integration_test_case.py index 280fcb9e91..b5b92e1001 100644 --- a/tests/integration/integration_test_case.py +++ b/tests/integration/integration_test_case.py @@ -127,7 +127,7 @@ def launchSurvey(self, schema_name="test_dates", **payload_kwargs): self.get(f"/session?token={token}") - def launchPrepopSurvey( + def launchSupplementaryDataSurvey( self, schema_name="test_supplementary_data", **payload_kwargs ): """ diff --git a/tests/integration/routes/test_session.py b/tests/integration/routes/test_session.py index 2157d5702e..3a803674f4 100644 --- a/tests/integration/routes/test_session.py +++ b/tests/integration/routes/test_session.py @@ -91,7 +91,7 @@ def test_patch_session_expiry_extends_session(self): def test_supplementary_data_is_loaded_when_sds_dataset_id_in_metadata(self): with patch("app.routes.session.get_supplementary_data", return_value={}): - self.launchPrepopSurvey() + self.launchSupplementaryDataSurvey() self.assertStatusOK() From 90c29ffa5d0802f2761f88142254d5b61d0500f0 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Fri, 26 May 2023 13:46:48 +0100 Subject: [PATCH 08/21] Update mock endpoint --- scripts/mock_sds_endpoint.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/mock_sds_endpoint.py b/scripts/mock_sds_endpoint.py index a5bb7fe8f3..b8b26b7117 100644 --- a/scripts/mock_sds_endpoint.py +++ b/scripts/mock_sds_endpoint.py @@ -1,4 +1,4 @@ -from flask import Flask, request +from flask import Flask, request, Response app = Flask(__name__) @@ -128,7 +128,10 @@ def get_sds_data(): if dataset_id == "002": return SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD - return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD + elif dataset_id == "001": + return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD + else: + return Response(status=404) if __name__ == "__main__": From 638cc0d32f20903ede231ab0b5132b68c076d0f0 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Fri, 26 May 2023 14:05:11 +0100 Subject: [PATCH 09/21] Lint --- scripts/mock_sds_endpoint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/mock_sds_endpoint.py b/scripts/mock_sds_endpoint.py index b8b26b7117..9ceb6a3937 100644 --- a/scripts/mock_sds_endpoint.py +++ b/scripts/mock_sds_endpoint.py @@ -128,7 +128,7 @@ def get_sds_data(): if dataset_id == "002": return SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD - elif dataset_id == "001": + elif dataset_id == "001": return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD else: return Response(status=404) From bb97f830d718fca65eeb41cd59b914a3f0ff6d6e Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Fri, 26 May 2023 14:27:24 +0100 Subject: [PATCH 10/21] Lint --- scripts/mock_sds_endpoint.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/mock_sds_endpoint.py b/scripts/mock_sds_endpoint.py index 9ceb6a3937..ea619279e0 100644 --- a/scripts/mock_sds_endpoint.py +++ b/scripts/mock_sds_endpoint.py @@ -1,4 +1,4 @@ -from flask import Flask, request, Response +from flask import Flask, Response, request app = Flask(__name__) @@ -126,12 +126,12 @@ def get_sds_data(): dataset_id = request.args.get("dataset_id") + if dataset_id == "001": + return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD if dataset_id == "002": return SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD - elif dataset_id == "001": - return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD - else: - return Response(status=404) + + return Response(status=404) if __name__ == "__main__": From 0945489e1e5d011b2fba507fc790015c79345651 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Tue, 30 May 2023 12:05:50 +0100 Subject: [PATCH 11/21] PR comments --- app/routes/session.py | 91 +---------- app/settings.py | 1 + app/supplementary_data.py | 89 +++++++++++ app/utilities/request_session.py | 21 +++ app/utilities/schema.py | 22 +-- app/utilities/supplementary_data_parser.py | 31 ++-- schemas/test/en/test_supplementary_data.json | 4 + .../parser/test_supplementary_data_parser.py | 53 +++++-- tests/app/test_request_supplementary_data.py | 141 ++++++++++-------- tests/integration/create_token.py | 1 + 10 files changed, 272 insertions(+), 182 deletions(-) create mode 100644 app/supplementary_data.py create mode 100644 app/utilities/request_session.py diff --git a/app/routes/session.py b/app/routes/session.py index ddbd1ca526..d7fc277feb 100644 --- a/app/routes/session.py +++ b/app/routes/session.py @@ -1,15 +1,11 @@ -import json from datetime import datetime, timezone from typing import Any, Iterable, Mapping -import requests from flask import Blueprint, g, jsonify, redirect, request from flask import session as cookie_session from flask import url_for from flask_login import login_required, logout_user from marshmallow import INCLUDE, ValidationError -from requests import RequestException -from requests.adapters import HTTPAdapter, Retry from sdc.crypto.exceptions import InvalidTokenException from structlog import contextvars, get_logger from werkzeug.exceptions import Unauthorized @@ -27,36 +23,18 @@ ) from app.questionnaire import QuestionnaireSchema from app.routes.errors import _render_error_page +from app.supplementary_data import get_supplementary_data from app.utilities.metadata_parser import validate_runner_claims from app.utilities.metadata_parser_v2 import ( validate_questionnaire_claims, validate_runner_claims_v2, ) from app.utilities.schema import load_schema_from_metadata -from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 logger = get_logger() session_blueprint = Blueprint("session", __name__) -SUPPLEMENTARY_DATA_URL = "http://localhost:5003/v1/unit_data" -SUPPLEMENTARY_DATA_REQUEST_MAX_BACKOFF = 0.2 -SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES -SUPPLEMENTARY_DATA_REQUEST_TIMEOUT = 3 -SUPPLEMENTARY_DATA_REQUEST_RETRY_STATUS_CODES = [ - 408, - 429, - 500, - 502, - 503, - 504, -] - - -class SupplementaryDataRequestFailed(Exception): - def __str__(self) -> str: - return "Supplementary Data request failed" - @session_blueprint.after_request def add_cache_control(response: Response) -> Response: @@ -151,76 +129,17 @@ def login() -> Response: cookie_session["language_code"] = metadata.language_code - if (dataset_id := metadata["sds_dataset_id"]) and ru_ref: + # Type ignore: survey_id and either ru_ref or qid are required for schemas that use supplementary data + if dataset_id := metadata["sds_dataset_id"]: get_supplementary_data( - supplementary_data_url=SUPPLEMENTARY_DATA_URL, dataset_id=dataset_id, - ru_ref=ru_ref, + unit_id=metadata["ru_ref"] or metadata["qid"], # type: ignore + survey_id=metadata["survey_id"], # type: ignore ) return redirect(url_for("questionnaire.get_questionnaire")) -def get_supplementary_data( - supplementary_data_url: str, dataset_id: str, ru_ref: str -) -> dict: - constructed_supplementary_data_url = ( - f"{supplementary_data_url}?dataset_id={dataset_id}&unit_id={ru_ref}" - ) - - session = requests.Session() - - retries = Retry( - total=SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES, - status_forcelist=SUPPLEMENTARY_DATA_REQUEST_RETRY_STATUS_CODES, - ) # Codes to retry according to Google Docs https://cloud.google.com/storage/docs/retry-strategy#client-libraries - - # Type ignore: MyPy does not recognise BACKOFF_MAX however it is a property, albeit deprecated - retries.BACKOFF_MAX = SUPPLEMENTARY_DATA_REQUEST_MAX_BACKOFF # type: ignore - - session.mount("http://", HTTPAdapter(max_retries=retries)) - session.mount("https://", HTTPAdapter(max_retries=retries)) - - try: - response = session.get( - constructed_supplementary_data_url, - timeout=SUPPLEMENTARY_DATA_REQUEST_TIMEOUT, - ) - except RequestException as exc: - logger.exception( - "Error requesting supplementary data", - supplementary_data_url=constructed_supplementary_data_url, - ) - raise SupplementaryDataRequestFailed from exc - - if response.status_code == 200: - supplementary_data_response_content = response.content.decode() - supplementary_data = json.loads(supplementary_data_response_content) - - return validate_supplementary_data( - supplementary_data=supplementary_data, dataset_id=dataset_id, ru_ref=ru_ref - ) - - logger.error( - "got a non-200 response for supplementary data request", - status_code=response.status_code, - schema_url=constructed_supplementary_data_url, - ) - - raise SupplementaryDataRequestFailed - - -def validate_supplementary_data( - supplementary_data: Mapping, dataset_id: str, ru_ref: str -) -> dict: - try: - return validate_supplementary_data_v1( - supplementary_data=supplementary_data, dataset_id=dataset_id, ru_ref=ru_ref - ) - except ValidationError as e: - raise ValidationError("Invalid supplementary_dataulation data") from e - - def validate_jti(decrypted_token: dict[str, str | list | int]) -> None: # Type ignore: decrypted_token["exp"] will return a valid timestamp with compatible typing expires_at = datetime.fromtimestamp(decrypted_token["exp"], tz=timezone.utc) # type: ignore diff --git a/app/settings.py b/app/settings.py index 675c8f1bd2..813e5637b4 100644 --- a/app/settings.py +++ b/app/settings.py @@ -144,6 +144,7 @@ def utcoffset_or_fail(date_value, key): SURVEY_TYPE = os.getenv("SURVEY_TYPE", "business") +SDS_API_BASE_URL = os.getenv("SDS_API_BASE_URL") ACCOUNT_SERVICE_BASE_URL = os.getenv( "ACCOUNT_SERVICE_BASE_URL", "https://surveys.ons.gov.uk" diff --git a/app/supplementary_data.py b/app/supplementary_data.py new file mode 100644 index 0000000000..49f86dc96f --- /dev/null +++ b/app/supplementary_data.py @@ -0,0 +1,89 @@ +import json +from typing import Mapping + +from flask import current_app +from marshmallow import ValidationError +from requests import RequestException +from structlog import get_logger + +from app.utilities.request_session import get_retryable_session +from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 + +SUPPLEMENTARY_DATA_URL = "http://localhost:5003/v1/unit_data" +SUPPLEMENTARY_DATA_REQUEST_BACKOFF_FACTOR = 0.2 +SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES +SUPPLEMENTARY_DATA_REQUEST_TIMEOUT = 3 +SUPPLEMENTARY_DATA_REQUEST_RETRY_STATUS_CODES = [ + 408, + 429, + 500, + 502, + 503, + 504, +] + +logger = get_logger() + + +class SupplementaryDataRequestFailed(Exception): + def __str__(self) -> str: + return "Supplementary Data request failed" + + +def get_supplementary_data(dataset_id: str, unit_id: str, survey_id: str) -> dict: + supplementary_data_url = current_app.config["SDS_API_BASE_URL"] + + constructed_supplementary_data_url = ( + f"{supplementary_data_url}?dataset_id={dataset_id}&unit_id={unit_id}" + ) + + session = get_retryable_session( + max_retries=SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES, + retry_status_codes=SUPPLEMENTARY_DATA_REQUEST_RETRY_STATUS_CODES, + backoff_factor=SUPPLEMENTARY_DATA_REQUEST_BACKOFF_FACTOR, + ) + + try: + response = session.get( + constructed_supplementary_data_url, + timeout=SUPPLEMENTARY_DATA_REQUEST_TIMEOUT, + ) + except RequestException as exc: + logger.exception( + "Error requesting supplementary data", + supplementary_data_url=constructed_supplementary_data_url, + ) + raise SupplementaryDataRequestFailed from exc + + if response.status_code == 200: + supplementary_data_response_content = response.content.decode() + supplementary_data = json.loads(supplementary_data_response_content) + + return validate_supplementary_data( + supplementary_data=supplementary_data, + dataset_id=dataset_id, + unit_id=unit_id, + survey_id=survey_id, + ) + + logger.error( + "got a non-200 response for supplementary data request", + status_code=response.status_code, + schema_url=constructed_supplementary_data_url, + ) + + raise SupplementaryDataRequestFailed + + +def validate_supplementary_data( + supplementary_data: Mapping, dataset_id: str, unit_id: str, survey_id: str +) -> dict: + try: + return validate_supplementary_data_v1( + supplementary_data=supplementary_data, + dataset_id=dataset_id, + unit_id=unit_id, + survey_id=survey_id, + ) + except ValidationError as e: + raise ValidationError("Invalid supplementary data") from e diff --git a/app/utilities/request_session.py b/app/utilities/request_session.py new file mode 100644 index 0000000000..4835d06a7a --- /dev/null +++ b/app/utilities/request_session.py @@ -0,0 +1,21 @@ +import requests +from requests.adapters import HTTPAdapter +from urllib3 import Retry + + +def get_retryable_session( + max_retries, retry_status_codes, backoff_factor +) -> requests.Session: + session = requests.Session() + + retries = Retry( + total=max_retries, + status_forcelist=retry_status_codes, + ) # Codes to retry according to Google Docs https://cloud.google.com/storage/docs/retry-strategy#client-libraries + + retries.backoff_factor = backoff_factor + + session.mount("http://", HTTPAdapter(max_retries=retries)) + session.mount("https://", HTTPAdapter(max_retries=retries)) + + return session diff --git a/app/utilities/schema.py b/app/utilities/schema.py index 994f700423..4845a7df33 100644 --- a/app/utilities/schema.py +++ b/app/utilities/schema.py @@ -5,9 +5,7 @@ from pathlib import Path from typing import Any -import requests from requests import RequestException -from requests.adapters import HTTPAdapter, Retry from structlog import get_logger from app.data_models.metadata_proxy import MetadataProxy @@ -16,6 +14,7 @@ QuestionnaireSchema, ) from app.utilities.json import json_load, json_loads +from app.utilities.request_session import get_retryable_session logger = get_logger() @@ -28,7 +27,7 @@ "phm_0001": [["en", "cy"]], } -SCHEMA_REQUEST_MAX_BACKOFF = 0.2 +SCHEMA_REQUEST_BACKOFF_FACTOR = 0.2 SCHEMA_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + SCHEMA_REQUEST_MAX_RETRIES SCHEMA_REQUEST_TIMEOUT = 3 SCHEMA_REQUEST_RETRY_STATUS_CODES = [ @@ -210,18 +209,11 @@ def load_schema_from_url( constructed_schema_url = f"{schema_url}?language={language_code}" - session = requests.Session() - - retries = Retry( - total=SCHEMA_REQUEST_MAX_RETRIES, - status_forcelist=SCHEMA_REQUEST_RETRY_STATUS_CODES, - ) # Codes to retry according to Google Docs https://cloud.google.com/storage/docs/retry-strategy#client-libraries - - # Type ignore: MyPy does not recognise BACKOFF_MAX however it is a property, albeit deprecated - retries.BACKOFF_MAX = SCHEMA_REQUEST_MAX_BACKOFF # type: ignore - - session.mount("http://", HTTPAdapter(max_retries=retries)) - session.mount("https://", HTTPAdapter(max_retries=retries)) + session = get_retryable_session( + max_retries=SCHEMA_REQUEST_MAX_RETRIES, + retry_status_codes=SCHEMA_REQUEST_RETRY_STATUS_CODES, + backoff_factor=SCHEMA_REQUEST_BACKOFF_FACTOR, + ) try: req = session.get(constructed_schema_url, timeout=SCHEMA_REQUEST_TIMEOUT) diff --git a/app/utilities/supplementary_data_parser.py b/app/utilities/supplementary_data_parser.py index 5c3a1e38aa..cd778fc27c 100644 --- a/app/utilities/supplementary_data_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -35,7 +35,7 @@ def validate_unit_id(self, data, **kwargs): if ( data and data.get("identifier") - and data["identifier"] != self.context["ru_ref"] + and data["identifier"] != self.context["unit_id"] ): raise ValidationError( "Supplementary data did not return the specified Unit ID" @@ -55,18 +55,26 @@ class SupplementaryDataMetadataSchema(Schema, StripWhitespaceMixin): @validates_schema() def validate_dataset_id(self, data, **kwargs): # pylint: disable=no-self-use, unused-argument - if ( - data - and data.get("dataset_id") - and data["dataset_id"] != self.context["dataset_id"] - ): - raise ValidationError( - "Supplementary data did not return the specified Dataset ID" - ) + if data: + if ( + data.get("dataset_id") + and data["dataset_id"] != self.context["dataset_id"] + ): + raise ValidationError( + "Supplementary data did not return the specified Dataset ID" + ) + + if data.get("survey_id") and data["survey_id"] != self.context["survey_id"]: + raise ValidationError( + "Supplementary data did not return the specified Survey ID" + ) def validate_supplementary_data_v1( - supplementary_data: Mapping, dataset_id: str, ru_ref: str + supplementary_data: Mapping, + dataset_id: str, + unit_id: str, + survey_id: str, ) -> dict: """Validate claims required for runner to function""" supplementary_data_metadata_schema = SupplementaryDataMetadataSchema( @@ -74,7 +82,8 @@ def validate_supplementary_data_v1( ) supplementary_data_metadata_schema.context = { "dataset_id": dataset_id, - "ru_ref": ru_ref, + "unit_id": unit_id, + "survey_id": survey_id, } validated_supplementary_data = supplementary_data_metadata_schema.load( supplementary_data diff --git a/schemas/test/en/test_supplementary_data.json b/schemas/test/en/test_supplementary_data.json index 88d1df69ff..bfa413765a 100644 --- a/schemas/test/en/test_supplementary_data.json +++ b/schemas/test/en/test_supplementary_data.json @@ -23,6 +23,10 @@ { "name": "sds_dataset_id", "type": "string" + }, + { + "name": "survey_id", + "type": "string" } ], "questionnaire_flow": { diff --git a/tests/app/parser/test_supplementary_data_parser.py b/tests/app/parser/test_supplementary_data_parser.py index cd9d8b3ae8..866d47f2fd 100644 --- a/tests/app/parser/test_supplementary_data_parser.py +++ b/tests/app/parser/test_supplementary_data_parser.py @@ -1,7 +1,7 @@ import pytest from marshmallow import ValidationError -from app.routes.session import validate_supplementary_data +from app.supplementary_data import validate_supplementary_data from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 SUPPLEMENTARY_DATA_PAYLOAD = { @@ -43,7 +43,10 @@ def test_invalid_supplementary_data_payload_raises_error(): with pytest.raises(ValidationError): validate_supplementary_data( - supplementary_data={}, dataset_id="001", ru_ref="12346789012A" + supplementary_data={}, + dataset_id="001", + unit_id="12346789012A", + survey_id="123", ) @@ -51,7 +54,8 @@ def test_validate_supplementary_data_payload(): validated_payload = validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="001", - ru_ref="12346789012A", + unit_id="12346789012A", + survey_id="123", ) assert validated_payload == SUPPLEMENTARY_DATA_PAYLOAD @@ -62,16 +66,28 @@ def test_validate_supplementary_data_payload_incorrect_dataset_id(): validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="002", - ru_ref="12346789012A", + unit_id="12346789012A", + survey_id="123", ) -def test_validate_supplementary_data_payload_incorrect_ru_ref(): +def test_validate_supplementary_data_payload_incorrect_survey_id(): with pytest.raises(ValidationError): validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="001", - ru_ref="000000000001", + unit_id="12346789012A", + survey_id="234", + ) + + +def test_validate_supplementary_data_payload_incorrect_unit_id(): + with pytest.raises(ValidationError): + validate_supplementary_data_v1( + supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, + dataset_id="001", + unit_id="000000000001", + survey_id="123", ) @@ -86,7 +102,10 @@ def test_supplementary_data_payload_with_no_items_is_validated(): } validated_payload = validate_supplementary_data_v1( - supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" + supplementary_data=payload, + dataset_id="001", + unit_id="12346789012A", + survey_id="123", ) assert validated_payload == payload @@ -103,7 +122,10 @@ def test_validate_supplementary_data_payload_missing_survey_id(): with pytest.raises(ValidationError): validate_supplementary_data_v1( - supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" + supplementary_data=payload, + dataset_id="001", + unit_id="12346789012A", + survey_id="123", ) @@ -119,7 +141,10 @@ def test_validate_supplementary_data_payload_with_unknown_field(): } validated_payload = validate_supplementary_data_v1( - supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" + supplementary_data=payload, + dataset_id="001", + unit_id="12346789012A", + survey_id="123", ) assert validated_payload == payload @@ -138,7 +163,10 @@ def test_validate_supplementary_data_invalid_schema_version(): with pytest.raises(ValidationError): validate_supplementary_data_v1( - supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" + supplementary_data=payload, + dataset_id="001", + unit_id="12346789012A", + survey_id="123", ) @@ -179,5 +207,8 @@ def test_validate_supplementary_data_payload_missing_identifier_in_items(): with pytest.raises(ValidationError): validate_supplementary_data_v1( - supplementary_data=payload, dataset_id="001", ru_ref="12346789012A" + supplementary_data=payload, + dataset_id="001", + unit_id="12346789012A", + survey_id="123", ) diff --git a/tests/app/test_request_supplementary_data.py b/tests/app/test_request_supplementary_data.py index bdef68fc98..ba6c3cfe45 100644 --- a/tests/app/test_request_supplementary_data.py +++ b/tests/app/test_request_supplementary_data.py @@ -1,8 +1,9 @@ import pytest import responses +from flask import Flask, current_app from requests import RequestException -from app.routes.session import ( +from app.supplementary_data import ( SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES, SupplementaryDataRequestFailed, get_supplementary_data, @@ -48,13 +49,19 @@ @responses.activate -def test_get_supplementary_data_200(): - responses.add( - responses.GET, TEST_SDS_URL, json=mock_supplementary_data_payload, status=200 - ) - loaded_supplementary_data = get_supplementary_data( - supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) +def test_get_supplementary_data_200(app: Flask): + with app.app_context(): + current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL + + responses.add( + responses.GET, + TEST_SDS_URL, + json=mock_supplementary_data_payload, + status=200, + ) + loaded_supplementary_data = get_supplementary_data( + dataset_id="001", unit_id="12346789012A", survey_id="123" + ) assert loaded_supplementary_data == mock_supplementary_data_payload @@ -64,47 +71,55 @@ def test_get_supplementary_data_200(): [401, 403, 404, 501, 511], ) @responses.activate -def test_get_supplementary_data_non_200(status_code): - responses.add( - responses.GET, - TEST_SDS_URL, - json=mock_supplementary_data_payload, - status=status_code, - ) - - with pytest.raises(SupplementaryDataRequestFailed) as exc: - get_supplementary_data( - supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" +def test_get_supplementary_data_non_200(app: Flask, status_code): + with app.app_context(): + current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL + + responses.add( + responses.GET, + TEST_SDS_URL, + json=mock_supplementary_data_payload, + status=status_code, ) + with pytest.raises(SupplementaryDataRequestFailed) as exc: + get_supplementary_data( + dataset_id="001", unit_id="12346789012A", survey_id="123" + ) + assert str(exc.value) == "Supplementary Data request failed" @responses.activate -def test_get_supplementary_data_request_failed(): - responses.add(responses.GET, TEST_SDS_URL, body=RequestException()) - with pytest.raises(SupplementaryDataRequestFailed) as exc: - get_supplementary_data( - supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" - ) +def test_get_supplementary_data_request_failed(app: Flask): + with app.app_context(): + current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL + + responses.add(responses.GET, TEST_SDS_URL, body=RequestException()) + with pytest.raises(SupplementaryDataRequestFailed) as exc: + get_supplementary_data( + dataset_id="001", unit_id="12346789012A", survey_id="123" + ) assert str(exc.value) == "Supplementary Data request failed" def test_get_supplementary_data_retries_timeout_error( - mocker, mocked_make_request_with_timeout + app: Flask, mocker, mocked_make_request_with_timeout ): - mocker.patch( - "app.routes.session.validate_supplementary_data", - return_value=mock_supplementary_data_payload, - ) - - try: - supplementary_data = get_supplementary_data( - supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + with app.app_context(): + current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL + mocker.patch( + "app.supplementary_data.validate_supplementary_data", + return_value=mock_supplementary_data_payload, ) - except SupplementaryDataRequestFailed: - return pytest.fail("Supplementary data request unexpectedly failed") + + try: + supplementary_data = get_supplementary_data( + dataset_id="001", unit_id="12346789012A", survey_id="123" + ) + except SupplementaryDataRequestFailed: + return pytest.fail("Supplementary data request unexpectedly failed") assert supplementary_data == mock_supplementary_data_payload @@ -115,38 +130,46 @@ def test_get_supplementary_data_retries_timeout_error( @pytest.mark.usefixtures("mocked_response_content") -def test_get_supplementary_data_retries_transient_error(mocker): - mocked_make_request = get_mocked_make_request(mocker, status_codes=[500, 500, 200]) - - mocker.patch( - "app.routes.session.validate_supplementary_data", - return_value=mock_supplementary_data_payload, - ) +def test_get_supplementary_data_retries_transient_error(app: Flask, mocker): + with app.app_context(): + current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL + mocked_make_request = get_mocked_make_request( + mocker, status_codes=[500, 500, 200] + ) - try: - supplementary_data = get_supplementary_data( - supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + mocker.patch( + "app.supplementary_data.validate_supplementary_data", + return_value=mock_supplementary_data_payload, ) - except SupplementaryDataRequestFailed: - return pytest.fail("Supplementary data request unexpectedly failed") - assert supplementary_data == mock_supplementary_data_payload + try: + supplementary_data = get_supplementary_data( + dataset_id="001", unit_id="12346789012A", survey_id="123" + ) + except SupplementaryDataRequestFailed: + return pytest.fail("Supplementary data request unexpectedly failed") + + assert supplementary_data == mock_supplementary_data_payload + + expected_call = ( + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES + 1 + ) # Max retries + the initial request - expected_call = ( - SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES + 1 - ) # Max retries + the initial request assert mocked_make_request.call_count == expected_call -def test_get_supplementary_data_max_retries(mocker): - mocked_make_request = get_mocked_make_request( - mocker, status_codes=[500, 500, 500, 500] - ) +def test_get_supplementary_data_max_retries(app: Flask, mocker): + with app.app_context(): + current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL - with pytest.raises(SupplementaryDataRequestFailed) as exc: - get_supplementary_data( - supplementary_data_url=TEST_SDS_URL, dataset_id="001", ru_ref="12346789012A" + mocked_make_request = get_mocked_make_request( + mocker, status_codes=[500, 500, 500, 500] ) + with pytest.raises(SupplementaryDataRequestFailed) as exc: + get_supplementary_data( + dataset_id="001", unit_id="12346789012A", survey_id="123" + ) + assert str(exc.value) == "Supplementary Data request failed" assert mocked_make_request.call_count == 3 diff --git a/tests/integration/create_token.py b/tests/integration/create_token.py index 96dbba83f8..13cf45205f 100644 --- a/tests/integration/create_token.py +++ b/tests/integration/create_token.py @@ -66,6 +66,7 @@ "employment_date": "1983-06-02", "display_address": "68 Abingdon Road, Goathill", "sds_dataset_id": "001", + "survey_id": "123", } }, "collection_exercise_sid": "789", From fc74b919712ac2874bd65408dd696b774672eac3 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Tue, 30 May 2023 14:28:05 +0100 Subject: [PATCH 12/21] Use uuid's and update the test script --- app/utilities/supplementary_data_parser.py | 2 +- .../supplementary_data_no_repeat.json | 8 ++ .../supplementary_data_with_repeat.json | 85 +++++++++++ scripts/mock_sds_endpoint.py | 134 ++---------------- .../parser/test_supplementary_data_parser.py | 30 ++-- tests/integration/create_token.py | 2 +- 6 files changed, 121 insertions(+), 140 deletions(-) create mode 100644 scripts/mock_data/supplementary_data_no_repeat.json create mode 100644 scripts/mock_data/supplementary_data_with_repeat.json diff --git a/app/utilities/supplementary_data_parser.py b/app/utilities/supplementary_data_parser.py index cd778fc27c..68a8b9c3b8 100644 --- a/app/utilities/supplementary_data_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -43,7 +43,7 @@ def validate_unit_id(self, data, **kwargs): class SupplementaryDataMetadataSchema(Schema, StripWhitespaceMixin): - dataset_id = VALIDATORS["string"](validate=validate.Length(min=1)) + dataset_id = VALIDATORS["uuid"]() survey_id = VALIDATORS["string"](validate=validate.Length(min=1)) data = fields.Nested( SupplementaryData, diff --git a/scripts/mock_data/supplementary_data_no_repeat.json b/scripts/mock_data/supplementary_data_no_repeat.json new file mode 100644 index 0000000000..275fbdb5f8 --- /dev/null +++ b/scripts/mock_data/supplementary_data_no_repeat.json @@ -0,0 +1,8 @@ +{ + "dataset_id": "c067f6de-6d64-42b1-8b02-431a3486c178", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A" + } +} diff --git a/scripts/mock_data/supplementary_data_with_repeat.json b/scripts/mock_data/supplementary_data_with_repeat.json new file mode 100644 index 0000000000..a5bc06bb3b --- /dev/null +++ b/scripts/mock_data/supplementary_data_with_repeat.json @@ -0,0 +1,85 @@ +{ + "dataset_id": "34a80231-c49a-44d0-91a6-8fe1fb190e64", + "survey_id": "123", + "data": { + "schema_version": "v1", + "identifier": "12346789012A", + "note": { + "title": "Volume of total production", + "description": "Figures should cover the total quantity of the goods produced during the period of the return" + }, + "items": { + "products": [ + { + "identifier": "89929001", + "name": "Articles and equipment for sports or outdoor games", + "cn_codes": "2504 + 250610 + 2512 + 2519 + 2524", + "guidance_include": { + "title": "Include", + "list": [ + "for children's playgrounds", + "swimming pools and paddling pools" + ] + }, + "guidance_exclude": { + "title": "Exclude", + "list": [ + "sports holdalls, gloves, clothing of textile materials, footwear, protective eyewear, rackets, balls, skates", + "for skiing, water sports, golf, fishing', for skiing, water sports, golf, fishing, table tennis, PE, gymnastics, athletics" + ] + }, + "value_sales": { + "answer_code": "89929001", + "label": "Value of sales" + }, + "volume_sales": { + "answer_code": "89929002", + "label": "Volume of sales", + "unit_label": "Tonnes" + }, + "total_volume": { + "answer_code": "89929005", + "label": "Total volume produced", + "unit_label": "Tonnes" + } + }, + { + "identifier": "201630601", + "name": "Other Minerals", + "cn_codes": "5908 + 5910 + 591110 + 591120 + 591140", + "guidance_include": { + "title": "Include", + "list": [ + "natural graphite", + "quartz for industrial use", + "diatomite; magnesia; feldspar", + "magnesite; natural magnesium carbonate", + "talc including steatite and chlorite", + "unexpanded vermiculite and perlite" + ] + }, + "guidance_exclude": { + "title": "Exclude", + "list": [ + "natural quartz sands" + ] + }, + "value_sales": { + "answer_code": "201630601", + "label": "Value of sales" + }, + "volume_sales": { + "answer_code": "201630602", + "label": "Volume of sales", + "unit_label": "Kilogram" + }, + "total_volume": { + "answer_code": "201630605", + "label": "Total volume produced", + "unit_label": "Kilogram" + } + } + ] + } + } +} diff --git a/scripts/mock_sds_endpoint.py b/scripts/mock_sds_endpoint.py index ea619279e0..176a23e79a 100644 --- a/scripts/mock_sds_endpoint.py +++ b/scripts/mock_sds_endpoint.py @@ -1,138 +1,26 @@ +import json + from flask import Flask, Response, request app = Flask(__name__) -SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD = { - "dataset_id": "001", - "survey_id": "123", - "data": { - "schema_version": "v1", - "identifier": "12346789012A", - "items": { - "local_units": [ - { - "identifier": "0001", - "lu_name": "TEST NAME. 1", - "lu_address": [ - "FIRST ADDRESS 1", - "FIRST ADDRESS 2", - "TOWN", - "COUNTY", - "POST CODE", - ], - }, - { - "identifier": "0002", - "lu_name": "TEST NAME 2", - "lu_address": [ - "SECOND ADDRESS 1", - "SECOND ADDRESS 1", - "TOWN", - "COUNTY", - "POSTCODE", - ], - }, - ] - }, - }, -} - -SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD = { - "dataset_id": "002", - "survey_id": "123", - "data": { - "schema_version": "v1", - "identifier": "12346789012A", - "note": { - "title": "Volume of total production", - "description": "Figures should cover the total quantity of the goods produced during the period of the return", - }, - "items": { - "products": [ - { - "identifier": "89929001", - "name": "Articles and equipment for sports or outdoor games", - "cn_codes": "2504 + 250610 + 2512 + 2519 + 2524", - "guidance_include": { - "title": "Include", - "list": [ - "for children's playgrounds", - "swimming pools and paddling pools", - ], - }, - "guidance_exclude": { - "title": "Exclude", - "list": [ - "sports holdalls, gloves, clothing of textile materials, footwear, protective eyewear, rackets, balls, skates", - "for skiing, water sports, golf, fishing', for skiing, water sports, golf, fishing, table tennis, PE, gymnastics, athletics", - ], - }, - "value_sales": { - "answer_code": "89929001", - "label": "Value of sales", - }, - "volume_sales": { - "answer_code": "89929002", - "label": "Volume of sales", - "unit_label": "Tonnes", - }, - "total_volume": { - "answer_code": "89929005", - "label": "Total volume produced", - "unit_label": "Tonnes", - }, - }, - { - "identifier": "201630601", - "name": "Other Minerals", - "cn_codes": "5908 + 5910 + 591110 + 591120 + 591140", - "guidance_include": { - "title": "Include", - "list": [ - "natural graphite", - "quartz for industrial use", - "diatomite; magnesia; feldspar", - "magnesite; natural magnesium carbonate", - "talc including steatite and chlorite", - "unexpanded vermiculite and perlite", - ], - }, - "guidance_exclude": { - "title": "Exclude", - "list": ["natural quartz sands"], - }, - "value_sales": { - "answer_code": "201630601", - "label": "Value of sales", - }, - "volume_sales": { - "answer_code": "201630602", - "label": "Volume of sales", - "unit_label": "Kilogram", - }, - "total_volume": { - "answer_code": "201630605", - "label": "Total volume produced", - "unit_label": "Kilogram", - }, - }, - ] - }, - }, -} - @app.route("/v1/unit_data") def get_sds_data(): dataset_id = request.args.get("dataset_id") - if dataset_id == "001": - return SUPPLEMENTARY_DATA_TILES_AND_SLATE_PAYLOAD - if dataset_id == "002": - return SUPPLEMENTARY_DATA_PRODCOM_PAYLOAD + if dataset_id == "c067f6de-6d64-42b1-8b02-431a3486c178": + return load_mock_data("scripts/mock_data/supplementary_data_no_repeat.json") + if dataset_id == "34a80231-c49a-44d0-91a6-8fe1fb190e64": + return load_mock_data("scripts/mock_data/supplementary_data_with_repeat.json") return Response(status=404) +def load_mock_data(filename): + with open(filename, encoding="utf-8") as mock_data_file: + return json.load(mock_data_file) + + if __name__ == "__main__": app.run(host="localhost", port=5003) diff --git a/tests/app/parser/test_supplementary_data_parser.py b/tests/app/parser/test_supplementary_data_parser.py index 866d47f2fd..dd35bc8ff7 100644 --- a/tests/app/parser/test_supplementary_data_parser.py +++ b/tests/app/parser/test_supplementary_data_parser.py @@ -5,7 +5,7 @@ from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 SUPPLEMENTARY_DATA_PAYLOAD = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", "data": { "schema_version": "v1", @@ -44,7 +44,7 @@ def test_invalid_supplementary_data_payload_raises_error(): with pytest.raises(ValidationError): validate_supplementary_data( supplementary_data={}, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) @@ -53,7 +53,7 @@ def test_invalid_supplementary_data_payload_raises_error(): def test_validate_supplementary_data_payload(): validated_payload = validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) @@ -65,7 +65,7 @@ def test_validate_supplementary_data_payload_incorrect_dataset_id(): with pytest.raises(ValidationError): validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, - dataset_id="002", + dataset_id="331507ca-1039-4624-a342-7cbc3630e217", unit_id="12346789012A", survey_id="123", ) @@ -75,7 +75,7 @@ def test_validate_supplementary_data_payload_incorrect_survey_id(): with pytest.raises(ValidationError): validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="234", ) @@ -85,7 +85,7 @@ def test_validate_supplementary_data_payload_incorrect_unit_id(): with pytest.raises(ValidationError): validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="000000000001", survey_id="123", ) @@ -93,7 +93,7 @@ def test_validate_supplementary_data_payload_incorrect_unit_id(): def test_supplementary_data_payload_with_no_items_is_validated(): payload = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", "data": { "schema_version": "v1", @@ -103,7 +103,7 @@ def test_supplementary_data_payload_with_no_items_is_validated(): validated_payload = validate_supplementary_data_v1( supplementary_data=payload, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) @@ -113,7 +113,7 @@ def test_supplementary_data_payload_with_no_items_is_validated(): def test_validate_supplementary_data_payload_missing_survey_id(): payload = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "data": { "schema_version": "v1", "identifier": "12346789012A", @@ -123,7 +123,7 @@ def test_validate_supplementary_data_payload_missing_survey_id(): with pytest.raises(ValidationError): validate_supplementary_data_v1( supplementary_data=payload, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) @@ -131,7 +131,7 @@ def test_validate_supplementary_data_payload_missing_survey_id(): def test_validate_supplementary_data_payload_with_unknown_field(): payload = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", "some_field": "value", "data": { @@ -142,7 +142,7 @@ def test_validate_supplementary_data_payload_with_unknown_field(): validated_payload = validate_supplementary_data_v1( supplementary_data=payload, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) @@ -152,7 +152,7 @@ def test_validate_supplementary_data_payload_with_unknown_field(): def test_validate_supplementary_data_invalid_schema_version(): payload = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", "some_field": "value", "data": { @@ -172,7 +172,7 @@ def test_validate_supplementary_data_invalid_schema_version(): def test_validate_supplementary_data_payload_missing_identifier_in_items(): payload = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", "data": { "schema_version": "v1", @@ -208,7 +208,7 @@ def test_validate_supplementary_data_payload_missing_identifier_in_items(): with pytest.raises(ValidationError): validate_supplementary_data_v1( supplementary_data=payload, - dataset_id="001", + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) diff --git a/tests/integration/create_token.py b/tests/integration/create_token.py index 13cf45205f..c8bfd759ea 100644 --- a/tests/integration/create_token.py +++ b/tests/integration/create_token.py @@ -65,7 +65,7 @@ "trad_as": "Integration Tests", "employment_date": "1983-06-02", "display_address": "68 Abingdon Road, Goathill", - "sds_dataset_id": "001", + "sds_dataset_id": str(uuid4()), "survey_id": "123", } }, From 06b3b141f23130c378d4f8e9063a960ecd518d2b Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Tue, 30 May 2023 14:49:42 +0100 Subject: [PATCH 13/21] Use uuid in test --- tests/app/test_request_supplementary_data.py | 26 ++++++++++++++------ 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/tests/app/test_request_supplementary_data.py b/tests/app/test_request_supplementary_data.py index ba6c3cfe45..5117d89702 100644 --- a/tests/app/test_request_supplementary_data.py +++ b/tests/app/test_request_supplementary_data.py @@ -13,7 +13,7 @@ TEST_SDS_URL = "http://test.domain/v1/unit_data" mock_supplementary_data_payload = { - "dataset_id": "001", + "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", "data": { "schema_version": "v1", @@ -60,7 +60,9 @@ def test_get_supplementary_data_200(app: Flask): status=200, ) loaded_supplementary_data = get_supplementary_data( - dataset_id="001", unit_id="12346789012A", survey_id="123" + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", + unit_id="12346789012A", + survey_id="123", ) assert loaded_supplementary_data == mock_supplementary_data_payload @@ -84,7 +86,9 @@ def test_get_supplementary_data_non_200(app: Flask, status_code): with pytest.raises(SupplementaryDataRequestFailed) as exc: get_supplementary_data( - dataset_id="001", unit_id="12346789012A", survey_id="123" + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", + unit_id="12346789012A", + survey_id="123", ) assert str(exc.value) == "Supplementary Data request failed" @@ -98,7 +102,9 @@ def test_get_supplementary_data_request_failed(app: Flask): responses.add(responses.GET, TEST_SDS_URL, body=RequestException()) with pytest.raises(SupplementaryDataRequestFailed) as exc: get_supplementary_data( - dataset_id="001", unit_id="12346789012A", survey_id="123" + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", + unit_id="12346789012A", + survey_id="123", ) assert str(exc.value) == "Supplementary Data request failed" @@ -116,7 +122,9 @@ def test_get_supplementary_data_retries_timeout_error( try: supplementary_data = get_supplementary_data( - dataset_id="001", unit_id="12346789012A", survey_id="123" + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", + unit_id="12346789012A", + survey_id="123", ) except SupplementaryDataRequestFailed: return pytest.fail("Supplementary data request unexpectedly failed") @@ -144,7 +152,9 @@ def test_get_supplementary_data_retries_transient_error(app: Flask, mocker): try: supplementary_data = get_supplementary_data( - dataset_id="001", unit_id="12346789012A", survey_id="123" + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", + unit_id="12346789012A", + survey_id="123", ) except SupplementaryDataRequestFailed: return pytest.fail("Supplementary data request unexpectedly failed") @@ -168,7 +178,9 @@ def test_get_supplementary_data_max_retries(app: Flask, mocker): with pytest.raises(SupplementaryDataRequestFailed) as exc: get_supplementary_data( - dataset_id="001", unit_id="12346789012A", survey_id="123" + dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", + unit_id="12346789012A", + survey_id="123", ) assert str(exc.value) == "Supplementary Data request failed" From 6890fce26cf8abcaed119f968182d327c2f6f8de Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Thu, 1 Jun 2023 10:45:04 +0100 Subject: [PATCH 14/21] PR comments --- app/routes/errors.py | 9 +++++++++ app/routes/session.py | 2 +- app/services/__init__.py | 0 app/{ => services}/supplementary_data.py | 9 ++++++--- app/utilities/supplementary_data_parser.py | 7 +++---- tests/app/parser/test_supplementary_data_parser.py | 2 +- tests/app/services/__init__.py | 0 .../{ => services}/test_request_supplementary_data.py | 6 +++--- tests/integration/routes/test_session.py | 10 ++++++++++ 9 files changed, 33 insertions(+), 12 deletions(-) create mode 100644 app/services/__init__.py rename app/{ => services}/supplementary_data.py (90%) create mode 100644 tests/app/services/__init__.py rename tests/app/{ => services}/test_request_supplementary_data.py (96%) diff --git a/app/routes/errors.py b/app/routes/errors.py index 2d8f8d2b6c..ecac804181 100644 --- a/app/routes/errors.py +++ b/app/routes/errors.py @@ -22,6 +22,7 @@ from app.globals import get_metadata from app.helpers.language_helper import handle_language from app.helpers.template_helpers import get_survey_config, render_template +from app.services.supplementary_data import SupplementaryDataRequestFailed from app.settings import ACCOUNT_SERVICE_BASE_URL_SOCIAL from app.submitter.previously_submitted_exception import PreviouslySubmittedException from app.submitter.submission_failed import SubmissionFailedException @@ -189,6 +190,14 @@ def too_many_feedback_requests( ) +@errors_blueprint.app_errorhandler(SupplementaryDataRequestFailed) +def supplementary_data_request_failed( + exception: SupplementaryDataRequestFailed, +) -> tuple[str, int]: + log_exception(exception, 500) + return _render_error_page(500, template=500) + + @errors_blueprint.app_errorhandler(SubmissionFailedException) def submission_failed( exception: SubmissionFailedException, diff --git a/app/routes/session.py b/app/routes/session.py index d7fc277feb..ed05508320 100644 --- a/app/routes/session.py +++ b/app/routes/session.py @@ -23,7 +23,7 @@ ) from app.questionnaire import QuestionnaireSchema from app.routes.errors import _render_error_page -from app.supplementary_data import get_supplementary_data +from app.services.supplementary_data import get_supplementary_data from app.utilities.metadata_parser import validate_runner_claims from app.utilities.metadata_parser_v2 import ( validate_questionnaire_claims, diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/app/supplementary_data.py b/app/services/supplementary_data.py similarity index 90% rename from app/supplementary_data.py rename to app/services/supplementary_data.py index 49f86dc96f..12bcea9bdb 100644 --- a/app/supplementary_data.py +++ b/app/services/supplementary_data.py @@ -1,5 +1,6 @@ import json from typing import Mapping +from urllib.parse import urlencode from flask import current_app from marshmallow import ValidationError @@ -9,7 +10,6 @@ from app.utilities.request_session import get_retryable_session from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 -SUPPLEMENTARY_DATA_URL = "http://localhost:5003/v1/unit_data" SUPPLEMENTARY_DATA_REQUEST_BACKOFF_FACTOR = 0.2 SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES = 2 # Totals no. of request should be 3. The initial request + SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES SUPPLEMENTARY_DATA_REQUEST_TIMEOUT = 3 @@ -30,11 +30,14 @@ def __str__(self) -> str: return "Supplementary Data request failed" -def get_supplementary_data(dataset_id: str, unit_id: str, survey_id: str) -> dict: +def get_supplementary_data(*, dataset_id: str, unit_id: str, survey_id: str) -> dict: supplementary_data_url = current_app.config["SDS_API_BASE_URL"] + parameters = {"dataset_id": dataset_id, "unit_id": unit_id} + + encoded_parameters = urlencode(parameters) constructed_supplementary_data_url = ( - f"{supplementary_data_url}?dataset_id={dataset_id}&unit_id={unit_id}" + f"{supplementary_data_url}?{encoded_parameters}" ) session = get_retryable_session( diff --git a/app/utilities/supplementary_data_parser.py b/app/utilities/supplementary_data_parser.py index 68a8b9c3b8..4dd002163a 100644 --- a/app/utilities/supplementary_data_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -1,7 +1,6 @@ from typing import Mapping from marshmallow import ( - EXCLUDE, INCLUDE, Schema, ValidationError, @@ -27,7 +26,7 @@ class SupplementaryData(Schema, StripWhitespaceMixin): schema_version = VALIDATORS["string"]( validate=validate.OneOf([SupplementaryDataSchemaVersion.V1.value]) ) - items = fields.Nested(ItemsData, required=False, unknown=EXCLUDE) + items = fields.Nested(ItemsData, required=False, unknown=INCLUDE) @validates_schema() def validate_unit_id(self, data, **kwargs): @@ -48,12 +47,12 @@ class SupplementaryDataMetadataSchema(Schema, StripWhitespaceMixin): data = fields.Nested( SupplementaryData, required=True, - unknown=EXCLUDE, + unknown=INCLUDE, validate=validate.Length(min=1), ) @validates_schema() - def validate_dataset_id(self, data, **kwargs): + def validate_dataset_and_survey_id(self, data, **kwargs): # pylint: disable=no-self-use, unused-argument if data: if ( diff --git a/tests/app/parser/test_supplementary_data_parser.py b/tests/app/parser/test_supplementary_data_parser.py index dd35bc8ff7..58799bdef6 100644 --- a/tests/app/parser/test_supplementary_data_parser.py +++ b/tests/app/parser/test_supplementary_data_parser.py @@ -1,7 +1,7 @@ import pytest from marshmallow import ValidationError -from app.supplementary_data import validate_supplementary_data +from app.services.supplementary_data import validate_supplementary_data from app.utilities.supplementary_data_parser import validate_supplementary_data_v1 SUPPLEMENTARY_DATA_PAYLOAD = { diff --git a/tests/app/services/__init__.py b/tests/app/services/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/app/test_request_supplementary_data.py b/tests/app/services/test_request_supplementary_data.py similarity index 96% rename from tests/app/test_request_supplementary_data.py rename to tests/app/services/test_request_supplementary_data.py index 5117d89702..dc51abfa36 100644 --- a/tests/app/test_request_supplementary_data.py +++ b/tests/app/services/test_request_supplementary_data.py @@ -3,7 +3,7 @@ from flask import Flask, current_app from requests import RequestException -from app.supplementary_data import ( +from app.services.supplementary_data import ( SUPPLEMENTARY_DATA_REQUEST_MAX_RETRIES, SupplementaryDataRequestFailed, get_supplementary_data, @@ -116,7 +116,7 @@ def test_get_supplementary_data_retries_timeout_error( with app.app_context(): current_app.config["SDS_API_BASE_URL"] = TEST_SDS_URL mocker.patch( - "app.supplementary_data.validate_supplementary_data", + "app.services.supplementary_data.validate_supplementary_data", return_value=mock_supplementary_data_payload, ) @@ -146,7 +146,7 @@ def test_get_supplementary_data_retries_transient_error(app: Flask, mocker): ) mocker.patch( - "app.supplementary_data.validate_supplementary_data", + "app.services.supplementary_data.validate_supplementary_data", return_value=mock_supplementary_data_payload, ) diff --git a/tests/integration/routes/test_session.py b/tests/integration/routes/test_session.py index 3a803674f4..119cd7050e 100644 --- a/tests/integration/routes/test_session.py +++ b/tests/integration/routes/test_session.py @@ -5,6 +5,7 @@ from mock.mock import patch from app.questionnaire.questionnaire_schema import DEFAULT_LANGUAGE_CODE +from app.services.supplementary_data import SupplementaryDataRequestFailed from app.settings import ACCOUNT_SERVICE_BASE_URL, ACCOUNT_SERVICE_BASE_URL_SOCIAL from app.utilities.json import json_loads from tests.integration.integration_test_case import IntegrationTestCase @@ -94,6 +95,15 @@ def test_supplementary_data_is_loaded_when_sds_dataset_id_in_metadata(self): self.launchSupplementaryDataSurvey() self.assertStatusOK() + def test_supplementary_data_raises_500_error_on_exception(self): + with patch( + "app.routes.session.get_supplementary_data", + side_effect=SupplementaryDataRequestFailed, + ): + self.launchSupplementaryDataSurvey() + self.assertStatusCode(500) + self.assertInBody("Sorry, there is a problem with this service") + class TestCensusSession(IntegrationTestCase): def setUp(self): From f22802d933b40bc32d910e8cc0496cd95ff00abb Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Thu, 1 Jun 2023 14:20:11 +0100 Subject: [PATCH 15/21] Update parser --- app/utilities/supplementary_data_parser.py | 13 +++---------- tests/integration/create_token.py | 2 +- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/app/utilities/supplementary_data_parser.py b/app/utilities/supplementary_data_parser.py index 4dd002163a..0c3c69f23d 100644 --- a/app/utilities/supplementary_data_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -31,11 +31,7 @@ class SupplementaryData(Schema, StripWhitespaceMixin): @validates_schema() def validate_unit_id(self, data, **kwargs): # pylint: disable=no-self-use, unused-argument - if ( - data - and data.get("identifier") - and data["identifier"] != self.context["unit_id"] - ): + if data and data["identifier"] != self.context["unit_id"]: raise ValidationError( "Supplementary data did not return the specified Unit ID" ) @@ -55,15 +51,12 @@ class SupplementaryDataMetadataSchema(Schema, StripWhitespaceMixin): def validate_dataset_and_survey_id(self, data, **kwargs): # pylint: disable=no-self-use, unused-argument if data: - if ( - data.get("dataset_id") - and data["dataset_id"] != self.context["dataset_id"] - ): + if data["dataset_id"] != self.context["dataset_id"]: raise ValidationError( "Supplementary data did not return the specified Dataset ID" ) - if data.get("survey_id") and data["survey_id"] != self.context["survey_id"]: + if data["survey_id"] != self.context["survey_id"]: raise ValidationError( "Supplementary data did not return the specified Survey ID" ) diff --git a/tests/integration/create_token.py b/tests/integration/create_token.py index c8bfd759ea..9dc97e2500 100644 --- a/tests/integration/create_token.py +++ b/tests/integration/create_token.py @@ -65,7 +65,7 @@ "trad_as": "Integration Tests", "employment_date": "1983-06-02", "display_address": "68 Abingdon Road, Goathill", - "sds_dataset_id": str(uuid4()), + "sds_dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", "survey_id": "123", } }, From 675b9320c25ab083100e1ac10641ddd8da0d821d Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Mon, 5 Jun 2023 09:01:04 +0100 Subject: [PATCH 16/21] Update env vars and add readme --- .development.env | 1 + .functional-tests.env | 1 + doc/run-mock-sds-endpoint.md | 23 +++++++++++++++++++++++ 3 files changed, 25 insertions(+) create mode 100644 doc/run-mock-sds-endpoint.md diff --git a/.development.env b/.development.env index f8011aa2d3..c9546f9d6a 100644 --- a/.development.env +++ b/.development.env @@ -27,3 +27,4 @@ CDN_ASSETS_PATH=/design-system ADDRESS_LOOKUP_API_URL=https://whitelodge-ai-api.census-gcp.onsdigital.uk COOKIE_SETTINGS_URL=# EQ_SUBMISSION_CONFIRMATION_BACKEND=log +SDS_API_BASE_URL=http://localhost:5003/v1/unit_data diff --git a/.functional-tests.env b/.functional-tests.env index cd5b1c31a7..3192849c18 100644 --- a/.functional-tests.env +++ b/.functional-tests.env @@ -28,3 +28,4 @@ ADDRESS_LOOKUP_API_URL=https://whitelodge-ai-api.census-gcp.onsdigital.uk COOKIE_SETTINGS_URL=# EQ_SUBMISSION_CONFIRMATION_BACKEND=log VIEW_SUBMITTED_RESPONSE_EXPIRATION_IN_SECONDS=35 +SDS_API_BASE_URL=http://localhost:5003/v1/unit_data diff --git a/doc/run-mock-sds-endpoint.md b/doc/run-mock-sds-endpoint.md new file mode 100644 index 0000000000..e5d8c02ec7 --- /dev/null +++ b/doc/run-mock-sds-endpoint.md @@ -0,0 +1,23 @@ +# Running the Mock SDS endpoint + +In order to test loading supplementary data, we have a development script that creates a Mock SDS endpoint in Flask that +returns mocked supplementary data. + +Ensure the following env var is set before running the script: +```bash +SDS_API_BASE_URL=http://localhost:5003/v1/unit_data +``` + +From the home directory, run using +```python + python -m scripts.mock_sds_endpoint +``` + +The following datasets are available using the mocked endpoint. To retrieve the data, one of the following dataset IDs needs to be set using the `sds_dataset_id` +field in Launcher. + +| Dataset ID | Description | +|------------------------|------------------------------------------------------------| +| `c067f6de-6d64-42b1-8b02-431a3486c178` | Basic supplementary data structure with no repeating items | +| `34a80231-c49a-44d0-91a6-8fe1fb190e64` | Supplementary data structure with repeating items | + From 066ac7a0b87b955d6f8a11236382fe0eef3fa2f3 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Mon, 5 Jun 2023 10:59:13 +0100 Subject: [PATCH 17/21] Check error messages --- .../parser/test_supplementary_data_parser.py | 73 ++++++++----------- 1 file changed, 32 insertions(+), 41 deletions(-) diff --git a/tests/app/parser/test_supplementary_data_parser.py b/tests/app/parser/test_supplementary_data_parser.py index 58799bdef6..3e2371a601 100644 --- a/tests/app/parser/test_supplementary_data_parser.py +++ b/tests/app/parser/test_supplementary_data_parser.py @@ -41,7 +41,7 @@ def test_invalid_supplementary_data_payload_raises_error(): - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data( supplementary_data={}, dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", @@ -49,6 +49,8 @@ def test_invalid_supplementary_data_payload_raises_error(): survey_id="123", ) + assert str(error.value) == "Invalid supplementary data" + def test_validate_supplementary_data_payload(): validated_payload = validate_supplementary_data_v1( @@ -62,7 +64,7 @@ def test_validate_supplementary_data_payload(): def test_validate_supplementary_data_payload_incorrect_dataset_id(): - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="331507ca-1039-4624-a342-7cbc3630e217", @@ -70,9 +72,14 @@ def test_validate_supplementary_data_payload_incorrect_dataset_id(): survey_id="123", ) + assert ( + str(error.value) + == "{'_schema': ['Supplementary data did not return the specified Dataset ID']}" + ) + def test_validate_supplementary_data_payload_incorrect_survey_id(): - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", @@ -80,9 +87,14 @@ def test_validate_supplementary_data_payload_incorrect_survey_id(): survey_id="234", ) + assert ( + str(error.value) + == "{'_schema': ['Supplementary data did not return the specified Survey ID']}" + ) + def test_validate_supplementary_data_payload_incorrect_unit_id(): - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", @@ -90,6 +102,11 @@ def test_validate_supplementary_data_payload_incorrect_unit_id(): survey_id="123", ) + assert ( + str(error.value) + == "{'data': {'_schema': ['Supplementary data did not return the specified Unit ID']}}" + ) + def test_supplementary_data_payload_with_no_items_is_validated(): payload = { @@ -120,7 +137,7 @@ def test_validate_supplementary_data_payload_missing_survey_id(): }, } - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( supplementary_data=payload, dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", @@ -128,6 +145,8 @@ def test_validate_supplementary_data_payload_missing_survey_id(): survey_id="123", ) + assert str(error.value) == "{'survey_id': ['Missing data for required field.']}" + def test_validate_supplementary_data_payload_with_unknown_field(): payload = { @@ -161,7 +180,7 @@ def test_validate_supplementary_data_invalid_schema_version(): }, } - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( supplementary_data=payload, dataset_id="001", @@ -169,46 +188,18 @@ def test_validate_supplementary_data_invalid_schema_version(): survey_id="123", ) + assert str(error.value) == "{'data': {'schema_version': ['Must be one of: v1.']}}" + def test_validate_supplementary_data_payload_missing_identifier_in_items(): - payload = { - "dataset_id": "44f1b432-9421-49e5-bd26-e63e18a30b69", - "survey_id": "123", - "data": { - "schema_version": "v1", - "identifier": "12346789012A", - "items": { - "local_units": [ - { - "identifier": "0001", - "lu_name": "TEST NAME. 1", - "lu_address": [ - "FIRST ADDRESS 1", - "FIRST ADDRESS 2", - "TOWN", - "COUNTY", - "POST CODE", - ], - }, - { - "lu_name": "TEST NAME 2", - "lu_address": [ - "SECOND ADDRESS 1", - "SECOND ADDRESS 1", - "TOWN", - "COUNTY", - "POSTCODE", - ], - }, - ] - }, - }, - } + SUPPLEMENTARY_DATA_PAYLOAD["data"]["items"]["local_units"][0].pop("identifier") - with pytest.raises(ValidationError): + with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( - supplementary_data=payload, + supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", ) + + assert str(error.value) == "{'identifier': ['Missing data for required field.']}" From 865fe7fd2ee3c8bdaf26945b49e25d623926b771 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Tue, 6 Jun 2023 14:07:06 +0100 Subject: [PATCH 18/21] Update test --- tests/app/parser/test_supplementary_data_parser.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/app/parser/test_supplementary_data_parser.py b/tests/app/parser/test_supplementary_data_parser.py index 3e2371a601..1a7f67ff8c 100644 --- a/tests/app/parser/test_supplementary_data_parser.py +++ b/tests/app/parser/test_supplementary_data_parser.py @@ -1,3 +1,5 @@ +from copy import deepcopy + import pytest from marshmallow import ValidationError @@ -192,11 +194,12 @@ def test_validate_supplementary_data_invalid_schema_version(): def test_validate_supplementary_data_payload_missing_identifier_in_items(): - SUPPLEMENTARY_DATA_PAYLOAD["data"]["items"]["local_units"][0].pop("identifier") + payload = deepcopy(SUPPLEMENTARY_DATA_PAYLOAD) + payload["data"]["items"]["local_units"][0].pop("identifier") with pytest.raises(ValidationError) as error: validate_supplementary_data_v1( - supplementary_data=SUPPLEMENTARY_DATA_PAYLOAD, + supplementary_data=payload, dataset_id="44f1b432-9421-49e5-bd26-e63e18a30b69", unit_id="12346789012A", survey_id="123", From bcc72abaab7442eaadf81ae5a64513dd267526c3 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Tue, 6 Jun 2023 14:20:07 +0100 Subject: [PATCH 19/21] Update test name --- tests/app/conftest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/app/conftest.py b/tests/app/conftest.py index fd6a04c60d..0e84312ae1 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -195,15 +195,15 @@ def mock_autoescape_context(mocker): return mocker.Mock(autoescape=True) -@pytest.fixture(name="mocked_response_content") -def mocked_response_content_fixture(mocker): +@pytest.fixture +def mocked_response_content(mocker): decodable_content = Mock() decodable_content.decode.return_value = b"{}" mocker.patch("requests.models.Response.content", decodable_content) -@pytest.fixture(name="mocked_make_request_with_timeout") -def mocked_make_request_with_timeout_fixture( +@pytest.fixture +def mocked_make_request_with_timeout( mocker, mocked_response_content # pylint: disable=unused-argument ): connect_timeout_error = ConnectTimeoutError("connect timed out") From 186c74710547bde077bbf9b7344b55d94f0648c4 Mon Sep 17 00:00:00 2001 From: Rhys Berrow Date: Thu, 8 Jun 2023 15:29:14 +0100 Subject: [PATCH 20/21] Fix some strings --- app/utilities/supplementary_data_parser.py | 2 +- schemas/test/en/test_supplementary_data.json | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/app/utilities/supplementary_data_parser.py b/app/utilities/supplementary_data_parser.py index 0c3c69f23d..8ffee3e3f7 100644 --- a/app/utilities/supplementary_data_parser.py +++ b/app/utilities/supplementary_data_parser.py @@ -68,7 +68,7 @@ def validate_supplementary_data_v1( unit_id: str, survey_id: str, ) -> dict: - """Validate claims required for runner to function""" + """Validate claims required for supplementary data""" supplementary_data_metadata_schema = SupplementaryDataMetadataSchema( unknown=INCLUDE ) diff --git a/schemas/test/en/test_supplementary_data.json b/schemas/test/en/test_supplementary_data.json index bfa413765a..e19d73474d 100644 --- a/schemas/test/en/test_supplementary_data.json +++ b/schemas/test/en/test_supplementary_data.json @@ -4,9 +4,9 @@ "schema_version": "0.0.1", "data_version": "0.0.3", "survey_id": "0", - "title": "Test Prepop Data", + "title": "Test Supplementary Data", "theme": "default", - "description": "A questionnaire to demo loading prepop data.", + "description": "A questionnaire to demo loading Supplementary data.", "metadata": [ { "name": "user_id", @@ -46,10 +46,10 @@ { "id": "interstitial-definition", "content": { - "title": "Prepop", + "title": "Supplementary Data", "contents": [ { - "description": "You have successfully loaded Prepop data" + "description": "You have successfully loaded Supplementary data" } ] }, From f578499b49ca695f7754a471367ab1d4d73759c8 Mon Sep 17 00:00:00 2001 From: Rhys Berrow <47635349+berroar@users.noreply.github.com> Date: Fri, 9 Jun 2023 12:04:51 +0100 Subject: [PATCH 21/21] Update doc/run-mock-sds-endpoint.md Co-authored-by: rmccar <42928680+rmccar@users.noreply.github.com> --- doc/run-mock-sds-endpoint.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/run-mock-sds-endpoint.md b/doc/run-mock-sds-endpoint.md index e5d8c02ec7..10a656d192 100644 --- a/doc/run-mock-sds-endpoint.md +++ b/doc/run-mock-sds-endpoint.md @@ -10,7 +10,7 @@ SDS_API_BASE_URL=http://localhost:5003/v1/unit_data From the home directory, run using ```python - python -m scripts.mock_sds_endpoint +python -m scripts.mock_sds_endpoint ``` The following datasets are available using the mocked endpoint. To retrieve the data, one of the following dataset IDs needs to be set using the `sds_dataset_id`