diff --git a/lambdas/enums/document_review_reason.py b/lambdas/enums/document_review_reason.py index 4cee878faa..5e11c6e56f 100644 --- a/lambdas/enums/document_review_reason.py +++ b/lambdas/enums/document_review_reason.py @@ -2,10 +2,5 @@ class DocumentReviewReason(StrEnum): - UNKNOWN_NHS_NUMBER = "Unknown NHS number" - DEMOGRAPHIC_MISMATCHES = "Demographic mismatches" - DUPLICATE_RECORD = "Duplicate records error" - FILE_COUNT_MISMATCH = "More or less files than we expected" - FILE_NAME_MISMATCH = "Filename Naming convention error" - GP2GP_ERROR = "GP2GP failure" - GENERAL_ERROR = "General error" + NEW_DOCUMENT = "New document to review" + UNSUCCESSFUL_UPLOAD = "Unsuccessful upload" diff --git a/lambdas/enums/feature_flags.py b/lambdas/enums/feature_flags.py index 72a59e44b7..e22ec70ece 100755 --- a/lambdas/enums/feature_flags.py +++ b/lambdas/enums/feature_flags.py @@ -11,3 +11,4 @@ class FeatureFlags(StrEnum): ) UPLOAD_DOCUMENT_ITERATION_2_ENABLED = "uploadDocumentIteration2Enabled" UPLOAD_DOCUMENT_ITERATION_3_ENABLED = "uploadDocumentIteration3Enabled" + BULK_UPLOAD_SEND_TO_REVIEW_ENABLED = "bulkUploadSendToReviewEnabled" diff --git a/lambdas/handlers/bulk_upload_handler.py b/lambdas/handlers/bulk_upload_handler.py index 1d2136d147..71d182a0d1 100644 --- a/lambdas/handlers/bulk_upload_handler.py +++ b/lambdas/handlers/bulk_upload_handler.py @@ -27,11 +27,22 @@ def lambda_handler(event, _context): validation_strict_mode = validation_strict_mode_flag_object[ FeatureFlags.LLOYD_GEORGE_VALIDATION_STRICT_MODE_ENABLED.value ] + + send_to_review_flag_object = feature_flag_service.get_feature_flags_by_flag( + FeatureFlags.BULK_UPLOAD_SEND_TO_REVIEW_ENABLED.value + ) + send_to_review_enabled = send_to_review_flag_object[ + FeatureFlags.BULK_UPLOAD_SEND_TO_REVIEW_ENABLED.value + ] + bypass_pds = os.getenv("BYPASS_PDS", "false").lower() == "true" if validation_strict_mode: logger.info("Lloyd George validation strict mode is enabled") + if send_to_review_enabled: + logger.info("Bulk upload send to review queue is enabled") + if "Records" not in event or len(event["Records"]) < 1: http_status_code = 400 response_body = ( @@ -43,7 +54,9 @@ def lambda_handler(event, _context): ).create_api_gateway_response() bulk_upload_service = BulkUploadService( - strict_mode=validation_strict_mode, bypass_pds=bypass_pds + strict_mode=validation_strict_mode, + bypass_pds=bypass_pds, + send_to_review_enabled=send_to_review_enabled, ) try: diff --git a/lambdas/models/document_review.py b/lambdas/models/document_review.py index 8ef7610d36..abf80ebd73 100644 --- a/lambdas/models/document_review.py +++ b/lambdas/models/document_review.py @@ -5,12 +5,22 @@ from enums.document_review_reason import DocumentReviewReason from enums.document_review_status import DocumentReviewStatus from enums.metadata_field_names import DocumentReferenceMetadataFields -from enums.upload_forbidden_file_extensions import is_file_type_allowed from enums.snomed_codes import SnomedCodes -from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator, ValidationError +from enums.upload_forbidden_file_extensions import is_file_type_allowed +from pydantic import ( + BaseModel, + ConfigDict, + Field, + field_validator, + model_validator, +) from pydantic.alias_generators import to_camel, to_pascal -from utils.exceptions import InvalidNhsNumberException, ConfigNotFoundException, InvalidFileTypeException from utils import upload_file_configs +from utils.exceptions import ( + ConfigNotFoundException, + InvalidFileTypeException, + InvalidNhsNumberException, +) from utils.utilities import validate_nhs_number @@ -43,7 +53,7 @@ class DocumentUploadReviewReference(BaseModel): default=DocumentReviewStatus.PENDING_REVIEW ) review_reason: DocumentReviewReason = Field( - default=DocumentReviewReason.GENERAL_ERROR + default=DocumentReviewReason.UNSUCCESSFUL_UPLOAD ) review_date: int | None = Field(default=None) reviewer: str | None = Field(default=None) @@ -165,7 +175,9 @@ def verify_nhs_number(cls, value) -> str | None: @model_validator(mode="after") def validate_file_extension(self) -> Self: try: - accepted_file_types = upload_file_configs.get_config_by_snomed_code(self.snomed_code.code).accepted_file_types + accepted_file_types = upload_file_configs.get_config_by_snomed_code( + self.snomed_code.code + ).accepted_file_types for file in self.documents: if not is_file_type_allowed(file, accepted_file_types): @@ -173,4 +185,3 @@ def validate_file_extension(self) -> Self: return self except ConfigNotFoundException: raise InvalidFileTypeException("Unable to find file configuration.") - diff --git a/lambdas/models/sqs/review_message_body.py b/lambdas/models/sqs/review_message_body.py index 2233a068de..69696fd224 100644 --- a/lambdas/models/sqs/review_message_body.py +++ b/lambdas/models/sqs/review_message_body.py @@ -3,7 +3,7 @@ class ReviewMessageFile(BaseModel): - """Model for individual file in SQS message body from the document review queue.""" + """Model for an individual file in the SQS message body from the document review queue""" file_name: str file_path: str = Field(description="Location in the staging bucket") @@ -11,7 +11,7 @@ class ReviewMessageFile(BaseModel): class ReviewMessageBody(BaseModel): - """Model for SQS message body from the document review queue.""" + """Model for SQS message body from the document review queue""" model_config = ConfigDict( use_enum_values=True, @@ -20,8 +20,6 @@ class ReviewMessageBody(BaseModel): files: list[ReviewMessageFile] nhs_number: str failure_reason: DocumentReviewReason = Field( - default=DocumentReviewReason.GENERAL_ERROR + default=DocumentReviewReason.UNSUCCESSFUL_UPLOAD ) - upload_date: str uploader_ods: str - current_gp: str diff --git a/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py b/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py index 2116cd11e9..78f738f411 100644 --- a/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py +++ b/lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py @@ -1,25 +1,28 @@ import os import uuid +from datetime import datetime +from enums.document_review_reason import DocumentReviewReason from models.sqs.pdf_stitching_sqs_message import PdfStitchingSqsMessage +from models.sqs.review_message_body import ReviewMessageBody, ReviewMessageFile from models.staging_metadata import StagingSqsMetadata from services.base.sqs_service import SQSService from utils.audit_logging_setup import LoggingService from utils.request_context import request_context -_logger = LoggingService(__name__) +logger = LoggingService(__name__) class BulkUploadSqsRepository: def __init__(self): self.sqs_repository = SQSService() - self.invalid_queue_url = os.environ["INVALID_SQS_QUEUE_URL"] self.metadata_queue_url = os.environ["METADATA_SQS_QUEUE_URL"] + self.review_queue_url = os.environ["REVIEW_SQS_QUEUE_URL"] def put_staging_metadata_back_to_queue(self, staging_metadata: StagingSqsMetadata): request_context.patient_nhs_no = staging_metadata.nhs_number setattr(staging_metadata, "retries", (staging_metadata.retries + 1)) - _logger.info("Returning message to sqs queue...") + logger.info("Returning message to sqs queue...") self.sqs_repository.send_message_with_nhs_number_attr_fifo( queue_url=self.metadata_queue_url, message_body=staging_metadata.model_dump_json(by_alias=True), @@ -27,6 +30,41 @@ def put_staging_metadata_back_to_queue(self, staging_metadata: StagingSqsMetadat group_id=f"back_to_queue_bulk_upload_{uuid.uuid4()}", ) + def send_message_to_review_queue( + self, + staging_metadata: StagingSqsMetadata, + uploader_ods: str, + failure_reason: DocumentReviewReason, + ): + request_context.patient_nhs_no = staging_metadata.nhs_number + review_files = [ + ReviewMessageFile( + file_name=file.stored_file_name.split("/")[-1], + file_path=file.file_path, + ) + for file in staging_metadata.files + ] + + upload_id = f"{uuid.uuid4()}" + + review_message = ReviewMessageBody( + upload_id=upload_id, + files=review_files, + nhs_number=staging_metadata.nhs_number, + failure_reason=failure_reason, + uploader_ods=uploader_ods, + ) + + logger.info( + f"Sending message to review queue for NHS number {staging_metadata.nhs_number} " + f"with failure reason: {failure_reason}" + ) + + self.sqs_repository.send_message_standard( + queue_url=self.review_queue_url, + message_body=review_message.model_dump_json(), + ) + def put_sqs_message_back_to_queue(self, sqs_message: dict): try: nhs_number = sqs_message["messageAttributes"]["NhsNumber"]["stringValue"] @@ -34,7 +72,7 @@ def put_sqs_message_back_to_queue(self, sqs_message: dict): except KeyError: nhs_number = "" - _logger.info("Returning message to sqs queue...") + logger.info("Returning message to sqs queue...") self.sqs_repository.send_message_with_nhs_number_attr_fifo( queue_url=self.metadata_queue_url, message_body=sqs_message["body"], diff --git a/lambdas/services/bulk_upload_service.py b/lambdas/services/bulk_upload_service.py index 71981301b1..21bf7eddfb 100644 --- a/lambdas/services/bulk_upload_service.py +++ b/lambdas/services/bulk_upload_service.py @@ -4,13 +4,18 @@ import pydantic from botocore.exceptions import ClientError +from enums.document_review_reason import DocumentReviewReason from enums.patient_ods_inactive_status import PatientOdsInactiveStatus from enums.snomed_codes import SnomedCodes from enums.upload_status import UploadStatus from enums.virus_scan_result import VirusScanResult from models.document_reference import DocumentReference from models.sqs.pdf_stitching_sqs_message import PdfStitchingSqsMessage -from models.staging_metadata import BulkUploadQueueMetadata, StagingSqsMetadata +from models.staging_metadata import ( + NHS_NUMBER_PLACEHOLDER, + BulkUploadQueueMetadata, + StagingSqsMetadata, +) from repositories.bulk_upload.bulk_upload_dynamo_repository import ( BulkUploadDynamoRepository, ) @@ -52,7 +57,7 @@ class BulkUploadService: - def __init__(self, strict_mode, bypass_pds=False): + def __init__(self, strict_mode, bypass_pds=False, send_to_review_enabled=False): self.dynamo_repository = BulkUploadDynamoRepository() self.sqs_repository = BulkUploadSqsRepository() self.bulk_upload_s3_repository = BulkUploadS3Repository() @@ -62,6 +67,7 @@ def __init__(self, strict_mode, bypass_pds=False): self.file_path_cache = {} self.pdf_stitching_queue_url = os.environ["PDF_STITCHING_SQS_URL"] self.bypass_pds = bypass_pds + self.send_to_review_enabled = send_to_review_enabled def process_message_queue(self, records: list): for index, message in enumerate(records, start=1): @@ -74,9 +80,7 @@ def process_message_queue(self, records: list): logger.info( "Cannot validate patient due to PDS responded with Too Many Requests" ) - logger.info( - "Cannot process for now due to PDS rate limit reached." - ) + logger.info("Cannot process for now due to PDS rate limit reached.") logger.info( "All remaining messages in this batch will be returned to sqs queue to retry later." ) @@ -132,6 +136,9 @@ def handle_sqs_message(self, message: dict): for file_metadata in staging_metadata.files: file_names.append(os.path.basename(file_metadata.stored_file_name)) file_metadata.scan_date = validate_scan_date(file_metadata.scan_date) + file_metadata.file_path = self.strip_leading_slash( + file_metadata.file_path + ) request_context.patient_nhs_no = staging_metadata.nhs_number validate_nhs_number(staging_metadata.nhs_number) pds_patient_details = getting_patient_info_from_pds( @@ -194,9 +201,19 @@ def handle_sqs_message(self, message: dict): logger.info("Will stop processing Lloyd George record for this patient.") reason = str(error) + uploader_ods = ( + staging_metadata.files[0].gp_practice_code + if staging_metadata.files + else "" + ) + self.dynamo_repository.write_report_upload_to_dynamo( staging_metadata, UploadStatus.FAILED, reason, patient_ods_code ) + if isinstance(error, (InvalidNhsNumberException, PatientNotFoundException)): + logger.info("Invalid NHS number detected. Will set as placeholder") + staging_metadata.nhs_number = NHS_NUMBER_PLACEHOLDER + self.send_to_review_queue_if_enabled(staging_metadata, uploader_ods) return logger.info( @@ -336,8 +353,7 @@ def resolve_source_file_path(self, staging_metadata: StagingSqsMetadata): if not contains_accent_char(sample_file_path): logger.info("No accented character detected in file path.") self.file_path_cache = { - file.file_path: self.strip_leading_slash(file.file_path) - for file in staging_metadata.files + file.file_path: file.file_path for file in staging_metadata.files } return @@ -347,11 +363,8 @@ def resolve_source_file_path(self, staging_metadata: StagingSqsMetadata): resolved_file_paths = {} for file in staging_metadata.files: file_path_in_metadata = file.file_path - file_path_without_leading_slash = self.strip_leading_slash( - file_path_in_metadata - ) - file_path_in_nfc_form = convert_to_nfc_form(file_path_without_leading_slash) - file_path_in_nfd_form = convert_to_nfd_form(file_path_without_leading_slash) + file_path_in_nfc_form = convert_to_nfc_form(file_path_in_metadata) + file_path_in_nfd_form = convert_to_nfd_form(file_path_in_metadata) if self.bulk_upload_s3_repository.file_exists_on_staging_bucket( file_path_in_nfc_form @@ -440,3 +453,29 @@ def strip_leading_slash(filepath: str) -> str: @staticmethod def concatenate_acceptance_reason(previous_reasons: str | None, new_reason: str): return previous_reasons + ", " + new_reason if previous_reasons else new_reason + + def send_to_review_queue_if_enabled( + self, + staging_metadata: StagingSqsMetadata, + uploader_ods: str, + ): + if not self.send_to_review_enabled: + return + + review_reason = DocumentReviewReason.UNSUCCESSFUL_UPLOAD + + try: + self.sqs_repository.send_message_to_review_queue( + staging_metadata=staging_metadata, + failure_reason=review_reason, + uploader_ods=uploader_ods, + ) + logger.info( + f"Sent failed record to review queue with reason: {review_reason}" + ) + except Exception as e: + logger.error( + f"Failed to send message to review queue: {e}", + {"Result": "Review queue send failed"}, + ) + raise e diff --git a/lambdas/services/document_review_processor_service.py b/lambdas/services/document_review_processor_service.py index 5e2162ba98..e1ac005056 100644 --- a/lambdas/services/document_review_processor_service.py +++ b/lambdas/services/document_review_processor_service.py @@ -4,64 +4,48 @@ from botocore.exceptions import ClientError from enums.document_review_status import DocumentReviewStatus -from models.document_reference import DocumentReferenceMetadataFields from models.document_review import ( DocumentReviewFileDetails, DocumentUploadReviewReference, ) from models.sqs.review_message_body import ReviewMessageBody -from services.base.dynamo_service import DynamoDBService +from models.staging_metadata import NHS_NUMBER_PLACEHOLDER from services.base.s3_service import S3Service +from services.document_upload_review_service import DocumentUploadReviewService from utils.audit_logging_setup import LoggingService +from utils.exceptions import ( + InvalidResourceIdException, + PatientNotFoundException, + PdsErrorException, +) from utils.request_context import request_context +from utils.utilities import get_pds_service logger = LoggingService(__name__) class ReviewProcessorService: - """ - Service for processing single SQS messages from the document review queue. - """ def __init__(self): - """Initialize the review processor service with required AWS services.""" - self.dynamo_service = DynamoDBService() self.s3_service = S3Service() - + self.document_review_service = DocumentUploadReviewService() self.review_table_name = os.environ["DOCUMENT_REVIEW_DYNAMODB_NAME"] self.staging_bucket_name = os.environ["STAGING_STORE_BUCKET_NAME"] self.review_bucket_name = os.environ["PENDING_REVIEW_BUCKET_NAME"] def process_review_message(self, review_message: ReviewMessageBody) -> None: - """ - Process a single SQS message from the review queue. - - Args: - sqs_message: SQS message record containing file and failure information - - Raises: - InvalidMessageException: If message format is invalid or required fields missing - S3FileNotFoundException: If file doesn't exist in staging bucket - ClientError: For AWS service errors (DynamoDB, S3) - """ - logger.info("Processing review queue message") request_context.patient_nhs_no = review_message.nhs_number review_id = review_message.upload_id review_files = self._move_files_to_review_bucket(review_message, review_id) + custodian = self._get_patient_custodian(review_message) document_upload_review = self._build_review_record( - review_message, review_id, review_files + review_message, review_id, review_files, custodian ) try: - self.dynamo_service.create_item( - table_name=self.review_table_name, - item=document_upload_review.model_dump( - by_alias=True, exclude_none=True - ), - key_name=DocumentReferenceMetadataFields.ID.value, - ) + self.document_review_service.create_dynamo_entry(document_upload_review) logger.info(f"Created review record {document_upload_review.id}") except ClientError as e: @@ -72,11 +56,37 @@ def process_review_message(self, review_message: ReviewMessageBody) -> None: self._delete_files_from_staging(review_message) + def _get_patient_custodian(self, review_message: ReviewMessageBody) -> str: + try: + if ( + not review_message.nhs_number + or review_message.nhs_number == NHS_NUMBER_PLACEHOLDER + ): + logger.info( + "No valid NHS number found in message. Using uploader ODS as custodian" + ) + return review_message.uploader_ods + pds_service = get_pds_service() + patient_details = pds_service.fetch_patient_details( + review_message.nhs_number + ) + return patient_details.general_practice_ods + except (PdsErrorException, InvalidResourceIdException): + logger.info("Error when searching PDS. Using uploader ODS as custodian") + return review_message.uploader_ods + except PatientNotFoundException: + logger.info( + "Patient not found in PDS. Using uploader ODS as custodian, and nhs number placeholder" + ) + review_message.nhs_number = NHS_NUMBER_PLACEHOLDER + return review_message.uploader_ods + def _build_review_record( self, message_data: ReviewMessageBody, review_id: str, review_files: list[DocumentReviewFileDetails], + custodian: str, ) -> DocumentUploadReviewReference: return DocumentUploadReviewReference( id=review_id, @@ -84,7 +94,7 @@ def _build_review_record( review_status=DocumentReviewStatus.PENDING_REVIEW, review_reason=message_data.failure_reason, author=message_data.uploader_ods, - custodian=message_data.current_gp, + custodian=custodian, files=review_files, upload_date=int(datetime.now(tz=timezone.utc).timestamp()), ) @@ -92,16 +102,6 @@ def _build_review_record( def _move_files_to_review_bucket( self, message_data: ReviewMessageBody, review_record_id: str ) -> list[DocumentReviewFileDetails]: - """ - Move file from staging to review bucket. - - Args: - message_data: Review queue message data - review_record_id: ID of the review record being created - - Returns: - List of DocumentReviewFileDetails with new file locations in review bucket - """ new_file_keys: list[DocumentReviewFileDetails] = [] for file in message_data.files: @@ -118,7 +118,7 @@ def _move_files_to_review_bucket( source_file_key=file.file_path, dest_bucket=self.review_bucket_name, dest_file_key=new_file_key, - if_none_match="*", + if_none_match=True, ) logger.info("File successfully copied to review bucket") logger.info(f"Successfully moved file to: {new_file_key}") @@ -135,7 +135,6 @@ def _move_files_to_review_bucket( file_location=new_file_key, ) ) - return new_file_keys def _delete_files_from_staging(self, message_data: ReviewMessageBody) -> None: diff --git a/lambdas/services/post_document_review_service.py b/lambdas/services/post_document_review_service.py index 317b00524e..7d5389401e 100644 --- a/lambdas/services/post_document_review_service.py +++ b/lambdas/services/post_document_review_service.py @@ -124,7 +124,7 @@ def create_review_reference_from_event( files=document_file_details, nhs_number=event.nhs_number, document_snomed_code_type=event.snomed_code.code, - review_reason=DocumentReviewReason.GP2GP_ERROR, + review_reason=DocumentReviewReason.NEW_DOCUMENT, ) return document_review_reference diff --git a/lambdas/tests/e2e/mns/mns_helper.py b/lambdas/tests/e2e/mns/mns_helper.py index f6d8161898..c6fa44c89f 100644 --- a/lambdas/tests/e2e/mns/mns_helper.py +++ b/lambdas/tests/e2e/mns/mns_helper.py @@ -78,7 +78,7 @@ def create_document_review_record( "Author": ods_code, "Custodian": ods_code, "ReviewStatus": review_status, - "ReviewReason": DocumentReviewReason.GP2GP_ERROR, + "ReviewReason": DocumentReviewReason.NEW_DOCUMENT, "UploadDate": int(time.time()), "Files": [ { diff --git a/lambdas/tests/unit/conftest.py b/lambdas/tests/unit/conftest.py index df5d7820e4..b3afba0f82 100644 --- a/lambdas/tests/unit/conftest.py +++ b/lambdas/tests/unit/conftest.py @@ -79,7 +79,7 @@ MOCK_LG_INVALID_SQS_QUEUE = "INVALID_SQS_QUEUE_URL" MOCK_STATISTICS_TABLE = "test_statistics_table" MOCK_STATISTICS_REPORT_BUCKET_NAME = "test_statistics_report_bucket" - +REVIEW_SQS_QUEUE_URL = "test_review_queue" TEST_NHS_NUMBER = "9000000009" TEST_UUID = "1234-4567-8912-HSDF-TEST" TEST_FILE_KEY = "test_file_key" @@ -235,6 +235,7 @@ def set_env(monkeypatch): monkeypatch.setenv("STAGING_STORE_BUCKET_NAME", MOCK_STAGING_STORE_BUCKET) monkeypatch.setenv("METADATA_SQS_QUEUE_URL", MOCK_LG_METADATA_SQS_QUEUE) monkeypatch.setenv("EDGE_REFERENCE_TABLE", MOCK_EDGE_REFERENCE_TABLE) + monkeypatch.setenv("REVIEW_SQS_QUEUE_URL", REVIEW_SQS_QUEUE_URL) EXPECTED_PARSED_PATIENT_BASE_CASE = PatientDetails( diff --git a/lambdas/tests/unit/handlers/conftest.py b/lambdas/tests/unit/handlers/conftest.py index 5c4350470f..8eca1979f2 100755 --- a/lambdas/tests/unit/handlers/conftest.py +++ b/lambdas/tests/unit/handlers/conftest.py @@ -1,5 +1,4 @@ import pytest - from enums.feature_flags import FeatureFlags from services.feature_flags_service import FeatureFlagService @@ -38,7 +37,10 @@ def valid_id_post_event_with_auth_header(): def valid_id_and_both_doctype_event(): api_gateway_proxy_event = { "httpMethod": "GET", - "queryStringParameters": {"patientId": "9000000009", "docType": "16521000000101,ARF"}, + "queryStringParameters": { + "patientId": "9000000009", + "docType": "16521000000101,ARF", + }, } return api_gateway_proxy_event @@ -56,7 +58,10 @@ def valid_id_and_arf_doctype_event(): def valid_id_and_lg_doctype_event(): api_gateway_proxy_event = { "httpMethod": "GET", - "queryStringParameters": {"patientId": "9000000009", "docType": "16521000000101"}, + "queryStringParameters": { + "patientId": "9000000009", + "docType": "16521000000101", + }, } return api_gateway_proxy_event @@ -65,7 +70,10 @@ def valid_id_and_lg_doctype_event(): def valid_id_and_lg_doctype_delete_event(): api_gateway_proxy_event = { "httpMethod": "DELETE", - "queryStringParameters": {"patientId": "9000000009", "docType": "16521000000101"}, + "queryStringParameters": { + "patientId": "9000000009", + "docType": "16521000000101", + }, } return api_gateway_proxy_event @@ -114,6 +122,7 @@ def mock_upload_lambda_disabled(mocker): } yield mock_upload_lambda_feature_flag + @pytest.fixture def mock_upload_document_iteration2_enabled(mocker): mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") @@ -123,6 +132,7 @@ def mock_upload_document_iteration2_enabled(mocker): ] yield mock_function + @pytest.fixture def mock_upload_document_iteration2_disabled(mocker): mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") @@ -132,6 +142,7 @@ def mock_upload_document_iteration2_disabled(mocker): ] yield mock_function + @pytest.fixture def mock_upload_document_iteration3_enabled(mocker): mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") @@ -141,6 +152,7 @@ def mock_upload_document_iteration3_enabled(mocker): ] yield mock_function + @pytest.fixture def mock_upload_document_iteration3_disabled(mocker): mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") @@ -152,10 +164,41 @@ def mock_upload_document_iteration3_disabled(mocker): @pytest.fixture -def mock_validation_strict_disabled(mocker): +def mock_validation_strict_and_bulk_upload_send_to_review_disabled(mocker): + mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") + mock_upload_lambda_feature_flag = mock_function.return_value = { + "lloydGeorgeValidationStrictModeEnabled": False, + "bulkUploadSendToReviewEnabled": False, + } + yield mock_upload_lambda_feature_flag + + +@pytest.fixture +def mock_validation_strict_and_bulk_upload_send_to_review_enabled(mocker): mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") mock_upload_lambda_feature_flag = mock_function.return_value = { - "lloydGeorgeValidationStrictModeEnabled": False + "lloydGeorgeValidationStrictModeEnabled": True, + "bulkUploadSendToReviewEnabled": True, + } + yield mock_upload_lambda_feature_flag + + +@pytest.fixture +def mock_validation_strict_enabled_send_to_review_disabled(mocker): + mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") + mock_upload_lambda_feature_flag = mock_function.return_value = { + "lloydGeorgeValidationStrictModeEnabled": True, + "bulkUploadSendToReviewEnabled": False, + } + yield mock_upload_lambda_feature_flag + + +@pytest.fixture +def mock_validation_strict_disabled_send_to_review_enabled(mocker): + mock_function = mocker.patch.object(FeatureFlagService, "get_feature_flags_by_flag") + mock_upload_lambda_feature_flag = mock_function.return_value = { + "lloydGeorgeValidationStrictModeEnabled": False, + "bulkUploadSendToReviewEnabled": True, } yield mock_upload_lambda_feature_flag @@ -175,4 +218,4 @@ def mock_upload_document_iteration_3_disabled(mocker): mock_feature_flag = mock_function.return_value = { FeatureFlags.UPLOAD_DOCUMENT_ITERATION_3_ENABLED: False } - yield mock_feature_flag \ No newline at end of file + yield mock_feature_flag diff --git a/lambdas/tests/unit/handlers/test_bulk_upload_handler.py b/lambdas/tests/unit/handlers/test_bulk_upload_handler.py index 9a6419600d..005525f75e 100644 --- a/lambdas/tests/unit/handlers/test_bulk_upload_handler.py +++ b/lambdas/tests/unit/handlers/test_bulk_upload_handler.py @@ -1,6 +1,5 @@ import pytest from handlers.bulk_upload_handler import lambda_handler -from services.bulk_upload_service import BulkUploadService from tests.unit.helpers.data.bulk_upload.test_data import ( TEST_EVENT_WITH_NO_SQS_MESSAGES, TEST_EVENT_WITH_ONE_SQS_MESSAGE, @@ -11,52 +10,148 @@ @pytest.fixture -def mock_service(mocker): - yield mocker.patch.object(BulkUploadService, "process_message_queue") +def mock_bulk_upload_service_class(mocker): + mock_class = mocker.patch("handlers.bulk_upload_handler.BulkUploadService") + return mock_class -def test_can_process_event_with_one_message( - mock_service, context, set_env, mock_validation_strict_disabled +@pytest.fixture +def mock_bulk_upload_service_instance(mock_bulk_upload_service_class): + return mock_bulk_upload_service_class.return_value + + +def test_lambda_handler_processes_single_message_successfully( + mock_bulk_upload_service_instance, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_disabled, ): + response = lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + expected = ApiGatewayResponse( 200, "Finished processing all 1 messages", "GET" ).create_api_gateway_response() - actual = lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + assert response == expected + mock_bulk_upload_service_instance.process_message_queue.assert_called_once_with( + TEST_EVENT_WITH_ONE_SQS_MESSAGE["Records"] + ) - assert expected == actual - -def test_can_process_event_with_several_messages( - mock_service, context, set_env, mock_validation_strict_disabled +def test_lambda_handler_processes_multiple_messages_successfully( + mock_bulk_upload_service_instance, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_disabled, ): + response = lambda_handler(TEST_EVENT_WITH_SQS_MESSAGES, context) + expected = ApiGatewayResponse( 200, "Finished processing all 3 messages", "GET" ).create_api_gateway_response() - actual = lambda_handler(TEST_EVENT_WITH_SQS_MESSAGES, context) + assert response == expected + mock_bulk_upload_service_instance.process_message_queue.assert_called_once_with( + TEST_EVENT_WITH_SQS_MESSAGES["Records"] + ) - assert actual == expected - -def test_receive_correct_response_when_service_returns_error( - mock_service, context, set_env, mock_validation_strict_disabled +def test_lambda_handler_handles_bulk_upload_exception( + mock_bulk_upload_service_instance, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_disabled, ): + mock_bulk_upload_service_instance.process_message_queue.side_effect = BulkUploadException() + + response = lambda_handler(TEST_EVENT_WITH_SQS_MESSAGES, context) + expected = ApiGatewayResponse( 500, "Bulk upload failed with error: ", "GET" ).create_api_gateway_response() - mock_service.side_effect = BulkUploadException() - actual = lambda_handler(TEST_EVENT_WITH_SQS_MESSAGES, context) - - assert actual == expected + assert response == expected + mock_bulk_upload_service_instance.process_message_queue.assert_called_once() -def test_receive_correct_response_when_no_records_in_event( - mock_service, context, set_env, mock_validation_strict_disabled +def test_lambda_handler_handles_empty_records_list( + mock_bulk_upload_service_instance, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_disabled, ): + response = lambda_handler(TEST_EVENT_WITH_NO_SQS_MESSAGES, context) + expected = ApiGatewayResponse( 400, "No sqs messages found in event: {'Records': []}. Will ignore this event", "GET", ).create_api_gateway_response() - actual = lambda_handler(TEST_EVENT_WITH_NO_SQS_MESSAGES, context) + assert response == expected + mock_bulk_upload_service_instance.process_message_queue.assert_not_called() + + +def test_lambda_handler_instantiates_service_with_both_flags_disabled( + mock_bulk_upload_service_class, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_disabled, +): + lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + + mock_bulk_upload_service_class.assert_called_once_with( + strict_mode=False, bypass_pds=False, send_to_review_enabled=False + ) + + +def test_lambda_handler_instantiates_service_with_both_flags_enabled( + mock_bulk_upload_service_class, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_enabled, +): + lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + + mock_bulk_upload_service_class.assert_called_once_with( + strict_mode=True, bypass_pds=False, send_to_review_enabled=True + ) + + +def test_lambda_handler_instantiates_service_with_validation_strict_enabled_only( + mock_bulk_upload_service_class, + context, + set_env, + mock_validation_strict_enabled_send_to_review_disabled, +): + lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + + mock_bulk_upload_service_class.assert_called_once_with( + strict_mode=True, bypass_pds=False, send_to_review_enabled=False + ) + + +def test_lambda_handler_instantiates_service_with_send_to_review_enabled_only( + mock_bulk_upload_service_class, + context, + set_env, + mock_validation_strict_disabled_send_to_review_enabled, +): + lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + + mock_bulk_upload_service_class.assert_called_once_with( + strict_mode=False, bypass_pds=False, send_to_review_enabled=True + ) + + +def test_lambda_handler_instantiates_service_with_bypass_pds_enabled( + mock_bulk_upload_service_class, + context, + set_env, + mock_validation_strict_and_bulk_upload_send_to_review_disabled, + monkeypatch, +): + monkeypatch.setenv("BYPASS_PDS", "true") + + lambda_handler(TEST_EVENT_WITH_ONE_SQS_MESSAGE, context) + + mock_bulk_upload_service_class.assert_called_once_with( + strict_mode=False, bypass_pds=True, send_to_review_enabled=False + ) - assert expected == actual diff --git a/lambdas/tests/unit/handlers/test_document_review_processor_handler.py b/lambdas/tests/unit/handlers/test_document_review_processor_handler.py index 1b1f388781..c80a080c65 100644 --- a/lambdas/tests/unit/handlers/test_document_review_processor_handler.py +++ b/lambdas/tests/unit/handlers/test_document_review_processor_handler.py @@ -1,7 +1,6 @@ import json import pytest - from enums.document_review_reason import DocumentReviewReason from handlers.document_review_processor_handler import lambda_handler from models.sqs.review_message_body import ReviewMessageBody, ReviewMessageFile @@ -29,10 +28,8 @@ def sample_review_message_body(): ) ], nhs_number="9000000009", - failure_reason=DocumentReviewReason.GENERAL_ERROR, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) @@ -64,10 +61,8 @@ def sample_sqs_event_multiple_messages(sample_review_message_body): ) ], nhs_number="9000000009", - failure_reason=DocumentReviewReason.UNKNOWN_NHS_NUMBER, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) message_2 = ReviewMessageBody( @@ -79,10 +74,8 @@ def sample_sqs_event_multiple_messages(sample_review_message_body): ) ], nhs_number="9000000010", - failure_reason=DocumentReviewReason.FILE_NAME_MISMATCH, - upload_date="2024-01-15T10:35:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) message_3 = ReviewMessageBody( @@ -94,10 +87,8 @@ def sample_sqs_event_multiple_messages(sample_review_message_body): ) ], nhs_number="9000000011", - failure_reason=DocumentReviewReason.FILE_COUNT_MISMATCH, - upload_date="2024-01-15T10:40:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y67890", - current_gp="Y67890", ) return { @@ -196,7 +187,7 @@ def test_lambda_handler_parses_json_body_correctly( {"file_name": "test.pdf", "file_path": "staging/test.pdf"} ], "nhs_number": "9000000009", - "failure_reason": "General error", + "failure_reason": "Unsuccessful upload", "upload_date": "2024-01-15T10:30:00Z", "uploader_ods": "Y12345", "current_gp": "Y12345", diff --git a/lambdas/tests/unit/handlers/test_patch_document_review_handler.py b/lambdas/tests/unit/handlers/test_patch_document_review_handler.py index cf8279bc63..7c40ebf666 100644 --- a/lambdas/tests/unit/handlers/test_patch_document_review_handler.py +++ b/lambdas/tests/unit/handlers/test_patch_document_review_handler.py @@ -139,12 +139,13 @@ def mock_authorization(mocker): @pytest.fixture def mock_missing_authorization(mocker): - mock_auth = mocker.patch( + mock_auth = mocker.patch( "handlers.patch_document_review_handler.extract_ods_code_from_request_context" ) mock_auth.side_effect = OdsErrorException() yield mock_auth + def test_lambda_handler_returns_200_when_document_review_approved( mocked_service, valid_put_document_review_event_approved, diff --git a/lambdas/tests/unit/helpers/data/bulk_upload/test_data.py b/lambdas/tests/unit/helpers/data/bulk_upload/test_data.py index df2cec6b08..3e5642bd2f 100644 --- a/lambdas/tests/unit/helpers/data/bulk_upload/test_data.py +++ b/lambdas/tests/unit/helpers/data/bulk_upload/test_data.py @@ -246,7 +246,7 @@ def build_test_staging_metadata_from_patient_name( def build_test_staging_metadata(file_names: list[str], nhs_number: str = "9000000009"): files = [] for file_name in file_names: - source_file_path = f"/{nhs_number}/{file_name}" + source_file_path = f"{nhs_number}/{file_name}" files.append( sample_sqs_metadata_model.model_copy( update={ @@ -310,7 +310,7 @@ def build_test_document_reference(file_name: str, nhs_number: str = "9000000009" custodian=TEST_CURRENT_GP_ODS, doc_status="preliminary", document_scan_creation="2022-09-03", - doc_type=SupportedDocumentTypes.LG + doc_type=SupportedDocumentTypes.LG, ) doc_ref.virus_scanner_result = VirusScanResult.CLEAN return doc_ref diff --git a/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py b/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py index ddb99b5a84..87319d4964 100644 --- a/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py +++ b/lambdas/tests/unit/helpers/data/search_document_review/dynamo_response.py @@ -29,7 +29,7 @@ "Custodian": TEST_CURRENT_GP_ODS, "UploadDate": 1704110400, "NhsNumber": TEST_NHS_NUMBER, - "ReviewReason": DocumentReviewReason.FILE_COUNT_MISMATCH, + "ReviewReason": DocumentReviewReason.UNSUCCESSFUL_UPLOAD, "ReviewStatus": DocumentReviewStatus.PENDING_REVIEW.value, "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, @@ -51,7 +51,7 @@ "Custodian": TEST_CURRENT_GP_ODS, "UploadDate": 1704110400, "NhsNumber": TEST_NHS_NUMBER, - "ReviewReason": DocumentReviewReason.FILE_COUNT_MISMATCH, + "ReviewReason": DocumentReviewReason.UNSUCCESSFUL_UPLOAD, "ReviewStatus": DocumentReviewStatus.PENDING_REVIEW.value, "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, @@ -73,7 +73,7 @@ "Custodian": TEST_CURRENT_GP_ODS, "UploadDate": 1704110400, "NhsNumber": TEST_NHS_NUMBER, - "ReviewReason": DocumentReviewReason.FILE_COUNT_MISMATCH, + "ReviewReason": DocumentReviewReason.UNSUCCESSFUL_UPLOAD, "ReviewStatus": DocumentReviewStatus.PENDING_REVIEW.value, "LastUpdated": 1704110400, # Timestamp: 2024-01-01T12:00:00 "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, @@ -127,7 +127,7 @@ "Custodian": {"S": TEST_CURRENT_GP_ODS}, "UploadDate": {"N": "1704110400"}, "NhsNumber": {"S": TEST_NHS_NUMBER}, - "ReviewReason": {"S": DocumentReviewReason.FILE_COUNT_MISMATCH}, + "ReviewReason": {"S": DocumentReviewReason.UNSUCCESSFUL_UPLOAD}, "ReviewStatus": {"S": DocumentReviewStatus.PENDING_REVIEW.value}, "LastUpdated": {"N": "1704110400"}, "DocumentSnomedCodeType": {"S": SnomedCodes.LLOYD_GEORGE.value.code}, @@ -159,7 +159,7 @@ "Custodian": {"S": TEST_CURRENT_GP_ODS}, "UploadDate": {"N": "1704110400"}, "NhsNumber": {"S": TEST_NHS_NUMBER}, - "ReviewReason": {"S": DocumentReviewReason.FILE_COUNT_MISMATCH}, + "ReviewReason": {"S": DocumentReviewReason.UNSUCCESSFUL_UPLOAD}, "ReviewStatus": {"S": DocumentReviewStatus.PENDING_REVIEW.value}, "LastUpdated": {"N": "1704110400"}, "DocumentSnomedCodeType": {"S": SnomedCodes.LLOYD_GEORGE.value.code}, @@ -191,7 +191,7 @@ "Custodian": {"S": TEST_CURRENT_GP_ODS}, "UploadDate": {"N": "1704110400"}, "NhsNumber": {"S": TEST_NHS_NUMBER}, - "ReviewReason": {"S": DocumentReviewReason.FILE_COUNT_MISMATCH}, + "ReviewReason": {"S": DocumentReviewReason.UNSUCCESSFUL_UPLOAD}, "ReviewStatus": {"S": DocumentReviewStatus.PENDING_REVIEW.value}, "LastUpdated": {"N": "1704110400"}, "DocumentSnomedCodeType": {"S": SnomedCodes.LLOYD_GEORGE.value.code}, diff --git a/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py b/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py index a849bb8bf6..fa76981406 100644 --- a/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py +++ b/lambdas/tests/unit/repositories/bulk_upload/test_bulk_upload_sqs_repository.py @@ -1,6 +1,9 @@ import copy +import json import pytest +from enums.document_review_reason import DocumentReviewReason +from models.staging_metadata import BulkUploadQueueMetadata, StagingSqsMetadata from repositories.bulk_upload.bulk_upload_sqs_repository import BulkUploadSqsRepository from tests.unit.conftest import MOCK_LG_METADATA_SQS_QUEUE, PDF_STITCHING_SQS_URL from tests.unit.helpers.data.bulk_upload.test_data import ( @@ -21,6 +24,29 @@ def repo_under_test(mocker, set_env): yield repo +@pytest.fixture +def sample_staging_metadata(): + """Create sample staging metadata for tests""" + return StagingSqsMetadata( + nhs_number="9000000009", + files=[ + BulkUploadQueueMetadata( + file_path="staging/9000000009/test1.pdf", + stored_file_name="test1.pdf", + gp_practice_code="Y12345", + scan_date="2024-01-01", + ), + BulkUploadQueueMetadata( + file_path="staging/9000000009/test2.pdf", + stored_file_name="test2.pdf", + gp_practice_code="Y12345", + scan_date="2024-01-01", + ), + ], + retries=0, + ) + + def test_put_staging_metadata_back_to_queue_and_increases_retries( set_env, mock_uuid, repo_under_test ): @@ -59,3 +85,39 @@ def test_send_message_to_pdf_stitching_queue(set_env, repo_under_test): queue_url=PDF_STITCHING_SQS_URL, message_body=message_body.model_dump_json(), ) + + +def test_sends_message_to_review_queue_with_correct_structure_and_fields( + set_env, repo_under_test, mock_uuid +): + repo_under_test.send_message_to_review_queue( + staging_metadata=TEST_STAGING_METADATA, + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, + uploader_ods="Y12345", + ) + + expected_message_body = { + "upload_id": mock_uuid, + "files": [ + { + "file_name": "1of3_Lloyd_George_Record_[Jane Smith]_[9000000009]_[22-10-2010].pdf", + "file_path": "9000000009/1of3_Lloyd_George_Record_[Jane Smith]_[9000000009]_[22-10-2010].pdf", + }, + { + "file_name": "2of3_Lloyd_George_Record_[Jane Smith]_[9000000009]_[22-10-2010].pdf", + "file_path": "9000000009/2of3_Lloyd_George_Record_[Jane Smith]_[9000000009]_[22-10-2010].pdf", + }, + { + "file_name": "3of3_Lloyd_George_Record_[Jane Smith]_[9000000009]_[22-10-2010].pdf", + "file_path": "9000000009/3of3_Lloyd_George_Record_[Jane Smith]_[9000000009]_[22-10-2010].pdf", + }, + ], + "nhs_number": "9000000009", + "failure_reason": "Unsuccessful upload", + "uploader_ods": "Y12345", + } + + repo_under_test.sqs_repository.send_message_standard.assert_called_once_with( + queue_url=repo_under_test.review_queue_url, + message_body=json.dumps(expected_message_body, separators=(",", ":")), + ) diff --git a/lambdas/tests/unit/services/test_bulk_upload_service.py b/lambdas/tests/unit/services/test_bulk_upload_service.py index fda9d32d99..9dab8d2714 100644 --- a/lambdas/tests/unit/services/test_bulk_upload_service.py +++ b/lambdas/tests/unit/services/test_bulk_upload_service.py @@ -3,6 +3,7 @@ import pytest from botocore.exceptions import ClientError +from enums.document_review_reason import DocumentReviewReason from enums.patient_ods_inactive_status import PatientOdsInactiveStatus from enums.upload_status import UploadStatus from enums.virus_scan_result import SCAN_RESULT_TAG_KEY, VirusScanResult @@ -67,6 +68,18 @@ def repo_under_test(set_env, mocker): yield service +@pytest.fixture +def repo_with_review_enabled(set_env, mocker): + mocker.patch("services.bulk_upload_service.BulkUploadDynamoRepository") + mocker.patch("services.bulk_upload_service.BulkUploadSqsRepository") + mocker.patch("services.bulk_upload_service.BulkUploadS3Repository") + mocker.patch("services.bulk_upload_service.allowed_to_ingest_ods_code", return_value=True) + service = BulkUploadService( + strict_mode=True, bypass_pds=False, send_to_review_enabled=True + ) + yield service + + @pytest.fixture def mock_check_virus_result(mocker): yield mocker.patch.object(BulkUploadS3Repository, "check_virus_result") @@ -810,7 +823,7 @@ def test_resolve_source_file_path_when_filenames_have_accented_chars( expected_cache = {} for i in range(1, 4): file_path_in_metadata = ( - f"/9000000009/{i}of3_Lloyd_George_Record_" + f"9000000009/{i}of3_Lloyd_George_Record_" f"[{patient_name_in_metadata_file}]_[9000000009]_[22-10-2010].pdf" ) file_path_on_s3 = f"9000000009/{i}of3_Lloyd_George_Record_[{patient_name}]_[9000000009]_[22-10-2010].pdf" @@ -1106,7 +1119,8 @@ def test_patient_not_found_is_caught_and_written_to_dynamo( assert call_status == UploadStatus.FAILED assert call_reason == expected_error_message - assert call_metadata == TEST_STAGING_METADATA + assert call_metadata.files == TEST_STAGING_METADATA.files + assert call_metadata.nhs_number == "0000000000" @pytest.fixture @@ -1244,3 +1258,103 @@ def test_handle_sqs_message_report_failure_when_pdf_integrity_check_file_not_fou mock_create_lg_records_and_copy_files.assert_not_called() mock_remove_ingested_file_from_source_bucket.assert_not_called() repo_under_test.sqs_repository.send_message_to_pdf_stitching_queue.assert_not_called() + + +def test_does_not_send_when_feature_flag_disabled(repo_under_test): + repo_under_test.send_to_review_queue_if_enabled(TEST_SQS_MESSAGE, "Y12345") + + repo_under_test.sqs_repository.send_message_to_review_queue.assert_not_called() + + +def test_sends_demographic_error_when_flag_enabled(set_env, repo_with_review_enabled): + + repo_with_review_enabled.send_to_review_queue_if_enabled(TEST_SQS_MESSAGE, "Y12345") + + repo_with_review_enabled.sqs_repository.send_message_to_review_queue.assert_called_once_with( + staging_metadata=TEST_SQS_MESSAGE, + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, + uploader_ods="Y12345", + ) + + +def test_sends_to_review_queue_when_patient_not_found( + repo_with_review_enabled, mock_validate_files, mocker +): + expected_error_message = "Could not find the given patient on PDS" + mocker.patch( + "services.bulk_upload_service.getting_patient_info_from_pds", + side_effect=PatientNotFoundException(expected_error_message), + ) + + repo_with_review_enabled.handle_sqs_message(message=TEST_SQS_MESSAGE) + + call_args = repo_with_review_enabled.sqs_repository.send_message_to_review_queue.call_args + staging_metadata_arg = call_args[1]["staging_metadata"] + + assert staging_metadata_arg.nhs_number == "0000000000" + assert staging_metadata_arg.files == TEST_STAGING_METADATA.files + assert call_args[1]["failure_reason"] == DocumentReviewReason.UNSUCCESSFUL_UPLOAD + assert call_args[1]["uploader_ods"] == TEST_STAGING_METADATA.files[0].gp_practice_code + + +def test_sends_to_review_queue_when_invalid_files( + repo_with_review_enabled, mock_validate_files, mock_pds_service, mocker +): + mocked_error = LGInvalidFilesException( + "One or more of the files do not match naming convention" + ) + mock_validate_files.side_effect = mocked_error + + repo_with_review_enabled.handle_sqs_message(message=TEST_SQS_MESSAGE) + + repo_with_review_enabled.sqs_repository.send_message_to_review_queue.assert_called_once_with( + staging_metadata=TEST_STAGING_METADATA, + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, + uploader_ods=TEST_STAGING_METADATA.files[0].gp_practice_code, + ) + + +def test_does_not_send_to_review_queue_when_virus_scan_fails( + repo_with_review_enabled, mock_validate_files, mock_pds_service, mocker +): + mocker.patch.object( + repo_with_review_enabled.bulk_upload_s3_repository, + "check_virus_result", + side_effect=DocumentInfectedException("File is infected"), + ) + + repo_with_review_enabled.handle_sqs_message(message=TEST_SQS_MESSAGE) + + repo_with_review_enabled.sqs_repository.send_message_to_review_queue.assert_not_called() + + +def test_does_not_send_to_review_queue_when_file_corrupted( + repo_with_review_enabled, mock_validate_files, mock_pds_service, mocker +): + mocker.patch.object( + repo_with_review_enabled.bulk_upload_s3_repository, "check_virus_result" + ) + mocker.patch.object( + repo_with_review_enabled.bulk_upload_s3_repository, + "check_pdf_integrity", + side_effect=CorruptedFileException("File is corrupted"), + ) + + repo_with_review_enabled.handle_sqs_message(message=TEST_SQS_MESSAGE) + + repo_with_review_enabled.sqs_repository.send_message_to_review_queue.assert_not_called() + + +def test_does_not_send_to_review_queue_when_s3_file_not_found( + repo_with_review_enabled, mock_validate_files, mock_pds_service, mocker +): + mocker.patch.object( + repo_with_review_enabled.bulk_upload_s3_repository, + "check_virus_result", + side_effect=S3FileNotFoundException("File not found"), + ) + + repo_with_review_enabled.handle_sqs_message(message=TEST_SQS_MESSAGE) + + repo_with_review_enabled.sqs_repository.send_message_to_review_queue.assert_not_called() + diff --git a/lambdas/tests/unit/services/test_document_review_processor_service.py b/lambdas/tests/unit/services/test_document_review_processor_service.py index ffff8abab3..c1e00dfdec 100644 --- a/lambdas/tests/unit/services/test_document_review_processor_service.py +++ b/lambdas/tests/unit/services/test_document_review_processor_service.py @@ -6,32 +6,32 @@ DocumentReviewFileDetails, DocumentUploadReviewReference, ) +from models.pds_models import PatientDetails from models.sqs.review_message_body import ReviewMessageBody, ReviewMessageFile from services.document_review_processor_service import ReviewProcessorService +from utils.exceptions import PdsErrorException, InvalidResourceIdException, PatientNotFoundException @pytest.fixture -def mock_dynamo_service(mocker): - """Mock the DynamoDBService.""" - return mocker.patch("services.document_review_processor_service.DynamoDBService") +def mock_document_upload_review_service(mocker): + return mocker.patch( + "services.document_review_processor_service.DocumentUploadReviewService" + ) @pytest.fixture def mock_s3_service(mocker): - """Mock the S3Service.""" return mocker.patch("services.document_review_processor_service.S3Service") @pytest.fixture -def service_under_test(set_env, mock_dynamo_service, mock_s3_service): - """Create a ReviewProcessorService instance with mocked dependencies.""" +def service_under_test(set_env, mock_document_upload_review_service, mock_s3_service): service = ReviewProcessorService() return service @pytest.fixture def sample_review_message(): - """Create a sample review message.""" return ReviewMessageBody( upload_id="test-upload-id-123", files=[ @@ -41,29 +41,43 @@ def sample_review_message(): ) ], nhs_number="9000000009", - failure_reason=DocumentReviewReason.DEMOGRAPHIC_MISMATCHES, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) +@pytest.fixture +def mock_pds_service(mocker): + mock_pds = mocker.MagicMock() + mock_patient_details = PatientDetails( + nhsNumber="9000000009", + generalPracticeOds="Y67890", + superseded=False, + restricted=False, + ) + mock_pds.fetch_patient_details.return_value = mock_patient_details + mocker.patch( + "services.document_review_processor_service.get_pds_service", + return_value=mock_pds, + ) + return mock_pds + + def test_service_initializes_with_correct_environment_variables( - set_env, mock_dynamo_service, mock_s3_service + set_env, mock_document_upload_review_service, mock_s3_service ): service = ReviewProcessorService() assert service.review_table_name == "test_document_review" assert service.staging_bucket_name == "test_staging_bulk_store" assert service.review_bucket_name == "test_document_review_bucket" - mock_dynamo_service.assert_called_once() + mock_document_upload_review_service.assert_called_once() mock_s3_service.assert_called_once() def test_process_review_message_success( service_under_test, sample_review_message, mocker ): - """Test successful processing of a review message.""" mock_move = mocker.patch.object(service_under_test, "_move_files_to_review_bucket") mock_delete = mocker.patch.object(service_under_test, "_delete_files_from_staging") @@ -77,12 +91,11 @@ def test_process_review_message_success( service_under_test.process_review_message(sample_review_message) mock_move.assert_called_once() - service_under_test.dynamo_service.create_item.assert_called_once() + service_under_test.document_review_service.create_dynamo_entry.assert_called_once() mock_delete.assert_called_once_with(sample_review_message) def test_process_review_message_multiple_files(service_under_test, mocker): - """Test successful processing of a review message with multiple files.""" message = ReviewMessageBody( upload_id="test-upload-id-456", files=[ @@ -96,10 +109,8 @@ def test_process_review_message_multiple_files(service_under_test, mocker): ), ], nhs_number="9000000009", - failure_reason=DocumentReviewReason.FILE_COUNT_MISMATCH, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) mock_move = mocker.patch.object(service_under_test, "_move_files_to_review_bucket") @@ -119,14 +130,13 @@ def test_process_review_message_multiple_files(service_under_test, mocker): service_under_test.process_review_message(message) mock_move.assert_called_once() - service_under_test.dynamo_service.create_item.assert_called_once() + service_under_test.document_review_service.create_dynamo_entry.assert_called_once() mock_delete.assert_called_once_with(message) def test_process_review_message_s3_copy_error( service_under_test, sample_review_message, mocker ): - """Test processing fails when S3 copy operation fails.""" mocker.patch.object( service_under_test, "_move_files_to_review_bucket", @@ -143,7 +153,6 @@ def test_process_review_message_s3_copy_error( def test_process_review_message_dynamo_error_not_precondition( service_under_test, sample_review_message, mocker ): - """Test processing fails when DynamoDB put fails.""" mocker.patch.object( service_under_test, "_move_files_to_review_bucket", @@ -154,9 +163,11 @@ def test_process_review_message_dynamo_error_not_precondition( ) ], ) - service_under_test.dynamo_service.create_item.side_effect = ClientError( - {"Error": {"Code": "InternalServerError", "Message": "DynamoDB error"}}, - "PutItem", + service_under_test.document_review_service.create_dynamo_entry.side_effect = ( + ClientError( + {"Error": {"Code": "InternalServerError", "Message": "DynamoDB error"}}, + "PutItem", + ) ) with pytest.raises(ClientError): @@ -166,7 +177,6 @@ def test_process_review_message_dynamo_error_not_precondition( def test_process_review_message_continues_dynamo_conditional_check_failure( service_under_test, sample_review_message, mocker ): - mocker.patch.object( service_under_test, "_move_files_to_review_bucket", @@ -178,14 +188,16 @@ def test_process_review_message_continues_dynamo_conditional_check_failure( ], ) mocker.patch.object(service_under_test, "_delete_files_from_staging") - service_under_test.dynamo_service.create_item.side_effect = ClientError( - { - "Error": { - "Code": "ConditionalCheckFailedException", - "Message": "DynamoDB error", - } - }, - "PutItem", + service_under_test.document_review_service.create_dynamo_entry.side_effect = ( + ClientError( + { + "Error": { + "Code": "ConditionalCheckFailedException", + "Message": "DynamoDB error", + } + }, + "PutItem", + ) ) service_under_test.process_review_message(sample_review_message) @@ -193,11 +205,7 @@ def test_process_review_message_continues_dynamo_conditional_check_failure( service_under_test._delete_files_from_staging.assert_called() -# Tests for _build_review_record and _create_review_record methods - - def test_build_review_record_success(service_under_test, sample_review_message): - """Test successful building of review record.""" files = [ DocumentReviewFileDetails( file_name="test_document.pdf", @@ -206,14 +214,14 @@ def test_build_review_record_success(service_under_test, sample_review_message): ] result = service_under_test._build_review_record( - sample_review_message, "test-review-id", files + sample_review_message, "test-review-id", files, "Y12345" ) assert isinstance(result, DocumentUploadReviewReference) assert result.id == "test-review-id" assert result.nhs_number == "9000000009" assert result.review_status == DocumentReviewStatus.PENDING_REVIEW - assert result.review_reason == "Demographic mismatches" + assert result.review_reason == "Unsuccessful upload" assert result.author == "Y12345" assert result.custodian == "Y12345" assert len(result.files) == 1 @@ -224,7 +232,6 @@ def test_build_review_record_success(service_under_test, sample_review_message): def test_build_review_record_with_multiple_files(service_under_test): - """Test building review record with multiple files.""" message = ReviewMessageBody( upload_id="test-upload-id-789", files=[ @@ -238,10 +245,8 @@ def test_build_review_record_with_multiple_files(service_under_test): ), ], nhs_number="9000000009", - failure_reason=DocumentReviewReason.FILE_COUNT_MISMATCH, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) files = [ @@ -255,18 +260,16 @@ def test_build_review_record_with_multiple_files(service_under_test): ), ] - result = service_under_test._build_review_record(message, "test-review-id", files) + result = service_under_test._build_review_record( + message, "test-review-id", files, "Y12345" + ) assert len(result.files) == 2 assert result.files[0].file_name == "document_1.pdf" assert result.files[1].file_name == "document_2.pdf" -# Tests for _move_files_to_review_bucket method - - def test_move_files_success(service_under_test, sample_review_message, mocker): - """Test successful file move from staging to review bucket.""" mocker.patch("uuid.uuid4", return_value="123412342") files = service_under_test._move_files_to_review_bucket( @@ -284,12 +287,11 @@ def test_move_files_success(service_under_test, sample_review_message, mocker): source_file_key="staging/9000000009/test_document.pdf", dest_bucket="test_document_review_bucket", dest_file_key=expected_key, - if_none_match="*", + if_none_match=True, ) def test_move_multiple_files_success(service_under_test, mocker): - """Test successful move of multiple files.""" message = ReviewMessageBody( upload_id="test-upload-id-999", files=[ @@ -303,10 +305,8 @@ def test_move_multiple_files_success(service_under_test, mocker): ), ], nhs_number="9000000009", - failure_reason=DocumentReviewReason.FILE_COUNT_MISMATCH, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) mocker.patch("uuid.uuid4", side_effect=["123412342", "56785678"]) @@ -322,7 +322,6 @@ def test_move_multiple_files_success(service_under_test, mocker): def test_move_files_copy_error(service_under_test, sample_review_message): - """Test file move handles S3 copy errors.""" service_under_test.s3_service.copy_across_bucket.side_effect = ClientError( {"Error": {"Code": "NoSuchKey", "Message": "Source not found"}}, "CopyObject", @@ -337,7 +336,6 @@ def test_move_files_copy_error(service_under_test, sample_review_message): def test_move_files_to_review_bucket_continues_file_already_exists_in_review_bucket( service_under_test, sample_review_message ): - service_under_test.s3_service.copy_across_bucket.side_effect = ClientError( { "Error": { @@ -349,14 +347,10 @@ def test_move_files_to_review_bucket_continues_file_already_exists_in_review_buc ) service_under_test.process_review_message(sample_review_message) - service_under_test.dynamo_service.create_item.assert_called() - - -# Tests for _delete_files_from_staging method + service_under_test.document_review_service.create_dynamo_entry.assert_called() def test_delete_from_staging_success(service_under_test, sample_review_message): - """Test successful deletion from staging bucket.""" service_under_test._delete_files_from_staging(sample_review_message) service_under_test.s3_service.delete_object.assert_called_once_with( @@ -366,7 +360,6 @@ def test_delete_from_staging_success(service_under_test, sample_review_message): def test_delete_from_staging_handles_errors(service_under_test, sample_review_message): - """Test deletion from staging handles errors gracefully.""" service_under_test.s3_service.delete_object.side_effect = ClientError( {"Error": {"Code": "AccessDenied", "Message": "Access Denied"}}, "DeleteObject", @@ -378,25 +371,20 @@ def test_delete_from_staging_handles_errors(service_under_test, sample_review_me service_under_test.s3_service.delete_object.assert_called_once() -# Integration scenario tests - - def test_full_workflow_with_valid_message(service_under_test, sample_review_message): - """Test complete workflow from message to final record creation.""" - service_under_test.dynamo_service.create_item.return_value = None + service_under_test.document_review_service.create_dynamo_entry.return_value = None service_under_test.s3_service.copy_across_bucket.return_value = None service_under_test.s3_service.delete_object.return_value = None service_under_test.process_review_message(sample_review_message) - service_under_test.dynamo_service.create_item.assert_called_once() + service_under_test.document_review_service.create_dynamo_entry.assert_called_once() service_under_test.s3_service.copy_across_bucket.assert_called_once() service_under_test.s3_service.delete_object.assert_called_once() def test_workflow_handles_multiple_different_patients(service_under_test): - """Test processing messages for different patients.""" - service_under_test.dynamo_service.create_item.return_value = None + service_under_test.document_review_service.create_dynamo_entry.return_value = None service_under_test.s3_service.copy_across_bucket.return_value = None service_under_test.s3_service.delete_object.return_value = None @@ -410,10 +398,8 @@ def test_workflow_handles_multiple_different_patients(service_under_test): ) ], nhs_number=f"900000000{i}", - failure_reason=DocumentReviewReason.GENERAL_ERROR, - upload_date="2024-01-15T10:30:00Z", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, uploader_ods="Y12345", - current_gp="Y12345", ) for i in range(1, 4) ] @@ -421,5 +407,98 @@ def test_workflow_handles_multiple_different_patients(service_under_test): for message in messages: service_under_test.process_review_message(message) - assert service_under_test.dynamo_service.create_item.call_count == 3 + assert ( + service_under_test.document_review_service.create_dynamo_entry.call_count == 3 + ) assert service_under_test.s3_service.copy_across_bucket.call_count == 3 + +def test_get_patient_custodian_returns_gp_ods_from_pds( + service_under_test, sample_review_message, mock_pds_service +): + result = service_under_test._get_patient_custodian(sample_review_message) + + assert result == "Y67890" + mock_pds_service.fetch_patient_details.assert_called_once_with("9000000009") + + +def test_get_patient_custodian_returns_uploader_ods_when_nhs_number_is_none( + service_under_test, mock_pds_service +): + message = ReviewMessageBody( + upload_id="test-upload-id", + files=[ + ReviewMessageFile( + file_name="test.pdf", file_path="staging/test/test.pdf" + ) + ], + nhs_number="", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, + uploader_ods="Y12345", + ) + + result = service_under_test._get_patient_custodian(message) + + assert result == "Y12345" + mock_pds_service.fetch_patient_details.assert_not_called() + + +def test_get_patient_custodian_returns_uploader_ods_when_nhs_number_is_placeholder( + service_under_test, mock_pds_service +): + message = ReviewMessageBody( + upload_id="test-upload-id", + files=[ + ReviewMessageFile( + file_name="test.pdf", file_path="staging/test/test.pdf" + ) + ], + nhs_number="0000000000", + failure_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, + uploader_ods="Y12345", + ) + + result = service_under_test._get_patient_custodian(message) + + assert result == "Y12345" + mock_pds_service.fetch_patient_details.assert_not_called() + + +def test_get_patient_custodian_returns_uploader_ods_on_pds_error( + service_under_test, sample_review_message, mock_pds_service +): + + mock_pds_service.fetch_patient_details.side_effect = PdsErrorException("PDS error") + + result = service_under_test._get_patient_custodian(sample_review_message) + + assert result == "Y12345" + assert sample_review_message.nhs_number == "9000000009" + + +def test_get_patient_custodian_returns_uploader_ods_on_invalid_resource_id( + service_under_test, sample_review_message, mock_pds_service +): + + mock_pds_service.fetch_patient_details.side_effect = InvalidResourceIdException( + "Invalid NHS number" + ) + + result = service_under_test._get_patient_custodian(sample_review_message) + + assert result == "Y12345" + assert sample_review_message.nhs_number == "9000000009" + + +def test_get_patient_custodian_handles_patient_not_found_sets_placeholder( + service_under_test, sample_review_message, mock_pds_service +): + mock_pds_service.fetch_patient_details.side_effect = PatientNotFoundException( + "Patient not found" + ) + + result = service_under_test._get_patient_custodian(sample_review_message) + + assert result == "Y12345" + assert sample_review_message.nhs_number == "0000000000" + + diff --git a/lambdas/tests/unit/services/test_document_service.py b/lambdas/tests/unit/services/test_document_service.py index 59b244a505..87b603050e 100644 --- a/lambdas/tests/unit/services/test_document_service.py +++ b/lambdas/tests/unit/services/test_document_service.py @@ -572,7 +572,7 @@ def test_get_item_with_custom_model_class( "Author": "Y12345", "Custodian": "Y12345", "ReviewStatus": "PENDING_REVIEW", - "ReviewReason": "General error", + "ReviewReason": "Unsuccessful upload", "UploadDate": 1699000000, "Files": [ { diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index ceefbfcee0..a06d88cf5d 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -7,10 +7,8 @@ from enums.metadata_field_names import DocumentReferenceMetadataFields from freezegun import freeze_time from models.document_review import ( - DocumentReviewFileDetails, DocumentUploadReviewReference, ) -from pydantic import ValidationError from services.document_upload_review_service import DocumentUploadReviewService from tests.unit.conftest import ( MOCK_DOCUMENT_REVIEW_BUCKET, diff --git a/lambdas/tests/unit/services/test_get_document_review_service.py b/lambdas/tests/unit/services/test_get_document_review_service.py index 0d2dcadb88..5d57b5e805 100644 --- a/lambdas/tests/unit/services/test_get_document_review_service.py +++ b/lambdas/tests/unit/services/test_get_document_review_service.py @@ -66,7 +66,7 @@ def mock_document_review(): author=TEST_ODS_CODE, custodian=TEST_ODS_CODE, review_status=DocumentReviewStatus.PENDING_REVIEW, - review_reason=DocumentReviewReason.FILE_NAME_MISMATCH, + review_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, upload_date=1699000000, files=files, nhs_number=TEST_NHS_NUMBER, diff --git a/lambdas/tests/unit/services/test_ods_report_service.py b/lambdas/tests/unit/services/test_ods_report_service.py index 81d95c3374..c959db8ff0 100644 --- a/lambdas/tests/unit/services/test_ods_report_service.py +++ b/lambdas/tests/unit/services/test_ods_report_service.py @@ -4,7 +4,6 @@ from unittest.mock import call import pytest - from enums.document_review_reason import DocumentReviewReason from enums.document_review_status import DocumentReviewStatus from enums.dynamo_filter import AttributeOperator @@ -63,7 +62,7 @@ def mock_review_result(): return DocumentUploadReviewReference( nhs_number="mock_nhs_number", author="mock_author", - review_reason=DocumentReviewReason.GENERAL_ERROR, + review_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, document_snomed_code_type="mock_snomed_code", upload_date=int(datetime.now().timestamp()), files=[ diff --git a/lambdas/tests/unit/services/test_staged_document_review_processing_service.py b/lambdas/tests/unit/services/test_staged_document_review_processing_service.py index 1258b50c8b..255aae1ab6 100644 --- a/lambdas/tests/unit/services/test_staged_document_review_processing_service.py +++ b/lambdas/tests/unit/services/test_staged_document_review_processing_service.py @@ -1,6 +1,5 @@ import pytest from botocore.exceptions import ClientError - from enums.document_review_reason import DocumentReviewReason from enums.document_review_status import DocumentReviewStatus from enums.virus_scan_result import VirusScanResult @@ -66,7 +65,7 @@ def sample_document_reference(): author="Y12345", custodian="Y12345", review_status=DocumentReviewStatus.REVIEW_PENDING_UPLOAD, - review_reason=DocumentReviewReason.FILE_COUNT_MISMATCH, + review_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, upload_date=1704110400, files=[ DocumentReviewFileDetails( diff --git a/lambdas/tests/unit/services/test_update_document_review_service.py b/lambdas/tests/unit/services/test_update_document_review_service.py index 5caabd3503..c50d62ffb6 100644 --- a/lambdas/tests/unit/services/test_update_document_review_service.py +++ b/lambdas/tests/unit/services/test_update_document_review_service.py @@ -57,7 +57,7 @@ def mock_document_review(): author=TEST_ODS_CODE, custodian=TEST_ODS_CODE, review_status=DocumentReviewStatus.PENDING_REVIEW, - review_reason=DocumentReviewReason.DUPLICATE_RECORD, + review_reason=DocumentReviewReason.UNSUCCESSFUL_UPLOAD, upload_date=TEST_UPLOAD_DATE, files=files, nhs_number=TEST_NHS_NUMBER, @@ -546,7 +546,6 @@ def test_process_review_status_update_calls_handle_reassignment_for_reassignment ) - @freeze_time(TEST_FROZEN_TIME) def test_process_review_status_update_calls_soft_delete_for_approval( mock_service, mock_document_review, mocker diff --git a/tests/bulk-upload/scripts/setup_document_review.py b/tests/bulk-upload/scripts/setup_document_review.py index 5b4b070444..89e226d649 100644 --- a/tests/bulk-upload/scripts/setup_document_review.py +++ b/tests/bulk-upload/scripts/setup_document_review.py @@ -58,7 +58,7 @@ def build_document_review_object( review_id: str, files: List[Dict[str, str]], review_status: str = "PENDING_REVIEW", - review_reason: str = "General error", + review_reason: str = "Unsuccessful upload", days_ago_uploaded: int = 1, reviewer: str | None = None, review_date: int | None = None, @@ -118,7 +118,7 @@ def scenario_1(patient): patient=patient, files=files, review_status="PENDING_REVIEW", - review_reason="General error", + review_reason="Unsuccessful upload", days_ago_uploaded=1, ) return review_obj, [(patient.nhs_number, file_name, files[0]["FileLocation"])] @@ -140,7 +140,7 @@ def scenario_2(patient): patient=patient, files=files, review_status="PENDING_REVIEW", - review_reason="More or less files than we expected", + review_reason="Unsuccessful upload", days_ago_uploaded=2, ) return review_obj, files_list @@ -155,7 +155,7 @@ def scenario_3(patient): review_id=review_id, files=files, review_status="APPROVED", - review_reason="Demographic mismatches", + review_reason="Unsuccessful upload", days_ago_uploaded=5, reviewer="H81109", review_date=get_timestamp(days_ago=2), @@ -173,7 +173,7 @@ def scenario_4(patient): patient=patient, files=files, review_status="REJECTED", - review_reason="Filename Naming convention error", + review_reason="New document to review", days_ago_uploaded=7, reviewer="H81109", review_date=get_timestamp(days_ago=3), @@ -198,7 +198,7 @@ def scenario_5(patient): patient=patient, files=files, review_status="APPROVED", - review_reason="Duplicate records error", + review_reason="New document to review", days_ago_uploaded=10, reviewer="H81109", review_date=get_timestamp(days_ago=5), @@ -218,7 +218,7 @@ def scenario_6(patient): patient=patient, files=files, review_status="PENDING_REVIEW", - review_reason="Unknown NHS number", + review_reason="New document to review", days_ago_uploaded=3, ) review_obj["DocumentSnomedCodeType"] = "734163000" @@ -237,7 +237,7 @@ def scenario_7(patient): patient=patient, files=files, review_status="NEVER_REVIEWED", - review_reason="General error", + review_reason="Unsuccessful upload", review_date=get_timestamp(days_ago=1), days_ago_uploaded=15, ) @@ -250,7 +250,7 @@ def scenario_7(patient): patient=patient, files=files, review_status="PENDING_REVIEW", - review_reason="General error", + review_reason="Unsuccessful upload", days_ago_uploaded=15, ) review_obj_v2["Version"] = 2