From 4b9a62940f6e4197e67865f0a8a6cd1421ff298d Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 31 Oct 2025 00:52:53 -0500 Subject: [PATCH 01/81] Set update records to use decoupled SK pattern --- .../cc_common/data_model/data_client.py | 45 ++- .../data_model/schema/license/record.py | 2 +- .../data_model/schema/privilege/record.py | 2 +- .../data_model/schema/provider/record.py | 2 +- .../common/common_test/test_data_generator.py | 19 +- .../common/tests/function/test_data_client.py | 339 ++++++++++-------- .../resources/dynamo/license-update.json | 2 +- .../resources/dynamo/privilege-update.json | 2 +- .../tests/function/test_encumbrance_events.py | 76 ++-- .../test_license_deactivation_events.py | 2 +- .../provider-data-v1/handlers/__init__.py | 4 +- .../provider-data-v1/handlers/state_api.py | 4 +- .../test_provider_transformations.py | 21 +- .../test_handlers/test_encumbrance.py | 6 +- 14 files changed, 307 insertions(+), 219 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index 4cb052a8e..f13e2a788 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -179,9 +179,15 @@ def get_provider_user_records( compact: str, provider_id: UUID, consistent_read: bool = True, + include_updates: bool = False, ) -> ProviderUserRecords: logger.info('Getting provider') + # Determine SK prefix based on include_updates parameter + # When include_updates=False, use begins_with({compact}#PROVIDER#) to exclude update records + # When include_updates=True, use begins_with({compact}#PROVIDER) to include both main records and update records + sk_prefix = f'{compact}#PROVIDER' if not include_updates else f'{compact}#PROV' + resp = {'Items': []} last_evaluated_key = None @@ -191,7 +197,7 @@ def get_provider_user_records( query_resp = self.config.provider_table.query( Select='ALL_ATTRIBUTES', KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') - & Key('sk').begins_with(f'{compact}#PROVIDER'), + & Key('sk').begins_with(sk_prefix), ConsistentRead=consistent_read, **pagination, ) @@ -971,26 +977,41 @@ def get_privilege_data( :param str provider_id: The provider of the privilege :param str jurisdiction: The jurisdiction of the privilege :param str license_type_abbr: The license type abbreviation of the privilege + :param bool consistent_read: If true, performs a consistent read of the records :param bool detail: Boolean determining whether we include associated records or just privilege record itself :raises CCNotFoundException: If the privilege record is not found :return If detail = False list of length one containing privilege item, if detail = True list containing, privilege record, privilege update records and privilege adverse action records """ - # Get the privilege record - if detail: - sk_condition = Key('sk').begins_with(f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#') - else: - sk_condition = Key('sk').eq(f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#') + # Get all provider records (with updates if detail=True) + provider_user_records: ProviderUserRecords = self.get_provider_user_records( + compact=compact, + provider_id=provider_id, + consistent_read=consistent_read, + include_updates=detail, # Only include updates if detail=True + ) - resp = self.config.provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') & sk_condition, - ConsistentRead=consistent_read, + # Get the specific privilege record + privilege = provider_user_records.get_specific_privilege_record( + jurisdiction=jurisdiction, + license_abbreviation=license_type_abbr, ) - if not resp['Items'] or not len(resp['Items']): + + if privilege is None: raise CCNotFoundException('Privilege not found') - return load_records_into_schemas(resp['Items']) + # Build return list in the same format as before + result = [privilege.to_dict()] + if detail: + # Add update records + privilege_updates = provider_user_records.get_update_records_for_privilege( + jurisdiction=jurisdiction, + license_type=privilege.licenseType, + ) + result.extend([update.to_dict() for update in privilege_updates]) + + + return result @logger_inject_kwargs(logger, 'compact', 'provider_id', 'jurisdiction', 'license_type') def get_privilege(self, *, compact: str, provider_id: str, jurisdiction: str, license_type_abbr: str) -> dict: diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index 9af6ea97e..1e4cef6f7 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -215,7 +215,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#PROVIDER#license/{in_data["jurisdiction"]}/{license_type_abbr}#UPDATE#{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#PROV_UPDATE#license/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py index fdc918357..a98482dda 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py @@ -240,7 +240,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#PROVIDER#privilege/{in_data["jurisdiction"]}/{license_type_abbr}#UPDATE#{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#PROV_UPDATE#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index be8b56992..be5079ff4 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -238,6 +238,6 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument # field for this. change_hash = self.hash_changes(in_data) in_data['sk'] = ( - f'{in_data["compact"]}#PROVIDER#UPDATE#{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#PROV_UPDATE#provider/{int(config.current_standard_datetime.timestamp())}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py index fd2e36e83..04d205437 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py @@ -101,14 +101,19 @@ def query_privilege_update_records_for_given_record_from_database( """ Helper method to query update records from the database using the provider data class instance. - All of our update records use the same pk as the actual record that is being updated. The sk of the actual - record is the prefix for all the update records. Using this pattern, we can query for all of the update records - that have been written for the given record. + All of our update records use the same pk as the actual record that is being updated. The new pattern + for privilege updates is {compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/ """ serialized_record = privilege_data.serialize_to_database_record() + from cc_common.config import config + + license_type_abbr = config.license_type_abbreviations[privilege_data.compact][privilege_data.licenseType] + sk_prefix = ( + f'{privilege_data.compact}#PROV_UPDATE#privilege/{privilege_data.jurisdiction}/{license_type_abbr}/' + ) privilege_update_records = TestDataGenerator._query_records_by_pk_and_sk_prefix( - serialized_record['pk'], f'{serialized_record["sk"]}UPDATE' + serialized_record['pk'], sk_prefix ) return [PrivilegeUpdateData.from_database_record(update_record) for update_record in privilege_update_records] @@ -124,8 +129,12 @@ def query_provider_update_records_for_given_record_from_database(provider_record """ serialized_record = provider_record.serialize_to_database_record() + sk_prefix = ( + f'{provider_record.compact}#PROV_UPDATE#provider' + ) + return TestDataGenerator._query_records_by_pk_and_sk_prefix( - serialized_record['pk'], f'{serialized_record["sk"]}#UPDATE' + serialized_record['pk'], sk_prefix ) @staticmethod diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index 1599702ce..36e103ebf 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -228,6 +228,7 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): """ from cc_common.data_model.data_client import DataClient from cc_common.data_model.schema.privilege import PrivilegeData + from cc_common.data_model.provider_record_util import ProviderUserRecords # Imagine that there have been 123 privileges issued for the compact # and that the next privilege number will be 124 @@ -315,79 +316,87 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): ) # Verify that the audiologist privilege update record was created for ky - new_aud_ky_privilege = self._provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_uuid}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ky/aud#'), - )['Items'] + provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_uuid, include_updates=True) + + new_aud_ky_privilege = provider_user_records.get_specific_privilege_record( + jurisdiction='ky', license_abbreviation='aud' + ) + self.assertEqual( - [ - # Primary record - { - 'pk': f'aslp#PROVIDER#{provider_uuid}', - 'sk': 'aslp#PROVIDER#privilege/ky/aud#', - 'type': 'privilege', - 'providerId': provider_uuid, - 'compact': 'aslp', - 'jurisdiction': 'ky', - 'licenseJurisdiction': 'oh', - 'licenseType': 'audiologist', - 'administratorSetStatus': 'active', - # Should be updated dates for renewal, expiration, update + # Primary record + { + 'pk': f'aslp#PROVIDER#{provider_uuid}', + 'sk': 'aslp#PROVIDER#privilege/ky/aud#', + 'type': 'privilege', + 'providerId': provider_uuid, + 'compact': 'aslp', + 'jurisdiction': 'ky', + 'licenseJurisdiction': 'oh', + 'licenseType': 'audiologist', + 'administratorSetStatus': 'active', + # Should be updated dates for renewal, expiration, update + 'dateOfIssuance': '2023-11-08T23:59:59+00:00', + 'dateOfRenewal': '2024-11-08T23:59:59+00:00', + 'dateOfExpiration': '2025-10-31', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'compactTransactionId': 'test_transaction_id', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#test_transaction_id#', + 'attestations': self.sample_privilege_attestations, + # Should remain the same, since we're renewing the same privilege + 'privilegeId': 'AUD-KY-1', + }, + new_aud_ky_privilege.serialize_to_database_record() + ) + + ky_aud_update_record = provider_user_records.get_update_records_for_privilege( + jurisdiction=new_aud_ky_privilege.jurisdiction, + license_type=new_aud_ky_privilege.licenseType + )[0] + + self.assertEqual( + # A new history record + { + 'pk': f'aslp#PROVIDER#{provider_uuid}', + 'sk': 'aslp#PROV_UPDATE#privilege/ky/aud/1731110399/f61e34798e1775ff6230d1187d444146', + 'type': 'privilegeUpdate', + 'updateType': 'renewal', + 'providerId': provider_uuid, + 'compact': 'aslp', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', + 'jurisdiction': 'ky', + 'licenseType': 'audiologist', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'createDate': '2024-11-08T23:59:59+00:00', + 'effectiveDate': '2024-11-08T23:59:59+00:00', + 'previous': { 'dateOfIssuance': '2023-11-08T23:59:59+00:00', + 'dateOfRenewal': '2023-11-08T23:59:59+00:00', + 'dateOfExpiration': '2024-10-31', + 'dateOfUpdate': '2023-11-08T23:59:59+00:00', + 'compactTransactionId': '1234567890', + 'attestations': self.sample_privilege_attestations, + 'administratorSetStatus': 'active', + 'licenseJurisdiction': 'oh', + 'privilegeId': 'AUD-KY-1', + }, + 'updatedValues': { + 'attestations': self.sample_privilege_attestations, 'dateOfRenewal': '2024-11-08T23:59:59+00:00', 'dateOfExpiration': '2025-10-31', - 'dateOfUpdate': '2024-11-08T23:59:59+00:00', 'compactTransactionId': 'test_transaction_id', - 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#test_transaction_id#', - 'attestations': self.sample_privilege_attestations, - # Should remain the same, since we're renewing the same privilege 'privilegeId': 'AUD-KY-1', }, - # A new history record - { - 'pk': f'aslp#PROVIDER#{provider_uuid}', - 'sk': 'aslp#PROVIDER#privilege/ky/aud#UPDATE#1731110399/f61e34798e1775ff6230d1187d444146', - 'type': 'privilegeUpdate', - 'updateType': 'renewal', - 'providerId': provider_uuid, - 'compact': 'aslp', - 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', - 'jurisdiction': 'ky', - 'licenseType': 'audiologist', - 'dateOfUpdate': '2024-11-08T23:59:59+00:00', - 'createDate': '2024-11-08T23:59:59+00:00', - 'effectiveDate': '2024-11-08T23:59:59+00:00', - 'previous': { - 'dateOfIssuance': '2023-11-08T23:59:59+00:00', - 'dateOfRenewal': '2023-11-08T23:59:59+00:00', - 'dateOfExpiration': '2024-10-31', - 'dateOfUpdate': '2023-11-08T23:59:59+00:00', - 'compactTransactionId': '1234567890', - 'attestations': self.sample_privilege_attestations, - 'administratorSetStatus': 'active', - 'licenseJurisdiction': 'oh', - 'privilegeId': 'AUD-KY-1', - }, - 'updatedValues': { - 'attestations': self.sample_privilege_attestations, - 'dateOfRenewal': '2024-11-08T23:59:59+00:00', - 'dateOfExpiration': '2025-10-31', - 'compactTransactionId': 'test_transaction_id', - 'privilegeId': 'AUD-KY-1', - }, - }, - ], - new_aud_ky_privilege, + }, + ky_aud_update_record.serialize_to_database_record(), ) # Verify that a new audiologist privilege record was created for ne with expected values - new_aud_ne_privilege = self._provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_uuid}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/aud#'), - )['Items'] + new_aud_ne_privilege = provider_user_records.get_specific_privilege_record( + jurisdiction='ne', license_abbreviation='aud' + ) self.assertEqual( - [ - # Primary record with no history record + # Primary record { 'pk': f'aslp#PROVIDER#{provider_uuid}', 'sk': 'aslp#PROVIDER#privilege/ne/aud#', @@ -408,10 +417,16 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): 'attestations': self.sample_privilege_attestations, # Should remain the same, since we're renewing the same privilege 'privilegeId': 'AUD-NE-124', - } - ], - new_aud_ne_privilege, + }, + new_aud_ne_privilege.serialize_to_database_record(), ) + # assert there are no update records for this privilege + ne_aud_update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction=new_aud_ne_privilege.jurisdiction, + license_type=new_aud_ne_privilege.licenseType + ) + self.assertEqual([], ne_aud_update_records) + # ensure that slp privilege was not updated with an update record slp_privilege = self._provider_table.query( @@ -448,6 +463,7 @@ def test_data_client_create_privilege_record_invalid_license_type(self): def test_data_client_handles_large_privilege_purchase(self): """Test that we can process privilege purchases with more than 100 transaction items.""" from cc_common.data_model.data_client import DataClient + from cc_common.data_model.provider_record_util import ProviderUserRecords from cc_common.data_model.schema.common import ActiveInactiveStatus from cc_common.data_model.schema.privilege import PrivilegeData @@ -501,24 +517,28 @@ def test_data_client_handles_large_privilege_purchase(self): ) # Verify that all privileges were updated - for jurisdiction in jurisdictions: - privilege_records = self._provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_uuid}') - & Key('sk').begins_with(f'aslp#PROVIDER#privilege/{jurisdiction}/aud#'), - )['Items'] - - self.assertEqual(2, len(privilege_records)) # One privilege record and one update record + provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_uuid, include_updates=True + ) - # Find the main privilege record - privilege_record = next(r for r in privilege_records if r['type'] == 'privilege') - self.assertEqual('2025-10-31', privilege_record['dateOfExpiration']) - self.assertEqual('test_transaction_id', privilege_record['compactTransactionId']) + for jurisdiction in jurisdictions: + # Get the privilege record using ProviderUserRecords + privilege_record = provider_user_records.get_specific_privilege_record( + jurisdiction=jurisdiction, license_abbreviation='aud' + ) + self.assertIsNotNone(privilege_record, f'Privilege record not found for jurisdiction {jurisdiction}') + self.assertEqual('2025-10-31', privilege_record.dateOfExpiration.isoformat()) + self.assertEqual('test_transaction_id', privilege_record.compactTransactionId) - # Find the update record - update_record = next(r for r in privilege_records if r['type'] == 'privilegeUpdate') - self.assertEqual('renewal', update_record['updateType']) - self.assertEqual('2024-10-31', update_record['previous']['dateOfExpiration']) - self.assertEqual('2025-10-31', update_record['updatedValues']['dateOfExpiration']) + # Get the update record using ProviderUserRecords + update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction=jurisdiction, license_type=privilege_record.licenseType + ) + self.assertEqual(1, len(update_records), f'Expected 1 update record for jurisdiction {jurisdiction}') + update_record = update_records[0] + self.assertEqual('renewal', update_record.updateType) + self.assertEqual('2024-10-31', update_record.previous['dateOfExpiration'].isoformat()) + self.assertEqual('2025-10-31', update_record.updatedValues['dateOfExpiration'].isoformat()) # Verify the provider record was updated correctly provider = self._provider_table.get_item( @@ -721,6 +741,7 @@ def test_get_ssn_by_provider_id_raises_exception_multiple_records_found(self): def test_deactivate_privilege_updates_record(self): from cc_common.data_model.data_client import DataClient + from cc_common.data_model.provider_record_util import ProviderUserRecords provider_id = self._load_provider_data() @@ -762,68 +783,80 @@ def test_deactivate_privilege_updates_record(self): ) # Verify that the privilege record was updated - new_privilege = self._provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/aud#'), - )['Items'] + provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_updates=True + ) + + new_privilege = provider_user_records.get_specific_privilege_record( + jurisdiction='ne', license_abbreviation='aud' + ) + self.assertIsNotNone(new_privilege, 'Privilege record not found') + self.assertEqual( - [ - # Primary record - { - 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#PROVIDER#privilege/ne/aud#', - 'type': 'privilege', - 'providerId': str(provider_id), - 'compact': 'aslp', - 'licenseJurisdiction': 'oh', - 'licenseType': 'audiologist', - 'jurisdiction': 'ne', - 'administratorSetStatus': 'inactive', + { + 'pk': f'aslp#PROVIDER#{provider_id}', + 'sk': 'aslp#PROVIDER#privilege/ne/aud#', + 'type': 'privilege', + 'providerId': str(provider_id), + 'compact': 'aslp', + 'licenseJurisdiction': 'oh', + 'licenseType': 'audiologist', + 'jurisdiction': 'ne', + 'administratorSetStatus': 'inactive', + 'dateOfIssuance': '2023-11-08T23:59:59+00:00', + 'dateOfRenewal': '2023-11-08T23:59:59+00:00', + 'dateOfExpiration': '2024-10-31', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'compactTransactionId': '1234567890', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', + 'attestations': self.sample_privilege_attestations, + 'privilegeId': 'AUD-NE-1', + }, + new_privilege.serialize_to_database_record(), + ) + + # Get the update record + update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction='ne', license_type=new_privilege.licenseType + ) + self.assertEqual(1, len(update_records), 'Expected 1 update record') + update_record = update_records[0] + + self.assertEqual( + { + 'pk': f'aslp#PROVIDER#{provider_id}', + 'sk': 'aslp#PROV_UPDATE#privilege/ne/aud/1731110399/aac682a76e1182a641a1b40dd606ae51', + 'type': 'privilegeUpdate', + 'updateType': 'deactivation', + 'providerId': str(provider_id), + 'compact': 'aslp', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', + 'jurisdiction': 'ne', + 'licenseType': 'audiologist', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'createDate': '2024-11-08T23:59:59+00:00', + 'effectiveDate': '2024-11-08T23:59:59+00:00', + 'deactivationDetails': { + 'note': 'test deactivation note', + 'deactivatedByStaffUserId': 'a4182428-d061-701c-82e5-a3d1d547d797', + 'deactivatedByStaffUserName': 'John Doe', + }, + 'previous': { 'dateOfIssuance': '2023-11-08T23:59:59+00:00', 'dateOfRenewal': '2023-11-08T23:59:59+00:00', 'dateOfExpiration': '2024-10-31', - 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'dateOfUpdate': '2023-11-08T23:59:59+00:00', 'compactTransactionId': '1234567890', - 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', 'attestations': self.sample_privilege_attestations, + 'administratorSetStatus': 'active', + 'licenseJurisdiction': 'oh', 'privilegeId': 'AUD-NE-1', }, - # A new history record - { - 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#PROVIDER#privilege/ne/aud#UPDATE#1731110399/aac682a76e1182a641a1b40dd606ae51', - 'type': 'privilegeUpdate', - 'updateType': 'deactivation', - 'providerId': str(provider_id), - 'compact': 'aslp', - 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', - 'jurisdiction': 'ne', - 'licenseType': 'audiologist', - 'dateOfUpdate': '2024-11-08T23:59:59+00:00', - 'createDate': '2024-11-08T23:59:59+00:00', - 'effectiveDate': '2024-11-08T23:59:59+00:00', - 'deactivationDetails': { - 'note': 'test deactivation note', - 'deactivatedByStaffUserId': 'a4182428-d061-701c-82e5-a3d1d547d797', - 'deactivatedByStaffUserName': 'John Doe', - }, - 'previous': { - 'dateOfIssuance': '2023-11-08T23:59:59+00:00', - 'dateOfRenewal': '2023-11-08T23:59:59+00:00', - 'dateOfExpiration': '2024-10-31', - 'dateOfUpdate': '2023-11-08T23:59:59+00:00', - 'compactTransactionId': '1234567890', - 'attestations': self.sample_privilege_attestations, - 'administratorSetStatus': 'active', - 'licenseJurisdiction': 'oh', - 'privilegeId': 'AUD-NE-1', - }, - 'updatedValues': { - 'administratorSetStatus': 'inactive', - }, + 'updatedValues': { + 'administratorSetStatus': 'inactive', }, - ], - new_privilege, + }, + update_record.serialize_to_database_record(), ) # The deactivation should not remove 'ne' from privilegeJurisdictions, as that set is intended to include @@ -855,6 +888,7 @@ def test_deactivate_privilege_raises_if_privilege_not_found(self): def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): from cc_common.data_model.data_client import DataClient + from cc_common.data_model.provider_record_util import ProviderUserRecords provider_id = self._load_provider_data() @@ -889,11 +923,14 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): # We'll create it as if it were already deactivated original_history = { 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#PROVIDER#privilege/ne/aud#UPDATE#1731110399/483bebc6cb3fd6b517f8ce9ad706c518', + 'sk': 'aslp#PROV_UPDATE#privilege/ne/aud/1731110399/4ebb3dc8f1ffcc30fe7aad5ec49d0ca6', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': str(provider_id), 'compact': 'aslp', + 'licenseType': 'audiologist', + 'createDate': '2024-11-08T23:59:59+00:00', + 'effectiveDate': '2024-11-08T23:59:59+00:00', 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', 'jurisdiction': 'ne', 'dateOfUpdate': '2024-11-08T23:59:59+00:00', @@ -905,7 +942,6 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): 'compactTransactionId': '1234567890', 'attestations': self.sample_privilege_attestations, 'licenseJurisdiction': 'oh', - 'licenseType': 'audiologist', 'privilegeId': 'AUD-NE-1', }, 'updatedValues': { @@ -932,17 +968,30 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): self.assertEqual('Privilege already deactivated', context.exception.message) # Verify that the privilege record was unchanged - new_privilege = self._provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/aud#'), - )['Items'] - self.assertEqual([original_privilege, original_history], new_privilege) + provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_updates=True + ) + + new_privilege = provider_user_records.get_specific_privilege_record( + jurisdiction='ne', license_abbreviation='aud' + ) + self.assertIsNotNone(new_privilege, 'Privilege record not found') + serialized_record = new_privilege.serialize_to_database_record() + # the serialize_to_database_record() call automatically generates a new dateOfUpdate stamp, + # setting it back to the original timestamp for comparison + serialized_record['dateOfUpdate'] = original_privilege['dateOfUpdate'] + self.assertEqual(original_privilege, serialized_record) + + # Verify the update record is unchanged + update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction='ne', license_type=new_privilege.licenseType + ) + self.assertEqual(1, len(update_records), 'Expected 1 update record') + self.assertEqual(original_history, update_records[0].serialize_to_database_record()) # 'ne' should still be removed from privilegeJurisdictions - provider = self._provider_table.get_item( - Key={'pk': f'aslp#PROVIDER#{provider_id}', 'sk': 'aslp#PROVIDER'}, - )['Item'] - self.assertEqual(set(), provider.get('privilegeJurisdictions', set())) + provider = provider_user_records.get_provider_record() + self.assertEqual(set(), provider.privilegeJurisdictions) def test_get_provider_user_records_correctly_handles_pagination(self): """Test that get_provider_user_records correctly handles pagination by returning all records. diff --git a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json index 6c43d8072..8a6e7ba74 100644 --- a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json +++ b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json @@ -1,6 +1,6 @@ { "pk": "aslp#PROVIDER#89a6377e-c3a5-40e5-bca5-317ec854c570", - "sk": "aslp#PROVIDER#license/oh/slp#UPDATE#1586264399/34702de3dc08e64922605a6b18f3838b", + "sk": "aslp#PROV_UPDATE#license/oh/slp/1586264399/34702de3dc08e64922605a6b18f3838b", "type": "licenseUpdate", "updateType": "renewal", "providerId": "89a6377e-c3a5-40e5-bca5-317ec854c570", diff --git a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json index 8f2469682..456d3b702 100644 --- a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json +++ b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json @@ -1,6 +1,6 @@ { "pk": "aslp#PROVIDER#89a6377e-c3a5-40e5-bca5-317ec854c570", - "sk": "aslp#PROVIDER#privilege/ne/slp#UPDATE#1731110399/939a3c350708e34875f0a652bf7d7454", + "sk": "aslp#PROV_UPDATE#privilege/ne/slp/1731110399/939a3c350708e34875f0a652bf7d7454", "type": "privilegeUpdate", "updateType": "renewal", "providerId": "89a6377e-c3a5-40e5-bca5-317ec854c570", diff --git a/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py b/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py index be9b4fc75..1d50a1bea 100644 --- a/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py +++ b/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py @@ -4,7 +4,6 @@ from unittest.mock import ANY, MagicMock, patch from uuid import UUID -from boto3.dynamodb.conditions import Key from common_test.test_constants import ( DEFAULT_ADVERSE_ACTION_ID, DEFAULT_CLINICAL_PRIVILEGE_ACTION_CATEGORY, @@ -237,6 +236,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( provider_records = self.config.data_client.get_provider_user_records( compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, + include_updates=True, ) privileges = provider_records.get_privilege_records() @@ -245,18 +245,16 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( serialized_privilege = privilege.serialize_to_database_record() self.assertEqual(PrivilegeEncumberedStatusEnum.ENCUMBERED, privileges[0].encumberedStatus) - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(serialized_privilege['pk']) - & Key('sk').begins_with(f'{serialized_privilege["sk"]}UPDATE'), + # Get update records using ProviderUserRecords + update_records = provider_records.get_update_records_for_privilege( + jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType ) - - self.assertEqual(1, len(privilege_update_records['Items'])) - update_record = privilege_update_records['Items'][0] - update_encumbrance_details = update_record['encumbranceDetails'] + self.assertEqual(1, len(update_records)) + update_record = update_records[0] + update_encumbrance_details = update_record.encumbranceDetails self.assertEqual( { - 'adverseActionId': DEFAULT_ADVERSE_ACTION_ID, + 'adverseActionId': UUID(DEFAULT_ADVERSE_ACTION_ID), 'licenseJurisdiction': 'oh', 'clinicalPrivilegeActionCategories': ['Unsafe Practice or Substandard Care'], }, @@ -307,6 +305,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ provider_records = self.config.data_client.get_provider_user_records( compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, + include_updates=True, ) privileges = provider_records.get_privilege_records() @@ -315,18 +314,16 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ serialized_privilege = privilege.serialize_to_database_record() self.assertEqual(PrivilegeEncumberedStatusEnum.ENCUMBERED, privileges[0].encumberedStatus) - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(serialized_privilege['pk']) - & Key('sk').begins_with(f'{serialized_privilege["sk"]}UPDATE'), + # Get update records using ProviderUserRecords + update_records = provider_records.get_update_records_for_privilege( + jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType ) - - self.assertEqual(1, len(privilege_update_records['Items'])) - update_record = privilege_update_records['Items'][0] - update_encumbrance_details = update_record['encumbranceDetails'] + self.assertEqual(1, len(update_records)) + update_record = update_records[0] + update_encumbrance_details = update_record.encumbranceDetails self.assertEqual( { - 'adverseActionId': DEFAULT_ADVERSE_ACTION_ID, + 'adverseActionId': UUID(DEFAULT_ADVERSE_ACTION_ID), 'licenseJurisdiction': 'oh', 'clinicalPrivilegeActionCategory': 'Unsafe Practice or Substandard Care', }, @@ -358,17 +355,18 @@ def test_license_encumbrance_listener_creates_privilege_update_records(self): license_encumbrance_listener(event, self.mock_context) # Verify privilege update record was created - serialized_privilege = privilege.serialize_to_database_record() - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(serialized_privilege['pk']) - & Key('sk').begins_with(f'{serialized_privilege["sk"]}UPDATE'), + provider_records = self.config.data_client.get_provider_user_records( + compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, include_updates=True ) - - self.assertEqual(1, len(privilege_update_records['Items'])) - update_record = privilege_update_records['Items'][0] - self.assertEqual('encumbrance', update_record['updateType']) - self.assertEqual({'encumberedStatus': 'licenseEncumbered'}, update_record['updatedValues']) + privileges = provider_records.get_privilege_records() + self.assertEqual(1, len(privileges)) + update_records = provider_records.get_update_records_for_privilege( + jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType + ) + self.assertEqual(1, len(update_records)) + update_record = update_records[0] + self.assertEqual('encumbrance', update_record.updateType) + self.assertEqual({'encumberedStatus': 'licenseEncumbered'}, update_record.updatedValues) @patch('cc_common.event_bus_client.EventBusClient._publish_event') def test_license_encumbrance_lifted_listener_unencumbers_license_encumbered_privileges_successfully( @@ -552,16 +550,18 @@ def test_license_encumbrance_lifted_listener_creates_privilege_update_records(se license_encumbrance_lifted_listener(event, self.mock_context) # Verify privilege update record was created - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(privilege.serialize_to_database_record()['pk']) - & Key('sk').begins_with(f'{privilege.compact}#PROVIDER#privilege/{privilege.jurisdiction}/slp#UPDATE'), + provider_records = self.config.data_client.get_provider_user_records( + compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, include_updates=True ) - - self.assertEqual(1, len(privilege_update_records['Items'])) - update_record = privilege_update_records['Items'][0] - self.assertEqual('lifting_encumbrance', update_record['updateType']) - self.assertEqual({'encumberedStatus': 'unencumbered'}, update_record['updatedValues']) + privileges = provider_records.get_privilege_records() + self.assertEqual(1, len(privileges)) + update_records = provider_records.get_update_records_for_privilege( + jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType + ) + self.assertEqual(1, len(update_records)) + update_record = update_records[0] + self.assertEqual('lifting_encumbrance', update_record.updateType) + self.assertEqual({'encumberedStatus': 'unencumbered'}, update_record.updatedValues) @patch('cc_common.event_bus_client.EventBusClient._publish_event') def test_license_encumbrance_listener_handles_multiple_matching_privileges(self, mock_publish_event): diff --git a/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py b/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py index 4f3d0106f..8a2d205d9 100644 --- a/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py +++ b/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py @@ -302,7 +302,7 @@ def test_license_deactivation_listener_creates_update_records_for_all_affected_p privilege_update_records = self._provider_table.query( Select='ALL_ATTRIBUTES', KeyConditionExpression=Key('pk').eq(privilege.serialize_to_database_record()['pk']) - & Key('sk').begins_with(f'{privilege.compact}#PROVIDER#privilege/{privilege.jurisdiction}/slp#UPDATE'), + & Key('sk').begins_with(f'{privilege.compact}#PROV_UPDATE#privilege/{privilege.jurisdiction}/slp'), ) self.assertEqual(1, len(privilege_update_records['Items'])) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py index 7b3bff40a..8d5a971b7 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py @@ -13,5 +13,7 @@ def get_provider_information(compact: str, provider_id: str) -> dict: :param provider_id: The provider's unique identifier. :return: Provider profile information. """ - provider_user_records = config.data_client.get_provider_user_records(compact=compact, provider_id=provider_id) + provider_user_records = config.data_client.get_provider_user_records( + compact=compact, provider_id=provider_id, include_updates=True + ) return provider_user_records.generate_api_response_object() diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py index 5d9defd9e..338dffef9 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py @@ -147,7 +147,9 @@ def get_provider(event: dict, context: LambdaContext): # noqa: ARG001 unused-ar raise CCInvalidRequestException('Missing required field') from e with logger.append_context_keys(compact=compact, provider_id=provider_id, jurisdiction=jurisdiction): - provider_user_records = config.data_client.get_provider_user_records(compact=compact, provider_id=provider_id) + provider_user_records = config.data_client.get_provider_user_records( + compact=compact, provider_id=provider_id, include_updates=True + ) # Get caller's scopes to determine private data access scopes = get_event_scopes(event) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index 9567a88cc..9a0407fe6 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -2,7 +2,6 @@ from datetime import date, datetime from unittest.mock import patch -from boto3.dynamodb.conditions import Key from moto import mock_aws from .. import TstFunction @@ -98,6 +97,9 @@ def test_transformations(self, mock_license_preprocessing_queue): expected_provider = json.load(f) # this should be set during the registration flow expected_provider['currentHomeJurisdiction'] = 'oh' + # provider should be active and compact eligible + expected_provider['licenseStatus'] = 'active' + expected_provider['compactEligibility'] = 'eligible' # register the provider in the system client.process_registration_values( @@ -134,23 +136,26 @@ def test_transformations(self, mock_license_preprocessing_queue): ) # Get the provider straight from the table, to inspect them - resp = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER'), - ) + provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_updates=True) + # One record for each of: provider, providerUpdate, license, # privilege, and militaryAffiliation - self.assertEqual(5, len(resp['Items'])) - records = {item['type']: item for item in resp['Items']} + self.assertEqual(5, len(provider_user_records.provider_records)) + records = {item['type']: item for item in provider_user_records.provider_records} # Convert this to the data type expected from DynamoDB expected_provider['privilegeJurisdictions'] = set(expected_provider['privilegeJurisdictions']) with open('../common/tests/resources/dynamo/license.json') as f: expected_license = json.load(f) + # license should be active and compact eligible + expected_license['licenseStatus'] = 'active' + expected_license['compactEligibility'] = 'eligible' with open('../common/tests/resources/dynamo/privilege.json') as f: expected_privilege = json.load(f) + # privilege status should be active + expected_privilege['status'] = 'active' with open('../common/tests/resources/dynamo/military-affiliation.json') as f: expected_military_affiliation = json.load(f) # in this case, the status will be initializing, since it is not set to active until diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py index ed11c68ea..e27347b14 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py @@ -153,7 +153,7 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ Select='ALL_ATTRIBUTES', KeyConditionExpression=Key('pk').eq(test_privilege_record.serialize_to_database_record()['pk']) & Key('sk').begins_with( - f'{test_privilege_record.compact}#PROVIDER#privilege/{test_privilege_record.jurisdiction}/slp#UPDATE' + f'{test_privilege_record.compact}#PROV_UPDATE#privilege/{test_privilege_record.jurisdiction}/slp' ), ) self.assertEqual(1, len(privilege_update_records['Items'])) @@ -200,7 +200,7 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ Select='ALL_ATTRIBUTES', KeyConditionExpression=Key('pk').eq(test_privilege_record.serialize_to_database_record()['pk']) & Key('sk').begins_with( - f'{test_privilege_record.compact}#PROVIDER#privilege/{test_privilege_record.jurisdiction}/slp#UPDATE' + f'{test_privilege_record.compact}#PROV_UPDATE#privilege/{test_privilege_record.jurisdiction}/slp' ), ) self.assertEqual(1, len(privilege_update_records['Items'])) @@ -528,7 +528,7 @@ def test_license_encumbrance_handler_adds_license_update_record_in_provider_data Select='ALL_ATTRIBUTES', KeyConditionExpression=Key('pk').eq(test_license_record.serialize_to_database_record()['pk']) & Key('sk').begins_with( - f'{test_license_record.compact}#PROVIDER#license/{test_license_record.jurisdiction}/slp#UPDATE' + f'{test_license_record.compact}#PROV_UPDATE#license/{test_license_record.jurisdiction}/slp' ), ) self.assertEqual(1, len(license_update_records['Items'])) From 0d7af533308b3295c4bcec3fbcdb2f15e1acfeb4 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 31 Oct 2025 10:59:35 -0500 Subject: [PATCH 02/81] Remove unused data client method --- .../cc_common/data_model/data_client.py | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index f13e2a788..d07079742 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -1013,30 +1013,6 @@ def get_privilege_data( return result - @logger_inject_kwargs(logger, 'compact', 'provider_id', 'jurisdiction', 'license_type') - def get_privilege(self, *, compact: str, provider_id: str, jurisdiction: str, license_type_abbr: str) -> dict: - """ - Get a privilege for a provider in a jurisdiction of the license type - - :param str compact: The compact of the privilege - :param str provider_id: The provider of the privilege - :param str jurisdiction: The jurisdiction of the privilege - :param str license_type_abbr: The license type abbreviation of the privilege - :raises CCNotFoundException: If the privilege record is not found - """ - # Get the privilege record - try: - privilege_record = self.config.provider_table.get_item( - Key={ - 'pk': f'{compact}#PROVIDER#{provider_id}', - 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#', - }, - )['Item'] - except KeyError as e: - raise CCNotFoundException(f'Privilege not found for jurisdiction {jurisdiction}') from e - - return privilege_record - @logger_inject_kwargs(logger, 'compact', 'provider_id', 'jurisdiction', 'license_type') def deactivate_privilege( self, *, compact: str, provider_id: str, jurisdiction: str, license_type_abbr: str, deactivation_details: dict From 575472a5db9b079929d45057693fbb79706fc908 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 31 Oct 2025 11:08:36 -0500 Subject: [PATCH 03/81] formatter/linter --- .../cc_common/data_model/data_client.py | 3 +- .../common/common_test/test_data_generator.py | 12 +--- .../common/tests/function/test_data_client.py | 56 +++++++++---------- .../tests/function/test_encumbrance_events.py | 12 ++-- .../test_provider_transformations.py | 3 +- 5 files changed, 38 insertions(+), 48 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index d07079742..34566cabf 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -48,7 +48,7 @@ CCNotFoundException, ) from cc_common.license_util import LicenseUtility -from cc_common.utils import load_records_into_schemas, logger_inject_kwargs +from cc_common.utils import logger_inject_kwargs class DataClient: @@ -1010,7 +1010,6 @@ def get_privilege_data( ) result.extend([update.to_dict() for update in privilege_updates]) - return result @logger_inject_kwargs(logger, 'compact', 'provider_id', 'jurisdiction', 'license_type') diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py index 04d205437..f6f99c683 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py @@ -108,9 +108,7 @@ def query_privilege_update_records_for_given_record_from_database( from cc_common.config import config license_type_abbr = config.license_type_abbreviations[privilege_data.compact][privilege_data.licenseType] - sk_prefix = ( - f'{privilege_data.compact}#PROV_UPDATE#privilege/{privilege_data.jurisdiction}/{license_type_abbr}/' - ) + sk_prefix = f'{privilege_data.compact}#PROV_UPDATE#privilege/{privilege_data.jurisdiction}/{license_type_abbr}/' privilege_update_records = TestDataGenerator._query_records_by_pk_and_sk_prefix( serialized_record['pk'], sk_prefix @@ -129,13 +127,9 @@ def query_provider_update_records_for_given_record_from_database(provider_record """ serialized_record = provider_record.serialize_to_database_record() - sk_prefix = ( - f'{provider_record.compact}#PROV_UPDATE#provider' - ) + sk_prefix = f'{provider_record.compact}#PROV_UPDATE#provider' - return TestDataGenerator._query_records_by_pk_and_sk_prefix( - serialized_record['pk'], sk_prefix - ) + return TestDataGenerator._query_records_by_pk_and_sk_prefix(serialized_record['pk'], sk_prefix) @staticmethod def generate_default_adverse_action(value_overrides: dict | None = None) -> AdverseActionData: diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index 36e103ebf..7f544cc3a 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -227,8 +227,8 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): referenced nor updated in any way as part of this purchase. """ from cc_common.data_model.data_client import DataClient - from cc_common.data_model.schema.privilege import PrivilegeData from cc_common.data_model.provider_record_util import ProviderUserRecords + from cc_common.data_model.schema.privilege import PrivilegeData # Imagine that there have been 123 privileges issued for the compact # and that the next privilege number will be 124 @@ -317,7 +317,8 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): # Verify that the audiologist privilege update record was created for ky provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_uuid, include_updates=True) + compact='aslp', provider_id=provider_uuid, include_updates=True + ) new_aud_ky_privilege = provider_user_records.get_specific_privilege_record( jurisdiction='ky', license_abbreviation='aud' @@ -346,12 +347,11 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): # Should remain the same, since we're renewing the same privilege 'privilegeId': 'AUD-KY-1', }, - new_aud_ky_privilege.serialize_to_database_record() + new_aud_ky_privilege.serialize_to_database_record(), ) ky_aud_update_record = provider_user_records.get_update_records_for_privilege( - jurisdiction=new_aud_ky_privilege.jurisdiction, - license_type=new_aud_ky_privilege.licenseType + jurisdiction=new_aud_ky_privilege.jurisdiction, license_type=new_aud_ky_privilege.licenseType )[0] self.assertEqual( @@ -396,38 +396,36 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): jurisdiction='ne', license_abbreviation='aud' ) self.assertEqual( - # Primary record - { - 'pk': f'aslp#PROVIDER#{provider_uuid}', - 'sk': 'aslp#PROVIDER#privilege/ne/aud#', - 'type': 'privilege', - 'providerId': provider_uuid, - 'compact': 'aslp', - 'jurisdiction': 'ne', - 'licenseJurisdiction': 'oh', - 'licenseType': 'audiologist', - 'administratorSetStatus': 'active', - # issuance and renewal dates should be the same - 'dateOfIssuance': '2024-11-08T23:59:59+00:00', - 'dateOfRenewal': '2024-11-08T23:59:59+00:00', - 'dateOfExpiration': '2025-10-31', - 'dateOfUpdate': '2024-11-08T23:59:59+00:00', - 'compactTransactionId': 'test_transaction_id', - 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#test_transaction_id#', - 'attestations': self.sample_privilege_attestations, - # Should remain the same, since we're renewing the same privilege - 'privilegeId': 'AUD-NE-124', + # Primary record + { + 'pk': f'aslp#PROVIDER#{provider_uuid}', + 'sk': 'aslp#PROVIDER#privilege/ne/aud#', + 'type': 'privilege', + 'providerId': provider_uuid, + 'compact': 'aslp', + 'jurisdiction': 'ne', + 'licenseJurisdiction': 'oh', + 'licenseType': 'audiologist', + 'administratorSetStatus': 'active', + # issuance and renewal dates should be the same + 'dateOfIssuance': '2024-11-08T23:59:59+00:00', + 'dateOfRenewal': '2024-11-08T23:59:59+00:00', + 'dateOfExpiration': '2025-10-31', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'compactTransactionId': 'test_transaction_id', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#test_transaction_id#', + 'attestations': self.sample_privilege_attestations, + # Should remain the same, since we're renewing the same privilege + 'privilegeId': 'AUD-NE-124', }, new_aud_ne_privilege.serialize_to_database_record(), ) # assert there are no update records for this privilege ne_aud_update_records = provider_user_records.get_update_records_for_privilege( - jurisdiction=new_aud_ne_privilege.jurisdiction, - license_type=new_aud_ne_privilege.licenseType + jurisdiction=new_aud_ne_privilege.jurisdiction, license_type=new_aud_ne_privilege.licenseType ) self.assertEqual([], ne_aud_update_records) - # ensure that slp privilege was not updated with an update record slp_privilege = self._provider_table.query( KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_uuid}') diff --git a/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py b/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py index 1d50a1bea..2a281b571 100644 --- a/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py +++ b/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py @@ -213,7 +213,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( self.test_data_generator.put_default_provider_record_in_provider_table() # Create privileges that are already encumbered - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + self.test_data_generator.put_default_privilege_record_in_provider_table( value_overrides={ 'licenseJurisdiction': DEFAULT_LICENSE_JURISDICTION, 'licenseTypeAbbreviation': DEFAULT_LICENSE_TYPE_ABBREVIATION, @@ -242,7 +242,6 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( privileges = provider_records.get_privilege_records() self.assertEqual(1, len(privileges)) - serialized_privilege = privilege.serialize_to_database_record() self.assertEqual(PrivilegeEncumberedStatusEnum.ENCUMBERED, privileges[0].encumberedStatus) # Get update records using ProviderUserRecords @@ -279,7 +278,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ self.test_data_generator.put_default_provider_record_in_provider_table() # Create privileges that are already encumbered - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + self.test_data_generator.put_default_privilege_record_in_provider_table( value_overrides={ 'licenseJurisdiction': DEFAULT_LICENSE_JURISDICTION, 'licenseTypeAbbreviation': DEFAULT_LICENSE_TYPE_ABBREVIATION, @@ -311,7 +310,6 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ privileges = provider_records.get_privilege_records() self.assertEqual(1, len(privileges)) - serialized_privilege = privilege.serialize_to_database_record() self.assertEqual(PrivilegeEncumberedStatusEnum.ENCUMBERED, privileges[0].encumberedStatus) # Get update records using ProviderUserRecords @@ -323,7 +321,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ update_encumbrance_details = update_record.encumbranceDetails self.assertEqual( { - 'adverseActionId': UUID(DEFAULT_ADVERSE_ACTION_ID), + 'adverseActionId': UUID(DEFAULT_ADVERSE_ACTION_ID), 'licenseJurisdiction': 'oh', 'clinicalPrivilegeActionCategory': 'Unsafe Practice or Substandard Care', }, @@ -342,7 +340,7 @@ def test_license_encumbrance_listener_creates_privilege_update_records(self): # Set up test data self.test_data_generator.put_default_provider_record_in_provider_table() - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table() + self.test_data_generator.put_default_privilege_record_in_provider_table() # add adverse action item for license self.test_data_generator.put_default_adverse_action_record_in_provider_table( value_overrides={'actionAgainst': 'license'} @@ -525,7 +523,7 @@ def test_license_encumbrance_lifted_listener_creates_privilege_update_records(se } ) - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + self.test_data_generator.put_default_privilege_record_in_provider_table( value_overrides={ 'licenseJurisdiction': DEFAULT_LICENSE_JURISDICTION, 'licenseTypeAbbreviation': DEFAULT_LICENSE_TYPE_ABBREVIATION, diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index 9a0407fe6..36ee55786 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -137,7 +137,8 @@ def test_transformations(self, mock_license_preprocessing_queue): # Get the provider straight from the table, to inspect them provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_id, include_updates=True) + compact='aslp', provider_id=provider_id, include_updates=True + ) # One record for each of: provider, providerUpdate, license, # privilege, and militaryAffiliation From 6e1aec565637d2615c7d486bb43b4cdef4da6a9f Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 31 Oct 2025 11:27:20 -0500 Subject: [PATCH 04/81] Refactor get_privilege_data to only pull down privilege records --- .../cc_common/data_model/data_client.py | 115 +++++++++++++++--- 1 file changed, 99 insertions(+), 16 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index 34566cabf..bb28725fb 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -959,6 +959,90 @@ def process_registration_values( ] ) + def _get_privilege_record_directly( + self, + *, + compact: str, + provider_id: str, + jurisdiction: str, + license_type_abbr: str, + consistent_read: bool = False, + ) -> PrivilegeData: + """ + Query for a single privilege record directly from DynamoDB. + + This should be used when it is undesirable to get all provider records and + filter for the specific privilege record. + + :param str compact: The compact of the privilege + :param str provider_id: The provider of the privilege + :param str jurisdiction: The jurisdiction of the privilege + :param str license_type_abbr: The license type abbreviation of the privilege + :param bool consistent_read: If true, performs a consistent read of the record + :raises CCNotFoundException: If the privilege record is not found + :return: The privilege record as PrivilegeData + """ + pk = f'{compact}#PROVIDER#{provider_id}' + sk = f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#' + + try: + response = self.config.provider_table.get_item( + Key={'pk': pk, 'sk': sk}, + ConsistentRead=consistent_read, + ) + if 'Item' not in response: + raise CCNotFoundException('Privilege not found') + + return PrivilegeData.from_database_record(response['Item']) + except KeyError as e: + raise CCNotFoundException('Privilege not found') from e + + def _get_privilege_update_records_directly( + self, + *, + compact: str, + provider_id: str, + jurisdiction: str, + license_type_abbr: str, + consistent_read: bool = False, + ) -> list[PrivilegeUpdateData]: + """ + Query for all privilege update records for a specific privilege directly from DynamoDB. + + This should be used when it is undesirable to get all provider update records and + filter for the specific privilege update records. + + :param str compact: The compact of the privilege + :param str provider_id: The provider of the privilege + :param str jurisdiction: The jurisdiction of the privilege + :param str license_type_abbr: The license type abbreviation of the privilege + :param bool consistent_read: If true, performs a consistent read of the records + :return: List of privilege update records + """ + pk = f'{compact}#PROVIDER#{provider_id}' + sk_prefix = f'{compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/' + + response_items = [] + last_evaluated_key = None + + while True: + pagination = {'ExclusiveStartKey': last_evaluated_key} if last_evaluated_key else {} + + query_resp = self.config.provider_table.query( + Select='ALL_ATTRIBUTES', + KeyConditionExpression=Key('pk').eq(pk) & Key('sk').begins_with(sk_prefix), + ConsistentRead=consistent_read, + **pagination, + ) + + response_items.extend(query_resp.get('Items', [])) + + last_evaluated_key = query_resp.get('LastEvaluatedKey') + if not last_evaluated_key: + break + + return [PrivilegeUpdateData.from_database_record(item) for item in response_items] + @logger_inject_kwargs(logger, 'compact', 'provider_id', 'detail', 'jurisdiction', 'license_type') def get_privilege_data( self, @@ -971,7 +1055,10 @@ def get_privilege_data( detail: bool = False, ) -> list[dict]: """ - Get a privilege for a provider in a jurisdiction of the license type + Get a privilege for a provider in a jurisdiction of the license type. + + This should be used when it is undesirable to pull all provider records and + filter for the specific privilege record and associated update records. :param str compact: The compact of the privilege :param str provider_id: The provider of the privilege @@ -983,30 +1070,26 @@ def get_privilege_data( :return If detail = False list of length one containing privilege item, if detail = True list containing, privilege record, privilege update records and privilege adverse action records """ - # Get all provider records (with updates if detail=True) - provider_user_records: ProviderUserRecords = self.get_provider_user_records( + # Query directly for the privilege record + privilege = self._get_privilege_record_directly( compact=compact, provider_id=provider_id, - consistent_read=consistent_read, - include_updates=detail, # Only include updates if detail=True - ) - - # Get the specific privilege record - privilege = provider_user_records.get_specific_privilege_record( jurisdiction=jurisdiction, - license_abbreviation=license_type_abbr, + license_type_abbr=license_type_abbr, + consistent_read=consistent_read, ) - if privilege is None: - raise CCNotFoundException('Privilege not found') - # Build return list in the same format as before result = [privilege.to_dict()] + if detail: - # Add update records - privilege_updates = provider_user_records.get_update_records_for_privilege( + # Query directly for privilege update records + privilege_updates = self._get_privilege_update_records_directly( + compact=compact, + provider_id=provider_id, jurisdiction=jurisdiction, - license_type=privilege.licenseType, + license_type_abbr=license_type_abbr, + consistent_read=consistent_read, ) result.extend([update.to_dict() for update in privilege_updates]) From 0c9b4fdc5a6a750d55ac7827a298ea3ba1c236fd Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 31 Oct 2025 11:37:02 -0500 Subject: [PATCH 05/81] Make sure to check for old records before migration is complete --- .../cc_common/data_model/data_client.py | 43 ++++++++++++------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index bb28725fb..7f1fd61ce 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -1012,6 +1012,9 @@ def _get_privilege_update_records_directly( This should be used when it is undesirable to get all provider update records and filter for the specific privilege update records. + During migration period, this method queries both the new and old SK patterns to ensure + no records are missed. + :param str compact: The compact of the privilege :param str provider_id: The provider of the privilege :param str jurisdiction: The jurisdiction of the privilege @@ -1020,26 +1023,36 @@ def _get_privilege_update_records_directly( :return: List of privilege update records """ pk = f'{compact}#PROVIDER#{provider_id}' - sk_prefix = f'{compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/' - response_items = [] - last_evaluated_key = None + # SK prefixes to query (new pattern and old pattern for migration support) + # TODO - remove old pattern once migration is complete # noqa: FIX002 + sk_prefixes = [ + # New pattern: {compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/ + f'{compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/', + # Old pattern: {compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE + f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE', + ] - while True: - pagination = {'ExclusiveStartKey': last_evaluated_key} if last_evaluated_key else {} + response_items = [] - query_resp = self.config.provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(pk) & Key('sk').begins_with(sk_prefix), - ConsistentRead=consistent_read, - **pagination, - ) + # Query for records using each SK prefix pattern + for sk_prefix in sk_prefixes: + last_evaluated_key = None + while True: + pagination = {'ExclusiveStartKey': last_evaluated_key} if last_evaluated_key else {} + + query_resp = self.config.provider_table.query( + Select='ALL_ATTRIBUTES', + KeyConditionExpression=Key('pk').eq(pk) & Key('sk').begins_with(sk_prefix), + ConsistentRead=consistent_read, + **pagination, + ) - response_items.extend(query_resp.get('Items', [])) + response_items.extend(query_resp.get('Items', [])) - last_evaluated_key = query_resp.get('LastEvaluatedKey') - if not last_evaluated_key: - break + last_evaluated_key = query_resp.get('LastEvaluatedKey') + if not last_evaluated_key: + break return [PrivilegeUpdateData.from_database_record(item) for item in response_items] From 7dd0e84935f4c067726df1200eff730f772b6e17 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 31 Oct 2025 14:33:20 -0500 Subject: [PATCH 06/81] Using update tiers to support returning privilege updates in single query --- .../cc_common/data_model/data_client.py | 27 +++++++---- .../data_model/schema/license/record.py | 2 +- .../data_model/schema/privilege/record.py | 2 +- .../data_model/schema/provider/record.py | 2 +- .../cc_common/data_model/update_tier_enum.py | 32 +++++++++++++ .../common/common_test/test_data_generator.py | 29 ++++++++++-- .../common/tests/function/test_data_client.py | 47 ++++++++++--------- .../resources/dynamo/license-update.json | 2 +- .../resources/dynamo/privilege-update.json | 2 +- .../tests/function/test_encumbrance_events.py | 40 ++++++---------- .../test_license_deactivation_events.py | 19 ++++---- .../provider-data-v1/handlers/__init__.py | 4 +- .../provider-data-v1/handlers/state_api.py | 4 +- .../test_provider_transformations.py | 5 +- .../test_handlers/test_encumbrance.py | 43 +++++------------ 15 files changed, 147 insertions(+), 113 deletions(-) create mode 100644 backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index 7f1fd61ce..267c5d0d8 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -41,6 +41,7 @@ from cc_common.data_model.schema.privilege import PrivilegeData, PrivilegeUpdateData from cc_common.data_model.schema.privilege.record import PrivilegeUpdateRecordSchema from cc_common.data_model.schema.provider import ProviderData, ProviderUpdateData +from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.exceptions import ( CCAwsServiceException, CCInternalException, @@ -179,14 +180,21 @@ def get_provider_user_records( compact: str, provider_id: UUID, consistent_read: bool = True, - include_updates: bool = False, + include_update_tier: UpdateTierEnum | None = None, ) -> ProviderUserRecords: logger.info('Getting provider') - # Determine SK prefix based on include_updates parameter - # When include_updates=False, use begins_with({compact}#PROVIDER#) to exclude update records - # When include_updates=True, use begins_with({compact}#PROVIDER) to include both main records and update records - sk_prefix = f'{compact}#PROVIDER' if not include_updates else f'{compact}#PROV' + # Determine SK condition based on include_update_tier parameter + # When include_update_tier=None, use begins_with to get only main records (provider, licenses, privileges) + # When include_update_tier is set, use lt (less than) to get main records plus updates up to that tier + if include_update_tier is None: + # Get only main records: {compact}#PROVIDER prefix + sk_condition = Key('sk').begins_with(f'{compact}#PROVIDER') + else: + # Get main records and updates up to specified tier using lt (less than) + # This fetches all SKs less than {compact}#UPDATE#{next_tier} + next_tier = int(include_update_tier) + 1 + sk_condition = Key('sk').lt(f'{compact}#UPDATE#{next_tier}') resp = {'Items': []} last_evaluated_key = None @@ -196,8 +204,7 @@ def get_provider_user_records( query_resp = self.config.provider_table.query( Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') - & Key('sk').begins_with(sk_prefix), + KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') & sk_condition, ConsistentRead=consistent_read, **pagination, ) @@ -1027,9 +1034,9 @@ def _get_privilege_update_records_directly( # SK prefixes to query (new pattern and old pattern for migration support) # TODO - remove old pattern once migration is complete # noqa: FIX002 sk_prefixes = [ - # New pattern: {compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/ - f'{compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/', - # Old pattern: {compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE + # New pattern + f'{compact}#UPDATE#1#privilege/{jurisdiction}/{license_type_abbr}/', + # Old pattern f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE', ] diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index 1e4cef6f7..822dca07c 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -215,7 +215,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#PROV_UPDATE#license/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#UPDATE#3#license/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py index a98482dda..3bcd3ed0e 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py @@ -240,7 +240,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#PROV_UPDATE#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#UPDATE#1#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index be5079ff4..5bc21b05c 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -238,6 +238,6 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument # field for this. change_hash = self.hash_changes(in_data) in_data['sk'] = ( - f'{in_data["compact"]}#PROV_UPDATE#provider/{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#UPDATE#2#provider/{int(config.current_standard_datetime.timestamp())}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py new file mode 100644 index 000000000..30df103ed --- /dev/null +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py @@ -0,0 +1,32 @@ +from enum import StrEnum + + +class UpdateTierEnum(StrEnum): + """ + Enum for update record tiers in the sort key hierarchy. + + Update records are organized into tiers to enable efficient range queries. + Using lt (less than) conditions, we can fetch multiple tiers in a single query. + + Tier structure in sort keys: + - Tier 1: {compact}#UPDATE#1#privilege/... (Privilege updates) + - Tier 2: {compact}#UPDATE#2#provider/... (Provider updates) + - Tier 3: {compact}#UPDATE#3#license/... (License updates) + + Query patterns: + - TIER_ONE: Fetches privilege updates only + Query: Key('sk').lt('{compact}#UPDATE#2') + + - TIER_TWO: Fetches privilege + provider updates + Query: Key('sk').lt('{compact}#UPDATE#3') + + - TIER_THREE: Fetches all updates (privilege + provider + license) + Query: Key('sk').lt('{compact}#UPDATE#4') + + This tiered approach prevents bulk invalid license updates from breaking + queries that only need privilege and provider data. + """ + + TIER_ONE = '1' # Privilege updates only + TIER_TWO = '2' # Privilege + Provider updates + TIER_THREE = '3' # All updates (Privilege + Provider + License) diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py index f6f99c683..2fdaa6274 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py @@ -100,15 +100,12 @@ def query_privilege_update_records_for_given_record_from_database( ) -> list[PrivilegeUpdateData]: """ Helper method to query update records from the database using the provider data class instance. - - All of our update records use the same pk as the actual record that is being updated. The new pattern - for privilege updates is {compact}#PROV_UPDATE#privilege/{jurisdiction}/{license_type_abbr}/ """ serialized_record = privilege_data.serialize_to_database_record() from cc_common.config import config license_type_abbr = config.license_type_abbreviations[privilege_data.compact][privilege_data.licenseType] - sk_prefix = f'{privilege_data.compact}#PROV_UPDATE#privilege/{privilege_data.jurisdiction}/{license_type_abbr}/' + sk_prefix = f'{privilege_data.compact}#UPDATE#1#privilege/{privilege_data.jurisdiction}/{license_type_abbr}/' privilege_update_records = TestDataGenerator._query_records_by_pk_and_sk_prefix( serialized_record['pk'], sk_prefix @@ -127,10 +124,32 @@ def query_provider_update_records_for_given_record_from_database(provider_record """ serialized_record = provider_record.serialize_to_database_record() - sk_prefix = f'{provider_record.compact}#PROV_UPDATE#provider' + sk_prefix = f'{provider_record.compact}#UPDATE#2#provider' return TestDataGenerator._query_records_by_pk_and_sk_prefix(serialized_record['pk'], sk_prefix) + @staticmethod + def query_license_update_records_for_given_record_from_database( + license_data: LicenseData, + ) -> list[LicenseUpdateData]: + """ + Helper method to query update records from the database using the license data class instance. + + All of our update records use the same pk as the actual record that is being updated. The sk prefix + for license updates follows the tier pattern: {compact}#UPDATE#3#license/{jurisdiction}/{license_type_abbr}/ + """ + serialized_record = license_data.serialize_to_database_record() + from cc_common.config import config + + license_type_abbr = config.license_type_abbreviations[license_data.compact][license_data.licenseType] + sk_prefix = f'{license_data.compact}#UPDATE#3#license/{license_data.jurisdiction}/{license_type_abbr}/' + + license_update_records = TestDataGenerator._query_records_by_pk_and_sk_prefix( + serialized_record['pk'], sk_prefix + ) + + return [LicenseUpdateData.from_database_record(update_record) for update_record in license_update_records] + @staticmethod def generate_default_adverse_action(value_overrides: dict | None = None) -> AdverseActionData: """Generate a default adverse action""" diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index 7f544cc3a..41284dd13 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -317,7 +317,7 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): # Verify that the audiologist privilege update record was created for ky provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_uuid, include_updates=True + compact='aslp', provider_id=provider_uuid ) new_aud_ky_privilege = provider_user_records.get_specific_privilege_record( @@ -350,15 +350,18 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): new_aud_ky_privilege.serialize_to_database_record(), ) - ky_aud_update_record = provider_user_records.get_update_records_for_privilege( - jurisdiction=new_aud_ky_privilege.jurisdiction, license_type=new_aud_ky_privilege.licenseType - )[0] + # Get update records using test_data_generator + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + new_aud_ky_privilege + ) + self.assertEqual(1, len(update_records)) + ky_aud_update_record = update_records[0] self.assertEqual( # A new history record { 'pk': f'aslp#PROVIDER#{provider_uuid}', - 'sk': 'aslp#PROV_UPDATE#privilege/ky/aud/1731110399/f61e34798e1775ff6230d1187d444146', + 'sk': 'aslp#UPDATE#1#privilege/ky/aud/1731110399/f61e34798e1775ff6230d1187d444146', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': provider_uuid, @@ -420,9 +423,9 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): }, new_aud_ne_privilege.serialize_to_database_record(), ) - # assert there are no update records for this privilege - ne_aud_update_records = provider_user_records.get_update_records_for_privilege( - jurisdiction=new_aud_ne_privilege.jurisdiction, license_type=new_aud_ne_privilege.licenseType + # assert there are no update records for this privilege using test_data_generator + ne_aud_update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + new_aud_ne_privilege ) self.assertEqual([], ne_aud_update_records) @@ -516,7 +519,7 @@ def test_data_client_handles_large_privilege_purchase(self): # Verify that all privileges were updated provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_uuid, include_updates=True + compact='aslp', provider_id=provider_uuid ) for jurisdiction in jurisdictions: @@ -528,9 +531,9 @@ def test_data_client_handles_large_privilege_purchase(self): self.assertEqual('2025-10-31', privilege_record.dateOfExpiration.isoformat()) self.assertEqual('test_transaction_id', privilege_record.compactTransactionId) - # Get the update record using ProviderUserRecords - update_records = provider_user_records.get_update_records_for_privilege( - jurisdiction=jurisdiction, license_type=privilege_record.licenseType + # Get the update record using test_data_generator + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + privilege_record ) self.assertEqual(1, len(update_records), f'Expected 1 update record for jurisdiction {jurisdiction}') update_record = update_records[0] @@ -782,7 +785,7 @@ def test_deactivate_privilege_updates_record(self): # Verify that the privilege record was updated provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_id, include_updates=True + compact='aslp', provider_id=provider_id ) new_privilege = provider_user_records.get_specific_privilege_record( @@ -813,9 +816,9 @@ def test_deactivate_privilege_updates_record(self): new_privilege.serialize_to_database_record(), ) - # Get the update record - update_records = provider_user_records.get_update_records_for_privilege( - jurisdiction='ne', license_type=new_privilege.licenseType + # Get the update record using test_data_generator + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + new_privilege ) self.assertEqual(1, len(update_records), 'Expected 1 update record') update_record = update_records[0] @@ -823,7 +826,7 @@ def test_deactivate_privilege_updates_record(self): self.assertEqual( { 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#PROV_UPDATE#privilege/ne/aud/1731110399/aac682a76e1182a641a1b40dd606ae51', + 'sk': 'aslp#UPDATE#1#privilege/ne/aud/1731110399/aac682a76e1182a641a1b40dd606ae51', 'type': 'privilegeUpdate', 'updateType': 'deactivation', 'providerId': str(provider_id), @@ -921,7 +924,7 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): # We'll create it as if it were already deactivated original_history = { 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#PROV_UPDATE#privilege/ne/aud/1731110399/4ebb3dc8f1ffcc30fe7aad5ec49d0ca6', + 'sk': 'aslp#UPDATE#1#privilege/ne/aud/1731110399/4ebb3dc8f1ffcc30fe7aad5ec49d0ca6', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': str(provider_id), @@ -967,7 +970,7 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): # Verify that the privilege record was unchanged provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_id, include_updates=True + compact='aslp', provider_id=provider_id ) new_privilege = provider_user_records.get_specific_privilege_record( @@ -980,9 +983,9 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): serialized_record['dateOfUpdate'] = original_privilege['dateOfUpdate'] self.assertEqual(original_privilege, serialized_record) - # Verify the update record is unchanged - update_records = provider_user_records.get_update_records_for_privilege( - jurisdiction='ne', license_type=new_privilege.licenseType + # Verify the update record is unchanged using test_data_generator + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + new_privilege ) self.assertEqual(1, len(update_records), 'Expected 1 update record') self.assertEqual(original_history, update_records[0].serialize_to_database_record()) diff --git a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json index 8a6e7ba74..9bab8a231 100644 --- a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json +++ b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json @@ -1,6 +1,6 @@ { "pk": "aslp#PROVIDER#89a6377e-c3a5-40e5-bca5-317ec854c570", - "sk": "aslp#PROV_UPDATE#license/oh/slp/1586264399/34702de3dc08e64922605a6b18f3838b", + "sk": "aslp#UPDATE#3#license/oh/slp/1586264399/34702de3dc08e64922605a6b18f3838b", "type": "licenseUpdate", "updateType": "renewal", "providerId": "89a6377e-c3a5-40e5-bca5-317ec854c570", diff --git a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json index 456d3b702..4a22b0aff 100644 --- a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json +++ b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json @@ -1,6 +1,6 @@ { "pk": "aslp#PROVIDER#89a6377e-c3a5-40e5-bca5-317ec854c570", - "sk": "aslp#PROV_UPDATE#privilege/ne/slp/1731110399/939a3c350708e34875f0a652bf7d7454", + "sk": "aslp#UPDATE#1#privilege/ne/slp/1731110399/939a3c350708e34875f0a652bf7d7454", "type": "privilegeUpdate", "updateType": "renewal", "providerId": "89a6377e-c3a5-40e5-bca5-317ec854c570", diff --git a/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py b/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py index 2a281b571..f7a5ffe26 100644 --- a/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py +++ b/backend/compact-connect/lambdas/python/data-events/tests/function/test_encumbrance_events.py @@ -213,7 +213,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( self.test_data_generator.put_default_provider_record_in_provider_table() # Create privileges that are already encumbered - self.test_data_generator.put_default_privilege_record_in_provider_table( + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( value_overrides={ 'licenseJurisdiction': DEFAULT_LICENSE_JURISDICTION, 'licenseTypeAbbreviation': DEFAULT_LICENSE_TYPE_ABBREVIATION, @@ -236,7 +236,6 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( provider_records = self.config.data_client.get_provider_user_records( compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, - include_updates=True, ) privileges = provider_records.get_privilege_records() @@ -244,9 +243,9 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered( self.assertEqual(PrivilegeEncumberedStatusEnum.ENCUMBERED, privileges[0].encumberedStatus) - # Get update records using ProviderUserRecords - update_records = provider_records.get_update_records_for_privilege( - jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType + # Get update records using test_data_generator + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + privilege ) self.assertEqual(1, len(update_records)) update_record = update_records[0] @@ -278,7 +277,7 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ self.test_data_generator.put_default_provider_record_in_provider_table() # Create privileges that are already encumbered - self.test_data_generator.put_default_privilege_record_in_provider_table( + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( value_overrides={ 'licenseJurisdiction': DEFAULT_LICENSE_JURISDICTION, 'licenseTypeAbbreviation': DEFAULT_LICENSE_TYPE_ABBREVIATION, @@ -304,7 +303,6 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ provider_records = self.config.data_client.get_provider_user_records( compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, - include_updates=True, ) privileges = provider_records.get_privilege_records() @@ -312,9 +310,9 @@ def test_license_encumbrance_listener_handles_all_privileges_already_encumbered_ self.assertEqual(PrivilegeEncumberedStatusEnum.ENCUMBERED, privileges[0].encumberedStatus) - # Get update records using ProviderUserRecords - update_records = provider_records.get_update_records_for_privilege( - jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType + # Get update records using test_data_generator + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + privilege ) self.assertEqual(1, len(update_records)) update_record = update_records[0] @@ -340,7 +338,7 @@ def test_license_encumbrance_listener_creates_privilege_update_records(self): # Set up test data self.test_data_generator.put_default_provider_record_in_provider_table() - self.test_data_generator.put_default_privilege_record_in_provider_table() + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table() # add adverse action item for license self.test_data_generator.put_default_adverse_action_record_in_provider_table( value_overrides={'actionAgainst': 'license'} @@ -353,13 +351,8 @@ def test_license_encumbrance_listener_creates_privilege_update_records(self): license_encumbrance_listener(event, self.mock_context) # Verify privilege update record was created - provider_records = self.config.data_client.get_provider_user_records( - compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, include_updates=True - ) - privileges = provider_records.get_privilege_records() - self.assertEqual(1, len(privileges)) - update_records = provider_records.get_update_records_for_privilege( - jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + privilege ) self.assertEqual(1, len(update_records)) update_record = update_records[0] @@ -523,7 +516,7 @@ def test_license_encumbrance_lifted_listener_creates_privilege_update_records(se } ) - self.test_data_generator.put_default_privilege_record_in_provider_table( + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( value_overrides={ 'licenseJurisdiction': DEFAULT_LICENSE_JURISDICTION, 'licenseTypeAbbreviation': DEFAULT_LICENSE_TYPE_ABBREVIATION, @@ -548,13 +541,8 @@ def test_license_encumbrance_lifted_listener_creates_privilege_update_records(se license_encumbrance_lifted_listener(event, self.mock_context) # Verify privilege update record was created - provider_records = self.config.data_client.get_provider_user_records( - compact=DEFAULT_COMPACT, provider_id=DEFAULT_PROVIDER_ID, include_updates=True - ) - privileges = provider_records.get_privilege_records() - self.assertEqual(1, len(privileges)) - update_records = provider_records.get_update_records_for_privilege( - jurisdiction=privileges[0].jurisdiction, license_type=privileges[0].licenseType + update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + privilege ) self.assertEqual(1, len(update_records)) update_record = update_records[0] diff --git a/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py b/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py index 8a2d205d9..eaba9ec32 100644 --- a/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py +++ b/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py @@ -299,16 +299,15 @@ def test_license_deactivation_listener_creates_update_records_for_all_affected_p # Verify privilege update records were created for both privileges for privilege in [privilege1, privilege2]: - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(privilege.serialize_to_database_record()['pk']) - & Key('sk').begins_with(f'{privilege.compact}#PROV_UPDATE#privilege/{privilege.jurisdiction}/slp'), - ) - - self.assertEqual(1, len(privilege_update_records['Items'])) - update_record = privilege_update_records['Items'][0] - self.assertEqual('licenseDeactivation', update_record['updateType']) - self.assertEqual({'licenseDeactivatedStatus': 'licenseDeactivated'}, update_record['updatedValues']) + privilege_update_records = ( + self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + privilege + )) + + self.assertEqual(1, len(privilege_update_records)) + update_record = privilege_update_records[0] + self.assertEqual('licenseDeactivation', update_record.updateType) + self.assertEqual({'licenseDeactivatedStatus': 'licenseDeactivated'}, update_record.updatedValues) def test_license_deactivation_listener_fails_with_missing_required_fields(self): """Test that license deactivation event handler fails when required fields are missing.""" diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py index 8d5a971b7..8939ee726 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/__init__.py @@ -1,4 +1,5 @@ from cc_common.config import config, logger +from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.utils import logger_inject_kwargs @@ -13,7 +14,8 @@ def get_provider_information(compact: str, provider_id: str) -> dict: :param provider_id: The provider's unique identifier. :return: Provider profile information. """ + # Collect all main provider records and privilege update records, which are included in tier one. provider_user_records = config.data_client.get_provider_user_records( - compact=compact, provider_id=provider_id, include_updates=True + compact=compact, provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_ONE ) return provider_user_records.generate_api_response_object() diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py index 338dffef9..3a4cfaf3a 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py @@ -2,6 +2,7 @@ from aws_lambda_powertools.utilities.typing import LambdaContext from cc_common.config import config, logger +from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.data_model.schema.common import CCPermissionsAction from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.privilege import PrivilegeData @@ -147,8 +148,9 @@ def get_provider(event: dict, context: LambdaContext): # noqa: ARG001 unused-ar raise CCInvalidRequestException('Missing required field') from e with logger.append_context_keys(compact=compact, provider_id=provider_id, jurisdiction=jurisdiction): + # Collect all main provider records and privilege update records, which are included in tier one. provider_user_records = config.data_client.get_provider_user_records( - compact=compact, provider_id=provider_id, include_updates=True + compact=compact, provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_ONE ) # Get caller's scopes to determine private data access diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index 36ee55786..a98a3fbf1 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -4,6 +4,7 @@ from moto import mock_aws +from cc_common.data_model.update_tier_enum import UpdateTierEnum from .. import TstFunction @@ -135,9 +136,9 @@ def test_transformations(self, mock_license_preprocessing_queue): ], ) - # Get the provider straight from the table, to inspect them + # Get the provider and all update records straight from the table, to inspect them provider_user_records: ProviderUserRecords = self.config.data_client.get_provider_user_records( - compact='aslp', provider_id=provider_id, include_updates=True + compact='aslp', provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_THREE ) # One record for each of: provider, providerUpdate, license, diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py index e27347b14..f3ae0da88 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py @@ -148,18 +148,11 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ self.assertEqual(200, response['statusCode'], msg=json.loads(response['body'])) # Verify that the encumbrance record was added to the provider data table - # Perform a query to list all encumbrances for the provider using the starts_with key condition - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(test_privilege_record.serialize_to_database_record()['pk']) - & Key('sk').begins_with( - f'{test_privilege_record.compact}#PROV_UPDATE#privilege/{test_privilege_record.jurisdiction}/slp' - ), + privilege_update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + test_privilege_record ) - self.assertEqual(1, len(privilege_update_records['Items'])) - item = privilege_update_records['Items'][0] - - loaded_privilege_update_data = PrivilegeUpdateData.from_database_record(item) + self.assertEqual(1, len(privilege_update_records)) + loaded_privilege_update_data = privilege_update_records[0] expected_privilege_update_data = self.test_data_generator.generate_default_privilege_update( value_overrides={ @@ -195,18 +188,11 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ self.assertEqual(200, response['statusCode'], msg=json.loads(response['body'])) # Verify that the encumbrance record was added to the provider data table - # Perform a query to list all encumbrances for the provider using the starts_with key condition - privilege_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(test_privilege_record.serialize_to_database_record()['pk']) - & Key('sk').begins_with( - f'{test_privilege_record.compact}#PROV_UPDATE#privilege/{test_privilege_record.jurisdiction}/slp' - ), + privilege_update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + test_privilege_record ) - self.assertEqual(1, len(privilege_update_records['Items'])) - item = privilege_update_records['Items'][0] - - loaded_privilege_update_data = PrivilegeUpdateData.from_database_record(item) + self.assertEqual(1, len(privilege_update_records)) + loaded_privilege_update_data = privilege_update_records[0] expected_privilege_update_data = self.test_data_generator.generate_default_privilege_update( value_overrides={ @@ -524,15 +510,11 @@ def test_license_encumbrance_handler_adds_license_update_record_in_provider_data self.assertEqual(200, response['statusCode'], msg=json.loads(response['body'])) # Verify that the update record was added for the license - license_update_records = self._provider_table.query( - Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(test_license_record.serialize_to_database_record()['pk']) - & Key('sk').begins_with( - f'{test_license_record.compact}#PROV_UPDATE#license/{test_license_record.jurisdiction}/slp' - ), + license_update_records = self.test_data_generator.query_license_update_records_for_given_record_from_database( + test_license_record ) - self.assertEqual(1, len(license_update_records['Items'])) - item = license_update_records['Items'][0] + self.assertEqual(1, len(license_update_records)) + loaded_license_update_data = license_update_records[0] expected_license_update_data = self.test_data_generator.generate_default_license_update( value_overrides={ @@ -542,7 +524,6 @@ def test_license_encumbrance_handler_adds_license_update_record_in_provider_data 'effectiveDate': datetime.fromisoformat(TEST_ENCUMBRANCE_EFFECTIVE_DATETIME), } ) - loaded_license_update_data = LicenseUpdateData.from_database_record(item) self.assertEqual( expected_license_update_data.to_dict(), From 0f8b8a0dd379b3a047b9a3616d30b19bf1a46210 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 3 Nov 2025 15:45:55 -0600 Subject: [PATCH 07/81] Add tests to confirm old pattern is supported until migration is completed --- .../common/tests/function/test_data_client.py | 157 ++++++++++++++++++ 1 file changed, 157 insertions(+) diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index 41284dd13..0da183a66 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -1966,3 +1966,160 @@ def test_close_license_investigation_with_encumbrance(self): investigation_record.pop('dateOfUpdate') self.assertEqual(expected_investigation_close, investigation_record) + + # TODO - remove this test once migration from old update SK pattern is complete # noqa: FIX002 + def test_get_provider_user_records_returns_old_sk_pattern_update_records_with_tier_one(self): + """Test that get_provider_user_records with TIER_ONE returns privilege update records with old SK pattern.""" + from cc_common.data_model.data_client import DataClient + from cc_common.data_model.provider_record_util import ProviderUserRecords + from cc_common.data_model.update_tier_enum import UpdateTierEnum + + provider_uuid = str(uuid4()) + compact = 'aslp' + jurisdiction = 'ky' + license_type_abbr = 'aud' + + # Create provider and privilege records + self.test_data_generator.put_default_provider_record_in_provider_table( + value_overrides={ + 'providerId': provider_uuid, + 'compact': compact, + } + ) + + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + value_overrides={ + 'providerId': provider_uuid, + 'compact': compact, + 'jurisdiction': jurisdiction, + 'licenseType': 'audiologist', + } + ) + + # Manually create a privilege update record with the old SK pattern + old_sk_update_record = { + 'pk': f'{compact}#PROVIDER#{provider_uuid}', + 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE/1731110399/939a3c350708e34875f0a652bf7d7454', + 'type': 'privilegeUpdate', + 'updateType': 'renewal', + 'providerId': provider_uuid, + 'compact': compact, + 'jurisdiction': jurisdiction, + 'licenseType': 'audiologist', + 'createDate': '2024-11-08T23:59:59+00:00', + 'effectiveDate': '2024-11-08T23:59:59+00:00', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', + "previous": { + "attestations": [ + {"attestationId": "jurisprudence-confirmation", "version": "1"} + ], + "dateOfIssuance": "2016-05-05T12:59:59+00:00", + "dateOfRenewal": "2016-05-05T12:59:59+00:00", + "dateOfExpiration": "2020-06-06", + "dateOfUpdate": "2016-05-05T12:59:59+00:00", + "compactTransactionId": "0123456789", + "privilegeId": "SLP-NE-1", + "administratorSetStatus": "active", + "licenseJurisdiction": "oh" + }, + 'updatedValues': { + 'dateOfRenewal': '2024-11-08T23:59:59+00:00', + 'dateOfExpiration': '2025-10-31', + 'compactTransactionId': 'test_transaction_id', + }, + } + self._provider_table.put_item(Item=old_sk_update_record) + + # Call get_provider_user_records with TIER_ONE + client = DataClient(self.config) + provider_user_records: ProviderUserRecords = client.get_provider_user_records( + compact=compact, + provider_id=provider_uuid, + include_update_tier=UpdateTierEnum.TIER_ONE, + ) + + # Verify the old SK pattern update record is returned + update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction=jurisdiction, license_type=privilege.licenseType + ) + self.assertEqual(1, len(update_records)) + self.assertEqual('renewal', update_records[0].updateType) + + # TODO - remove this test once migration from old update SK pattern is complete # noqa: FIX002 + def test_get_privilege_data_returns_old_sk_pattern_update_records_with_detail(self): + """Test that get_privilege_data with detail=True returns privilege update records with old SK pattern.""" + from cc_common.data_model.data_client import DataClient + + provider_uuid = str(uuid4()) + compact = 'aslp' + jurisdiction = 'ne' + license_type_abbr = 'aud' + + # Create provider and privilege records + self.test_data_generator.put_default_provider_record_in_provider_table( + value_overrides={ + 'providerId': provider_uuid, + 'compact': compact, + } + ) + + self.test_data_generator.put_default_privilege_record_in_provider_table( + value_overrides={ + 'providerId': provider_uuid, + 'compact': compact, + 'jurisdiction': jurisdiction, + 'licenseType': 'audiologist', + } + ) + + # Manually create a privilege update record with the old SK pattern + old_sk_update_record = { + 'pk': f'{compact}#PROVIDER#{provider_uuid}', + 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE/1731110399/939a3c350708e34875f0a652bf7d7454', + 'type': 'privilegeUpdate', + 'updateType': 'renewal', + 'providerId': provider_uuid, + 'compact': compact, + 'jurisdiction': jurisdiction, + 'licenseType': 'audiologist', + 'createDate': '2024-11-08T23:59:59+00:00', + 'effectiveDate': '2024-11-08T23:59:59+00:00', + 'dateOfUpdate': '2024-11-08T23:59:59+00:00', + 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', + "previous": { + "attestations": [ + {"attestationId": "jurisprudence-confirmation", "version": "1"} + ], + "dateOfIssuance": "2016-05-05T12:59:59+00:00", + "dateOfRenewal": "2016-05-05T12:59:59+00:00", + "dateOfExpiration": "2020-06-06", + "dateOfUpdate": "2016-05-05T12:59:59+00:00", + "compactTransactionId": "0123456789", + "privilegeId": "SLP-NE-1", + "administratorSetStatus": "active", + "licenseJurisdiction": "oh" + }, + 'updatedValues': { + 'dateOfRenewal': '2024-11-08T23:59:59+00:00', + 'dateOfExpiration': '2025-10-31', + 'compactTransactionId': 'test_transaction_id', + }, + } + self._provider_table.put_item(Item=old_sk_update_record) + + # Call get_privilege_data with detail=True + client = DataClient(self.config) + result = client.get_privilege_data( + compact=compact, + provider_id=provider_uuid, + jurisdiction=jurisdiction, + license_type_abbr=license_type_abbr, + detail=True, + ) + + # Verify the result contains the privilege record and the old SK pattern update record + self.assertEqual(2, len(result)) + self.assertEqual('privilege', result[0]['type']) + self.assertEqual('privilegeUpdate', result[1]['type']) + self.assertEqual('renewal', result[1]['updateType']) From 597d3d967d2be3935484e8e96604dbc0b64909a6 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 3 Nov 2025 15:57:27 -0600 Subject: [PATCH 08/81] formatting/linter --- .../common/tests/function/test_data_client.py | 50 +++++++++---------- .../test_license_deactivation_events.py | 6 +-- .../provider-data-v1/handlers/state_api.py | 2 +- .../test_provider_transformations.py | 2 +- .../test_handlers/test_encumbrance.py | 15 +++--- 5 files changed, 36 insertions(+), 39 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index 0da183a66..1f93f8724 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -1999,7 +1999,8 @@ def test_get_provider_user_records_returns_old_sk_pattern_update_records_with_ti # Manually create a privilege update record with the old SK pattern old_sk_update_record = { 'pk': f'{compact}#PROVIDER#{provider_uuid}', - 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE/1731110399/939a3c350708e34875f0a652bf7d7454', + 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}' + f'#UPDATE/1731110399/939a3c350708e34875f0a652bf7d7454', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': provider_uuid, @@ -2010,18 +2011,16 @@ def test_get_provider_user_records_returns_old_sk_pattern_update_records_with_ti 'effectiveDate': '2024-11-08T23:59:59+00:00', 'dateOfUpdate': '2024-11-08T23:59:59+00:00', 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', - "previous": { - "attestations": [ - {"attestationId": "jurisprudence-confirmation", "version": "1"} - ], - "dateOfIssuance": "2016-05-05T12:59:59+00:00", - "dateOfRenewal": "2016-05-05T12:59:59+00:00", - "dateOfExpiration": "2020-06-06", - "dateOfUpdate": "2016-05-05T12:59:59+00:00", - "compactTransactionId": "0123456789", - "privilegeId": "SLP-NE-1", - "administratorSetStatus": "active", - "licenseJurisdiction": "oh" + 'previous': { + 'attestations': [{'attestationId': 'jurisprudence-confirmation', 'version': '1'}], + 'dateOfIssuance': '2016-05-05T12:59:59+00:00', + 'dateOfRenewal': '2016-05-05T12:59:59+00:00', + 'dateOfExpiration': '2020-06-06', + 'dateOfUpdate': '2016-05-05T12:59:59+00:00', + 'compactTransactionId': '0123456789', + 'privilegeId': 'SLP-NE-1', + 'administratorSetStatus': 'active', + 'licenseJurisdiction': 'oh', }, 'updatedValues': { 'dateOfRenewal': '2024-11-08T23:59:59+00:00', @@ -2076,7 +2075,8 @@ def test_get_privilege_data_returns_old_sk_pattern_update_records_with_detail(se # Manually create a privilege update record with the old SK pattern old_sk_update_record = { 'pk': f'{compact}#PROVIDER#{provider_uuid}', - 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE/1731110399/939a3c350708e34875f0a652bf7d7454', + 'sk': f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}' + f'#UPDATE/1731110399/939a3c350708e34875f0a652bf7d7454', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': provider_uuid, @@ -2087,18 +2087,16 @@ def test_get_privilege_data_returns_old_sk_pattern_update_records_with_detail(se 'effectiveDate': '2024-11-08T23:59:59+00:00', 'dateOfUpdate': '2024-11-08T23:59:59+00:00', 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', - "previous": { - "attestations": [ - {"attestationId": "jurisprudence-confirmation", "version": "1"} - ], - "dateOfIssuance": "2016-05-05T12:59:59+00:00", - "dateOfRenewal": "2016-05-05T12:59:59+00:00", - "dateOfExpiration": "2020-06-06", - "dateOfUpdate": "2016-05-05T12:59:59+00:00", - "compactTransactionId": "0123456789", - "privilegeId": "SLP-NE-1", - "administratorSetStatus": "active", - "licenseJurisdiction": "oh" + 'previous': { + 'attestations': [{'attestationId': 'jurisprudence-confirmation', 'version': '1'}], + 'dateOfIssuance': '2016-05-05T12:59:59+00:00', + 'dateOfRenewal': '2016-05-05T12:59:59+00:00', + 'dateOfExpiration': '2020-06-06', + 'dateOfUpdate': '2016-05-05T12:59:59+00:00', + 'compactTransactionId': '0123456789', + 'privilegeId': 'SLP-NE-1', + 'administratorSetStatus': 'active', + 'licenseJurisdiction': 'oh', }, 'updatedValues': { 'dateOfRenewal': '2024-11-08T23:59:59+00:00', diff --git a/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py b/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py index eaba9ec32..c65bec043 100644 --- a/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py +++ b/backend/compact-connect/lambdas/python/data-events/tests/function/test_license_deactivation_events.py @@ -2,7 +2,6 @@ from datetime import datetime from unittest.mock import patch -from boto3.dynamodb.conditions import Key from common_test.test_constants import ( DEFAULT_COMPACT, DEFAULT_DATE_OF_UPDATE_TIMESTAMP, @@ -300,9 +299,8 @@ def test_license_deactivation_listener_creates_update_records_for_all_affected_p # Verify privilege update records were created for both privileges for privilege in [privilege1, privilege2]: privilege_update_records = ( - self.test_data_generator.query_privilege_update_records_for_given_record_from_database( - privilege - )) + self.test_data_generator.query_privilege_update_records_for_given_record_from_database(privilege) + ) self.assertEqual(1, len(privilege_update_records)) update_record = privilege_update_records[0] diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py index 3a4cfaf3a..8a6995d4f 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/state_api.py @@ -2,7 +2,6 @@ from aws_lambda_powertools.utilities.typing import LambdaContext from cc_common.config import config, logger -from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.data_model.schema.common import CCPermissionsAction from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.privilege import PrivilegeData @@ -13,6 +12,7 @@ StateProviderDetailGeneralResponseSchema, StateProviderDetailPrivateResponseSchema, ) +from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.exceptions import CCInternalException, CCInvalidRequestException, CCNotFoundException from cc_common.signature_auth import optional_signature_auth, required_signature_auth from cc_common.utils import ( diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index a98a3fbf1..03cc64457 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -2,9 +2,9 @@ from datetime import date, datetime from unittest.mock import patch +from cc_common.data_model.update_tier_enum import UpdateTierEnum from moto import mock_aws -from cc_common.data_model.update_tier_enum import UpdateTierEnum from .. import TstFunction diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py index f3ae0da88..42fdd2fba 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_encumbrance.py @@ -139,7 +139,6 @@ def test_privilege_encumbrance_handler_adds_adverse_action_record_in_provider_da @patch('cc_common.feature_flag_client.is_feature_enabled', return_value=True) def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_data_table(self, mock_flag): # noqa: ARG002 - from cc_common.data_model.schema.privilege import PrivilegeUpdateData from handlers.encumbrance import encumbrance_handler event, test_privilege_record = self._when_testing_privilege_encumbrance() @@ -148,8 +147,10 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ self.assertEqual(200, response['statusCode'], msg=json.loads(response['body'])) # Verify that the encumbrance record was added to the provider data table - privilege_update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( - test_privilege_record + privilege_update_records = ( + self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + test_privilege_record + ) ) self.assertEqual(1, len(privilege_update_records)) loaded_privilege_update_data = privilege_update_records[0] @@ -179,7 +180,6 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ self, mock_flag, # noqa: ARG002 ): - from cc_common.data_model.schema.privilege import PrivilegeUpdateData from handlers.encumbrance import encumbrance_handler event, test_privilege_record = self._when_testing_privilege_encumbrance() @@ -188,8 +188,10 @@ def test_privilege_encumbrance_handler_adds_privilege_update_record_in_provider_ self.assertEqual(200, response['statusCode'], msg=json.loads(response['body'])) # Verify that the encumbrance record was added to the provider data table - privilege_update_records = self.test_data_generator.query_privilege_update_records_for_given_record_from_database( - test_privilege_record + privilege_update_records = ( + self.test_data_generator.query_privilege_update_records_for_given_record_from_database( + test_privilege_record + ) ) self.assertEqual(1, len(privilege_update_records)) loaded_privilege_update_data = privilege_update_records[0] @@ -501,7 +503,6 @@ def test_license_encumbrance_handler_adds_adverse_action_record_in_provider_data ) def test_license_encumbrance_handler_adds_license_update_record_in_provider_data_table(self): - from cc_common.data_model.schema.license import LicenseUpdateData from handlers.encumbrance import encumbrance_handler event, test_license_record = self._when_testing_valid_license_encumbrance() From 8aca6b39cb731e4a1c32f83cc2bad464a300cbd5 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 3 Nov 2025 17:19:10 -0600 Subject: [PATCH 09/81] Add licenseUploadDateGSI fields to provider table/licenses --- .../data_model/schema/license/__init__.py | 8 +++ .../data_model/schema/license/record.py | 63 +++++++++++++++++++ .../provider-data-v1/handlers/ingest.py | 4 ++ .../tests/function/__init__.py | 10 +++ .../test_provider_transformations.py | 6 +- .../stacks/persistent_stack/provider_table.py | 7 +++ 6 files changed, 97 insertions(+), 1 deletion(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py index 5672fd071..3364e0811 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py @@ -140,6 +140,10 @@ def encumberedStatus(self) -> str | None: def investigationStatus(self) -> str | None: return self._data.get('investigationStatus') + @property + def uploadDate(self) -> datetime | None: + return self._data.get('uploadDate') + class LicenseUpdateData(CCDataClass): """ @@ -195,3 +199,7 @@ def updatedValues(self) -> dict: @property def removedValues(self) -> list[str] | None: return self._data.get('removedValues') + + @property + def uploadDate(self) -> datetime | None: + return self._data.get('uploadDate') diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index 822dca07c..26aa7ef41 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -47,6 +47,11 @@ class LicenseRecordSchema(BaseRecordSchema, LicenseCommonSchema): providerId = UUID(required=True, allow_none=False) licenseGSIPK = String(required=True, allow_none=False) licenseGSISK = String(required=True, allow_none=False) + licenseUploadDateGSIPK = String(required=False, allow_none=False) + licenseUploadDateGSISK = String(required=False, allow_none=False) + + # Optional field for tracking when the license upload caused this record to be created + uploadDate = DateTime(required=False, allow_none=False) # Provided fields npi = NationalProviderIdentifier(required=False, allow_none=False) @@ -124,12 +129,34 @@ def generate_license_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused in_data['licenseGSISK'] = f'FN#{quote(in_data["familyName"].lower())}#GN#{quote(in_data["givenName"].lower())}' return in_data + @pre_dump + def generate_license_upload_date_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused-argument + """Generate GSI fields for license upload date tracking (only if uploadDate is present)""" + if 'uploadDate' in in_data and in_data['uploadDate'] is not None: + # Extract YYYY-MM from uploadDate + upload_date = in_data['uploadDate'] + year_month = upload_date.strftime('%Y-%m') + + # Generate GSI PK: C#{compact}#J#{jurisdiction}#D#{YYYY-MM} + in_data['licenseUploadDateGSIPK'] = ( + f'C#{in_data["compact"].lower()}#J#{in_data["jurisdiction"].lower()}#D#{year_month}' + ) + # Generate GSI SK: TIME#{epoch_timestamp}#LT#{licenseType}#PID#{providerId} + upload_epoch_time = int(upload_date.timestamp()) + license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] + in_data['licenseUploadDateGSISK'] = ( + f'TIME#{upload_epoch_time}#LT#{license_type_abbr}#PID#{in_data["providerId"]}' + ) + return in_data + @post_load def drop_license_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused-argument """Drop the db-specific license GSI fields before returning loaded data""" # only drop the field if it's present, else continue on in_data.pop('licenseGSIPK', None) in_data.pop('licenseGSISK', None) + in_data.pop('licenseUploadDateGSIPK', None) + in_data.pop('licenseUploadDateGSISK', None) return in_data @@ -198,6 +225,13 @@ class LicenseUpdateRecordSchema(BaseRecordSchema, ChangeHashMixin): investigationDetails = Nested(InvestigationDetailsSchema(), required=False, allow_none=False) # List of field names that were present in the previous record but removed in the update removedValues = List(String(), required=False, allow_none=False) + + # Optional GSI fields for license upload date tracking + licenseUploadDateGSIPK = String(required=False, allow_none=False) + licenseUploadDateGSISK = String(required=False, allow_none=False) + + # Optional field for tracking when the license upload caused this update record to be created + uploadDate = DateTime(required=False, allow_none=False) @post_dump # Must be _post_ dump so we have values that are more easily hashed def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument @@ -219,6 +253,35 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument ) return in_data + @pre_dump + def generate_license_upload_date_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused-argument + """Generate GSI fields for license upload date tracking (only if uploadDate is present)""" + if 'uploadDate' in in_data and in_data['uploadDate'] is not None: + # Extract YYYY-MM from uploadDate + upload_date = in_data['uploadDate'] + year_month = upload_date.strftime('%Y-%m') + + # Generate GSI PK: C#{compact}#J#{jurisdiction}#D#{YYYY-MM} + in_data['licenseUploadDateGSIPK'] = ( + f'C#{in_data["compact"].lower()}#J#{in_data["jurisdiction"].lower()}#D#{year_month}' + ) + + # Generate GSI SK: TIME#{epoch_timestamp}#LT#{licenseType}#PID#{providerId} + upload_epoch_time = int(upload_date.timestamp()) + license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] + in_data['licenseUploadDateGSISK'] = ( + f'TIME#{upload_epoch_time}#LT#{in_data["licenseType"]}#PID#{in_data["providerId"]}' + ) + return in_data + + @post_load + def drop_license_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused-argument + """Drop the db-specific license GSI fields before returning loaded data""" + # only drop the field if it's present, else continue on + in_data.pop('licenseUploadDateGSIPK', None) + in_data.pop('licenseUploadDateGSISK', None) + return in_data + @validates_schema def validate_license_type(self, data, **kwargs): # noqa: ARG001 unused-argument license_types = config.license_types_for_compact(data['compact']) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py index e84ace0b0..4acc78cc6 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py @@ -105,6 +105,9 @@ def ingest_license_message(message: dict): # Start preparing our db transactions data_events = [] + # Set the uploadDate to the current time for GSI tracking + license_ingest_message['uploadDate'] = config.current_standard_datetime + license_record_schema = LicenseRecordSchema() dumped_license = license_record_schema.dumps(license_ingest_message) @@ -312,6 +315,7 @@ def _populate_update_record(*, existing_license: dict, updated_values: dict, rem 'licenseType': existing_license['licenseType'], 'createDate': now, 'effectiveDate': now, + 'uploadDate': now, # Track when this update was created during upload 'previous': existing_license, 'updatedValues': updated_values, # We'll only include the removed values field if there are some diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py index fe0ac38f7..d2f028361 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py @@ -129,6 +129,8 @@ def create_provider_table(self): {'AttributeName': 'providerDateOfUpdate', 'AttributeType': 'S'}, {'AttributeName': 'licenseGSIPK', 'AttributeType': 'S'}, {'AttributeName': 'licenseGSISK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSIPK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSISK', 'AttributeType': 'S'}, ], TableName=os.environ['PROVIDER_TABLE_NAME'], KeySchema=[{'AttributeName': 'pk', 'KeyType': 'HASH'}, {'AttributeName': 'sk', 'KeyType': 'RANGE'}], @@ -158,6 +160,14 @@ def create_provider_table(self): ], 'Projection': {'ProjectionType': 'ALL'}, }, + { + 'IndexName': 'licenseUploadDateGSI', + 'KeySchema': [ + {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, + {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'KEYS_ONLY'}, + }, ], ) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index 03cc64457..e77539953 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -7,11 +7,12 @@ from .. import TstFunction +MOCK_CURRENT_DATETIME_STRING = '2024-11-08T23:59:59+00:00' @mock_aws class TestTransformations(TstFunction): # Yes, this is an excessively long method. We're going with it for sake of a single illustrative test. - @patch('cc_common.config._Config.current_standard_datetime', datetime.fromisoformat('2024-11-08T23:59:59+00:00')) + @patch('cc_common.config._Config.current_standard_datetime', datetime.fromisoformat(MOCK_CURRENT_DATETIME_STRING)) @patch('cc_common.config._Config.license_preprocessing_queue') def test_transformations(self, mock_license_preprocessing_queue): """Provider data undergoes several transformations from when a license is first posted, stored into the @@ -154,6 +155,9 @@ def test_transformations(self, mock_license_preprocessing_queue): # license should be active and compact eligible expected_license['licenseStatus'] = 'active' expected_license['compactEligibility'] = 'eligible' + expected_license['uploadDate'] = MOCK_CURRENT_DATETIME_STRING + expected_license['licenseUploadDateGSIPK'] = 'C#aslp#J#oh#D#2024-11' + expected_license['licenseUploadDateGSISK'] = 'TIME#1731110399#LT#slp#PID#89a6377e-c3a5-40e5-bca5-317ec854c570' with open('../common/tests/resources/dynamo/privilege.json') as f: expected_privilege = json.load(f) # privilege status should be active diff --git a/backend/compact-connect/stacks/persistent_stack/provider_table.py b/backend/compact-connect/stacks/persistent_stack/provider_table.py index 5761f76f0..45dcb2b1f 100644 --- a/backend/compact-connect/stacks/persistent_stack/provider_table.py +++ b/backend/compact-connect/stacks/persistent_stack/provider_table.py @@ -48,6 +48,7 @@ def __init__( self.provider_date_of_update_index_name = 'providerDateOfUpdate' self.license_gsi_name = 'licenseGSI' self.compact_transaction_gsi_name = 'compactTransactionIdGSI' + self.license_upload_date_gsi_name = 'licenseUploadDateGSI' self.add_global_secondary_index( index_name=self.provider_fam_giv_mid_index_name, @@ -83,6 +84,12 @@ def __init__( 'providerId', ], ) + self.add_global_secondary_index( + index_name=self.license_upload_date_gsi_name, + partition_key=Attribute(name='licenseUploadDateGSIPK', type=AttributeType.STRING), + sort_key=Attribute(name='licenseUploadDateGSISK', type=AttributeType.STRING), + projection_type=ProjectionType.KEYS_ONLY + ) # Set up backup plan backup_enabled = environment_context['backup_enabled'] if backup_enabled and backup_infrastructure_stack is not None: From 68af79b84e031831b1adf4542b2eb39b3821dfbe Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 3 Nov 2025 20:40:51 -0600 Subject: [PATCH 10/81] Linter/formatter --- .../cc_common/data_model/schema/license/record.py | 10 +++++----- .../test_data_model/test_provider_transformations.py | 5 ++++- .../stacks/persistent_stack/provider_table.py | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index 26aa7ef41..de8f2f484 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -49,7 +49,7 @@ class LicenseRecordSchema(BaseRecordSchema, LicenseCommonSchema): licenseGSISK = String(required=True, allow_none=False) licenseUploadDateGSIPK = String(required=False, allow_none=False) licenseUploadDateGSISK = String(required=False, allow_none=False) - + # Optional field for tracking when the license upload caused this record to be created uploadDate = DateTime(required=False, allow_none=False) @@ -136,7 +136,7 @@ def generate_license_upload_date_gsi_fields(self, in_data, **kwargs): # noqa: A # Extract YYYY-MM from uploadDate upload_date = in_data['uploadDate'] year_month = upload_date.strftime('%Y-%m') - + # Generate GSI PK: C#{compact}#J#{jurisdiction}#D#{YYYY-MM} in_data['licenseUploadDateGSIPK'] = ( f'C#{in_data["compact"].lower()}#J#{in_data["jurisdiction"].lower()}#D#{year_month}' @@ -225,11 +225,11 @@ class LicenseUpdateRecordSchema(BaseRecordSchema, ChangeHashMixin): investigationDetails = Nested(InvestigationDetailsSchema(), required=False, allow_none=False) # List of field names that were present in the previous record but removed in the update removedValues = List(String(), required=False, allow_none=False) - + # Optional GSI fields for license upload date tracking licenseUploadDateGSIPK = String(required=False, allow_none=False) licenseUploadDateGSISK = String(required=False, allow_none=False) - + # Optional field for tracking when the license upload caused this update record to be created uploadDate = DateTime(required=False, allow_none=False) @@ -270,7 +270,7 @@ def generate_license_upload_date_gsi_fields(self, in_data, **kwargs): # noqa: A upload_epoch_time = int(upload_date.timestamp()) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['licenseUploadDateGSISK'] = ( - f'TIME#{upload_epoch_time}#LT#{in_data["licenseType"]}#PID#{in_data["providerId"]}' + f'TIME#{upload_epoch_time}#LT#{license_type_abbr}#PID#{in_data["providerId"]}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index e77539953..f1c5a4f58 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -9,6 +9,7 @@ MOCK_CURRENT_DATETIME_STRING = '2024-11-08T23:59:59+00:00' + @mock_aws class TestTransformations(TstFunction): # Yes, this is an excessively long method. We're going with it for sake of a single illustrative test. @@ -157,7 +158,9 @@ def test_transformations(self, mock_license_preprocessing_queue): expected_license['compactEligibility'] = 'eligible' expected_license['uploadDate'] = MOCK_CURRENT_DATETIME_STRING expected_license['licenseUploadDateGSIPK'] = 'C#aslp#J#oh#D#2024-11' - expected_license['licenseUploadDateGSISK'] = 'TIME#1731110399#LT#slp#PID#89a6377e-c3a5-40e5-bca5-317ec854c570' + expected_license['licenseUploadDateGSISK'] = ( + 'TIME#1731110399#LT#slp#PID#89a6377e-c3a5-40e5-bca5-317ec854c570' + ) with open('../common/tests/resources/dynamo/privilege.json') as f: expected_privilege = json.load(f) # privilege status should be active diff --git a/backend/compact-connect/stacks/persistent_stack/provider_table.py b/backend/compact-connect/stacks/persistent_stack/provider_table.py index 45dcb2b1f..662574937 100644 --- a/backend/compact-connect/stacks/persistent_stack/provider_table.py +++ b/backend/compact-connect/stacks/persistent_stack/provider_table.py @@ -88,7 +88,7 @@ def __init__( index_name=self.license_upload_date_gsi_name, partition_key=Attribute(name='licenseUploadDateGSIPK', type=AttributeType.STRING), sort_key=Attribute(name='licenseUploadDateGSISK', type=AttributeType.STRING), - projection_type=ProjectionType.KEYS_ONLY + projection_type=ProjectionType.KEYS_ONLY, ) # Set up backup plan backup_enabled = environment_context['backup_enabled'] From 9f3014f893cdee8c64cd85264f17606ce9e55ede Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 3 Nov 2025 21:56:05 -0600 Subject: [PATCH 11/81] WIP - add logic to rollback records from license upload --- .../cc_common/data_model/data_client.py | 4 +- .../data_model/schema/data_event/api.py | 14 + .../common/cc_common/event_bus_client.py | 84 +++ .../handlers/rollback_license_upload.py | 568 ++++++++++++++++++ .../function/test_rollback_license_upload.py | 252 ++++++++ .../disaster_recovery_stack/__init__.py | 25 + .../license_upload_rollback_step_function.py | 247 ++++++++ 7 files changed, 1192 insertions(+), 2 deletions(-) create mode 100644 backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py create mode 100644 backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py create mode 100644 backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index 267c5d0d8..fe229a8b9 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -1151,7 +1151,7 @@ def deactivate_privilege( privilege_update_record = PrivilegeUpdateRecordSchema().dump( { 'type': ProviderRecordType.PRIVILEGE_UPDATE, - 'updateType': 'deactivation', + 'updateType': UpdateCategory.DEACTIVATION, 'providerId': provider_id, 'compact': compact, 'jurisdiction': jurisdiction, @@ -2161,7 +2161,7 @@ def lift_license_encumbrance( and potentially updating the license record's encumbered status. :param str compact: The compact name - :param str provider_id: The provider ID + :param UUID provider_id: The provider ID :param str jurisdiction: The jurisdiction :param str license_type_abbreviation: The license type abbreviation :param UUID adverse_action_id: The adverse action ID to lift diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py index 6549399e7..d5d11812f 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py @@ -66,3 +66,17 @@ class InvestigationEventDetailSchema(DataEventDetailBaseSchema): class LicenseDeactivationDetailSchema(DataEventDetailBaseSchema): providerId = UUID(required=True, allow_none=False) licenseType = String(required=True, allow_none=False) + + +class LicenseRevertDetailSchema(DataEventDetailBaseSchema): + providerId = UUID(required=True, allow_none=False) + licenseType = String(required=True, allow_none=False) + jurisdiction = Jurisdiction(required=True, allow_none=False) + rollbackReason = String(required=True, allow_none=False) + + +class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): + providerId = UUID(required=True, allow_none=False) + licenseType = String(required=True, allow_none=False) + jurisdiction = Jurisdiction(required=True, allow_none=False) + rollbackReason = String(required=True, allow_none=False) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py index 88ae6307d..ab2e74ceb 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py @@ -437,3 +437,87 @@ def publish_investigation_closed_event( detail=deserialized_detail, event_batch_writer=event_batch_writer, ) + + def publish_license_revert_event( + self, + source: str, + compact: str, + provider_id: UUID, + jurisdiction: str, + license_type: str, + rollback_reason: str, + event_batch_writer: EventBatchWriter | None = None, + ): + """ + Publish a license revert event to the event bus. + + :param source: The source of the event + :param compact: The compact name + :param provider_id: The provider ID + :param jurisdiction: The jurisdiction of the license + :param license_type: The license type + :param rollback_reason: The reason for the rollback + :param event_batch_writer: Optional EventBatchWriter for efficient batch publishing + """ + from cc_common.data_model.schema.data_event.api import LicenseRevertDetailSchema + + event_detail = { + 'compact': compact, + 'providerId': provider_id, + 'jurisdiction': jurisdiction, + 'licenseType': license_type, + 'rollbackReason': rollback_reason, + 'eventTime': config.current_standard_datetime, + } + + license_revert_detail_schema = LicenseRevertDetailSchema() + deserialized_detail = license_revert_detail_schema.dump(event_detail) + + self._publish_event( + source=source, + detail_type='license.revert', + detail=deserialized_detail, + event_batch_writer=event_batch_writer, + ) + + def publish_privilege_revert_event( + self, + source: str, + compact: str, + provider_id: UUID, + jurisdiction: str, + license_type: str, + rollback_reason: str, + event_batch_writer: EventBatchWriter | None = None, + ): + """ + Publish a privilege revert event to the event bus. + + :param source: The source of the event + :param compact: The compact name + :param provider_id: The provider ID + :param jurisdiction: The jurisdiction of the privilege + :param license_type: The license type + :param rollback_reason: The reason for the rollback + :param event_batch_writer: Optional EventBatchWriter for efficient batch publishing + """ + from cc_common.data_model.schema.data_event.api import PrivilegeRevertDetailSchema + + event_detail = { + 'compact': compact, + 'providerId': provider_id, + 'jurisdiction': jurisdiction, + 'licenseType': license_type, + 'rollbackReason': rollback_reason, + 'eventTime': config.current_standard_datetime, + } + + privilege_revert_detail_schema = PrivilegeRevertDetailSchema() + deserialized_detail = privilege_revert_detail_schema.dump(event_detail) + + self._publish_event( + source=source, + detail_type='privilege.revert', + detail=deserialized_detail, + event_batch_writer=event_batch_writer, + ) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py new file mode 100644 index 000000000..048874f80 --- /dev/null +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -0,0 +1,568 @@ +import json +import os +import time +from datetime import datetime, timedelta +from collections import defaultdict + +import boto3 +from aws_lambda_powertools.utilities.typing import LambdaContext +from boto3.dynamodb.conditions import Key +from botocore.exceptions import ClientError +from cc_common.config import config, logger +from cc_common.data_model.schema.common import UpdateCategory +from cc_common.data_model.update_tier_enum import UpdateTierEnum +from cc_common.event_batch_writer import EventBatchWriter + + +# Maximum time window for rollback (1 week in seconds) +MAX_ROLLBACK_WINDOW_SECONDS = 7 * 24 * 60 * 60 + +# License upload related update categories +LICENSE_UPLOAD_UPDATE_CATEGORIES = { + UpdateCategory.DEACTIVATION, + UpdateCategory.RENEWAL, + UpdateCategory.LICENSE_UPLOAD_UPDATE_OTHER, +} + +# Privilege update category for license deactivations +PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY = UpdateCategory.LICENSE_DEACTIVATION + + +def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG001 unused-argument + """ + Rollback invalid license uploads for a compact/jurisdiction/time window. + + This function queries the licenseUploadDateGSI to find all affected records, validates + rollback eligibility, reverts records to their pre-upload state, and publishes events. + Results are written to S3 to avoid state management in the step function. + + Input event structure: + { + 'compact': 'aslp', + 'jurisdiction': 'oh', + 'startDateTime': '2024-01-01T00:00:00Z', + 'endDateTime': '2024-01-01T23:59:59Z', + 'rollbackReason': 'Invalid data uploaded', + 'tableNameRollbackConfirmation': 'provider-table', + 'executionId': 'unique-execution-id', + 'providersProcessed': 0, + 'lastEvaluatedGSIKey': None + } + + Returns: + { + 'rollbackStatus': 'IN_PROGRESS' | 'COMPLETE', + 'providersProcessed': int, + 'providersReverted': int, + 'providersSkipped': int, + 'providersFailed': int, + 'lastEvaluatedGSIKey': dict | None, + 'resultsS3Key': 's3://bucket-name/execution-id/results.json' + } + """ + start_time = time.time() + max_execution_time = 12 * 60 # 12 minutes in seconds + + # Extract and validate input parameters + compact = event['compact'] + jurisdiction = event['jurisdiction'] + start_datetime_str = event['startDateTime'] + end_datetime_str = event['endDateTime'] + rollback_reason = event['rollbackReason'] + table_name_confirmation = event['tableNameRollbackConfirmation'] + execution_id = event['executionId'] + providers_processed = event.get('providersProcessed', 0) + last_evaluated_gsi_key = event.get('lastEvaluatedGSIKey') + + # Validate table name guard rail + provider_table_name = config.provider_table_name + if table_name_confirmation != provider_table_name: + logger.error('Rollback execution guard flag missing or invalid') + return { + 'rollbackStatus': 'FAILED', + 'error': f'Invalid table name specified. ' + f'tableNameRollbackConfirmation field must be set to {provider_table_name}', + } + + # Parse and validate datetime parameters + try: + start_datetime = datetime.fromisoformat(start_datetime_str.replace('Z', '+00:00')) + end_datetime = datetime.fromisoformat(end_datetime_str.replace('Z', '+00:00')) + except ValueError as e: + logger.error(f'Invalid datetime format: {str(e)}') + return { + 'rollbackStatus': 'FAILED', + 'error': f'Invalid datetime format: {str(e)}', + } + + # Validate time window + if start_datetime >= end_datetime: + logger.error('Start time must be before end time') + return { + 'rollbackStatus': 'FAILED', + 'error': 'Start time must be before end time', + } + + time_window_seconds = (end_datetime - start_datetime).total_seconds() + if time_window_seconds > MAX_ROLLBACK_WINDOW_SECONDS: + logger.error(f'Time window exceeds maximum of {MAX_ROLLBACK_WINDOW_SECONDS / 86400} days') + return { + 'rollbackStatus': 'FAILED', + 'error': f'Time window cannot exceed {MAX_ROLLBACK_WINDOW_SECONDS / 86400} days', + } + + logger.info( + 'Starting license upload rollback', + compact=compact, + jurisdiction=jurisdiction, + start_datetime=start_datetime_str, + end_datetime=end_datetime_str, + execution_id=execution_id, + ) + + # Initialize S3 client and bucket + s3_client = boto3.client('s3') + rollback_results_bucket_name = os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] + results_s3_key = f'{execution_id}/results.json' + + # Load existing results if this is a continuation + if providers_processed > 0: + existing_results = _load_results_from_s3(s3_client, rollback_results_bucket_name, results_s3_key) + else: + existing_results = { + 'skippedProviderDetails': [], + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + } + + # Initialize counters + providers_reverted = len(existing_results['revertedProviderSummaries']) + providers_skipped = len(existing_results['skippedProviderDetails']) + providers_failed = len(existing_results['failedProviderDetails']) + + # Get provider table and GSI + provider_table = config.provider_table + + try: + # Query GSI for affected records across the time window + affected_provider_ids = _query_gsi_for_affected_providers( + provider_table, + compact, + jurisdiction, + start_datetime, + end_datetime, + last_evaluated_gsi_key, + ) + + # Process each provider + for provider_id in affected_provider_ids: + # Check time limit + elapsed_time = time.time() - start_time + if elapsed_time > max_execution_time: + logger.info(f'Approaching time limit after {elapsed_time:.2f} seconds. Returning IN_PROGRESS status.') + + # Write current results to S3 + _write_results_to_s3(s3_client, rollback_results_bucket_name, results_s3_key, existing_results) + + return { + 'rollbackStatus': 'IN_PROGRESS', + 'providersProcessed': providers_processed, + 'providersReverted': providers_reverted, + 'providersSkipped': providers_skipped, + 'providersFailed': providers_failed, + 'lastEvaluatedGSIKey': None, # Continue from next provider + 'resultsS3Key': f's3://{rollback_results_bucket_name}/{results_s3_key}', + 'compact': compact, + 'jurisdiction': jurisdiction, + 'startDateTime': start_datetime_str, + 'endDateTime': end_datetime_str, + 'rollbackReason': rollback_reason, + 'tableNameRollbackConfirmation': table_name_confirmation, + 'executionId': execution_id, + } + + providers_processed += 1 + + # Process the provider + result = _process_provider_rollback( + provider_id=provider_id, + compact=compact, + jurisdiction=jurisdiction, + start_datetime=start_datetime, + end_datetime=end_datetime, + rollback_reason=rollback_reason, + ) + + # Update results based on outcome + if result['status'] == 'reverted': + providers_reverted += 1 + existing_results['revertedProviderSummaries'].append(result['summary']) + elif result['status'] == 'skipped': + providers_skipped += 1 + existing_results['skippedProviderDetails'].append(result['details']) + elif result['status'] == 'failed': + providers_failed += 1 + existing_results['failedProviderDetails'].append(result['details']) + + # All providers processed successfully + logger.info('Rollback complete', providers_processed=providers_processed) + + # Write final results to S3 + _write_results_to_s3(s3_client, rollback_results_bucket_name, results_s3_key, existing_results) + + return { + 'rollbackStatus': 'COMPLETE', + 'providersProcessed': providers_processed, + 'providersReverted': providers_reverted, + 'providersSkipped': providers_skipped, + 'providersFailed': providers_failed, + 'resultsS3Key': f's3://{rollback_results_bucket_name}/{results_s3_key}', + } + + except ClientError as e: + logger.error(f'Error during rollback: {str(e)}') + raise e + + +def _query_gsi_for_affected_providers( + provider_table, + compact: str, + jurisdiction: str, + start_datetime: datetime, + end_datetime: datetime, + last_evaluated_key: dict | None, +) -> set[str]: + """ + Query the licenseUploadDateGSI to find all affected provider IDs. + + Since the time window might span multiple months, we need to query each month separately. + """ + affected_provider_ids = set() + + # Generate list of year-month strings to query + current_date = start_datetime.replace(day=1) + end_month = end_datetime.replace(day=1) + + year_months = [] + while current_date <= end_month: + year_months.append(current_date.strftime('%Y-%m')) + # Move to next month + if current_date.month == 12: + current_date = current_date.replace(year=current_date.year + 1, month=1) + else: + current_date = current_date.replace(month=current_date.month + 1) + + start_epoch = int(start_datetime.timestamp()) + end_epoch = int(end_datetime.timestamp()) + + # Query each month + for year_month in year_months: + gsi_pk = f'C#{compact.lower()}#J#{jurisdiction.lower()}#D#{year_month}' + + query_kwargs = { + 'IndexName': 'licenseUploadDateGSI', + 'KeyConditionExpression': ( + Key('licenseUploadDateGSIPK').eq(gsi_pk) + & Key('licenseUploadDateGSISK').between(f'TIME#{start_epoch}#', f'TIME#{end_epoch}#~') + ), + } + + if last_evaluated_key: + query_kwargs['ExclusiveStartKey'] = last_evaluated_key + + while True: + response = provider_table.query(**query_kwargs) + + # Extract provider IDs from the results + for item in response.get('Items', []): + # The providerId is in the SK: TIME#{epoch}#LT#{license_type}#PID#{provider_id} + sk = item.get('licenseUploadDateGSISK', '') + if '#PID#' in sk: + provider_id = sk.split('#PID#')[1] + affected_provider_ids.add(provider_id) + + # Check for pagination + last_evaluated_key = response.get('LastEvaluatedKey') + if not last_evaluated_key: + break + + query_kwargs['ExclusiveStartKey'] = last_evaluated_key + + logger.info(f'Found {len(affected_provider_ids)} unique providers affected by upload window') + return affected_provider_ids + + +def _process_provider_rollback( + provider_id: str, + compact: str, + jurisdiction: str, + start_datetime: datetime, + end_datetime: datetime, + rollback_reason: str, +) -> dict: + """ + Process rollback for a single provider. + + Returns a dict with: + - status: 'reverted', 'skipped', or 'failed' + - summary/details: Information about the outcome + """ + logger.info('Processing provider rollback', provider_id=provider_id) + + try: + # Fetch all provider records including all update tiers + provider_records = config.data_client.get_provider_user_records( + compact=compact, + provider_id=provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + + # Check eligibility for rollback + eligibility_result = _check_rollback_eligibility(provider_records, start_datetime, end_datetime) + + if not eligibility_result['eligible']: + logger.info('Provider not eligible for automatic rollback', provider_id=provider_id, reason=eligibility_result['reason']) + return { + 'status': 'skipped', + 'details': { + 'providerId': provider_id, + 'reason': eligibility_result['reason'], + 'ineligibleUpdates': eligibility_result.get('ineligible_updates', []), + }, + } + + # Determine pre-rollback state and build transactions + revert_plan = _determine_revert_plan(provider_records, start_datetime, end_datetime, compact, jurisdiction) + + # Execute the revert transactions + _execute_revert_transactions(revert_plan) + + # Publish events + _publish_revert_events(revert_plan, compact, rollback_reason) + + logger.info('Provider rollback successful', provider_id=provider_id) + return { + 'status': 'reverted', + 'summary': { + 'providerId': provider_id, + 'licensesReverted': len(revert_plan['licenses_to_revert']), + 'privilegesReverted': len(revert_plan['privileges_to_revert']), + 'updatesDeleted': len(revert_plan['updates_to_delete']), + }, + } + + except Exception as e: + logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) + return { + 'status': 'failed', + 'details': { + 'providerId': provider_id, + 'error': str(e), + }, + } + + +def _check_rollback_eligibility(provider_records, start_datetime: datetime, end_datetime: datetime) -> dict: + """ + Check if the provider is eligible for automatic rollback. + + A provider is ineligible if they have any updates after start_datetime that are NOT + license-upload related. + """ + # Get all update records + license_updates = provider_records.get_license_records( + filter_condition=lambda record: record.type == 'licenseUpdate' + ) + privilege_updates = provider_records.get_privilege_records( + filter_condition=lambda record: record.type == 'privilegeUpdate' + ) + + ineligible_updates = [] + + # Check license updates + for update in license_updates: + if update.createDate >= start_datetime: + if update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES: + ineligible_updates.append({ + 'type': 'licenseUpdate', + 'updateType': update.updateType, + 'createDate': update.createDate.isoformat(), + }) + + # Check privilege updates + for update in privilege_updates: + if update.createDate >= start_datetime: + if update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY: + ineligible_updates.append({ + 'type': 'privilegeUpdate', + 'updateType': update.updateType, + 'createDate': update.createDate.isoformat(), + }) + + if ineligible_updates: + return { + 'eligible': False, + 'reason': 'Provider has non-upload-related updates after rollback start time', + 'ineligible_updates': ineligible_updates, + } + + return {'eligible': True} + + +def _determine_revert_plan(provider_records, start_datetime: datetime, end_datetime: datetime, compact: str, jurisdiction: str) -> dict: + """ + Determine what changes need to be made to revert the provider to pre-rollback state. + + Returns a plan dict with: + - licenses_to_revert: List of license records to revert/delete + - privileges_to_revert: List of privilege records to revert + - provider_to_revert: Provider record to revert (if needed) + - updates_to_delete: List of update records to delete + """ + # This is a complex function that needs to be implemented + # For now, return a skeleton structure + plan = { + 'licenses_to_revert': [], + 'privileges_to_revert': [], + 'provider_to_revert': None, + 'updates_to_delete': [], + } + + # TODO: Implement full logic to determine revert plan + # This would involve: + # 1. Finding all licenses/privileges affected in the time window + # 2. For each, determining the state before the window + # 3. Identifying which update records need to be deleted + # 4. Determining if the provider record needs to be reverted + + return plan + + +def _execute_revert_transactions(revert_plan: dict): + """ + Execute DynamoDB transactions to revert records. + + DynamoDB transactions are limited to 100 items, so we may need to split into multiple transactions. + """ + # Build transaction items + transaction_items = [] + + # Add license revert/delete operations + for license_action in revert_plan['licenses_to_revert']: + if license_action['action'] == 'delete': + transaction_items.append({ + 'Delete': { + 'TableName': config.provider_table_name, + 'Key': { + 'pk': {'S': license_action['pk']}, + 'sk': {'S': license_action['sk']}, + }, + } + }) + else: # revert + transaction_items.append({ + 'Put': { + 'TableName': config.provider_table_name, + 'Item': license_action['item'], + } + }) + + # Add privilege revert operations + for privilege_action in revert_plan['privileges_to_revert']: + transaction_items.append({ + 'Put': { + 'TableName': config.provider_table_name, + 'Item': privilege_action['item'], + } + }) + + # Add provider revert operation if needed + if revert_plan['provider_to_revert']: + transaction_items.append({ + 'Put': { + 'TableName': config.provider_table_name, + 'Item': revert_plan['provider_to_revert']['item'], + } + }) + + # Add update delete operations + for update in revert_plan['updates_to_delete']: + transaction_items.append({ + 'Delete': { + 'TableName': config.provider_table_name, + 'Key': { + 'pk': {'S': update['pk']}, + 'sk': {'S': update['sk']}, + }, + } + }) + logger.info('Deleting update record', pk=update['pk'], sk=update['sk']) + + # Execute transactions in batches of 100 + for i in range(0, len(transaction_items), 100): + batch = transaction_items[i:i + 100] + config.dynamodb_client.transact_write_items(TransactItems=batch) + + +def _publish_revert_events(revert_plan: dict, compact: str, rollback_reason: str): + """ + Publish revert events for all reverted licenses and privileges. + """ + with EventBatchWriter(config.events_client) as event_writer: + # Publish license revert events + for license_action in revert_plan['licenses_to_revert']: + config.event_bus_client.publish_license_revert_event( + source='org.compactconnect.disaster-recovery', + compact=compact, + provider_id=license_action['provider_id'], + jurisdiction=license_action['jurisdiction'], + license_type=license_action['license_type'], + rollback_reason=rollback_reason, + event_batch_writer=event_writer, + ) + + # Publish privilege revert events + for privilege_action in revert_plan['privileges_to_revert']: + config.event_bus_client.publish_privilege_revert_event( + source='org.compactconnect.disaster-recovery', + compact=compact, + provider_id=privilege_action['provider_id'], + jurisdiction=privilege_action['jurisdiction'], + license_type=privilege_action['license_type'], + rollback_reason=rollback_reason, + event_batch_writer=event_writer, + ) + + +def _load_results_from_s3(s3_client, bucket_name: str, key: str) -> dict: + """Load existing results from S3.""" + try: + response = s3_client.get_object(Bucket=bucket_name, Key=key) + return json.loads(response['Body'].read().decode('utf-8')) + except s3_client.exceptions.NoSuchKey: + # First execution, no existing results + return { + 'skippedProviderDetails': [], + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + } + except Exception as e: + logger.error(f'Error loading results from S3: {str(e)}') + raise + + +def _write_results_to_s3(s3_client, bucket_name: str, key: str, results: dict): + """Write results to S3 with server-side encryption.""" + try: + s3_client.put_object( + Bucket=bucket_name, + Key=key, + Body=json.dumps(results, indent=2), + ContentType='application/json', + ServerSideEncryption='aws:kms', + ) + logger.info('Results written to S3', bucket=bucket_name, key=key) + except Exception as e: + logger.error(f'Error writing results to S3: {str(e)}') + raise + diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py new file mode 100644 index 000000000..f74dd3842 --- /dev/null +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -0,0 +1,252 @@ +""" +Tests for the license upload rollback handler. + +These tests verify the rollback functionality including: +- GSI queries for affected providers +- Eligibility validation +- Revert plan determination +- Transaction execution +- Event publishing +- S3 result management +""" + +import json +import os +from datetime import datetime, timedelta +from unittest.mock import MagicMock, Mock, patch +from uuid import uuid4 + +import boto3 +import pytest +from moto import mock_aws + +from handlers.rollback_license_upload import ( + MAX_ROLLBACK_WINDOW_SECONDS, + rollback_license_upload, +) + + +@mock_aws +class TestRollbackLicenseUpload: + """Test class for license upload rollback handler.""" + + def setup_method(self): + """Set up test fixtures before each test method.""" + # Set up environment variables + os.environ['PROVIDER_TABLE_NAME'] = 'test-provider-table' + os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] = 'test-rollback-results-bucket' + os.environ['EVENT_BUS_NAME'] = 'test-event-bus' + os.environ['AWS_DEFAULT_REGION'] = 'us-east-1' + + # Create mock resources + self.dynamodb = boto3.resource('dynamodb') + self.s3_client = boto3.client('s3') + + # Create provider table with GSI + self.provider_table = self.dynamodb.create_table( + TableName='test-provider-table', + KeySchema=[ + {'AttributeName': 'pk', 'KeyType': 'HASH'}, + {'AttributeName': 'sk', 'KeyType': 'RANGE'}, + ], + AttributeDefinitions=[ + {'AttributeName': 'pk', 'AttributeType': 'S'}, + {'AttributeName': 'sk', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSIPK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSISK', 'AttributeType': 'S'}, + ], + BillingMode='PAY_PER_REQUEST', + GlobalSecondaryIndexes=[ + { + 'IndexName': 'licenseUploadDateGSI', + 'KeySchema': [ + {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, + {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'KEYS_ONLY'}, + }, + ], + ) + + # Create S3 bucket + self.s3_client.create_bucket(Bucket='test-rollback-results-bucket') + + # Create sample test data + self.compact = 'aslp' + self.jurisdiction = 'oh' + self.provider_id = str(uuid4()) + self.start_datetime = datetime.now() - timedelta(days=1) + self.end_datetime = datetime.now() + + def teardown_method(self): + """Clean up after each test method.""" + # Clean up environment variables + for key in ['PROVIDER_TABLE_NAME', 'ROLLBACK_RESULTS_BUCKET_NAME', 'EVENT_BUS_NAME']: + if key in os.environ: + del os.environ[key] + + def test_rollback_validates_table_name_guard_rail(self): + """Test that rollback validates the table name confirmation.""" + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'tableNameRollbackConfirmation': 'wrong-table-name', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + context = Mock() + + result = rollback_license_upload(event, context) + + assert result['rollbackStatus'] == 'FAILED' + assert 'Invalid table name specified' in result['error'] + + def test_rollback_validates_datetime_format(self): + """Test that rollback validates datetime format.""" + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': 'invalid-datetime', + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'tableNameRollbackConfirmation': 'test-provider-table', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + context = Mock() + + result = rollback_license_upload(event, context) + + assert result['rollbackStatus'] == 'FAILED' + assert 'Invalid datetime format' in result['error'] + + def test_rollback_validates_time_window_order(self): + """Test that rollback validates start time is before end time.""" + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.end_datetime.isoformat(), + 'endDateTime': self.start_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'tableNameRollbackConfirmation': 'test-provider-table', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + context = Mock() + + result = rollback_license_upload(event, context) + + assert result['rollbackStatus'] == 'FAILED' + assert 'Start time must be before end time' in result['error'] + + def test_rollback_validates_maximum_time_window(self): + """Test that rollback validates maximum time window.""" + start = datetime.now() - timedelta(days=8) # More than 7 days + end = datetime.now() + + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': start.isoformat(), + 'endDateTime': end.isoformat(), + 'rollbackReason': 'Test rollback', + 'tableNameRollbackConfirmation': 'test-provider-table', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + context = Mock() + + result = rollback_license_upload(event, context) + + assert result['rollbackStatus'] == 'FAILED' + assert 'cannot exceed' in result['error'] + + @patch('handlers.rollback_license_upload.config') + def test_rollback_loads_existing_results_on_continuation(self, mock_config): + """Test that rollback loads existing results from S3 on continuation.""" + # Set up existing results in S3 + existing_results = { + 'skippedProviderDetails': [{'providerId': 'test-123', 'reason': 'test reason'}], + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + } + execution_id = 'test-execution-123' + self.s3_client.put_object( + Bucket='test-rollback-results-bucket', + Key=f'{execution_id}/results.json', + Body=json.dumps(existing_results), + ) + + # Mock config + mock_config.provider_table_name = 'test-provider-table' + mock_config.provider_table = self.provider_table + + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'tableNameRollbackConfirmation': 'test-provider-table', + 'executionId': execution_id, + 'providersProcessed': 1, # Continuation + } + + context = Mock() + + # Note: This test will need to be expanded to mock the full flow + # For now, it demonstrates the test structure + + def test_query_gsi_for_affected_providers_handles_multiple_months(self): + """Test that GSI query handles time windows spanning multiple months.""" + # This test would verify that the query correctly handles + # time windows that span multiple months by querying each month's + # partition separately + pass + + def test_process_provider_checks_eligibility(self): + """Test that provider processing checks rollback eligibility.""" + # This test would verify that providers with non-upload-related + # updates are correctly identified as ineligible + pass + + def test_process_provider_determines_correct_revert_plan(self): + """Test that provider processing determines the correct revert plan.""" + # This test would verify that the revert plan correctly identifies: + # - Licenses to delete (created during window) + # - Licenses to revert (existed before window) + # - Privileges to revert + # - Update records to delete + pass + + def test_execute_revert_transactions_handles_100_item_limit(self): + """Test that transaction execution handles DynamoDB's 100 item limit.""" + # This test would verify that transactions with >100 items + # are correctly split into multiple transactions + pass + + def test_publish_revert_events_uses_batch_writer(self): + """Test that event publishing uses EventBatchWriter for efficiency.""" + # This test would verify that events are published in batches + pass + + def test_s3_results_written_with_encryption(self): + """Test that S3 results are written with server-side encryption.""" + # This test would verify that S3 writes use server-side encryption + pass + + +# Additional test classes could be added for: +# - TestRollbackEligibilityValidation +# - TestRevertPlanDetermination +# - TestTransactionExecution +# - TestEventPublishing +# - TestS3ResultsManagement + diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py index 56c70085c..333594c7d 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py @@ -2,10 +2,14 @@ from aws_cdk.aws_dynamodb import Table from aws_cdk.aws_iam import PolicyStatement, ServicePrincipal from aws_cdk.aws_kms import Key +from aws_cdk.aws_s3 import Bucket, BucketEncryption from common_constructs.stack import AppStack from constructs import Construct from stacks import persistent_stack as ps +from stacks.disaster_recovery_stack.license_upload_rollback_step_function import ( + LicenseUploadRollbackStepFunctionConstruct, +) from stacks.disaster_recovery_stack.restore_dynamo_db_table_step_function import ( RestoreDynamoDbTableStepFunctionConstruct, ) @@ -56,6 +60,18 @@ def __init__( ) ) + # Create S3 bucket for license upload rollback results + self.rollback_results_bucket = Bucket( + self, + 'DisasterRecoveryResultsBucket', + encryption=BucketEncryption.KMS, + encryption_key=self.dr_shared_encryption_key, + removal_policy=removal_policy, + auto_delete_objects=removal_policy == RemovalPolicy.DESTROY, + versioned=True, + enforce_ssl=True, + ) + # Create Step Functions for restoring DynamoDB tables self.dr_workflows = {} @@ -77,6 +93,15 @@ def __init__( ssn_table=persistent_stack.ssn_table ) + # Create License Upload Rollback workflow + self.license_upload_rollback_workflow = LicenseUploadRollbackStepFunctionConstruct( + self, + 'LicenseUploadRollback', + provider_table=persistent_stack.provider_table, + rollback_results_bucket=self.rollback_results_bucket, + dr_shared_encryption_key=self.dr_shared_encryption_key, + ) + def _create_dynamodb_table_dr_recovery_workflow(self, table: Table, shared_persistent_stack_key: Key): """Create the DR workflow for a standard DynamoDB table.""" # Prefix for restored (source) tables created by the restore workflow. The diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py new file mode 100644 index 000000000..d73214977 --- /dev/null +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -0,0 +1,247 @@ +import os + +from aws_cdk import Duration +from aws_cdk.aws_dynamodb import Table +from aws_cdk.aws_iam import PolicyStatement +from aws_cdk.aws_kms import Key +from aws_cdk.aws_logs import LogGroup, RetentionDays +from aws_cdk.aws_s3 import Bucket +from aws_cdk.aws_stepfunctions import ( + Choice, + Condition, + DefinitionBody, + Fail, + IChainable, + LogLevel, + LogOptions, + Pass, + StateMachine, + Succeed, +) +from aws_cdk.aws_stepfunctions_tasks import LambdaInvoke +from cdk_nag import NagSuppressions +from common_constructs.stack import Stack +from constructs import Construct + +from common_constructs.python_function import PythonFunction + + +class LicenseUploadRollbackStepFunctionConstruct(Construct): + """ + Step Function construct for rolling back invalid license uploads. + + This construct creates a Lambda function to process the rollback and a Step Function + state machine to orchestrate the process with pagination support. + """ + + def __init__( + self, + scope: Construct, + construct_id: str, + *, + provider_table: Table, + rollback_results_bucket: Bucket, + dr_shared_encryption_key: Key, + **kwargs, + ): + super().__init__(scope, construct_id, **kwargs) + + stack = Stack.of(self) + + # Create Lambda function for rollback processing + self._create_rollback_function( + stack=stack, + provider_table=provider_table, + rollback_results_bucket=rollback_results_bucket, + ) + + # Build Step Function definition + definition = self._build_rollback_state_machine_definition(provider_table=provider_table) + + # Create log group for state machine + state_machine_log_group = LogGroup( + self, + 'LicenseUploadRollbackStateMachineLogs', + retention=RetentionDays.ONE_MONTH, + encryption_key=dr_shared_encryption_key, + ) + + # Create state machine + self.state_machine = StateMachine( + self, + 'LicenseUploadRollbackStateMachine', + definition_body=DefinitionBody.from_chainable(definition), + timeout=Duration.hours(8), # Long timeout for processing many providers + logs=LogOptions( + destination=state_machine_log_group, + level=LogLevel.ALL, + include_execution_data=True, + ), + tracing_enabled=True, + ) + + # Grant state machine permission to invoke the Lambda + self.rollback_function.grant_invoke(self.state_machine) + + NagSuppressions.add_resource_suppressions_by_path( + stack=stack, + path=f'{self.state_machine.node.path}/Role/DefaultPolicy/Resource', + suppressions=[ + { + 'id': 'AwsSolutions-IAM5', + 'reason': """ + This policy contains wild-carded actions and resources but they are scoped to the specific + Lambda function that this state machine needs access to. + """, + }, + ], + ) + + def _create_rollback_function( + self, + stack: Stack, + provider_table: Table, + rollback_results_bucket: Bucket, + ): + """Create the Lambda function for processing license upload rollback.""" + self.rollback_function = PythonFunction( + self, + 'LicenseUploadRollbackFunction', + description='Rollback invalid license uploads for a compact/jurisdiction/time window', + lambda_dir='disaster-recovery', + index=os.path.join('handlers', 'rollback_license_upload.py'), + handler='rollback_license_upload', + timeout=Duration.minutes(15), + memory_size=3008, # High memory for performance + environment={ + **stack.common_env_vars, + 'ROLLBACK_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, + }, + ) + + # Grant permissions to read/write provider table + provider_table.grant_read_write_data(self.rollback_function) + + # Grant permission to query the licenseUploadDateGSI + self.rollback_function.add_to_role_policy( + PolicyStatement( + actions=['dynamodb:Query'], + resources=[ + f'{provider_table.table_arn}/index/{provider_table.license_upload_date_gsi_name}' + ], + ) + ) + + # Grant S3 permissions for results bucket + rollback_results_bucket.grant_read_write(self.rollback_function) + + # Grant EventBridge permissions to publish events + self.rollback_function.add_to_role_policy( + PolicyStatement( + actions=['events:PutEvents'], + resources=[ + stack.format_arn( + service='events', + resource='event-bus', + resource_name=stack.common_env_vars['EVENT_BUS_NAME'], + ) + ], + ) + ) + + NagSuppressions.add_resource_suppressions_by_path( + stack=stack, + path=f'{self.rollback_function.role.node.path}/DefaultPolicy/Resource', + suppressions=[ + { + 'id': 'AwsSolutions-IAM5', + 'reason': """ + This policy contains wild-carded actions and resources but they are scoped to the + specific table, index, S3 bucket, and event bus that this lambda needs access to. + """, + }, + ], + ) + + def _build_rollback_state_machine_definition(self, provider_table: Table) -> IChainable: + """ + Build the Step Function definition for license upload rollback. + + Flow: + 1. Initialize - Set up execution parameters including executionId + 2. RollbackLicenses (Lambda) - Process providers and rollback + 3. CheckStatus - Check if complete or needs continuation + - IN_PROGRESS: Loop back to RollbackLicenses + - COMPLETE: Success + - default: Fail + """ + + # Initialize state - prepare input and add executionId + initialize_rollback = Pass( + self, + 'InitializeRollback', + parameters={ + 'compact.$': '$.compact', + 'jurisdiction.$': '$.jurisdiction', + 'startDateTime.$': '$.startDateTime', + 'endDateTime.$': '$.endDateTime', + 'rollbackReason.$': '$.rollbackReason', + 'tableNameRollbackConfirmation.$': '$.tableNameRollbackConfirmation', + 'executionId.$': '$$.Execution.Name', + 'providersProcessed': 0, + 'lastEvaluatedGSIKey': None, + }, + comment='Initialize rollback parameters with execution ID', + result_path='$', + ) + + # Rollback licenses Lambda task + rollback_licenses_task = LambdaInvoke( + self, + 'RollbackLicenses', + lambda_function=self.rollback_function, + comment='Process license upload rollback for affected providers', + payload_response_only=True, + result_path='$', + retry_on_service_exceptions=True, + ) + + # Check rollback status + rollback_status_choice = Choice( + self, + 'CheckRollbackStatus', + comment='Check if rollback is complete or needs continuation', + ) + + # Rollback failed state + rollback_failed = Fail( + self, + 'RollbackFailed', + comment='Rollback operation failed', + cause='Rollback operation encountered an error', + error='RollbackError', + ) + + # Success state + rollback_complete = Succeed( + self, + 'RollbackComplete', + comment='License upload rollback completed successfully', + ) + + # Define flow logic + initialize_rollback.next(rollback_licenses_task) + rollback_licenses_task.next(rollback_status_choice) + + # Rollback status flow + rollback_status_choice.when( + Condition.string_equals('$.rollbackStatus', 'COMPLETE'), + rollback_complete, + ).when( + Condition.string_equals('$.rollbackStatus', 'IN_PROGRESS'), + rollback_licenses_task, # Loop back to continue processing + ).otherwise(rollback_failed) + + # Start with initialization + return initialize_rollback + From 09b7f8040faff2d334a6685d70f8206088ffb3fe Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 4 Nov 2025 16:33:59 -0600 Subject: [PATCH 12/81] WIP - adding tests/helper methods for rollback implementation --- .../data_model/provider_record_util.py | 30 + .../handlers/rollback_license_upload.py | 475 ++++++++------ .../disaster-recovery/tests/__init__.py | 79 +++ .../tests/function/__init__.py | 70 ++ .../function/test_rollback_license_upload.py | 611 +++++++++++++----- 5 files changed, 915 insertions(+), 350 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py index 6cf5b5c72..8203f9e92 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py @@ -758,6 +758,36 @@ def get_latest_military_affiliation_status(self) -> str | None: return latest_military_affiliation.status + def get_all_license_update_records( + self, + filter_condition: Callable[[LicenseUpdateData], bool] | None = None, + ) -> list[LicenseUpdateData]: + """ + Get all license update records for this provider. + :param filter_condition: An optional filter to apply to the update records + :return: List of LicenseUpdateData records + """ + return [ + record + for record in self._license_update_records + if filter_condition is None or filter_condition(record) + ] + + def get_all_privilege_update_records( + self, + filter_condition: Callable[[PrivilegeUpdateData], bool] | None = None, + ) -> list[PrivilegeUpdateData]: + """ + Get all privilege update records for this provider. + :param filter_condition: An optional filter to apply to the update records + :return: List of PrivilegeUpdateData records + """ + return [ + record + for record in self._privilege_update_records + if filter_condition is None or filter_condition(record) + ] + def get_update_records_for_license( self, jurisdiction: str, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 048874f80..9379f83a7 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -1,14 +1,15 @@ import json import os import time -from datetime import datetime, timedelta -from collections import defaultdict +from dataclasses import dataclass, field +from datetime import datetime import boto3 from aws_lambda_powertools.utilities.typing import LambdaContext from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError from cc_common.config import config, logger +from cc_common.data_model.provider_record_util import ProviderUserRecords from cc_common.data_model.schema.common import UpdateCategory from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.event_batch_writer import EventBatchWriter @@ -28,6 +29,143 @@ PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY = UpdateCategory.LICENSE_DEACTIVATION +# Data classes for rollback operations +@dataclass +class IneligibleUpdate: + """Represents an update that makes a provider ineligible for rollback.""" + type: str # 'licenseUpdate' or 'privilegeUpdate' + update_type: str + create_date: str + + +@dataclass +class LicenseRevertAction: + """Action to take for a license record.""" + action: str # 'delete' or 'revert' + pk: str + sk: str + item: dict | None = None + provider_id: str = '' + jurisdiction: str = '' + license_type: str = '' + + +@dataclass +class PrivilegeRevertAction: + """Action to take for a privilege record.""" + item: dict + provider_id: str + jurisdiction: str + license_type: str + + +@dataclass +class UpdateDeleteAction: + """Action to delete an update record.""" + pk: str + sk: str + + +@dataclass +class RevertPlan: + """Plan for reverting a provider's records.""" + licenses_to_revert: list[LicenseRevertAction] = field(default_factory=list) + privileges_to_revert: list[PrivilegeRevertAction] = field(default_factory=list) + provider_to_revert: dict | None = None + updates_to_delete: list[UpdateDeleteAction] = field(default_factory=list) + + +@dataclass +class ProviderSkippedDetails: + """Details for a provider that was skipped.""" + provider_id: str + reason: str + ineligible_updates: list[dict] = field(default_factory=list) + + +@dataclass +class ProviderFailedDetails: + """Details for a provider that failed to revert.""" + provider_id: str + error: str + + +@dataclass +class ProviderRevertedSummary: + """Summary for a provider that was successfully reverted.""" + provider_id: str + licenses_reverted: int + privileges_reverted: int + updates_deleted: int + + +@dataclass +class RollbackResults: + """Complete results of a rollback operation.""" + skipped_provider_details: list[ProviderSkippedDetails] = field(default_factory=list) + failed_provider_details: list[ProviderFailedDetails] = field(default_factory=list) + reverted_provider_summaries: list[ProviderRevertedSummary] = field(default_factory=list) + + def to_dict(self) -> dict: + """Convert to dictionary for S3 storage.""" + return { + 'skippedProviderDetails': [ + { + 'providerId': detail.provider_id, + 'reason': detail.reason, + 'ineligibleUpdates': detail.ineligible_updates, + } + for detail in self.skipped_provider_details + ], + 'failedProviderDetails': [ + { + 'providerId': detail.provider_id, + 'error': detail.error, + } + for detail in self.failed_provider_details + ], + 'revertedProviderSummaries': [ + { + 'providerId': summary.provider_id, + 'licensesReverted': summary.licenses_reverted, + 'privilegesReverted': summary.privileges_reverted, + 'updatesDeleted': summary.updates_deleted, + } + for summary in self.reverted_provider_summaries + ], + } + + @classmethod + def from_dict(cls, data: dict) -> 'RollbackResults': + """Create from dictionary loaded from S3.""" + return cls( + skipped_provider_details=[ + ProviderSkippedDetails( + provider_id=detail['providerId'], + reason=detail['reason'], + ineligible_updates=detail.get('ineligibleUpdates', []), + ) + for detail in data.get('skippedProviderDetails', []) + ], + failed_provider_details=[ + ProviderFailedDetails( + provider_id=detail['providerId'], + error=detail['error'], + ) + for detail in data.get('failedProviderDetails', []) + ], + reverted_provider_summaries=[ + ProviderRevertedSummary( + provider_id=summary['providerId'], + licenses_reverted=summary['licensesReverted'], + privileges_reverted=summary['privilegesReverted'], + updates_deleted=summary['updatesDeleted'], + ) + for summary in data.get('revertedProviderSummaries', []) + ], + ) + + def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG001 unused-argument """ Rollback invalid license uploads for a compact/jurisdiction/time window. @@ -43,7 +181,6 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'startDateTime': '2024-01-01T00:00:00Z', 'endDateTime': '2024-01-01T23:59:59Z', 'rollbackReason': 'Invalid data uploaded', - 'tableNameRollbackConfirmation': 'provider-table', 'executionId': 'unique-execution-id', 'providersProcessed': 0, 'lastEvaluatedGSIKey': None @@ -69,21 +206,10 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 start_datetime_str = event['startDateTime'] end_datetime_str = event['endDateTime'] rollback_reason = event['rollbackReason'] - table_name_confirmation = event['tableNameRollbackConfirmation'] execution_id = event['executionId'] providers_processed = event.get('providersProcessed', 0) last_evaluated_gsi_key = event.get('lastEvaluatedGSIKey') - # Validate table name guard rail - provider_table_name = config.provider_table_name - if table_name_confirmation != provider_table_name: - logger.error('Rollback execution guard flag missing or invalid') - return { - 'rollbackStatus': 'FAILED', - 'error': f'Invalid table name specified. ' - f'tableNameRollbackConfirmation field must be set to {provider_table_name}', - } - # Parse and validate datetime parameters try: start_datetime = datetime.fromisoformat(start_datetime_str.replace('Z', '+00:00')) @@ -129,16 +255,12 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 if providers_processed > 0: existing_results = _load_results_from_s3(s3_client, rollback_results_bucket_name, results_s3_key) else: - existing_results = { - 'skippedProviderDetails': [], - 'failedProviderDetails': [], - 'revertedProviderSummaries': [], - } + existing_results = RollbackResults() # Initialize counters - providers_reverted = len(existing_results['revertedProviderSummaries']) - providers_skipped = len(existing_results['skippedProviderDetails']) - providers_failed = len(existing_results['failedProviderDetails']) + providers_reverted = len(existing_results.reverted_provider_summaries) + providers_skipped = len(existing_results.skipped_provider_details) + providers_failed = len(existing_results.failed_provider_details) # Get provider table and GSI provider_table = config.provider_table @@ -177,7 +299,6 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'startDateTime': start_datetime_str, 'endDateTime': end_datetime_str, 'rollbackReason': rollback_reason, - 'tableNameRollbackConfirmation': table_name_confirmation, 'executionId': execution_id, } @@ -194,15 +315,15 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 ) # Update results based on outcome - if result['status'] == 'reverted': + if isinstance(result, ProviderRevertedSummary): providers_reverted += 1 - existing_results['revertedProviderSummaries'].append(result['summary']) - elif result['status'] == 'skipped': + existing_results.reverted_provider_summaries.append(result) + elif isinstance(result, ProviderSkippedDetails): providers_skipped += 1 - existing_results['skippedProviderDetails'].append(result['details']) - elif result['status'] == 'failed': + existing_results.skipped_provider_details.append(result) + elif isinstance(result, ProviderFailedDetails): providers_failed += 1 - existing_results['failedProviderDetails'].append(result['details']) + existing_results.failed_provider_details.append(result) # All providers processed successfully logger.info('Rollback complete', providers_processed=providers_processed) @@ -299,13 +420,14 @@ def _process_provider_rollback( start_datetime: datetime, end_datetime: datetime, rollback_reason: str, -) -> dict: +) -> ProviderRevertedSummary | ProviderSkippedDetails | ProviderFailedDetails: """ Process rollback for a single provider. - Returns a dict with: - - status: 'reverted', 'skipped', or 'failed' - - summary/details: Information about the outcome + Returns one of: + - ProviderRevertedSummary: If provider was successfully reverted + - ProviderSkippedDetails: If provider was skipped due to ineligibility + - ProviderFailedDetails: If an error occurred during processing """ logger.info('Processing provider rollback', provider_id=provider_id) @@ -314,22 +436,60 @@ def _process_provider_rollback( provider_records = config.data_client.get_provider_user_records( compact=compact, provider_id=provider_id, + # tier three includes all update records for the provider include_update_tier=UpdateTierEnum.TIER_THREE, ) # Check eligibility for rollback - eligibility_result = _check_rollback_eligibility(provider_records, start_datetime, end_datetime) - - if not eligibility_result['eligible']: - logger.info('Provider not eligible for automatic rollback', provider_id=provider_id, reason=eligibility_result['reason']) - return { - 'status': 'skipped', - 'details': { - 'providerId': provider_id, - 'reason': eligibility_result['reason'], - 'ineligibleUpdates': eligibility_result.get('ineligible_updates', []), - }, - } + # A provider is ineligible if they have any updates after start_datetime that are NOT license-upload related + license_updates = provider_records.get_all_license_update_records() + privilege_updates = provider_records.get_all_privilege_update_records() + + ineligible_updates: list[IneligibleUpdate] = [] + + # Check license updates + for update in license_updates: + if update.createDate >= start_datetime: + if update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES: + ineligible_updates.append( + IneligibleUpdate( + type='licenseUpdate', + update_type=update.updateType, + create_date=update.createDate.isoformat(), + ) + ) + + # Check privilege updates + for update in privilege_updates: + if update.createDate >= start_datetime: + if update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY: + ineligible_updates.append( + IneligibleUpdate( + type='privilegeUpdate', + update_type=update.updateType, + create_date=update.createDate.isoformat(), + ) + ) + + # If ineligible updates found, skip this provider + if ineligible_updates: + logger.info( + 'Provider not eligible for automatic rollback', + provider_id=provider_id, + reason='Provider has non-upload-related updates after rollback start time', + ) + return ProviderSkippedDetails( + provider_id=provider_id, + reason='Provider has non-upload-related updates after rollback start time', + ineligible_updates=[ + { + 'type': update.type, + 'updateType': update.update_type, + 'createDate': update.create_date, + } + for update in ineligible_updates + ], + ) # Determine pre-rollback state and build transactions revert_plan = _determine_revert_plan(provider_records, start_datetime, end_datetime, compact, jurisdiction) @@ -341,79 +501,32 @@ def _process_provider_rollback( _publish_revert_events(revert_plan, compact, rollback_reason) logger.info('Provider rollback successful', provider_id=provider_id) - return { - 'status': 'reverted', - 'summary': { - 'providerId': provider_id, - 'licensesReverted': len(revert_plan['licenses_to_revert']), - 'privilegesReverted': len(revert_plan['privileges_to_revert']), - 'updatesDeleted': len(revert_plan['updates_to_delete']), - }, - } + return ProviderRevertedSummary( + provider_id=provider_id, + licenses_reverted=len(revert_plan.licenses_to_revert), + privileges_reverted=len(revert_plan.privileges_to_revert), + updates_deleted=len(revert_plan.updates_to_delete), + ) except Exception as e: logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) - return { - 'status': 'failed', - 'details': { - 'providerId': provider_id, - 'error': str(e), - }, - } - - -def _check_rollback_eligibility(provider_records, start_datetime: datetime, end_datetime: datetime) -> dict: - """ - Check if the provider is eligible for automatic rollback. - - A provider is ineligible if they have any updates after start_datetime that are NOT - license-upload related. - """ - # Get all update records - license_updates = provider_records.get_license_records( - filter_condition=lambda record: record.type == 'licenseUpdate' - ) - privilege_updates = provider_records.get_privilege_records( - filter_condition=lambda record: record.type == 'privilegeUpdate' - ) - - ineligible_updates = [] - - # Check license updates - for update in license_updates: - if update.createDate >= start_datetime: - if update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES: - ineligible_updates.append({ - 'type': 'licenseUpdate', - 'updateType': update.updateType, - 'createDate': update.createDate.isoformat(), - }) - - # Check privilege updates - for update in privilege_updates: - if update.createDate >= start_datetime: - if update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY: - ineligible_updates.append({ - 'type': 'privilegeUpdate', - 'updateType': update.updateType, - 'createDate': update.createDate.isoformat(), - }) - - if ineligible_updates: - return { - 'eligible': False, - 'reason': 'Provider has non-upload-related updates after rollback start time', - 'ineligible_updates': ineligible_updates, - } - - return {'eligible': True} + return ProviderFailedDetails( + provider_id=provider_id, + error=str(e), + ) -def _determine_revert_plan(provider_records, start_datetime: datetime, end_datetime: datetime, compact: str, jurisdiction: str) -> dict: +def _determine_revert_plan( + provider_records: ProviderUserRecords, + start_datetime: datetime, + end_datetime: datetime, + compact: str, + jurisdiction: str, +) -> RevertPlan: """ Determine what changes need to be made to revert the provider to pre-rollback state. - Returns a plan dict with: + Returns a RevertPlan with: - licenses_to_revert: List of license records to revert/delete - privileges_to_revert: List of privilege records to revert - provider_to_revert: Provider record to revert (if needed) @@ -421,12 +534,7 @@ def _determine_revert_plan(provider_records, start_datetime: datetime, end_datet """ # This is a complex function that needs to be implemented # For now, return a skeleton structure - plan = { - 'licenses_to_revert': [], - 'privileges_to_revert': [], - 'provider_to_revert': None, - 'updates_to_delete': [], - } + plan = RevertPlan() # TODO: Implement full logic to determine revert plan # This would involve: @@ -438,126 +546,133 @@ def _determine_revert_plan(provider_records, start_datetime: datetime, end_datet return plan -def _execute_revert_transactions(revert_plan: dict): +def _build_transaction_items(revert_plan: RevertPlan) -> list[dict]: """ - Execute DynamoDB transactions to revert records. + Build DynamoDB transaction items from a revert plan. - DynamoDB transactions are limited to 100 items, so we may need to split into multiple transactions. + Returns a list of transaction items ready for transact_write_items. """ - # Build transaction items transaction_items = [] + table_name = config.provider_table_name - # Add license revert/delete operations - for license_action in revert_plan['licenses_to_revert']: - if license_action['action'] == 'delete': - transaction_items.append({ - 'Delete': { - 'TableName': config.provider_table_name, - 'Key': { - 'pk': {'S': license_action['pk']}, - 'sk': {'S': license_action['sk']}, - }, - } - }) - else: # revert - transaction_items.append({ - 'Put': { - 'TableName': config.provider_table_name, - 'Item': license_action['item'], - } - }) - - # Add privilege revert operations - for privilege_action in revert_plan['privileges_to_revert']: - transaction_items.append({ - 'Put': { - 'TableName': config.provider_table_name, - 'Item': privilege_action['item'], - } - }) - - # Add provider revert operation if needed - if revert_plan['provider_to_revert']: + # Helper functions for cleaner item building + def add_put(item: dict): transaction_items.append({ 'Put': { - 'TableName': config.provider_table_name, - 'Item': revert_plan['provider_to_revert']['item'], + 'TableName': table_name, + 'Item': item, } }) - # Add update delete operations - for update in revert_plan['updates_to_delete']: + def add_delete(pk: str, sk: str): transaction_items.append({ 'Delete': { - 'TableName': config.provider_table_name, - 'Key': { - 'pk': {'S': update['pk']}, - 'sk': {'S': update['sk']}, - }, + 'TableName': table_name, + 'Key': {'pk': pk, 'sk': sk}, } }) - logger.info('Deleting update record', pk=update['pk'], sk=update['sk']) + + # Add license operations + for license_action in revert_plan.licenses_to_revert: + if license_action.action == 'delete': + add_delete(license_action.pk, license_action.sk) + logger.info('Deleting license record', pk=license_action.pk, sk=license_action.sk) + else: # revert + add_put(license_action.item) + logger.info('Reverting license record', pk=license_action.pk, sk=license_action.sk) + + # Add privilege revert operations + for privilege_action in revert_plan.privileges_to_revert: + add_put(privilege_action.item) + logger.info('Reverting privilege record') + + # Add provider revert operation if needed + if revert_plan.provider_to_revert: + add_put(revert_plan.provider_to_revert) + logger.info('Reverting provider record') + + # Add update record deletions + for update in revert_plan.updates_to_delete: + add_delete(update.pk, update.sk) + logger.info('Deleting update record', pk=update.pk, sk=update.sk) + + return transaction_items + + +def _execute_revert_transactions(revert_plan: RevertPlan): + """ + Execute DynamoDB transactions to revert records. + + DynamoDB transactions are limited to 100 items, so we split into batches if needed. + Uses the Table resource for automatic type conversion. + """ + transaction_items = _build_transaction_items(revert_plan) + + if not transaction_items: + logger.warning('No transaction items to execute') + return + + logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') # Execute transactions in batches of 100 for i in range(0, len(transaction_items), 100): batch = transaction_items[i:i + 100] - config.dynamodb_client.transact_write_items(TransactItems=batch) + # Use Table resource's client for automatic type conversion + config.provider_table.meta.client.transact_write_items(TransactItems=batch) + logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') -def _publish_revert_events(revert_plan: dict, compact: str, rollback_reason: str): +def _publish_revert_events(revert_plan: RevertPlan, compact: str, rollback_reason: str): """ Publish revert events for all reverted licenses and privileges. """ with EventBatchWriter(config.events_client) as event_writer: # Publish license revert events - for license_action in revert_plan['licenses_to_revert']: + for license_action in revert_plan.licenses_to_revert: config.event_bus_client.publish_license_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - provider_id=license_action['provider_id'], - jurisdiction=license_action['jurisdiction'], - license_type=license_action['license_type'], + provider_id=license_action.provider_id, + jurisdiction=license_action.jurisdiction, + license_type=license_action.license_type, rollback_reason=rollback_reason, event_batch_writer=event_writer, ) # Publish privilege revert events - for privilege_action in revert_plan['privileges_to_revert']: + for privilege_action in revert_plan.privileges_to_revert: config.event_bus_client.publish_privilege_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - provider_id=privilege_action['provider_id'], - jurisdiction=privilege_action['jurisdiction'], - license_type=privilege_action['license_type'], + provider_id=privilege_action.provider_id, + jurisdiction=privilege_action.jurisdiction, + license_type=privilege_action.license_type, rollback_reason=rollback_reason, event_batch_writer=event_writer, ) -def _load_results_from_s3(s3_client, bucket_name: str, key: str) -> dict: +def _load_results_from_s3(s3_client, bucket_name: str, key: str) -> RollbackResults: """Load existing results from S3.""" try: response = s3_client.get_object(Bucket=bucket_name, Key=key) - return json.loads(response['Body'].read().decode('utf-8')) + data = json.loads(response['Body'].read().decode('utf-8')) + return RollbackResults.from_dict(data) except s3_client.exceptions.NoSuchKey: # First execution, no existing results - return { - 'skippedProviderDetails': [], - 'failedProviderDetails': [], - 'revertedProviderSummaries': [], - } + return RollbackResults() except Exception as e: logger.error(f'Error loading results from S3: {str(e)}') raise -def _write_results_to_s3(s3_client, bucket_name: str, key: str, results: dict): +def _write_results_to_s3(s3_client, bucket_name: str, key: str, results: RollbackResults): """Write results to S3 with server-side encryption.""" try: s3_client.put_object( Bucket=bucket_name, Key=key, - Body=json.dumps(results, indent=2), + Body=json.dumps(results.to_dict(), indent=2), ContentType='application/json', ServerSideEncryption='aws:kms', ) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py index 5462ba841..0629d880d 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py @@ -1,3 +1,4 @@ +import json import os from unittest import TestCase from unittest.mock import MagicMock @@ -14,6 +15,84 @@ def setUpClass(cls): 'DEBUG': 'true', 'ALLOWED_ORIGINS': '["https://example.org"]', 'AWS_DEFAULT_REGION': 'us-east-1', + 'ROLLBACK_RESULTS_BUCKET_NAME': 'rollback-results-bucket', + 'EVENT_BUS_NAME': 'license-data-events', + 'PROVIDER_TABLE_NAME': 'provider-table', + 'RATE_LIMITING_TABLE_NAME': 'rate-limiting-table', + 'SSN_TABLE_NAME': 'ssn-table', + 'COMPACT_CONFIGURATION_TABLE_NAME': 'compact-configuration-table', + 'ENVIRONMENT_NAME': 'test', + 'PROV_FAM_GIV_MID_INDEX_NAME': 'providerFamGivMid', + 'FAM_GIV_INDEX_NAME': 'famGiv', + 'LICENSE_GSI_NAME': 'licenseGSI', + 'PROV_DATE_OF_UPDATE_INDEX_NAME': 'providerDateOfUpdate', + 'SSN_INDEX_NAME': 'ssnIndex', + 'COMPACTS': '["aslp", "octp", "coun"]', + 'JURISDICTIONS': json.dumps( + [ + 'al', + 'ak', + 'az', + 'ar', + 'ca', + 'co', + 'ct', + 'de', + 'dc', + 'fl', + 'ga', + 'hi', + 'id', + 'il', + 'in', + 'ia', + 'ks', + 'ky', + 'la', + 'me', + 'md', + 'ma', + 'mi', + 'mn', + 'ms', + 'mo', + 'mt', + 'ne', + 'nv', + 'nh', + 'nj', + 'nm', + 'ny', + 'nc', + 'nd', + 'oh', + 'ok', + 'or', + 'pa', + 'pr', + 'ri', + 'sc', + 'sd', + 'tn', + 'tx', + 'ut', + 'vt', + 'va', + 'vi', + 'wa', + 'wv', + 'wi', + 'wy', + ] + ), + 'LICENSE_TYPES': json.dumps( + { + 'aslp': [ + {'name': 'audiologist', 'abbreviation': 'aud'}, + {'name': 'speech-language pathologist', 'abbreviation': 'slp'}, + ], + }, + ), }, ) cls.mock_context = MagicMock(name='MockLambdaContext', spec=LambdaContext) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py index 4d0c0808d..9fb27c014 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py @@ -25,6 +25,10 @@ def setUp(self): # noqa: N801 invalid-name self.mock_source_table_arn = f'arn:aws:dynamodb:us-east-1:767398110685:table/{self.mock_source_table_name}' self.build_resources() + from common_test.test_data_generator import TestDataGenerator + + self.test_data_generator = TestDataGenerator + self.addCleanup(self.delete_resources) def build_resources(self): @@ -32,6 +36,17 @@ def build_resources(self): # cleanup and restoration process regardless of the table that is being recovered self.mock_source_table = self.create_mock_table(table_name=self.mock_source_table_name) self.mock_destination_table = self.create_mock_table(table_name=self.mock_destination_table_name) + self.create_provider_table() + self.create_rollback_results_bucket() + self.create_event_bus() + + def create_rollback_results_bucket(self): + self._rollback_results_bucket = boto3.resource('s3').create_bucket( + Bucket=os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] + ) + + def create_event_bus(self): + self._event_bus = boto3.client('events').create_event_bus(Name=os.environ['EVENT_BUS_NAME']) def create_mock_table(self, table_name: str): return boto3.resource('dynamodb').create_table( @@ -44,6 +59,61 @@ def create_mock_table(self, table_name: str): BillingMode='PAY_PER_REQUEST', ) + def create_provider_table(self): + self._provider_table = boto3.resource('dynamodb').create_table( + AttributeDefinitions=[ + {'AttributeName': 'pk', 'AttributeType': 'S'}, + {'AttributeName': 'sk', 'AttributeType': 'S'}, + {'AttributeName': 'providerFamGivMid', 'AttributeType': 'S'}, + {'AttributeName': 'providerDateOfUpdate', 'AttributeType': 'S'}, + {'AttributeName': 'licenseGSIPK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseGSISK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSIPK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSISK', 'AttributeType': 'S'}, + ], + TableName=os.environ['PROVIDER_TABLE_NAME'], + KeySchema=[{'AttributeName': 'pk', 'KeyType': 'HASH'}, {'AttributeName': 'sk', 'KeyType': 'RANGE'}], + BillingMode='PAY_PER_REQUEST', + GlobalSecondaryIndexes=[ + { + 'IndexName': os.environ['PROV_FAM_GIV_MID_INDEX_NAME'], + 'KeySchema': [ + {'AttributeName': 'sk', 'KeyType': 'HASH'}, + {'AttributeName': 'providerFamGivMid', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'ALL'}, + }, + { + 'IndexName': os.environ['PROV_DATE_OF_UPDATE_INDEX_NAME'], + 'KeySchema': [ + {'AttributeName': 'sk', 'KeyType': 'HASH'}, + {'AttributeName': 'providerDateOfUpdate', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'ALL'}, + }, + { + 'IndexName': os.environ['LICENSE_GSI_NAME'], + 'KeySchema': [ + {'AttributeName': 'licenseGSIPK', 'KeyType': 'HASH'}, + {'AttributeName': 'licenseGSISK', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'ALL'}, + }, + { + 'IndexName': 'licenseUploadDateGSI', + 'KeySchema': [ + {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, + {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'KEYS_ONLY'}, + }, + ], + ) + def delete_resources(self): self.mock_source_table.delete() self.mock_destination_table.delete() + self._provider_table.delete() + self._rollback_results_bucket.objects.delete() + self._rollback_results_bucket.delete() + self._event_bus.delete_event_bus(Name=os.environ['EVENT_BUS_NAME']) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index f74dd3842..8a0813bc4 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -9,102 +9,464 @@ - Event publishing - S3 result management """ - -import json -import os from datetime import datetime, timedelta -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock from uuid import uuid4 - -import boto3 -import pytest from moto import mock_aws -from handlers.rollback_license_upload import ( - MAX_ROLLBACK_WINDOW_SECONDS, - rollback_license_upload, -) - +from cc_common.config import config +from cc_common.data_model.schema.common import UpdateCategory +from cc_common.data_model.update_tier_enum import UpdateTierEnum +from handlers.rollback_license_upload import rollback_license_upload +from . import TstFunction @mock_aws -class TestRollbackLicenseUpload: +class TestRollbackLicenseUpload(TstFunction): """Test class for license upload rollback handler.""" def setup_method(self): """Set up test fixtures before each test method.""" - # Set up environment variables - os.environ['PROVIDER_TABLE_NAME'] = 'test-provider-table' - os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] = 'test-rollback-results-bucket' - os.environ['EVENT_BUS_NAME'] = 'test-event-bus' - os.environ['AWS_DEFAULT_REGION'] = 'us-east-1' - - # Create mock resources - self.dynamodb = boto3.resource('dynamodb') - self.s3_client = boto3.client('s3') - - # Create provider table with GSI - self.provider_table = self.dynamodb.create_table( - TableName='test-provider-table', - KeySchema=[ - {'AttributeName': 'pk', 'KeyType': 'HASH'}, - {'AttributeName': 'sk', 'KeyType': 'RANGE'}, - ], - AttributeDefinitions=[ - {'AttributeName': 'pk', 'AttributeType': 'S'}, - {'AttributeName': 'sk', 'AttributeType': 'S'}, - {'AttributeName': 'licenseUploadDateGSIPK', 'AttributeType': 'S'}, - {'AttributeName': 'licenseUploadDateGSISK', 'AttributeType': 'S'}, - ], - BillingMode='PAY_PER_REQUEST', - GlobalSecondaryIndexes=[ - { - 'IndexName': 'licenseUploadDateGSI', - 'KeySchema': [ - {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, - {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, - ], - 'Projection': {'ProjectionType': 'KEYS_ONLY'}, - }, - ], - ) - - # Create S3 bucket - self.s3_client.create_bucket(Bucket='test-rollback-results-bucket') - # Create sample test data self.compact = 'aslp' self.jurisdiction = 'oh' self.provider_id = str(uuid4()) - self.start_datetime = datetime.now() - timedelta(days=1) + self.upload_datetime = datetime.now() - timedelta(days=1) + self.start_datetime = self.upload_datetime - timedelta(hours=1) self.end_datetime = datetime.now() - def teardown_method(self): - """Clean up after each test method.""" - # Clean up environment variables - for key in ['PROVIDER_TABLE_NAME', 'ROLLBACK_RESULTS_BUCKET_NAME', 'EVENT_BUS_NAME']: - if key in os.environ: - del os.environ[key] - - def test_rollback_validates_table_name_guard_rail(self): - """Test that rollback validates the table name confirmation.""" + # Helper methods for setting up test scenarios + def _when_provider_had_license_created_from_upload(self, upload_datetime: datetime = None): + """ + Set up a scenario where a provider had a license created during the upload window. + Returns the created license data. + """ + if upload_datetime is None: + upload_datetime = self.upload_datetime + + return self.test_data_generator.put_default_license_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'uploadDate': upload_datetime, + 'dateOfUpdate': upload_datetime, + }) + + def _when_provider_had_license_updated_from_upload(self, upload_datetime: datetime = None): + """ + Set up a scenario where a provider had an existing license updated during the upload window. + Returns the license and its update record. + """ + if upload_datetime is None: + upload_datetime = self.upload_datetime + + # Create original license before upload window + original_license = self.test_data_generator.put_default_license_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': self.start_datetime - timedelta(days=30), + 'dateOfExpiration': (self.start_datetime - timedelta(days=30)).date(), + }) + + # Create update record within upload window + license_update = self.test_data_generator.put_default_license_update_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': original_license.licenseType, + 'updateType': UpdateCategory.RENEWAL, + 'createDate': upload_datetime, + 'effectiveDate': upload_datetime, + 'uploadDate': upload_datetime, + 'previous': { + 'dateOfExpiration': original_license.dateOfExpiration, + }, + 'updatedValues': { + 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + }, + }) + + # Update the license record to reflect the new expiration + updated_license = self.test_data_generator.put_default_license_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': upload_datetime, + 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + 'uploadDate': upload_datetime, + }) + + return updated_license, license_update + + def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: datetime = None): + """ + Set up a scenario where a provider's privilege was deactivated due to license deactivation during upload. + Returns the privilege and its update record. + """ + if upload_datetime is None: + upload_datetime = self.upload_datetime + + # Create privilege that was active before upload + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': self.start_datetime - timedelta(days=30), + }) + + # Create deactivation update record + privilege_update = self.test_data_generator.put_default_privilege_update_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': privilege.licenseType, + 'updateType': UpdateCategory.LICENSE_DEACTIVATION, + 'createDate': upload_datetime, + 'effectiveDate': upload_datetime, + }) + + return privilege, privilege_update + + def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime: datetime = None): + """ + Set up a scenario where a provider had a non-upload-related privilege update AFTER the upload window. + This makes them ineligible for automatic rollback. + Returns the privilege and its update record. + """ + if after_upload_datetime is None: + after_upload_datetime = self.end_datetime + timedelta(hours=1) + + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + }) + + # Create a non-upload-related update (e.g., renewal) after the window + privilege_update = self.test_data_generator.put_default_privilege_update_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': privilege.licenseType, + 'updateType': UpdateCategory.RENEWAL, # Not LICENSE_DEACTIVATION + 'createDate': after_upload_datetime, + 'effectiveDate': after_upload_datetime, + }) + + return privilege, privilege_update + + def _when_provider_had_license_update_after_upload(self, after_upload_datetime: datetime = None): + """ + Set up a scenario where a provider had a non-upload-related license update AFTER the upload window. + This makes them ineligible for automatic rollback. + Returns the license and its update record. + """ + if after_upload_datetime is None: + after_upload_datetime = self.end_datetime + timedelta(hours=1) + + license_record = self.test_data_generator.put_default_license_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + }) + + # Create a non-upload-related update (e.g., encumbrance) after the window + license_update = self.test_data_generator.put_default_license_update_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': license_record.licenseType, + 'updateType': UpdateCategory.ENCUMBRANCE, # Not an upload-related category + 'createDate': after_upload_datetime, + 'effectiveDate': after_upload_datetime, + }) + + return license_record, license_update + + def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: datetime = None): + """ + Set up a scenario where the provider's top-level record needs to be reverted. + Returns the provider record. + """ + if before_upload_datetime is None: + before_upload_datetime = self.start_datetime - timedelta(days=30) + + # Create provider record with old values + provider = self.test_data_generator.put_default_provider_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'givenName': 'OldFirstName', + 'familyName': 'OldLastName', + 'dateOfUpdate': before_upload_datetime, + }) + + # Simulate that the provider record was updated during upload + updated_provider = self.test_data_generator.put_default_provider_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'givenName': 'NewFirstName', + 'familyName': 'NewLastName', + 'dateOfUpdate': self.upload_datetime, + }) + + return provider, updated_provider + + # Integration tests for rollback scenarios + def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(self): + """Test that provider top-level record is reset to values before upload.""" + # Setup: Provider record was updated during upload + old_provider, new_provider = self._when_provider_top_level_record_needs_reverted() + + # Execute: Perform rollback event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'startDateTime': self.start_datetime.isoformat(), 'endDateTime': self.end_datetime.isoformat(), 'rollbackReason': 'Test rollback', - 'tableNameRollbackConfirmation': 'wrong-table-name', 'executionId': 'test-execution-123', 'providersProcessed': 0, } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersReverted'], 1) + + # Verify: Provider record has been reset to old values + provider_records = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + ) + provider_record = provider_records.get_provider_record() + self.assertEqual(provider_record.givenName, old_provider.givenName) + self.assertEqual(provider_record.familyName, old_provider.familyName) + + def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self): + """Test that license record is reset to values before upload.""" + # Setup: License was updated during upload (e.g., renewed) + updated_license, license_update = self._when_provider_had_license_updated_from_upload() + + # Store the original expiration date from the update's previous values + original_expiration = license_update.previous['dateOfExpiration'] + + # Execute: Perform rollback + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersReverted'], 1) + + # Verify: License record has been reset to original values + provider_records = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + licenses = provider_records.get_license_records() + self.assertEqual(len(licenses), 1) + license_record = licenses[0] + self.assertEqual(license_record.dateOfExpiration, original_expiration) + + # Verify: Update record has been deleted + license_updates = provider_records.get_all_license_update_records() + self.assertEqual(len(license_updates), 0, "License update records should be deleted") + + def test_provider_privilege_record_reactivated_when_upload_reverted(self): + """Test that privilege is reactivated when license deactivation is reverted.""" + # Setup: Privilege was deactivated during upload due to license deactivation + privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() + + # Execute: Perform rollback + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersReverted'], 1) + + # Verify: Privilege has been reactivated (status should be 'active') + provider_records = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + privileges = provider_records.get_privilege_records() + self.assertEqual(len(privileges), 1) + privilege_record = privileges[0] + self.assertEqual(privilege_record.status, 'active', "Privilege should be reactivated") + + # Verify: Privilege update record has been deleted + privilege_updates = provider_records.get_all_privilege_update_records() + self.assertEqual(len(privilege_updates), 0, "Privilege update records should be deleted") + + def test_provider_license_updates_within_time_period_removed_when_upload_reverted(self): + """Test that license update records within the time window are deleted.""" + # Setup: License was updated during upload + updated_license, license_update = self._when_provider_had_license_updated_from_upload() + + # Verify update record exists before rollback + provider_records_before = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + license_updates_before = provider_records_before.get_all_license_update_records() + self.assertGreater(len(license_updates_before), 0, "Should have update records before rollback") + + # Execute: Perform rollback + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + + # Verify: All license update records within time window have been deleted + provider_records_after = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + license_updates_after = provider_records_after.get_all_license_update_records() + self.assertEqual(len(license_updates_after), 0, "License update records should be deleted") + + def test_provider_privilege_deactivation_update_within_time_period_removed_when_upload_reverted(self): + """Test that privilege deactivation update records within the time window are deleted.""" + # Setup: Privilege was deactivated during upload + privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() + + # Verify update record exists before rollback + provider_records_before = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + privilege_updates_before = provider_records_before.get_all_privilege_update_records() + self.assertGreater(len(privilege_updates_before), 0, "Should have update records before rollback") + + # Execute: Perform rollback + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + + # Verify: All privilege update records within time window have been deleted + provider_records_after = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + privilege_updates_after = provider_records_after.get_all_privilege_update_records() + self.assertEqual(len(privilege_updates_after), 0, "Privilege update records should be deleted") + + def test_provider_skipped_if_license_updates_detected_after_time_period_when_upload_reverted(self): + """Test that provider is skipped if non-upload-related license updates exist after time window.""" + # Setup: Provider had license update after upload window + license_record, license_update = self._when_provider_had_license_update_after_upload() + + # Execute: Perform rollback + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed but provider was skipped + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersSkipped'], 1) + self.assertEqual(result['providersReverted'], 0) + + # Verify: License record and update still exist (not rolled back) + provider_records = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + licenses = provider_records.get_license_records() + self.assertEqual(len(licenses), 1, "License should still exist") + license_updates = provider_records.get_all_license_update_records() + self.assertEqual(len(license_updates), 1, "License update should still exist") + + def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_upload_reverted(self): + """Test that provider is skipped if non-upload-related privilege updates exist after time window.""" + # Setup: Provider had privilege update after upload window + privilege, privilege_update = self._when_provider_had_privilege_update_after_upload() + + # Execute: Perform rollback + event = { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.start_datetime.isoformat(), + 'endDateTime': self.end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed but provider was skipped + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersSkipped'], 1) + self.assertEqual(result['providersReverted'], 0) + + # Verify: Privilege record and update still exist (not rolled back) + provider_records = config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + privileges = provider_records.get_privilege_records() + self.assertEqual(len(privileges), 1, "Privilege should still exist") + privilege_updates = provider_records.get_all_privilege_update_records() + self.assertEqual(len(privilege_updates), 1, "Privilege update should still exist") - context = Mock() - - result = rollback_license_upload(event, context) - - assert result['rollbackStatus'] == 'FAILED' - assert 'Invalid table name specified' in result['error'] - + # Validation tests def test_rollback_validates_datetime_format(self): """Test that rollback validates datetime format.""" event = { @@ -113,17 +475,14 @@ def test_rollback_validates_datetime_format(self): 'startDateTime': 'invalid-datetime', 'endDateTime': self.end_datetime.isoformat(), 'rollbackReason': 'Test rollback', - 'tableNameRollbackConfirmation': 'test-provider-table', 'executionId': 'test-execution-123', 'providersProcessed': 0, } - context = Mock() + result = rollback_license_upload(event, Mock()) - result = rollback_license_upload(event, context) - - assert result['rollbackStatus'] == 'FAILED' - assert 'Invalid datetime format' in result['error'] + self.assertEqual(result['rollbackStatus'], 'FAILED') + self.assertIn('Invalid datetime format', result['error']) def test_rollback_validates_time_window_order(self): """Test that rollback validates start time is before end time.""" @@ -133,17 +492,14 @@ def test_rollback_validates_time_window_order(self): 'startDateTime': self.end_datetime.isoformat(), 'endDateTime': self.start_datetime.isoformat(), 'rollbackReason': 'Test rollback', - 'tableNameRollbackConfirmation': 'test-provider-table', 'executionId': 'test-execution-123', 'providersProcessed': 0, } - context = Mock() - - result = rollback_license_upload(event, context) + result = rollback_license_upload(event, Mock()) - assert result['rollbackStatus'] == 'FAILED' - assert 'Start time must be before end time' in result['error'] + self.assertEqual(result['rollbackStatus'], 'FAILED') + self.assertIn('Start time must be before end time', result['error']) def test_rollback_validates_maximum_time_window(self): """Test that rollback validates maximum time window.""" @@ -156,97 +512,12 @@ def test_rollback_validates_maximum_time_window(self): 'startDateTime': start.isoformat(), 'endDateTime': end.isoformat(), 'rollbackReason': 'Test rollback', - 'tableNameRollbackConfirmation': 'test-provider-table', 'executionId': 'test-execution-123', 'providersProcessed': 0, } - context = Mock() - - result = rollback_license_upload(event, context) - - assert result['rollbackStatus'] == 'FAILED' - assert 'cannot exceed' in result['error'] - - @patch('handlers.rollback_license_upload.config') - def test_rollback_loads_existing_results_on_continuation(self, mock_config): - """Test that rollback loads existing results from S3 on continuation.""" - # Set up existing results in S3 - existing_results = { - 'skippedProviderDetails': [{'providerId': 'test-123', 'reason': 'test reason'}], - 'failedProviderDetails': [], - 'revertedProviderSummaries': [], - } - execution_id = 'test-execution-123' - self.s3_client.put_object( - Bucket='test-rollback-results-bucket', - Key=f'{execution_id}/results.json', - Body=json.dumps(existing_results), - ) - - # Mock config - mock_config.provider_table_name = 'test-provider-table' - mock_config.provider_table = self.provider_table - - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'tableNameRollbackConfirmation': 'test-provider-table', - 'executionId': execution_id, - 'providersProcessed': 1, # Continuation - } + result = rollback_license_upload(event, Mock()) - context = Mock() - - # Note: This test will need to be expanded to mock the full flow - # For now, it demonstrates the test structure - - def test_query_gsi_for_affected_providers_handles_multiple_months(self): - """Test that GSI query handles time windows spanning multiple months.""" - # This test would verify that the query correctly handles - # time windows that span multiple months by querying each month's - # partition separately - pass - - def test_process_provider_checks_eligibility(self): - """Test that provider processing checks rollback eligibility.""" - # This test would verify that providers with non-upload-related - # updates are correctly identified as ineligible - pass - - def test_process_provider_determines_correct_revert_plan(self): - """Test that provider processing determines the correct revert plan.""" - # This test would verify that the revert plan correctly identifies: - # - Licenses to delete (created during window) - # - Licenses to revert (existed before window) - # - Privileges to revert - # - Update records to delete - pass - - def test_execute_revert_transactions_handles_100_item_limit(self): - """Test that transaction execution handles DynamoDB's 100 item limit.""" - # This test would verify that transactions with >100 items - # are correctly split into multiple transactions - pass - - def test_publish_revert_events_uses_batch_writer(self): - """Test that event publishing uses EventBatchWriter for efficiency.""" - # This test would verify that events are published in batches - pass - - def test_s3_results_written_with_encryption(self): - """Test that S3 results are written with server-side encryption.""" - # This test would verify that S3 writes use server-side encryption - pass - - -# Additional test classes could be added for: -# - TestRollbackEligibilityValidation -# - TestRevertPlanDetermination -# - TestTransactionExecution -# - TestEventPublishing -# - TestS3ResultsManagement + self.assertEqual(result['rollbackStatus'], 'FAILED') + self.assertIn('cannot exceed', result['error']) From a35c6299c258f07e61cdd1817c242147c6790335 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 5 Nov 2025 00:36:32 -0600 Subject: [PATCH 13/81] Track original upload date for original license record --- .../provider-data-v1/handlers/ingest.py | 38 ++++++++++++------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py index 4acc78cc6..e1018c22e 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py @@ -1,3 +1,4 @@ +from copy import deepcopy import json from boto3.dynamodb.types import TypeSerializer @@ -5,6 +6,7 @@ from cc_common.data_model.provider_record_util import ProviderRecordType, ProviderRecordUtility from cc_common.data_model.schema import LicenseRecordSchema from cc_common.data_model.schema.common import ActiveInactiveStatus, UpdateCategory +from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.license.ingest import LicenseIngestSchema from cc_common.data_model.schema.license.record import LicenseUpdateRecordSchema from cc_common.data_model.schema.provider import ProviderData @@ -105,9 +107,6 @@ def ingest_license_message(message: dict): # Start preparing our db transactions data_events = [] - # Set the uploadDate to the current time for GSI tracking - license_ingest_message['uploadDate'] = config.current_standard_datetime - license_record_schema = LicenseRecordSchema() dumped_license = license_record_schema.dumps(license_ingest_message) @@ -116,15 +115,7 @@ def ingest_license_message(message: dict): # We fully JSON serialize then load again so that we have a completely independent copy of the data posted_license_record = license_record_schema.load(json.loads(dumped_license)) - dynamo_transactions = [ - # Put the posted license - { - 'Put': { - 'TableName': config.provider_table_name, - 'Item': TypeSerializer().serialize(json.loads(dumped_license))['M'], - }, - }, - ] + dynamo_transactions = [] home_jurisdiction = None try: @@ -172,6 +163,26 @@ def ingest_license_message(message: dict): dynamo_transactions=dynamo_transactions, data_events=data_events, ) + # now grab the uploadDate from the existing record if available and put it in the posted_license + # for the license upload date GSI + if existing_license.get('uploadDate'): + posted_license_record['uploadDate'] = existing_license.get('uploadDate') + else: + # If this is the first time creating the license record, + # set the uploadDate to the current time for license upload date GSI tracking + posted_license_record['uploadDate'] = config.current_standard_datetime + + # write the record to the table to reflect the latest values from the upload + license_data = LicenseData.create_new(deepcopy(posted_license_record)) + dynamo_transactions.append( + { + 'Put': { + 'TableName': config.provider_table_name, + 'Item': TypeSerializer().serialize(license_data.serialize_to_database_record())['M'], + } + } + ) + licenses_organized.setdefault(posted_license_record['jurisdiction'], {}) licenses_organized[posted_license_record['jurisdiction']][posted_license_record['licenseType']] = ( posted_license_record @@ -222,7 +233,8 @@ def _process_license_update(*, existing_license: dict, new_license: dict, dynamo :param list dynamo_transactions: The dynamodb transaction array to append records to """ # Remove fields that are calculated at runtime, not stored in the database - dynamic_keys = {'dateOfUpdate', 'status'} + # uploadDate is metadata tracking when the license was first uploaded, not part of the license data + dynamic_keys = {'dateOfUpdate', 'status', 'uploadDate'} updated_values = { key: value for key, value in new_license.items() From 15c9b9eb3893ac90868c3b6498b83ff7bb0ecf89 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 5 Nov 2025 00:59:44 -0600 Subject: [PATCH 14/81] WIP - get license rollback test to pass! --- .../data_model/provider_record_util.py | 15 + .../common/common_test/test_data_generator.py | 14 + .../handlers/rollback_license_upload.py | 512 +++++++++++------- .../disaster-recovery/tests/__init__.py | 6 + .../tests/function/__init__.py | 7 +- .../function/test_rollback_license_upload.py | 157 ++++-- 6 files changed, 468 insertions(+), 243 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py index 8203f9e92..c60e1ca29 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py @@ -788,6 +788,21 @@ def get_all_privilege_update_records( if filter_condition is None or filter_condition(record) ] + def get_all_provider_update_records( + self, + filter_condition: Callable[[ProviderUpdateData], bool] | None = None, + ) -> list[ProviderUpdateData]: + """ + Get all provider update records for this provider. + :param filter_condition: An optional filter to apply to the update records + :return: List of ProviderUpdateData records + """ + return [ + record + for record in self._provider_update_records + if filter_condition is None or filter_condition(record) + ] + def get_update_records_for_license( self, jurisdiction: str, diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py index 2fdaa6274..5fdc0629e 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py @@ -328,6 +328,20 @@ def generate_default_license_update( return LicenseUpdateData.create_new(license_update) + @staticmethod + def put_default_license_update_record_in_provider_table( + value_overrides: dict | None = None, + ) -> LicenseUpdateData: + """ + Creates a default license update and stores it in the provider table. + """ + update_data = TestDataGenerator.generate_default_license_update(value_overrides) + update_record = update_data.serialize_to_database_record() + + TestDataGenerator.store_record_in_provider_table(update_record) + + return update_data + @staticmethod def generate_default_privilege(value_overrides: dict | None = None) -> PrivilegeData: """Generate a default privilege""" diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 9379f83a7..3dd7b2928 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -38,43 +38,6 @@ class IneligibleUpdate: create_date: str -@dataclass -class LicenseRevertAction: - """Action to take for a license record.""" - action: str # 'delete' or 'revert' - pk: str - sk: str - item: dict | None = None - provider_id: str = '' - jurisdiction: str = '' - license_type: str = '' - - -@dataclass -class PrivilegeRevertAction: - """Action to take for a privilege record.""" - item: dict - provider_id: str - jurisdiction: str - license_type: str - - -@dataclass -class UpdateDeleteAction: - """Action to delete an update record.""" - pk: str - sk: str - - -@dataclass -class RevertPlan: - """Plan for reverting a provider's records.""" - licenses_to_revert: list[LicenseRevertAction] = field(default_factory=list) - privileges_to_revert: list[PrivilegeRevertAction] = field(default_factory=list) - provider_to_revert: dict | None = None - updates_to_delete: list[UpdateDeleteAction] = field(default_factory=list) - - @dataclass class ProviderSkippedDetails: """Details for a provider that was skipped.""" @@ -90,13 +53,29 @@ class ProviderFailedDetails: error: str +@dataclass +class RevertedLicense: + """Details of a reverted license for event publishing.""" + provider_id: str + jurisdiction: str + license_type: str + + +@dataclass +class RevertedPrivilege: + """Details of a reverted privilege for event publishing.""" + provider_id: str + jurisdiction: str + license_type: str + + @dataclass class ProviderRevertedSummary: """Summary for a provider that was successfully reverted.""" provider_id: str - licenses_reverted: int - privileges_reverted: int - updates_deleted: int + licenses_reverted: list[RevertedLicense] = field(default_factory=list) + privileges_reverted: list[RevertedPrivilege] = field(default_factory=list) + updates_deleted: int = 0 @dataclass @@ -127,8 +106,22 @@ def to_dict(self) -> dict: 'revertedProviderSummaries': [ { 'providerId': summary.provider_id, - 'licensesReverted': summary.licenses_reverted, - 'privilegesReverted': summary.privileges_reverted, + 'licensesReverted': [ + { + 'providerId': license.provider_id, + 'jurisdiction': license.jurisdiction, + 'licenseType': license.license_type, + } + for license in summary.licenses_reverted + ], + 'privilegesReverted': [ + { + 'providerId': privilege.provider_id, + 'jurisdiction': privilege.jurisdiction, + 'licenseType': privilege.license_type, + } + for privilege in summary.privileges_reverted + ], 'updatesDeleted': summary.updates_deleted, } for summary in self.reverted_provider_summaries @@ -157,9 +150,23 @@ def from_dict(cls, data: dict) -> 'RollbackResults': reverted_provider_summaries=[ ProviderRevertedSummary( provider_id=summary['providerId'], - licenses_reverted=summary['licensesReverted'], - privileges_reverted=summary['privilegesReverted'], - updates_deleted=summary['updatesDeleted'], + licenses_reverted=[ + RevertedLicense( + provider_id=license['providerId'], + jurisdiction=license['jurisdiction'], + license_type=license['licenseType'], + ) + for license in summary.get('licensesReverted', []) + ], + privileges_reverted=[ + RevertedPrivilege( + provider_id=privilege['providerId'], + jurisdiction=privilege['jurisdiction'], + license_type=privilege['licenseType'], + ) + for privilege in summary.get('privilegesReverted', []) + ], + updates_deleted=summary.get('updatesDeleted', 0), ) for summary in data.get('revertedProviderSummaries', []) ], @@ -440,73 +447,26 @@ def _process_provider_rollback( include_update_tier=UpdateTierEnum.TIER_THREE, ) - # Check eligibility for rollback - # A provider is ineligible if they have any updates after start_datetime that are NOT license-upload related - license_updates = provider_records.get_all_license_update_records() - privilege_updates = provider_records.get_all_privilege_update_records() - - ineligible_updates: list[IneligibleUpdate] = [] - - # Check license updates - for update in license_updates: - if update.createDate >= start_datetime: - if update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES: - ineligible_updates.append( - IneligibleUpdate( - type='licenseUpdate', - update_type=update.updateType, - create_date=update.createDate.isoformat(), - ) - ) - - # Check privilege updates - for update in privilege_updates: - if update.createDate >= start_datetime: - if update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY: - ineligible_updates.append( - IneligibleUpdate( - type='privilegeUpdate', - update_type=update.updateType, - create_date=update.createDate.isoformat(), - ) - ) - - # If ineligible updates found, skip this provider - if ineligible_updates: - logger.info( - 'Provider not eligible for automatic rollback', - provider_id=provider_id, - reason='Provider has non-upload-related updates after rollback start time', - ) - return ProviderSkippedDetails( - provider_id=provider_id, - reason='Provider has non-upload-related updates after rollback start time', - ineligible_updates=[ - { - 'type': update.type, - 'updateType': update.update_type, - 'createDate': update.create_date, - } - for update in ineligible_updates - ], - ) - - # Determine pre-rollback state and build transactions - revert_plan = _determine_revert_plan(provider_records, start_datetime, end_datetime, compact, jurisdiction) - - # Execute the revert transactions - _execute_revert_transactions(revert_plan) + # Build transactions and check eligibility in a single pass + # If ineligible updates are found, this will return a ProviderSkippedDetails + result = _build_and_execute_revert_transactions( + provider_records=provider_records, + start_datetime=start_datetime, + end_datetime=end_datetime, + compact=compact, + jurisdiction=jurisdiction, + provider_id=provider_id, + ) + + # If provider was skipped due to ineligibility, return early + if isinstance(result, ProviderSkippedDetails): + return result - # Publish events - _publish_revert_events(revert_plan, compact, rollback_reason) + # Publish events for successful rollback + _publish_revert_events(result, compact, rollback_reason) logger.info('Provider rollback successful', provider_id=provider_id) - return ProviderRevertedSummary( - provider_id=provider_id, - licenses_reverted=len(revert_plan.licenses_to_revert), - privileges_reverted=len(revert_plan.privileges_to_revert), - updates_deleted=len(revert_plan.updates_to_delete), - ) + return result except Exception as e: logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) @@ -516,44 +476,35 @@ def _process_provider_rollback( ) -def _determine_revert_plan( +def _build_and_execute_revert_transactions( provider_records: ProviderUserRecords, start_datetime: datetime, end_datetime: datetime, compact: str, jurisdiction: str, -) -> RevertPlan: - """ - Determine what changes need to be made to revert the provider to pre-rollback state. - - Returns a RevertPlan with: - - licenses_to_revert: List of license records to revert/delete - - privileges_to_revert: List of privilege records to revert - - provider_to_revert: Provider record to revert (if needed) - - updates_to_delete: List of update records to delete - """ - # This is a complex function that needs to be implemented - # For now, return a skeleton structure - plan = RevertPlan() - - # TODO: Implement full logic to determine revert plan - # This would involve: - # 1. Finding all licenses/privileges affected in the time window - # 2. For each, determining the state before the window - # 3. Identifying which update records need to be deleted - # 4. Determining if the provider record needs to be reverted - - return plan - - -def _build_transaction_items(revert_plan: RevertPlan) -> list[dict]: + provider_id: str, +) -> ProviderRevertedSummary | ProviderSkippedDetails: """ - Build DynamoDB transaction items from a revert plan. - - Returns a list of transaction items ready for transact_write_items. + Build and execute DynamoDB transactions to revert provider records. + + This function processes all records in a single pass: + - Checks eligibility (returns ProviderSkippedDetails if ineligible) + - Builds transaction items + - Executes transactions + + Returns either a summary of what was reverted or details about why the provider was skipped. """ + from cc_common.data_model.provider_record_util import ProviderRecordUtility + from cc_common.data_model.schema.license import LicenseData + from cc_common.data_model.schema.license.record import LicenseRecordSchema + from cc_common.data_model.schema.privilege import PrivilegeData + transaction_items = [] table_name = config.provider_table_name + reverted_licenses = [] + reverted_privileges = [] + updates_deleted_count = 0 + ineligible_updates: list[IneligibleUpdate] = [] # Helper functions for cleaner item building def add_put(item: dict): @@ -572,81 +523,274 @@ def add_delete(pk: str, sk: str): } }) - # Add license operations - for license_action in revert_plan.licenses_to_revert: - if license_action.action == 'delete': - add_delete(license_action.pk, license_action.sk) - logger.info('Deleting license record', pk=license_action.pk, sk=license_action.sk) - else: # revert - add_put(license_action.item) - logger.info('Reverting license record', pk=license_action.pk, sk=license_action.sk) - - # Add privilege revert operations - for privilege_action in revert_plan.privileges_to_revert: - add_put(privilege_action.item) - logger.info('Reverting privilege record') - - # Add provider revert operation if needed - if revert_plan.provider_to_revert: - add_put(revert_plan.provider_to_revert) - logger.info('Reverting provider record') - - # Add update record deletions - for update in revert_plan.updates_to_delete: - add_delete(update.pk, update.sk) - logger.info('Deleting update record', pk=update.pk, sk=update.sk) - - return transaction_items - + # Step 1: Check provider updates - any after start_datetime make provider ineligible + provider_updates = provider_records.get_all_provider_update_records() + for update in provider_updates: + if update.dateOfUpdate >= start_datetime: + ineligible_updates.append( + IneligibleUpdate( + type='providerUpdate', + update_type=update.updateType, + create_date=update.dateOfUpdate.isoformat(), + ) + ) -def _execute_revert_transactions(revert_plan: RevertPlan): - """ - Execute DynamoDB transactions to revert records. + # Step 2: Process each license record for the jurisdiction + license_records = provider_records.get_license_records( + filter_condition=lambda x: x.jurisdiction == jurisdiction + ) + + reverted_licenses_dict = [] + + for license_record in license_records: + # Get privilege updates for this license (same jurisdiction and license type) + privilege_updates = provider_records.get_update_records_for_privilege( + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + filter_condition=lambda x: x.createDate >= start_datetime, + ) + + # Check privilege updates for eligibility + for privilege_update in privilege_updates: + if privilege_update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY or privilege_update.createDate > end_datetime: + # Non-license-deactivation privilege update or privilege update after end_datetime make provider ineligible + ineligible_updates.append( + IneligibleUpdate( + type='privilegeUpdate', + update_type=privilege_update.updateType, + create_date=privilege_update.createDate.isoformat(), + ) + ) + elif start_datetime <= privilege_update.createDate <= end_datetime: + # License deactivation within window - mark for deletion + serialized = privilege_update.serialize_to_database_record() + add_delete(serialized['pk'], serialized['sk']) + updates_deleted_count += 1 + logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') + + # Reactivate the privilege + privilege_record = provider_records.get_specific_privilege_record( + jurisdiction=license_record.jurisdiction, + license_abbreviation=license_record.licenseTypeAbbreviation, + ) + if privilege_record: + # Remove the licenseDeactivatedStatus field to reactivate + reactivated_privilege_data = privilege_record.to_dict() + reactivated_privilege_data.pop('licenseDeactivatedStatus', None) + + reactivated_privilege = PrivilegeData.create_new(reactivated_privilege_data) + add_put(reactivated_privilege.serialize_to_database_record()) + logger.info('Will reactivate privilege record if provider is eligible for rollback') + + reverted_privileges.append( + RevertedPrivilege( + provider_id=provider_id, + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + ) + ) - DynamoDB transactions are limited to 100 items, so we split into batches if needed. - Uses the Table resource for automatic type conversion. - """ - transaction_items = _build_transaction_items(revert_plan) + # Get license updates for this license after start_datetime + license_updates_after_start = provider_records.get_update_records_for_license( + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + filter_condition=lambda x: x.createDate >= start_datetime, + ) + # if license record was created during the window, delete it and all update records after start_datetime + if license_record.uploadDate is not None and start_datetime <= license_record.uploadDate <= end_datetime: + serialized = license_record.serialize_to_database_record() + add_delete(serialized['pk'], serialized['sk']) + logger.info('Will delete license record (created during upload) if provider is eligible for rollback') + reverted_licenses.append( + RevertedLicense( + provider_id=provider_id, + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + ) + ) + for update in license_updates_after_start: + serialized = update.serialize_to_database_record() + add_delete(serialized['pk'], serialized['sk']) + updates_deleted_count += 1 + logger.info('Will delete license update record if provider is eligible for rollback', update_type=update.updateType) + else: + # If license record was not created during the window, check license updates for eligibility and build transactions + license_updates_in_window = [] + for license_update in license_updates_after_start: + if license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES: + # Non-upload-related license updates make provider ineligible + ineligible_updates.append( + IneligibleUpdate( + type='licenseUpdate', + update_type=license_update.updateType, + create_date=license_update.createDate.isoformat(), + ) + ) + elif start_datetime <= license_update.createDate <= end_datetime: + # Upload-related update within window - mark for deletion + license_updates_in_window.append(license_update) + serialized = license_update.serialize_to_database_record() + add_delete(serialized['pk'], serialized['sk']) + updates_deleted_count += 1 + logger.info('Will delete license update record if provider is eligible for rollback', update_type=license_update.updateType) + + # If there were updates in the window and no updates after end_datetime, revert the license + # to the previous values of the earliest update in the window + if license_updates_in_window: + updates_after_window = [u for u in license_updates_after_start if u.createDate > end_datetime] + + if not updates_after_window: + # Find the earliest update in the window to get the previous state + license_updates_in_window.sort(key=lambda x: x.createDate) + earliest_update_in_window = license_updates_in_window[0] + + # Check if license was created during the window (uploadDate within window) + if (license_record.uploadDate is not None and + start_datetime <= license_record.uploadDate <= end_datetime): + # License created during upload - delete it + serialized = license_record.serialize_to_database_record() + add_delete(serialized['pk'], serialized['sk']) + logger.info('Deleting license record (created during upload)') + + reverted_licenses.append( + RevertedLicense( + provider_id=provider_id, + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + ) + ) + else: + # License existed before - revert to previous state + reverted_license_data = license_record.to_dict() + reverted_license_data.update(earliest_update_in_window.previous) + + reverted_license = LicenseData.create_new(reverted_license_data) + serialized_reverted = reverted_license.serialize_to_database_record() + + add_put(serialized_reverted) + logger.info('Reverting license record to pre-upload state') + + # Track for provider record regeneration + license_schema = LicenseRecordSchema() + reverted_licenses_dict.append(license_schema.load(serialized_reverted)) + + reverted_licenses.append( + RevertedLicense( + provider_id=provider_id, + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + ) + ) + else: + # Keep current license state if there were updates after the window + logger.info('Updates detected after rollback end time - will keep license record as-is.') + reverted_licenses_dict.append(license_record.to_dict()) + else: + # No updates in window, keep license as-is + reverted_licenses_dict.append(license_record.to_dict()) + + # Check if provider is ineligible for rollback + if ineligible_updates: + logger.info( + 'Provider not eligible for automatic rollback', + provider_id=provider_id, + ineligible_count=len(ineligible_updates), + ) + return ProviderSkippedDetails( + provider_id=provider_id, + reason='Provider has non-upload-related updates after rollback start time', + ineligible_updates=[ + { + 'type': update.type, + 'updateType': update.update_type, + 'createDate': update.create_date, + } + for update in ineligible_updates + ], + ) + + # Step 3: Regenerate provider record using populate_provider_record + if reverted_licenses or reverted_privileges: + current_provider_record = provider_records.get_provider_record() + + # Get licenses from other jurisdictions (not affected by rollback) + all_licenses = provider_records.get_license_records() + for license_rec in all_licenses: + if license_rec.jurisdiction != jurisdiction: + reverted_licenses_dict.append(license_rec.to_dict()) + + # Get all privilege records + privilege_records_dict = [p.to_dict() for p in provider_records.get_privilege_records()] + + # Find best license from reverted state + if reverted_licenses_dict: + best_license = ProviderRecordUtility.find_best_license( + license_records=reverted_licenses_dict, + home_jurisdiction=current_provider_record.currentHomeJurisdiction, + ) + + # Populate provider record using the reverted best license + updated_provider_record = ProviderRecordUtility.populate_provider_record( + current_provider_record=current_provider_record, + license_record=best_license, + privilege_records=privilege_records_dict, + ) + + add_put(updated_provider_record.serialize_to_database_record()) + logger.info('Adding provider record update to transaction') + + # Execute transactions in batches of 100 if not transaction_items: logger.warning('No transaction items to execute') - return - + return ProviderRevertedSummary( + provider_id=provider_id, + licenses_reverted=reverted_licenses, + privileges_reverted=reverted_privileges, + updates_deleted=updates_deleted_count, + ) + logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') - - # Execute transactions in batches of 100 + for i in range(0, len(transaction_items), 100): batch = transaction_items[i:i + 100] # Use Table resource's client for automatic type conversion config.provider_table.meta.client.transact_write_items(TransactItems=batch) logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') + + return ProviderRevertedSummary( + provider_id=provider_id, + licenses_reverted=reverted_licenses, + privileges_reverted=reverted_privileges, + updates_deleted=updates_deleted_count, + ) -def _publish_revert_events(revert_plan: RevertPlan, compact: str, rollback_reason: str): +def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str, rollback_reason: str): """ Publish revert events for all reverted licenses and privileges. """ with EventBatchWriter(config.events_client) as event_writer: # Publish license revert events - for license_action in revert_plan.licenses_to_revert: + for reverted_license in revert_summary.licenses_reverted: config.event_bus_client.publish_license_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - provider_id=license_action.provider_id, - jurisdiction=license_action.jurisdiction, - license_type=license_action.license_type, + provider_id=reverted_license.provider_id, + jurisdiction=reverted_license.jurisdiction, + license_type=reverted_license.license_type, rollback_reason=rollback_reason, event_batch_writer=event_writer, ) # Publish privilege revert events - for privilege_action in revert_plan.privileges_to_revert: + for reverted_privilege in revert_summary.privileges_reverted: config.event_bus_client.publish_privilege_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - provider_id=privilege_action.provider_id, - jurisdiction=privilege_action.jurisdiction, - license_type=privilege_action.license_type, + provider_id=reverted_privilege.provider_id, + jurisdiction=reverted_privilege.jurisdiction, + license_type=reverted_privilege.license_type, rollback_reason=rollback_reason, event_batch_writer=event_writer, ) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py index 0629d880d..7cc09b5db 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py @@ -95,4 +95,10 @@ def setUpClass(cls): ), }, ) + # Monkey-patch config object to be sure we have it based + # on the env vars we set above + import cc_common.config + + cls.config = cc_common.config._Config() # noqa: SLF001 protected-access + cc_common.config.config = cls.config cls.mock_context = MagicMock(name='MockLambdaContext', spec=LambdaContext) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py index 9fb27c014..db71681c6 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py @@ -25,8 +25,13 @@ def setUp(self): # noqa: N801 invalid-name self.mock_source_table_arn = f'arn:aws:dynamodb:us-east-1:767398110685:table/{self.mock_source_table_name}' self.build_resources() + # these must be imported within the tests, since they import modules which require + # environment variables that are not set until the TstLambdas class is initialized + import cc_common.config from common_test.test_data_generator import TestDataGenerator + cc_common.config.config = cc_common.config._Config() # noqa: SLF001 protected-access + self.config = cc_common.config.config self.test_data_generator = TestDataGenerator self.addCleanup(self.delete_resources) @@ -116,4 +121,4 @@ def delete_resources(self): self._provider_table.delete() self._rollback_results_bucket.objects.delete() self._rollback_results_bucket.delete() - self._event_bus.delete_event_bus(Name=os.environ['EVENT_BUS_NAME']) + boto3.client('events').delete_event_bus(Name=os.environ['EVENT_BUS_NAME']) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 8a0813bc4..09b06e620 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -10,29 +10,34 @@ - S3 result management """ from datetime import datetime, timedelta -from unittest.mock import Mock +from unittest.mock import Mock, patch from uuid import uuid4 from moto import mock_aws -from cc_common.config import config -from cc_common.data_model.schema.common import UpdateCategory from cc_common.data_model.update_tier_enum import UpdateTierEnum -from handlers.rollback_license_upload import rollback_license_upload from . import TstFunction +MOCK_DATETIME_STRING = '2025-10-23T08:15:00+00:00' + + @mock_aws +@patch('cc_common.config._Config.current_standard_datetime', datetime.fromisoformat(MOCK_DATETIME_STRING)) class TestRollbackLicenseUpload(TstFunction): """Test class for license upload rollback handler.""" - def setup_method(self): + def setUp(self): """Set up test fixtures before each test method.""" + super().setUp() # Create sample test data self.compact = 'aslp' self.jurisdiction = 'oh' self.provider_id = str(uuid4()) - self.upload_datetime = datetime.now() - timedelta(days=1) - self.start_datetime = self.upload_datetime - timedelta(hours=1) - self.end_datetime = datetime.now() + # default upload time between start and end time + self.default_upload_datetime = datetime.fromisoformat(MOCK_DATETIME_STRING) - timedelta(hours=1) + self.default_start_datetime = self.default_upload_datetime - timedelta(days=1) + self.default_end_datetime = self.default_upload_datetime + from cc_common.data_model.schema.common import UpdateCategory + self.update_categories = UpdateCategory # Helper methods for setting up test scenarios def _when_provider_had_license_created_from_upload(self, upload_datetime: datetime = None): @@ -41,7 +46,7 @@ def _when_provider_had_license_created_from_upload(self, upload_datetime: dateti Returns the created license data. """ if upload_datetime is None: - upload_datetime = self.upload_datetime + upload_datetime = self.default_upload_datetime return self.test_data_generator.put_default_license_record_in_provider_table({ 'providerId': self.provider_id, @@ -51,21 +56,33 @@ def _when_provider_had_license_created_from_upload(self, upload_datetime: dateti 'dateOfUpdate': upload_datetime, }) - def _when_provider_had_license_updated_from_upload(self, upload_datetime: datetime = None): + def _when_provider_had_license_updated_from_upload(self, upload_datetime: datetime = None, license_upload_datetime: datetime = None): """ Set up a scenario where a provider had an existing license updated during the upload window. Returns the license and its update record. """ if upload_datetime is None: - upload_datetime = self.upload_datetime + upload_datetime = self.default_upload_datetime + if license_upload_datetime is None: + license_upload_datetime = self.default_upload_datetime + + # add provider record to provider table + self.test_data_generator.put_default_provider_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), + }) + - # Create original license before upload window + # Create original license before upload window, unless different time is provided original_license = self.test_data_generator.put_default_license_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'dateOfUpdate': self.start_datetime - timedelta(days=30), - 'dateOfExpiration': (self.start_datetime - timedelta(days=30)).date(), + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), + 'dateOfExpiration': (self.default_start_datetime - timedelta(days=30)).date(), + 'uploadDate': license_upload_datetime, }) # Create update record within upload window @@ -74,12 +91,13 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'licenseType': original_license.licenseType, - 'updateType': UpdateCategory.RENEWAL, + 'updateType': self.update_categories.RENEWAL, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, 'uploadDate': upload_datetime, 'previous': { 'dateOfExpiration': original_license.dateOfExpiration, + **original_license.to_dict() }, 'updatedValues': { 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), @@ -93,7 +111,7 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'jurisdiction': self.jurisdiction, 'dateOfUpdate': upload_datetime, 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), - 'uploadDate': upload_datetime, + 'uploadDate': license_upload_datetime, }) return updated_license, license_update @@ -104,14 +122,14 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: Returns the privilege and its update record. """ if upload_datetime is None: - upload_datetime = self.upload_datetime + upload_datetime = self.default_upload_datetime # Create privilege that was active before upload privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'dateOfUpdate': self.start_datetime - timedelta(days=30), + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), }) # Create deactivation update record @@ -120,7 +138,7 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'licenseType': privilege.licenseType, - 'updateType': UpdateCategory.LICENSE_DEACTIVATION, + 'updateType': self.update_categories.LICENSE_DEACTIVATION, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, }) @@ -134,7 +152,7 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime Returns the privilege and its update record. """ if after_upload_datetime is None: - after_upload_datetime = self.end_datetime + timedelta(hours=1) + after_upload_datetime = self.default_end_datetime + timedelta(hours=1) privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ 'providerId': self.provider_id, @@ -148,7 +166,7 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'licenseType': privilege.licenseType, - 'updateType': UpdateCategory.RENEWAL, # Not LICENSE_DEACTIVATION + 'updateType': self.update_categories.RENEWAL, # Not LICENSE_DEACTIVATION 'createDate': after_upload_datetime, 'effectiveDate': after_upload_datetime, }) @@ -162,7 +180,7 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: Returns the license and its update record. """ if after_upload_datetime is None: - after_upload_datetime = self.end_datetime + timedelta(hours=1) + after_upload_datetime = self.default_end_datetime + timedelta(hours=1) license_record = self.test_data_generator.put_default_license_record_in_provider_table({ 'providerId': self.provider_id, @@ -176,7 +194,7 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'licenseType': license_record.licenseType, - 'updateType': UpdateCategory.ENCUMBRANCE, # Not an upload-related category + 'updateType': self.update_categories.ENCUMBRANCE, # Not an upload-related category 'createDate': after_upload_datetime, 'effectiveDate': after_upload_datetime, }) @@ -189,7 +207,7 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: Returns the provider record. """ if before_upload_datetime is None: - before_upload_datetime = self.start_datetime - timedelta(days=30) + before_upload_datetime = self.default_start_datetime - timedelta(days=30) # Create provider record with old values provider = self.test_data_generator.put_default_provider_record_in_provider_table({ @@ -206,7 +224,7 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: 'compact': self.compact, 'givenName': 'NewFirstName', 'familyName': 'NewLastName', - 'dateOfUpdate': self.upload_datetime, + 'dateOfUpdate': self.default_upload_datetime, }) return provider, updated_provider @@ -214,6 +232,8 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: # Integration tests for rollback scenarios def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(self): """Test that provider top-level record is reset to values before upload.""" + from handlers.rollback_license_upload import rollback_license_upload + # Setup: Provider record was updated during upload old_provider, new_provider = self._when_provider_top_level_record_needs_reverted() @@ -221,8 +241,8 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -235,7 +255,7 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se self.assertEqual(result['providersReverted'], 1) # Verify: Provider record has been reset to old values - provider_records = config.data_client.get_provider_user_records( + provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, ) @@ -245,8 +265,11 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self): """Test that license record is reset to values before upload.""" - # Setup: License was updated during upload (e.g., renewed) - updated_license, license_update = self._when_provider_had_license_updated_from_upload() + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time + updated_license, license_update = self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours = 1)) # Store the original expiration date from the update's previous values original_expiration = license_update.previous['dateOfExpiration'] @@ -255,8 +278,8 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -264,12 +287,12 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self result = rollback_license_upload(event, Mock()) - # Assert: Rollback completed successfully + # should return complete message self.assertEqual(result['rollbackStatus'], 'COMPLETE') self.assertEqual(result['providersReverted'], 1) # Verify: License record has been reset to original values - provider_records = config.data_client.get_provider_user_records( + provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -285,6 +308,8 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self def test_provider_privilege_record_reactivated_when_upload_reverted(self): """Test that privilege is reactivated when license deactivation is reverted.""" + from handlers.rollback_license_upload import rollback_license_upload + # Setup: Privilege was deactivated during upload due to license deactivation privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() @@ -292,8 +317,8 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -306,7 +331,7 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): self.assertEqual(result['providersReverted'], 1) # Verify: Privilege has been reactivated (status should be 'active') - provider_records = config.data_client.get_provider_user_records( + provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -320,13 +345,15 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): privilege_updates = provider_records.get_all_privilege_update_records() self.assertEqual(len(privilege_updates), 0, "Privilege update records should be deleted") - def test_provider_license_updates_within_time_period_removed_when_upload_reverted(self): - """Test that license update records within the time window are deleted.""" - # Setup: License was updated during upload - updated_license, license_update = self._when_provider_had_license_updated_from_upload() + def test_provider_license_updates_and_license_record_within_time_period_removed_when_upload_reverted(self): + """Test that license update records and license record within the time window are deleted.""" + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: License was uploaded and then updated during upload + license_record, license_update = self._when_provider_had_license_updated_from_upload(license_upload_datetime=self.default_start_datetime + timedelta(hours = 1)) # Verify update record exists before rollback - provider_records_before = config.data_client.get_provider_user_records( + provider_records_before = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -338,8 +365,8 @@ def test_provider_license_updates_within_time_period_removed_when_upload_reverte event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -351,21 +378,25 @@ def test_provider_license_updates_within_time_period_removed_when_upload_reverte self.assertEqual(result['rollbackStatus'], 'COMPLETE') # Verify: All license update records within time window have been deleted - provider_records_after = config.data_client.get_provider_user_records( + provider_records_after = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, ) + licenses_after = provider_records_after.get_license_records() + self.assertEqual(len(licenses_after), 0, "License records should be deleted") license_updates_after = provider_records_after.get_all_license_update_records() self.assertEqual(len(license_updates_after), 0, "License update records should be deleted") def test_provider_privilege_deactivation_update_within_time_period_removed_when_upload_reverted(self): """Test that privilege deactivation update records within the time window are deleted.""" + from handlers.rollback_license_upload import rollback_license_upload + # Setup: Privilege was deactivated during upload privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() # Verify update record exists before rollback - provider_records_before = config.data_client.get_provider_user_records( + provider_records_before = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -377,8 +408,8 @@ def test_provider_privilege_deactivation_update_within_time_period_removed_when_ event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -390,7 +421,7 @@ def test_provider_privilege_deactivation_update_within_time_period_removed_when_ self.assertEqual(result['rollbackStatus'], 'COMPLETE') # Verify: All privilege update records within time window have been deleted - provider_records_after = config.data_client.get_provider_user_records( + provider_records_after = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -400,6 +431,8 @@ def test_provider_privilege_deactivation_update_within_time_period_removed_when_ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upload_reverted(self): """Test that provider is skipped if non-upload-related license updates exist after time window.""" + from handlers.rollback_license_upload import rollback_license_upload + # Setup: Provider had license update after upload window license_record, license_update = self._when_provider_had_license_update_after_upload() @@ -407,8 +440,8 @@ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upl event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -422,7 +455,7 @@ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upl self.assertEqual(result['providersReverted'], 0) # Verify: License record and update still exist (not rolled back) - provider_records = config.data_client.get_provider_user_records( + provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -434,6 +467,8 @@ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upl def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_upload_reverted(self): """Test that provider is skipped if non-upload-related privilege updates exist after time window.""" + from handlers.rollback_license_upload import rollback_license_upload + # Setup: Provider had privilege update after upload window privilege, privilege_update = self._when_provider_had_privilege_update_after_upload() @@ -441,8 +476,8 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.start_datetime.isoformat(), - 'endDateTime': self.end_datetime.isoformat(), + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -456,7 +491,7 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u self.assertEqual(result['providersReverted'], 0) # Verify: Privilege record and update still exist (not rolled back) - provider_records = config.data_client.get_provider_user_records( + provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, @@ -469,11 +504,13 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u # Validation tests def test_rollback_validates_datetime_format(self): """Test that rollback validates datetime format.""" + from handlers.rollback_license_upload import rollback_license_upload + event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'startDateTime': 'invalid-datetime', - 'endDateTime': self.end_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -486,11 +523,13 @@ def test_rollback_validates_datetime_format(self): def test_rollback_validates_time_window_order(self): """Test that rollback validates start time is before end time.""" + from handlers.rollback_license_upload import rollback_license_upload + event = { 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'startDateTime': self.end_datetime.isoformat(), - 'endDateTime': self.start_datetime.isoformat(), + 'startDateTime': self.default_end_datetime.isoformat(), + 'endDateTime': self.default_start_datetime.isoformat(), 'rollbackReason': 'Test rollback', 'executionId': 'test-execution-123', 'providersProcessed': 0, @@ -503,6 +542,8 @@ def test_rollback_validates_time_window_order(self): def test_rollback_validates_maximum_time_window(self): """Test that rollback validates maximum time window.""" + from handlers.rollback_license_upload import rollback_license_upload + start = datetime.now() - timedelta(days=8) # More than 7 days end = datetime.now() From 1fb04a55769b836deeac9ffa0458e971c78edc3b Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 5 Nov 2025 10:30:16 -0600 Subject: [PATCH 15/81] Fix permissions/config for rollback lambda --- .../disaster_recovery_stack/__init__.py | 2 +- .../license_upload_rollback_step_function.py | 52 +++++++------------ 2 files changed, 21 insertions(+), 33 deletions(-) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py index 333594c7d..251b7f2ea 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py @@ -97,7 +97,7 @@ def __init__( self.license_upload_rollback_workflow = LicenseUploadRollbackStepFunctionConstruct( self, 'LicenseUploadRollback', - provider_table=persistent_stack.provider_table, + persistent_stack=persistent_stack, rollback_results_bucket=self.rollback_results_bucket, dr_shared_encryption_key=self.dr_shared_encryption_key, ) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index d73214977..a7b994021 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -1,8 +1,7 @@ import os from aws_cdk import Duration -from aws_cdk.aws_dynamodb import Table -from aws_cdk.aws_iam import PolicyStatement +from aws_cdk.aws_events import EventBus from aws_cdk.aws_kms import Key from aws_cdk.aws_logs import LogGroup, RetentionDays from aws_cdk.aws_s3 import Bucket @@ -24,6 +23,8 @@ from constructs import Construct from common_constructs.python_function import PythonFunction +from common_constructs.ssm_parameter_utility import SSMParameterUtility +from stacks import persistent_stack as ps class LicenseUploadRollbackStepFunctionConstruct(Construct): @@ -39,7 +40,7 @@ def __init__( scope: Construct, construct_id: str, *, - provider_table: Table, + persistent_stack: ps.PersistentStack, rollback_results_bucket: Bucket, dr_shared_encryption_key: Key, **kwargs, @@ -47,22 +48,26 @@ def __init__( super().__init__(scope, construct_id, **kwargs) stack = Stack.of(self) + # We explicitly get the event bus arn from parameter store, to avoid issues with cross stack updates + data_event_bus = SSMParameterUtility.load_data_event_bus_from_ssm_parameter(self) # Create Lambda function for rollback processing self._create_rollback_function( stack=stack, - provider_table=provider_table, + persistent_stack=persistent_stack, rollback_results_bucket=rollback_results_bucket, + data_event_bus=data_event_bus ) # Build Step Function definition - definition = self._build_rollback_state_machine_definition(provider_table=provider_table) + definition = self._build_rollback_state_machine_definition() # Create log group for state machine state_machine_log_group = LogGroup( self, 'LicenseUploadRollbackStateMachineLogs', - retention=RetentionDays.ONE_MONTH, + # this state machine will hopefully not be run often, so we will not automatically clear these logs + retention=RetentionDays.INFINITE, encryption_key=dr_shared_encryption_key, ) @@ -100,8 +105,9 @@ def __init__( def _create_rollback_function( self, stack: Stack, - provider_table: Table, + persistent_stack: ps.PersistentStack, rollback_results_bucket: Bucket, + data_event_bus: EventBus ): """Create the Lambda function for processing license upload rollback.""" self.rollback_function = PythonFunction( @@ -116,38 +122,20 @@ def _create_rollback_function( environment={ **stack.common_env_vars, 'ROLLBACK_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, + 'LICENSE_UPLOAD_DATE_INDEX_NAME': persistent_stack.provider_table.license_upload_date_gsi_name, + 'EVENT_BUS_NAME': data_event_bus.event_bus_name, }, ) # Grant permissions to read/write provider table - provider_table.grant_read_write_data(self.rollback_function) - - # Grant permission to query the licenseUploadDateGSI - self.rollback_function.add_to_role_policy( - PolicyStatement( - actions=['dynamodb:Query'], - resources=[ - f'{provider_table.table_arn}/index/{provider_table.license_upload_date_gsi_name}' - ], - ) - ) + persistent_stack.shared_encryption_key.grant_decrypt(self.rollback_function) + persistent_stack.provider_table.grant_read_write_data(self.rollback_function) # Grant S3 permissions for results bucket rollback_results_bucket.grant_read_write(self.rollback_function) # Grant EventBridge permissions to publish events - self.rollback_function.add_to_role_policy( - PolicyStatement( - actions=['events:PutEvents'], - resources=[ - stack.format_arn( - service='events', - resource='event-bus', - resource_name=stack.common_env_vars['EVENT_BUS_NAME'], - ) - ], - ) - ) + data_event_bus.grant_put_events_to(self.rollback_function) NagSuppressions.add_resource_suppressions_by_path( stack=stack, @@ -157,13 +145,13 @@ def _create_rollback_function( 'id': 'AwsSolutions-IAM5', 'reason': """ This policy contains wild-carded actions and resources but they are scoped to the - specific table, index, S3 bucket, and event bus that this lambda needs access to. + specific table, S3 bucket, and event bus that this lambda needs access to. """, }, ], ) - def _build_rollback_state_machine_definition(self, provider_table: Table) -> IChainable: + def _build_rollback_state_machine_definition(self) -> IChainable: """ Build the Step Function definition for license upload rollback. From 972409f77fd1f66e2d6928c0f1bfe506dcff2da7 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 5 Nov 2025 12:46:29 -0600 Subject: [PATCH 16/81] simplify upload GSI values --- .../cc_common/data_model/schema/common.py | 7 +++++ .../data_model/schema/license/__init__.py | 8 ++---- .../data_model/schema/license/record.py | 28 ++++++++++--------- .../provider-data-v1/handlers/ingest.py | 10 +++---- .../test_provider_transformations.py | 2 +- 5 files changed, 30 insertions(+), 25 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py index ced223d8a..8f886d934 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py @@ -311,6 +311,13 @@ class UpdateCategory(CCEnum): # it is referenced in the event that an invalid license upload needs to be reverted. LICENSE_UPLOAD_UPDATE_OTHER = 'other' +# License upload related update categories +LICENSE_UPLOAD_UPDATE_CATEGORIES = { + UpdateCategory.DEACTIVATION, + UpdateCategory.RENEWAL, + UpdateCategory.LICENSE_UPLOAD_UPDATE_OTHER, +} + class ActiveInactiveStatus(CCEnum): ACTIVE = 'active' diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py index 3364e0811..9da8acc5e 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/__init__.py @@ -141,8 +141,8 @@ def investigationStatus(self) -> str | None: return self._data.get('investigationStatus') @property - def uploadDate(self) -> datetime | None: - return self._data.get('uploadDate') + def firstUploadDate(self) -> datetime | None: + return self._data.get('firstUploadDate') class LicenseUpdateData(CCDataClass): @@ -199,7 +199,3 @@ def updatedValues(self) -> dict: @property def removedValues(self) -> list[str] | None: return self._data.get('removedValues') - - @property - def uploadDate(self) -> datetime | None: - return self._data.get('uploadDate') diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index de8f2f484..eb79bd5d7 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -17,6 +17,7 @@ CompactEligibilityStatus, LicenseEncumberedStatusEnum, UpdateCategory, + LICENSE_UPLOAD_UPDATE_CATEGORIES, ) from cc_common.data_model.schema.fields import ( ActiveInactive, @@ -50,8 +51,10 @@ class LicenseRecordSchema(BaseRecordSchema, LicenseCommonSchema): licenseUploadDateGSIPK = String(required=False, allow_none=False) licenseUploadDateGSISK = String(required=False, allow_none=False) - # Optional field for tracking when the license upload caused this record to be created - uploadDate = DateTime(required=False, allow_none=False) + # Optional field for tracking the first license upload that caused this record to be created + # Note that records which were uploaded before this field was supported will not have this included + # and will not be included in the license upload date GSI + firstUploadDate = DateTime(required=False, allow_none=False) # Provided fields npi = NationalProviderIdentifier(required=False, allow_none=False) @@ -131,10 +134,10 @@ def generate_license_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused @pre_dump def generate_license_upload_date_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused-argument - """Generate GSI fields for license upload date tracking (only if uploadDate is present)""" - if 'uploadDate' in in_data and in_data['uploadDate'] is not None: - # Extract YYYY-MM from uploadDate - upload_date = in_data['uploadDate'] + """Generate GSI fields for license upload date tracking (only if firstUploadDate is present)""" + if 'firstUploadDate' in in_data and in_data['firstUploadDate'] is not None: + # Extract YYYY-MM from firstUploadDate + upload_date = in_data['firstUploadDate'] year_month = upload_date.strftime('%Y-%m') # Generate GSI PK: C#{compact}#J#{jurisdiction}#D#{YYYY-MM} @@ -230,9 +233,6 @@ class LicenseUpdateRecordSchema(BaseRecordSchema, ChangeHashMixin): licenseUploadDateGSIPK = String(required=False, allow_none=False) licenseUploadDateGSISK = String(required=False, allow_none=False) - # Optional field for tracking when the license upload caused this update record to be created - uploadDate = DateTime(required=False, allow_none=False) - @post_dump # Must be _post_ dump so we have values that are more easily hashed def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument """ @@ -255,10 +255,12 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument @pre_dump def generate_license_upload_date_gsi_fields(self, in_data, **kwargs): # noqa: ARG001 unused-argument - """Generate GSI fields for license upload date tracking (only if uploadDate is present)""" - if 'uploadDate' in in_data and in_data['uploadDate'] is not None: - # Extract YYYY-MM from uploadDate - upload_date = in_data['uploadDate'] + """Generate GSI fields for license upload date tracking""" + # If the update is related to an upload event, we generate the upload GSI fields to allow the system to + # query when certain uploads occurred + if in_data['updateType'] in LICENSE_UPLOAD_UPDATE_CATEGORIES: + # Extract YYYY-MM from createDate + upload_date = in_data['createDate'] year_month = upload_date.strftime('%Y-%m') # Generate GSI PK: C#{compact}#J#{jurisdiction}#D#{YYYY-MM} diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py index e1018c22e..c9c802cec 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py @@ -163,14 +163,14 @@ def ingest_license_message(message: dict): dynamo_transactions=dynamo_transactions, data_events=data_events, ) - # now grab the uploadDate from the existing record if available and put it in the posted_license + # now grab the firstUploadDate from the existing record if available and put it in the posted_license # for the license upload date GSI - if existing_license.get('uploadDate'): - posted_license_record['uploadDate'] = existing_license.get('uploadDate') + if existing_license.get('firstUploadDate'): + posted_license_record['firstUploadDate'] = existing_license.get('firstUploadDate') else: # If this is the first time creating the license record, - # set the uploadDate to the current time for license upload date GSI tracking - posted_license_record['uploadDate'] = config.current_standard_datetime + # set the firstUploadDate to the current time for license upload date GSI tracking + posted_license_record['firstUploadDate'] = config.current_standard_datetime # write the record to the table to reflect the latest values from the upload license_data = LicenseData.create_new(deepcopy(posted_license_record)) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py index f1c5a4f58..d8a1d2577 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_data_model/test_provider_transformations.py @@ -156,7 +156,7 @@ def test_transformations(self, mock_license_preprocessing_queue): # license should be active and compact eligible expected_license['licenseStatus'] = 'active' expected_license['compactEligibility'] = 'eligible' - expected_license['uploadDate'] = MOCK_CURRENT_DATETIME_STRING + expected_license['firstUploadDate'] = MOCK_CURRENT_DATETIME_STRING expected_license['licenseUploadDateGSIPK'] = 'C#aslp#J#oh#D#2024-11' expected_license['licenseUploadDateGSISK'] = ( 'TIME#1731110399#LT#slp#PID#89a6377e-c3a5-40e5-bca5-317ec854c570' From d6022c064b93bfd7bd288300217611089a74b21c Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 5 Nov 2025 12:47:55 -0600 Subject: [PATCH 17/81] Add tons of TODOs --- .../lambdas/python/common/cc_common/config.py | 8 ++ .../handlers/rollback_license_upload.py | 90 +++++++++---------- .../function/test_rollback_license_upload.py | 26 +++--- 3 files changed, 67 insertions(+), 57 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/config.py b/backend/compact-connect/lambdas/python/common/cc_common/config.py index 4a790faca..de790b852 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/config.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/config.py @@ -187,6 +187,10 @@ def bulk_bucket_name(self): def provider_user_bucket_name(self): return os.environ['PROVIDER_USER_BUCKET_NAME'] + @property + def rollback_results_bucket_name(self): + return os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] + @property def user_pool_id(self): """ @@ -213,6 +217,10 @@ def users_table_name(self): def fam_giv_index_name(self): return os.environ['FAM_GIV_INDEX_NAME'] + @property + def license_upload_date_index_name(self): + return os.environ['LICENSE_UPLOAD_DATE_INDEX_NAME'] + @property def expiration_resolution_timezone(self): return timezone(offset=timedelta(hours=-4)) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 3dd7b2928..9647f0e34 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -1,16 +1,14 @@ import json -import os import time from dataclasses import dataclass, field from datetime import datetime -import boto3 from aws_lambda_powertools.utilities.typing import LambdaContext from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError from cc_common.config import config, logger from cc_common.data_model.provider_record_util import ProviderUserRecords -from cc_common.data_model.schema.common import UpdateCategory +from cc_common.data_model.schema.common import UpdateCategory, LICENSE_UPLOAD_UPDATE_CATEGORIES from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.event_batch_writer import EventBatchWriter @@ -18,13 +16,6 @@ # Maximum time window for rollback (1 week in seconds) MAX_ROLLBACK_WINDOW_SECONDS = 7 * 24 * 60 * 60 -# License upload related update categories -LICENSE_UPLOAD_UPDATE_CATEGORIES = { - UpdateCategory.DEACTIVATION, - UpdateCategory.RENEWAL, - UpdateCategory.LICENSE_UPLOAD_UPDATE_OTHER, -} - # Privilege update category for license deactivations PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY = UpdateCategory.LICENSE_DEACTIVATION @@ -56,6 +47,7 @@ class ProviderFailedDetails: @dataclass class RevertedLicense: """Details of a reverted license for event publishing.""" + # TODO - provider id be UUID, add reversion id provider_id: str jurisdiction: str license_type: str @@ -105,14 +97,17 @@ def to_dict(self) -> dict: ], 'revertedProviderSummaries': [ { + # TODO - remove redundant provider id in licenses/privileges reverted objects 'providerId': summary.provider_id, 'licensesReverted': [ { - 'providerId': license.provider_id, - 'jurisdiction': license.jurisdiction, - 'licenseType': license.license_type, + 'providerId': license_record.provider_id, + 'jurisdiction': license_record.jurisdiction, + 'licenseType': license_record.license_type, + # TODO - add action field showing 'REVERT' or 'DELETED' + 'action': 'some-action' } - for license in summary.licenses_reverted + for license_record in summary.licenses_reverted ], 'privilegesReverted': [ { @@ -122,6 +117,7 @@ def to_dict(self) -> dict: } for privilege in summary.privileges_reverted ], + # TODO - add pk/sk to list 'updatesDeleted': summary.updates_deleted, } for summary in self.reverted_provider_summaries @@ -201,10 +197,9 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'providersSkipped': int, 'providersFailed': int, 'lastEvaluatedGSIKey': dict | None, - 'resultsS3Key': 's3://bucket-name/execution-id/results.json' } """ - start_time = time.time() + execution_start_time = time.time() max_execution_time = 12 * 60 # 12 minutes in seconds # Extract and validate input parameters @@ -254,28 +249,19 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 ) # Initialize S3 client and bucket - s3_client = boto3.client('s3') - rollback_results_bucket_name = os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] results_s3_key = f'{execution_id}/results.json' # Load existing results if this is a continuation - if providers_processed > 0: - existing_results = _load_results_from_s3(s3_client, rollback_results_bucket_name, results_s3_key) - else: - existing_results = RollbackResults() + existing_results = _load_results_from_s3(results_s3_key) # Initialize counters providers_reverted = len(existing_results.reverted_provider_summaries) providers_skipped = len(existing_results.skipped_provider_details) providers_failed = len(existing_results.failed_provider_details) - # Get provider table and GSI - provider_table = config.provider_table - try: # Query GSI for affected records across the time window affected_provider_ids = _query_gsi_for_affected_providers( - provider_table, compact, jurisdiction, start_datetime, @@ -286,12 +272,12 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 # Process each provider for provider_id in affected_provider_ids: # Check time limit - elapsed_time = time.time() - start_time + elapsed_time = time.time() - execution_start_time if elapsed_time > max_execution_time: logger.info(f'Approaching time limit after {elapsed_time:.2f} seconds. Returning IN_PROGRESS status.') # Write current results to S3 - _write_results_to_s3(s3_client, rollback_results_bucket_name, results_s3_key, existing_results) + _write_results_to_s3(results_s3_key, existing_results) return { 'rollbackStatus': 'IN_PROGRESS', @@ -300,7 +286,6 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'providersSkipped': providers_skipped, 'providersFailed': providers_failed, 'lastEvaluatedGSIKey': None, # Continue from next provider - 'resultsS3Key': f's3://{rollback_results_bucket_name}/{results_s3_key}', 'compact': compact, 'jurisdiction': jurisdiction, 'startDateTime': start_datetime_str, @@ -336,7 +321,8 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 logger.info('Rollback complete', providers_processed=providers_processed) # Write final results to S3 - _write_results_to_s3(s3_client, rollback_results_bucket_name, results_s3_key, existing_results) + # TODO - consider writing a CSV file with final values for ease of reference + _write_results_to_s3(results_s3_key, existing_results) return { 'rollbackStatus': 'COMPLETE', @@ -344,7 +330,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'providersReverted': providers_reverted, 'providersSkipped': providers_skipped, 'providersFailed': providers_failed, - 'resultsS3Key': f's3://{rollback_results_bucket_name}/{results_s3_key}', + 'resultsS3Key': f's3://{config.rollback_results_bucket_name}/{results_s3_key}', } except ClientError as e: @@ -353,7 +339,6 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 def _query_gsi_for_affected_providers( - provider_table, compact: str, jurisdiction: str, start_datetime: datetime, @@ -399,7 +384,7 @@ def _query_gsi_for_affected_providers( query_kwargs['ExclusiveStartKey'] = last_evaluated_key while True: - response = provider_table.query(**query_kwargs) + response = config.provider_table.query(**query_kwargs) # Extract provider IDs from the results for item in response.get('Items', []): @@ -498,7 +483,8 @@ def _build_and_execute_revert_transactions( from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.license.record import LicenseRecordSchema from cc_common.data_model.schema.privilege import PrivilegeData - + + # TODO - split transactions into first tier/second tier lists (license/privilege first tier, updates second) transaction_items = [] table_name = config.provider_table_name reverted_licenses = [] @@ -544,6 +530,7 @@ def add_delete(pk: str, sk: str): for license_record in license_records: # Get privilege updates for this license (same jurisdiction and license type) + # TODO - get the privilege updates for license jurisdiction privilege_updates = provider_records.get_update_records_for_privilege( jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, @@ -569,6 +556,7 @@ def add_delete(pk: str, sk: str): logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') # Reactivate the privilege + # TODO - get privilege by it's own privilege jurisdiction, not the license jurisdiction privilege_record = provider_records.get_specific_privilege_record( jurisdiction=license_record.jurisdiction, license_abbreviation=license_record.licenseTypeAbbreviation, @@ -576,6 +564,7 @@ def add_delete(pk: str, sk: str): if privilege_record: # Remove the licenseDeactivatedStatus field to reactivate reactivated_privilege_data = privilege_record.to_dict() + # TODO - we should use an UPDATE to remove this field, instead of a PUT reactivated_privilege_data.pop('licenseDeactivatedStatus', None) reactivated_privilege = PrivilegeData.create_new(reactivated_privilege_data) @@ -583,6 +572,7 @@ def add_delete(pk: str, sk: str): logger.info('Will reactivate privilege record if provider is eligible for rollback') reverted_privileges.append( + # TODO - add revision id RevertedPrivilege( provider_id=provider_id, jurisdiction=license_record.jurisdiction, @@ -598,11 +588,13 @@ def add_delete(pk: str, sk: str): ) # if license record was created during the window, delete it and all update records after start_datetime - if license_record.uploadDate is not None and start_datetime <= license_record.uploadDate <= end_datetime: + # TODO - add check for any privileges that exist for this license, since there won't be a privilege update record + if license_record.firstUploadDate is not None and start_datetime <= license_record.firstUploadDate <= end_datetime: serialized = license_record.serialize_to_database_record() add_delete(serialized['pk'], serialized['sk']) logger.info('Will delete license record (created during upload) if provider is eligible for rollback') reverted_licenses.append( + # TODO - add revision id RevertedLicense( provider_id=provider_id, jurisdiction=license_record.jurisdiction, @@ -618,12 +610,13 @@ def add_delete(pk: str, sk: str): # If license record was not created during the window, check license updates for eligibility and build transactions license_updates_in_window = [] for license_update in license_updates_after_start: - if license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES: + if license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES or license_update.createDate > end_datetime: # Non-upload-related license updates make provider ineligible ineligible_updates.append( IneligibleUpdate( type='licenseUpdate', update_type=license_update.updateType, + # TODO - add 'ineligibleReason' field to explain why it's ineligible create_date=license_update.createDate.isoformat(), ) ) @@ -646,8 +639,8 @@ def add_delete(pk: str, sk: str): earliest_update_in_window = license_updates_in_window[0] # Check if license was created during the window (uploadDate within window) - if (license_record.uploadDate is not None and - start_datetime <= license_record.uploadDate <= end_datetime): + if (license_record.firstUploadDate is not None and + start_datetime <= license_record.firstUploadDate <= end_datetime): # License created during upload - delete it serialized = license_record.serialize_to_database_record() add_delete(serialized['pk'], serialized['sk']) @@ -695,6 +688,7 @@ def add_delete(pk: str, sk: str): logger.info( 'Provider not eligible for automatic rollback', provider_id=provider_id, + # TODO - log full change summary ineligible_count=len(ineligible_updates), ) return ProviderSkippedDetails( @@ -724,6 +718,7 @@ def add_delete(pk: str, sk: str): privilege_records_dict = [p.to_dict() for p in provider_records.get_privilege_records()] # Find best license from reverted state + # TODO - first update licenses/privilege, then pull down again, and update provider record in separate transaction if reverted_licenses_dict: best_license = ProviderRecordUtility.find_best_license( license_records=reverted_licenses_dict, @@ -755,9 +750,11 @@ def add_delete(pk: str, sk: str): for i in range(0, len(transaction_items), 100): batch = transaction_items[i:i + 100] # Use Table resource's client for automatic type conversion + # TODO - catch failures and add failure record to write to S3 results object config.provider_table.meta.client.transact_write_items(TransactItems=batch) logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') - + + # TODO - log full change summary (DO NOT LOG PII) return ProviderRevertedSummary( provider_id=provider_id, licenses_reverted=reverted_licenses, @@ -776,6 +773,7 @@ def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str config.event_bus_client.publish_license_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, + # TODO - add start time, end time, and revert id provider_id=reverted_license.provider_id, jurisdiction=reverted_license.jurisdiction, license_type=reverted_license.license_type, @@ -796,13 +794,13 @@ def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str ) -def _load_results_from_s3(s3_client, bucket_name: str, key: str) -> RollbackResults: +def _load_results_from_s3(key: str) -> RollbackResults: """Load existing results from S3.""" try: - response = s3_client.get_object(Bucket=bucket_name, Key=key) + response = config.s3_client.get_object(Bucket=config.rollback_results_bucket_name, Key=key) data = json.loads(response['Body'].read().decode('utf-8')) return RollbackResults.from_dict(data) - except s3_client.exceptions.NoSuchKey: + except config.s3_client.exceptions.NoSuchKey: # First execution, no existing results return RollbackResults() except Exception as e: @@ -810,17 +808,17 @@ def _load_results_from_s3(s3_client, bucket_name: str, key: str) -> RollbackResu raise -def _write_results_to_s3(s3_client, bucket_name: str, key: str, results: RollbackResults): +def _write_results_to_s3(key: str, results: RollbackResults): """Write results to S3 with server-side encryption.""" try: - s3_client.put_object( - Bucket=bucket_name, + config.s3_client.put_object( + Bucket=config.rollback_results_bucket_name, Key=key, Body=json.dumps(results.to_dict(), indent=2), ContentType='application/json', ServerSideEncryption='aws:kms', ) - logger.info('Results written to S3', bucket=bucket_name, key=key) + logger.info('Results written to S3', bucket=config.rollback_results_bucket_name, key=key) except Exception as e: logger.error(f'Error writing results to S3: {str(e)}') raise diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 09b06e620..6e5ed40ca 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -52,7 +52,7 @@ def _when_provider_had_license_created_from_upload(self, upload_datetime: dateti 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'uploadDate': upload_datetime, + 'firstUploadDate': upload_datetime, 'dateOfUpdate': upload_datetime, }) @@ -82,7 +82,7 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'jurisdiction': self.jurisdiction, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), 'dateOfExpiration': (self.default_start_datetime - timedelta(days=30)).date(), - 'uploadDate': license_upload_datetime, + 'firstUploadDate': license_upload_datetime, }) # Create update record within upload window @@ -94,7 +94,6 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'updateType': self.update_categories.RENEWAL, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, - 'uploadDate': upload_datetime, 'previous': { 'dateOfExpiration': original_license.dateOfExpiration, **original_license.to_dict() @@ -103,7 +102,7 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), }, }) - + # Update the license record to reflect the new expiration updated_license = self.test_data_generator.put_default_license_record_in_provider_table({ 'providerId': self.provider_id, @@ -111,9 +110,9 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'jurisdiction': self.jurisdiction, 'dateOfUpdate': upload_datetime, 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), - 'uploadDate': license_upload_datetime, + 'firstUploadDate': license_upload_datetime, }) - + return updated_license, license_update def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: datetime = None): @@ -350,7 +349,9 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ from handlers.rollback_license_upload import rollback_license_upload # Setup: License was uploaded and then updated during upload - license_record, license_update = self._when_provider_had_license_updated_from_upload(license_upload_datetime=self.default_start_datetime + timedelta(hours = 1)) + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime + timedelta(hours = 1) + ) # Verify update record exists before rollback provider_records_before = self.config.data_client.get_provider_user_records( @@ -358,8 +359,11 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ provider_id=self.provider_id, include_update_tier=UpdateTierEnum.TIER_THREE, ) + licenses_before = provider_records_before.get_license_records() + self.assertEqual(len(licenses_before), 1, "Should have license record before rollback") license_updates_before = provider_records_before.get_all_license_update_records() - self.assertGreater(len(license_updates_before), 0, "Should have update records before rollback") + self.assertEqual(len(license_updates_before), 1, "Should have update record before rollback") + # Execute: Perform rollback event = { @@ -434,7 +438,7 @@ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upl from handlers.rollback_license_upload import rollback_license_upload # Setup: Provider had license update after upload window - license_record, license_update = self._when_provider_had_license_update_after_upload() + self._when_provider_had_license_update_after_upload() # Execute: Perform rollback event = { @@ -451,9 +455,9 @@ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upl # Assert: Rollback completed but provider was skipped self.assertEqual(result['rollbackStatus'], 'COMPLETE') - self.assertEqual(result['providersSkipped'], 1) self.assertEqual(result['providersReverted'], 0) - + self.assertEqual(result['providersSkipped'], 1) + # Verify: License record and update still exist (not rolled back) provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, From 7122fbfbdb0a534a00e6ee44101d1c17b901f754 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 5 Nov 2025 16:53:18 -0600 Subject: [PATCH 18/81] Reactivating privileges that were deactivated by bad uploads --- .../handlers/rollback_license_upload.py | 96 ++++++++++++------- .../function/test_rollback_license_upload.py | 21 +++- .../license_upload_rollback_step_function.py | 2 +- 3 files changed, 80 insertions(+), 39 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 9647f0e34..6920d804d 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -1,5 +1,6 @@ import json import time +import uuid from dataclasses import dataclass, field from datetime import datetime @@ -24,9 +25,10 @@ @dataclass class IneligibleUpdate: """Represents an update that makes a provider ineligible for rollback.""" - type: str # 'licenseUpdate' or 'privilegeUpdate' + type: str # 'licenseUpdate', 'privilegeUpdate', or 'providerUpdate' update_type: str create_date: str + reason: str @dataclass @@ -34,7 +36,7 @@ class ProviderSkippedDetails: """Details for a provider that was skipped.""" provider_id: str reason: str - ineligible_updates: list[dict] = field(default_factory=list) + ineligible_updates: list[IneligibleUpdate] = field(default_factory=list) @dataclass @@ -47,10 +49,11 @@ class ProviderFailedDetails: @dataclass class RevertedLicense: """Details of a reverted license for event publishing.""" - # TODO - provider id be UUID, add reversion id + # TODO - provider id be UUID provider_id: str jurisdiction: str license_type: str + revision_id: str @dataclass @@ -59,6 +62,7 @@ class RevertedPrivilege: provider_id: str jurisdiction: str license_type: str + revision_id: str @dataclass @@ -104,6 +108,7 @@ def to_dict(self) -> dict: 'providerId': license_record.provider_id, 'jurisdiction': license_record.jurisdiction, 'licenseType': license_record.license_type, + 'revisionId': license_record.revision_id, # TODO - add action field showing 'REVERT' or 'DELETED' 'action': 'some-action' } @@ -114,6 +119,7 @@ def to_dict(self) -> dict: 'providerId': privilege.provider_id, 'jurisdiction': privilege.jurisdiction, 'licenseType': privilege.license_type, + 'revisionId': privilege.revision_id, } for privilege in summary.privileges_reverted ], @@ -151,6 +157,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': provider_id=license['providerId'], jurisdiction=license['jurisdiction'], license_type=license['licenseType'], + revision_id=license.get('revisionId', str(uuid.uuid4())), ) for license in summary.get('licensesReverted', []) ], @@ -159,6 +166,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': provider_id=privilege['providerId'], jurisdiction=privilege['jurisdiction'], license_type=privilege['licenseType'], + revision_id=privilege.get('revisionId', str(uuid.uuid4())), ) for privilege in summary.get('privilegesReverted', []) ], @@ -518,6 +526,7 @@ def add_delete(pk: str, sk: str): type='providerUpdate', update_type=update.updateType, create_date=update.dateOfUpdate.isoformat(), + reason='Provider update occurred after rollback start time. Manual review required.', ) ) @@ -529,12 +538,14 @@ def add_delete(pk: str, sk: str): reverted_licenses_dict = [] for license_record in license_records: - # Get privilege updates for this license (same jurisdiction and license type) - # TODO - get the privilege updates for license jurisdiction - privilege_updates = provider_records.get_update_records_for_privilege( - jurisdiction=license_record.jurisdiction, - license_type=license_record.licenseType, - filter_condition=lambda x: x.createDate >= start_datetime, + privileges_associated_with_license = provider_records.get_privilege_records( + filter_condition=lambda x: x.jurisdiction == jurisdiction and x.licenseType == license_record.licenseType + ) + privilege_jurisdictions = [x.jurisdiction for x in privileges_associated_with_license] + # Get privilege updates for all privileges associated with this license + # that are after the start_datetime + privilege_updates = provider_records.get_all_privilege_update_records( + filter_condition=lambda x: x.jurisdiction in privilege_jurisdictions and x.dateOfUpdate >= start_datetime, ) # Check privilege updates for eligibility @@ -545,7 +556,11 @@ def add_delete(pk: str, sk: str): IneligibleUpdate( type='privilegeUpdate', update_type=privilege_update.updateType, - create_date=privilege_update.createDate.isoformat(), + create_date=privilege_update.dateOfUpdate.isoformat(), + # include privilege jurisdiction in reason + reason=f'Privilege in jurisdiction {privilege_update.jurisdiction} for license ' + f'type {privilege_update.licenseType} was updated with a change unrelated to license ' + f'upload or the update occurred after rollback end time. Manual review required.', ) ) elif start_datetime <= privilege_update.createDate <= end_datetime: @@ -556,27 +571,34 @@ def add_delete(pk: str, sk: str): logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') # Reactivate the privilege - # TODO - get privilege by it's own privilege jurisdiction, not the license jurisdiction privilege_record = provider_records.get_specific_privilege_record( - jurisdiction=license_record.jurisdiction, + jurisdiction=privilege_update.jurisdiction, license_abbreviation=license_record.licenseTypeAbbreviation, ) if privilege_record: - # Remove the licenseDeactivatedStatus field to reactivate - reactivated_privilege_data = privilege_record.to_dict() - # TODO - we should use an UPDATE to remove this field, instead of a PUT - reactivated_privilege_data.pop('licenseDeactivatedStatus', None) - - reactivated_privilege = PrivilegeData.create_new(reactivated_privilege_data) - add_put(reactivated_privilege.serialize_to_database_record()) + logger.info( + 'privilege record found associated with deactivation, reactivating privilege', + provider_id=provider_id, + privilege_jurisdiction=privilege_record.jurisdiction, + license_type=privilege_record.licenseType + ) + # Remove the licenseDeactivatedStatus field to reactivate using UPDATE operation + serialized_privilege = privilege_record.serialize_to_database_record() + transaction_items.append({ + 'Update': { + 'TableName': table_name, + 'Key': {'pk': serialized_privilege['pk'], 'sk': serialized_privilege['sk']}, + 'UpdateExpression': 'REMOVE licenseDeactivatedStatus' + } + }) logger.info('Will reactivate privilege record if provider is eligible for rollback') reverted_privileges.append( - # TODO - add revision id RevertedPrivilege( provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, + revision_id=str(uuid.uuid4()), ) ) @@ -588,17 +610,27 @@ def add_delete(pk: str, sk: str): ) # if license record was created during the window, delete it and all update records after start_datetime - # TODO - add check for any privileges that exist for this license, since there won't be a privilege update record if license_record.firstUploadDate is not None and start_datetime <= license_record.firstUploadDate <= end_datetime: + if privilege_jurisdictions: + ineligible_updates.append( + IneligibleUpdate( + type='privilegeUpdate', + update_type='Issuance', + create_date=datetime.now().isoformat(), + reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} for ' + + f'license type {license_record.licenseType} after license upload. Manual review required.', + ) + ) + # no privileges found, so we can delete the license record serialized = license_record.serialize_to_database_record() add_delete(serialized['pk'], serialized['sk']) logger.info('Will delete license record (created during upload) if provider is eligible for rollback') reverted_licenses.append( - # TODO - add revision id RevertedLicense( provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, + revision_id=str(uuid.uuid4()), ) ) for update in license_updates_after_start: @@ -616,8 +648,10 @@ def add_delete(pk: str, sk: str): IneligibleUpdate( type='licenseUpdate', update_type=license_update.updateType, - # TODO - add 'ineligibleReason' field to explain why it's ineligible create_date=license_update.createDate.isoformat(), + reason=f'License update for license type {license_update.licenseType} was updated with a ' + f'change unrelated to license upload or the update occurred after rollback end ' + f'time. Manual review required.', ) ) elif start_datetime <= license_update.createDate <= end_datetime: @@ -651,6 +685,7 @@ def add_delete(pk: str, sk: str): provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, + revision_id=str(uuid.uuid4()), ) ) else: @@ -673,6 +708,7 @@ def add_delete(pk: str, sk: str): provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, + revision_id=str(uuid.uuid4()), ) ) else: @@ -688,20 +724,12 @@ def add_delete(pk: str, sk: str): logger.info( 'Provider not eligible for automatic rollback', provider_id=provider_id, - # TODO - log full change summary - ineligible_count=len(ineligible_updates), + ineligible_updates=ineligible_updates ) return ProviderSkippedDetails( provider_id=provider_id, - reason='Provider has non-upload-related updates after rollback start time', - ineligible_updates=[ - { - 'type': update.type, - 'updateType': update.update_type, - 'createDate': update.create_date, - } - for update in ineligible_updates - ], + reason='Provider has updates that are either unrelated to license upload or occurred after rollback end time. Manual review required.', + ineligible_updates=ineligible_updates, ) # Step 3: Regenerate provider record using populate_provider_record diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 6e5ed40ca..e59ff89d2 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -96,6 +96,7 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'effectiveDate': upload_datetime, 'previous': { 'dateOfExpiration': original_license.dateOfExpiration, + 'licenseStatus': 'inactive', **original_license.to_dict() }, 'updatedValues': { @@ -110,25 +111,30 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'jurisdiction': self.jurisdiction, 'dateOfUpdate': upload_datetime, 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + 'licenseStatus': 'inactive', 'firstUploadDate': license_upload_datetime, }) - return updated_license, license_update + return original_license, license_update, updated_license def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: datetime = None): """ Set up a scenario where a provider's privilege was deactivated due to license deactivation during upload. Returns the privilege and its update record. """ + from cc_common.data_model.schema.common import LicenseDeactivatedStatusEnum + if upload_datetime is None: upload_datetime = self.default_upload_datetime - # Create privilege that was active before upload + # Create privilege that was deactivated by upload privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), + 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, + 'dateOfExpiration': datetime.fromisoformat(MOCK_DATETIME_STRING) }) # Create deactivation update record @@ -140,6 +146,9 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: 'updateType': self.update_categories.LICENSE_DEACTIVATION, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, + 'updatedValues': { + 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, + }, }) return privilege, privilege_update @@ -267,7 +276,7 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self from handlers.rollback_license_upload import rollback_license_upload # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time - updated_license, license_update = self._when_provider_had_license_updated_from_upload( + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( license_upload_datetime=self.default_start_datetime - timedelta(hours = 1)) # Store the original expiration date from the update's previous values @@ -310,7 +319,10 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): from handlers.rollback_license_upload import rollback_license_upload # Setup: Privilege was deactivated during upload due to license deactivation - privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() + # license was uploaded before rollback window + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours = 1)) + self._when_provider_had_privilege_deactivated_from_upload() # Execute: Perform rollback event = { @@ -339,6 +351,7 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): self.assertEqual(len(privileges), 1) privilege_record = privileges[0] self.assertEqual(privilege_record.status, 'active', "Privilege should be reactivated") + self.assertIsNone(privilege_record.licenseDeactivatedStatus) # Verify: Privilege update record has been deleted privilege_updates = provider_records.get_all_privilege_update_records() diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index a7b994021..42db9238a 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -118,7 +118,7 @@ def _create_rollback_function( index=os.path.join('handlers', 'rollback_license_upload.py'), handler='rollback_license_upload', timeout=Duration.minutes(15), - memory_size=3008, # High memory for performance + memory_size=5120, # 5 GB for managing potentially large results files environment={ **stack.common_env_vars, 'ROLLBACK_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, From 5bc05b6b2a97ee7aa77d48fbe9b3a0b373cb3d02 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 6 Nov 2025 12:09:28 -0600 Subject: [PATCH 19/81] Get current tests to pass --- .../handlers/rollback_license_upload.py | 55 ++-- .../function/test_rollback_license_upload.py | 254 ++++++------------ 2 files changed, 102 insertions(+), 207 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 6920d804d..b376199b0 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -1,7 +1,7 @@ import json import time -import uuid -from dataclasses import dataclass, field +from uuid import UUID, uuid4 +from dataclasses import asdict, dataclass, field from datetime import datetime from aws_lambda_powertools.utilities.typing import LambdaContext @@ -49,20 +49,17 @@ class ProviderFailedDetails: @dataclass class RevertedLicense: """Details of a reverted license for event publishing.""" - # TODO - provider id be UUID - provider_id: str jurisdiction: str license_type: str - revision_id: str + revision_id: UUID @dataclass class RevertedPrivilege: """Details of a reverted privilege for event publishing.""" - provider_id: str jurisdiction: str license_type: str - revision_id: str + revision_id: UUID @dataclass @@ -85,30 +82,21 @@ def to_dict(self) -> dict: """Convert to dictionary for S3 storage.""" return { 'skippedProviderDetails': [ - { - 'providerId': detail.provider_id, - 'reason': detail.reason, - 'ineligibleUpdates': detail.ineligible_updates, - } + asdict(detail) for detail in self.skipped_provider_details ], 'failedProviderDetails': [ - { - 'providerId': detail.provider_id, - 'error': detail.error, - } + asdict(detail) for detail in self.failed_provider_details ], 'revertedProviderSummaries': [ { - # TODO - remove redundant provider id in licenses/privileges reverted objects - 'providerId': summary.provider_id, + 'providerId': str(summary.provider_id), 'licensesReverted': [ { - 'providerId': license_record.provider_id, 'jurisdiction': license_record.jurisdiction, 'licenseType': license_record.license_type, - 'revisionId': license_record.revision_id, + 'revisionId': str(license_record.revision_id), # TODO - add action field showing 'REVERT' or 'DELETED' 'action': 'some-action' } @@ -116,10 +104,9 @@ def to_dict(self) -> dict: ], 'privilegesReverted': [ { - 'providerId': privilege.provider_id, 'jurisdiction': privilege.jurisdiction, 'licenseType': privilege.license_type, - 'revisionId': privilege.revision_id, + 'revisionId': str(privilege.revision_id), } for privilege in summary.privileges_reverted ], @@ -154,19 +141,17 @@ def from_dict(cls, data: dict) -> 'RollbackResults': provider_id=summary['providerId'], licenses_reverted=[ RevertedLicense( - provider_id=license['providerId'], jurisdiction=license['jurisdiction'], license_type=license['licenseType'], - revision_id=license.get('revisionId', str(uuid.uuid4())), + revision_id=uuid4(), ) for license in summary.get('licensesReverted', []) ], privileges_reverted=[ RevertedPrivilege( - provider_id=privilege['providerId'], jurisdiction=privilege['jurisdiction'], license_type=privilege['licenseType'], - revision_id=privilege.get('revisionId', str(uuid.uuid4())), + revision_id=uuid4(), ) for privilege in summary.get('privilegesReverted', []) ], @@ -490,7 +475,6 @@ def _build_and_execute_revert_transactions( from cc_common.data_model.provider_record_util import ProviderRecordUtility from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.license.record import LicenseRecordSchema - from cc_common.data_model.schema.privilege import PrivilegeData # TODO - split transactions into first tier/second tier lists (license/privilege first tier, updates second) transaction_items = [] @@ -595,10 +579,9 @@ def add_delete(pk: str, sk: str): reverted_privileges.append( RevertedPrivilege( - provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - revision_id=str(uuid.uuid4()), + revision_id=uuid4(), ) ) @@ -627,10 +610,9 @@ def add_delete(pk: str, sk: str): logger.info('Will delete license record (created during upload) if provider is eligible for rollback') reverted_licenses.append( RevertedLicense( - provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - revision_id=str(uuid.uuid4()), + revision_id=uuid4(), ) ) for update in license_updates_after_start: @@ -682,10 +664,9 @@ def add_delete(pk: str, sk: str): reverted_licenses.append( RevertedLicense( - provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - revision_id=str(uuid.uuid4()), + revision_id=uuid4(), ) ) else: @@ -705,10 +686,9 @@ def add_delete(pk: str, sk: str): reverted_licenses.append( RevertedLicense( - provider_id=provider_id, jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - revision_id=str(uuid.uuid4()), + revision_id=uuid4(), ) ) else: @@ -747,6 +727,7 @@ def add_delete(pk: str, sk: str): # Find best license from reverted state # TODO - first update licenses/privilege, then pull down again, and update provider record in separate transaction + # or delete it all together if all license records were deleted for provider if reverted_licenses_dict: best_license = ProviderRecordUtility.find_best_license( license_records=reverted_licenses_dict, @@ -802,7 +783,7 @@ def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str source='org.compactconnect.disaster-recovery', compact=compact, # TODO - add start time, end time, and revert id - provider_id=reverted_license.provider_id, + provider_id=revert_summary.provider_id, jurisdiction=reverted_license.jurisdiction, license_type=reverted_license.license_type, rollback_reason=rollback_reason, @@ -814,7 +795,7 @@ def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str config.event_bus_client.publish_privilege_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - provider_id=reverted_privilege.provider_id, + provider_id=revert_summary.provider_id, jurisdiction=reverted_privilege.jurisdiction, license_type=reverted_privilege.license_type, rollback_reason=rollback_reason, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index e59ff89d2..9cfe3d9c5 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -18,6 +18,10 @@ from . import TstFunction MOCK_DATETIME_STRING = '2025-10-23T08:15:00+00:00' +MOCK_ORIGINAL_GIVEN_NAME = 'originalGiven' +MOCK_ORIGINAL_FAMILY_NAME = 'originalFamily' +MOCK_UPDATED_GIVEN_NAME = 'updatedGiven' +MOCK_UPDATED_FAMILY_NAME = 'updatedFamily' @mock_aws @@ -39,21 +43,40 @@ def setUp(self): from cc_common.data_model.schema.common import UpdateCategory self.update_categories = UpdateCategory + self._add_provider_record() + + def _generate_test_event(self): + return { + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'startDateTime': self.default_start_datetime.isoformat(), + 'endDateTime': self.default_end_datetime.isoformat(), + 'rollbackReason': 'Test rollback', + 'executionId': 'test-execution-123', + 'providersProcessed': 0, + } + + def _add_provider_record(self): + # add provider record to provider table + self.test_data_generator.put_default_provider_record_in_provider_table({ + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), + }) + # Helper methods for setting up test scenarios - def _when_provider_had_license_created_from_upload(self, upload_datetime: datetime = None): + def _when_provider_had_license_created_from_upload(self): """ Set up a scenario where a provider had a license created during the upload window. Returns the created license data. """ - if upload_datetime is None: - upload_datetime = self.default_upload_datetime - return self.test_data_generator.put_default_license_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'firstUploadDate': upload_datetime, - 'dateOfUpdate': upload_datetime, + 'firstUploadDate': self.default_upload_datetime, + 'dateOfUpdate': self.default_upload_datetime, }) def _when_provider_had_license_updated_from_upload(self, upload_datetime: datetime = None, license_upload_datetime: datetime = None): @@ -64,15 +87,8 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti if upload_datetime is None: upload_datetime = self.default_upload_datetime if license_upload_datetime is None: - license_upload_datetime = self.default_upload_datetime - - # add provider record to provider table - self.test_data_generator.put_default_provider_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), - }) + # by default, the license was originally uploaded a day before the bad upload + license_upload_datetime = self.default_start_datetime - timedelta(days=1) # Create original license before upload window, unless different time is provided @@ -80,6 +96,8 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, + 'familyName': MOCK_ORIGINAL_FAMILY_NAME, + 'givenName': MOCK_ORIGINAL_GIVEN_NAME, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), 'dateOfExpiration': (self.default_start_datetime - timedelta(days=30)).date(), 'firstUploadDate': license_upload_datetime, @@ -101,6 +119,8 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti }, 'updatedValues': { 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, }, }) @@ -109,6 +129,8 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, 'dateOfUpdate': upload_datetime, 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), 'licenseStatus': 'inactive', @@ -189,25 +211,18 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: """ if after_upload_datetime is None: after_upload_datetime = self.default_end_datetime + timedelta(hours=1) - - license_record = self.test_data_generator.put_default_license_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - }) - + # Create a non-upload-related update (e.g., encumbrance) after the window license_update = self.test_data_generator.put_default_license_update_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.jurisdiction, - 'licenseType': license_record.licenseType, 'updateType': self.update_categories.ENCUMBRANCE, # Not an upload-related category 'createDate': after_upload_datetime, 'effectiveDate': after_upload_datetime, }) - return license_record, license_update + return license_update def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: datetime = None): """ @@ -216,13 +231,16 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: """ if before_upload_datetime is None: before_upload_datetime = self.default_start_datetime - timedelta(days=30) - + + # Existing license updated during window + self._when_provider_had_license_updated_from_upload() + # Create provider record with old values provider = self.test_data_generator.put_default_provider_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, - 'givenName': 'OldFirstName', - 'familyName': 'OldLastName', + 'familyName': MOCK_ORIGINAL_FAMILY_NAME, + 'givenName': MOCK_ORIGINAL_GIVEN_NAME, 'dateOfUpdate': before_upload_datetime, }) @@ -230,37 +248,29 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: updated_provider = self.test_data_generator.put_default_provider_record_in_provider_table({ 'providerId': self.provider_id, 'compact': self.compact, - 'givenName': 'NewFirstName', - 'familyName': 'NewLastName', + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, 'dateOfUpdate': self.default_upload_datetime, }) return provider, updated_provider - # Integration tests for rollback scenarios def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(self): """Test that provider top-level record is reset to values before upload.""" from handlers.rollback_license_upload import rollback_license_upload - # Setup: Provider record was updated during upload + # Setup: + # Provider record was updated during upload old_provider, new_provider = self._when_provider_top_level_record_needs_reverted() # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) # Assert: Rollback completed successfully self.assertEqual(result['rollbackStatus'], 'COMPLETE') - self.assertEqual(result['providersReverted'], 1) + self.assertEqual(1, result['providersReverted']) # Verify: Provider record has been reset to old values provider_records = self.config.data_client.get_provider_user_records( @@ -268,8 +278,8 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se provider_id=self.provider_id, ) provider_record = provider_records.get_provider_record() - self.assertEqual(provider_record.givenName, old_provider.givenName) - self.assertEqual(provider_record.familyName, old_provider.familyName) + self.assertEqual(old_provider.givenName, provider_record.givenName) + self.assertEqual(old_provider.familyName, provider_record.familyName) def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self): """Test that license record is reset to values before upload.""" @@ -277,21 +287,14 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( - license_upload_datetime=self.default_start_datetime - timedelta(hours = 1)) + license_upload_datetime=self.default_start_datetime - timedelta(hours = 1) + ) # Store the original expiration date from the update's previous values original_expiration = license_update.previous['dateOfExpiration'] # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) @@ -325,15 +328,7 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): self._when_provider_had_privilege_deactivated_from_upload() # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) @@ -379,15 +374,7 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) @@ -405,71 +392,24 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ license_updates_after = provider_records_after.get_all_license_update_records() self.assertEqual(len(license_updates_after), 0, "License update records should be deleted") - def test_provider_privilege_deactivation_update_within_time_period_removed_when_upload_reverted(self): - """Test that privilege deactivation update records within the time window are deleted.""" - from handlers.rollback_license_upload import rollback_license_upload - - # Setup: Privilege was deactivated during upload - privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() - - # Verify update record exists before rollback - provider_records_before = self.config.data_client.get_provider_user_records( - compact=self.compact, - provider_id=self.provider_id, - include_update_tier=UpdateTierEnum.TIER_THREE, - ) - privilege_updates_before = provider_records_before.get_all_privilege_update_records() - self.assertGreater(len(privilege_updates_before), 0, "Should have update records before rollback") - - # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } - - result = rollback_license_upload(event, Mock()) - - # Assert: Rollback completed successfully - self.assertEqual(result['rollbackStatus'], 'COMPLETE') - - # Verify: All privilege update records within time window have been deleted - provider_records_after = self.config.data_client.get_provider_user_records( - compact=self.compact, - provider_id=self.provider_id, - include_update_tier=UpdateTierEnum.TIER_THREE, - ) - privilege_updates_after = provider_records_after.get_all_privilege_update_records() - self.assertEqual(len(privilege_updates_after), 0, "Privilege update records should be deleted") - - def test_provider_skipped_if_license_updates_detected_after_time_period_when_upload_reverted(self): + def test_provider_skipped_if_license_updates_detected_after_end_of_time_window_when_upload_reverted(self): """Test that provider is skipped if non-upload-related license updates exist after time window.""" from handlers.rollback_license_upload import rollback_license_upload - # Setup: Provider had license update after upload window + # Setup: Provider had valid license before upload, and update occurred during upload window + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + # update also occurred after upload window self._when_provider_had_license_update_after_upload() - # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } - + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) # Assert: Rollback completed but provider was skipped - self.assertEqual(result['rollbackStatus'], 'COMPLETE') - self.assertEqual(result['providersReverted'], 0) - self.assertEqual(result['providersSkipped'], 1) + self.assertEqual('COMPLETE', result['rollbackStatus']) + self.assertEqual(0, result['providersReverted']) + self.assertEqual(1, result['providersSkipped']) # Verify: License record and update still exist (not rolled back) provider_records = self.config.data_client.get_provider_user_records( @@ -480,32 +420,25 @@ def test_provider_skipped_if_license_updates_detected_after_time_period_when_upl licenses = provider_records.get_license_records() self.assertEqual(len(licenses), 1, "License should still exist") license_updates = provider_records.get_all_license_update_records() - self.assertEqual(len(license_updates), 1, "License update should still exist") + self.assertEqual(2, len(license_updates), "License updates should still exist") def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_upload_reverted(self): """Test that provider is skipped if non-upload-related privilege updates exist after time window.""" from handlers.rollback_license_upload import rollback_license_upload # Setup: Provider had privilege update after upload window - privilege, privilege_update = self._when_provider_had_privilege_update_after_upload() + self._when_provider_had_license_updated_from_upload() + self._when_provider_had_privilege_update_after_upload() # Execute: Perform rollback - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_start_datetime.isoformat(), - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) # Assert: Rollback completed but provider was skipped self.assertEqual(result['rollbackStatus'], 'COMPLETE') - self.assertEqual(result['providersSkipped'], 1) - self.assertEqual(result['providersReverted'], 0) + self.assertEqual(1, result['providersSkipped']) + self.assertEqual(0, result['providersReverted']) # Verify: Privilege record and update still exist (not rolled back) provider_records = self.config.data_client.get_provider_user_records( @@ -514,24 +447,17 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u include_update_tier=UpdateTierEnum.TIER_THREE, ) privileges = provider_records.get_privilege_records() - self.assertEqual(len(privileges), 1, "Privilege should still exist") + self.assertEqual(1, len(privileges), "Privilege should still exist") privilege_updates = provider_records.get_all_privilege_update_records() - self.assertEqual(len(privilege_updates), 1, "Privilege update should still exist") + self.assertEqual(1, len(privilege_updates), "Privilege update should still exist") # Validation tests def test_rollback_validates_datetime_format(self): """Test that rollback validates datetime format.""" from handlers.rollback_license_upload import rollback_license_upload - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': 'invalid-datetime', - 'endDateTime': self.default_end_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() + event['startDateTime'] = 'invalid-datetime' result = rollback_license_upload(event, Mock()) @@ -542,15 +468,9 @@ def test_rollback_validates_time_window_order(self): """Test that rollback validates start time is before end time.""" from handlers.rollback_license_upload import rollback_license_upload - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': self.default_end_datetime.isoformat(), - 'endDateTime': self.default_start_datetime.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() + event['startDateTime'] = self.default_end_datetime.isoformat() + event['endDateTime'] = self.default_start_datetime.isoformat() result = rollback_license_upload(event, Mock()) @@ -564,15 +484,9 @@ def test_rollback_validates_maximum_time_window(self): start = datetime.now() - timedelta(days=8) # More than 7 days end = datetime.now() - event = { - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'startDateTime': start.isoformat(), - 'endDateTime': end.isoformat(), - 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', - 'providersProcessed': 0, - } + event = self._generate_test_event() + event['startDateTime'] = start.isoformat() + event['endDateTime'] = end.isoformat() result = rollback_license_upload(event, Mock()) From 16185a16c0b9868ef24bff1b762d4cbacefe165f Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 6 Nov 2025 15:45:25 -0600 Subject: [PATCH 20/81] Manage provider revert action --- .../data_model/provider_record_util.py | 4 +- .../handlers/rollback_license_upload.py | 87 ++++++++++--------- .../function/test_rollback_license_upload.py | 45 +++++++--- 3 files changed, 81 insertions(+), 55 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py index c60e1ca29..4aabaded2 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py @@ -23,7 +23,7 @@ from cc_common.data_model.schema.privilege import PrivilegeData, PrivilegeUpdateData from cc_common.data_model.schema.privilege.api import PrivilegeHistoryResponseSchema from cc_common.data_model.schema.provider import ProviderData, ProviderUpdateData -from cc_common.exceptions import CCInternalException +from cc_common.exceptions import CCInternalException, CCNotFoundException class ProviderRecordType(StrEnum): @@ -739,7 +739,7 @@ def find_best_license_in_current_known_licenses(self, jurisdiction: str | None = # Last issued inactive license, otherwise latest_licenses = sorted(license_records, key=lambda x: x.dateOfIssuance.isoformat(), reverse=True) if not latest_licenses: - raise CCInternalException('No licenses found') + raise CCNotFoundException('No licenses found') return latest_licenses[0] diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index b376199b0..95a4c1e6c 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -8,11 +8,13 @@ from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError from cc_common.config import config, logger -from cc_common.data_model.provider_record_util import ProviderUserRecords +from cc_common.data_model.provider_record_util import ProviderUserRecords, ProviderRecordUtility from cc_common.data_model.schema.common import UpdateCategory, LICENSE_UPLOAD_UPDATE_CATEGORIES +from cc_common.data_model.schema.privilege import PrivilegeData +from cc_common.data_model.schema.provider import ProviderData from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.event_batch_writer import EventBatchWriter - +from cc_common.exceptions import CCNotFoundException # Maximum time window for rollback (1 week in seconds) MAX_ROLLBACK_WINDOW_SECONDS = 7 * 24 * 60 * 60 @@ -52,6 +54,7 @@ class RevertedLicense: jurisdiction: str license_type: str revision_id: UUID + action: str @dataclass @@ -453,6 +456,16 @@ def _process_provider_rollback( error=str(e), ) +def _perform_transaction(transaction_items: list[dict]) -> None: + logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') + + for i in range(0, len(transaction_items), 100): + batch = transaction_items[i:i + 100] + # Use Table resource's client for automatic type conversion + # TODO - catch failures and add failure record to write to S3 results object + config.provider_table.meta.client.transact_write_items(TransactItems=batch) + logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') + def _build_and_execute_revert_transactions( provider_records: ProviderUserRecords, @@ -472,7 +485,6 @@ def _build_and_execute_revert_transactions( Returns either a summary of what was reverted or details about why the provider was skipped. """ - from cc_common.data_model.provider_record_util import ProviderRecordUtility from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.license.record import LicenseRecordSchema @@ -613,6 +625,7 @@ def add_delete(pk: str, sk: str): jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, revision_id=uuid4(), + action='DELETE', ) ) for update in license_updates_after_start: @@ -667,6 +680,7 @@ def add_delete(pk: str, sk: str): jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, revision_id=uuid4(), + action='DELETE', ) ) else: @@ -689,6 +703,7 @@ def add_delete(pk: str, sk: str): jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, revision_id=uuid4(), + action='REVERT', ) ) else: @@ -712,38 +727,6 @@ def add_delete(pk: str, sk: str): ineligible_updates=ineligible_updates, ) - # Step 3: Regenerate provider record using populate_provider_record - if reverted_licenses or reverted_privileges: - current_provider_record = provider_records.get_provider_record() - - # Get licenses from other jurisdictions (not affected by rollback) - all_licenses = provider_records.get_license_records() - for license_rec in all_licenses: - if license_rec.jurisdiction != jurisdiction: - reverted_licenses_dict.append(license_rec.to_dict()) - - # Get all privilege records - privilege_records_dict = [p.to_dict() for p in provider_records.get_privilege_records()] - - # Find best license from reverted state - # TODO - first update licenses/privilege, then pull down again, and update provider record in separate transaction - # or delete it all together if all license records were deleted for provider - if reverted_licenses_dict: - best_license = ProviderRecordUtility.find_best_license( - license_records=reverted_licenses_dict, - home_jurisdiction=current_provider_record.currentHomeJurisdiction, - ) - - # Populate provider record using the reverted best license - updated_provider_record = ProviderRecordUtility.populate_provider_record( - current_provider_record=current_provider_record, - license_record=best_license, - privilege_records=privilege_records_dict, - ) - - add_put(updated_provider_record.serialize_to_database_record()) - logger.info('Adding provider record update to transaction') - # Execute transactions in batches of 100 if not transaction_items: logger.warning('No transaction items to execute') @@ -754,14 +737,32 @@ def add_delete(pk: str, sk: str): updates_deleted=updates_deleted_count, ) - logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') - - for i in range(0, len(transaction_items), 100): - batch = transaction_items[i:i + 100] - # Use Table resource's client for automatic type conversion - # TODO - catch failures and add failure record to write to S3 results object - config.provider_table.meta.client.transact_write_items(TransactItems=batch) - logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') + _perform_transaction(transaction_items) + + # Now read all the license records for the provider and update the provider record + # Fetch all provider records including all update tiers + provider_records = config.data_client.get_provider_user_records( + compact=compact, + provider_id=provider_id + ) + top_level_provider_record: ProviderData = provider_records.get_provider_record() + privilege_records: list[PrivilegeData] = provider_records.get_privilege_records() + transaction_items.clear() + try: + best_license = provider_records.find_best_license_in_current_known_licenses() + provider_record = ProviderRecordUtility.populate_provider_record( + current_provider_record=top_level_provider_record, + license_record=best_license.to_dict(), + privilege_records=[privilege.to_dict() for privilege in privilege_records], + ) + add_put(provider_record.serialize_to_database_record()) + except CCNotFoundException: + # all licenses for the provider were removed as part of the rollback, + # the provider record needs to be removed as well + serialized_provider_record = top_level_provider_record.serialize_to_database_record() + add_delete(pk=serialized_provider_record['pk'], sk=serialized_provider_record['sk']) + + _perform_transaction(transaction_items) # TODO - log full change summary (DO NOT LOG PII) return ProviderRevertedSummary( diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 9cfe3d9c5..f700c5251 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -12,9 +12,12 @@ from datetime import datetime, timedelta from unittest.mock import Mock, patch from uuid import uuid4 + +import pytest from moto import mock_aws from cc_common.data_model.update_tier_enum import UpdateTierEnum +from cc_common.exceptions import CCNotFoundException from . import TstFunction MOCK_DATETIME_STRING = '2025-10-23T08:15:00+00:00' @@ -281,6 +284,31 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se self.assertEqual(old_provider.givenName, provider_record.givenName) self.assertEqual(old_provider.familyName, provider_record.familyName) + def test_provider_top_level_record_deleted_when_license_created_during_bad_upload(self): + """Test that provider top-level record is deleted if the license record is also deleted when reverting upload.""" + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: + # License and provider records were created during upload + self._when_provider_had_license_created_from_upload() + + # Execute: Perform rollback + event = self._generate_test_event() + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(1, result['providersReverted']) + + # Verify: All provider records have been deleted + with pytest.raises(CCNotFoundException) as exc_info: + self.config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + ) + + def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self): """Test that license record is reset to values before upload.""" from handlers.rollback_license_upload import rollback_license_upload @@ -381,16 +409,13 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ # Assert: Rollback completed successfully self.assertEqual(result['rollbackStatus'], 'COMPLETE') - # Verify: All license update records within time window have been deleted - provider_records_after = self.config.data_client.get_provider_user_records( - compact=self.compact, - provider_id=self.provider_id, - include_update_tier=UpdateTierEnum.TIER_THREE, - ) - licenses_after = provider_records_after.get_license_records() - self.assertEqual(len(licenses_after), 0, "License records should be deleted") - license_updates_after = provider_records_after.get_all_license_update_records() - self.assertEqual(len(license_updates_after), 0, "License update records should be deleted") + # Verify: All records within time window have been deleted + with pytest.raises(CCNotFoundException) as exec_info: + self.config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) def test_provider_skipped_if_license_updates_detected_after_end_of_time_window_when_upload_reverted(self): """Test that provider is skipped if non-upload-related license updates exist after time window.""" From 2b7e489d1b6f233f47b227e57b9d9623743898c3 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 6 Nov 2025 16:39:19 -0600 Subject: [PATCH 21/81] organize transaction order by record type --- .../handlers/rollback_license_upload.py | 86 +++++++++++++------ 1 file changed, 58 insertions(+), 28 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 95a4c1e6c..761d32d88 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -488,8 +488,10 @@ def _build_and_execute_revert_transactions( from cc_common.data_model.schema.license import LicenseData from cc_common.data_model.schema.license.record import LicenseRecordSchema - # TODO - split transactions into first tier/second tier lists (license/privilege first tier, updates second) - transaction_items = [] + # Split transaction lists into first tier/second tier lists (license/privilege/provider first tier, updates second) + # then merge the two lists into a single list of transaction items + primary_record_transaction_items = [] # License, privilege, and provider records + update_record_transactions_items = [] # Update records (license updates, privilege updates, provider updates) table_name = config.provider_table_name reverted_licenses = [] reverted_privileges = [] @@ -497,21 +499,44 @@ def _build_and_execute_revert_transactions( ineligible_updates: list[IneligibleUpdate] = [] # Helper functions for cleaner item building - def add_put(item: dict): - transaction_items.append({ + def add_put(item: dict, update_record: bool): + """ + Add a Put operation to the appropriate list. + + Args: + item: The item to put + update_record: True if the item is an update record, False if it is a primary record + """ + transaction_item = { 'Put': { 'TableName': table_name, 'Item': item, } - }) + } + if update_record: + update_record_transactions_items.append(transaction_item) + else: + primary_record_transaction_items.append(transaction_item) - def add_delete(pk: str, sk: str): - transaction_items.append({ + def add_delete(pk: str, sk: str, update_record: bool): + """ + Add a Delete operation. + + Args: + pk: Partition key + sk: Sort key - used to determine if this is an update record + update_record: True if the item is an update record, False if it is a primary record + """ + transaction_item = { 'Delete': { 'TableName': table_name, 'Key': {'pk': pk, 'sk': sk}, } - }) + } + if update_record: + update_record_transactions_items.append(transaction_item) + else: + primary_record_transaction_items.append(transaction_item) # Step 1: Check provider updates - any after start_datetime make provider ineligible provider_updates = provider_records.get_all_provider_update_records() @@ -561,8 +586,8 @@ def add_delete(pk: str, sk: str): ) elif start_datetime <= privilege_update.createDate <= end_datetime: # License deactivation within window - mark for deletion - serialized = privilege_update.serialize_to_database_record() - add_delete(serialized['pk'], serialized['sk']) + serialized_privilege_update = privilege_update.serialize_to_database_record() + add_delete(serialized_privilege_update['pk'], serialized_privilege_update['sk'], update_record=True) updates_deleted_count += 1 logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') @@ -580,7 +605,7 @@ def add_delete(pk: str, sk: str): ) # Remove the licenseDeactivatedStatus field to reactivate using UPDATE operation serialized_privilege = privilege_record.serialize_to_database_record() - transaction_items.append({ + primary_record_transaction_items.append({ 'Update': { 'TableName': table_name, 'Key': {'pk': serialized_privilege['pk'], 'sk': serialized_privilege['sk']}, @@ -617,8 +642,8 @@ def add_delete(pk: str, sk: str): ) ) # no privileges found, so we can delete the license record - serialized = license_record.serialize_to_database_record() - add_delete(serialized['pk'], serialized['sk']) + serialized_license_record = license_record.serialize_to_database_record() + add_delete(serialized_license_record['pk'], serialized_license_record['sk'], update_record=False) logger.info('Will delete license record (created during upload) if provider is eligible for rollback') reverted_licenses.append( RevertedLicense( @@ -629,8 +654,8 @@ def add_delete(pk: str, sk: str): ) ) for update in license_updates_after_start: - serialized = update.serialize_to_database_record() - add_delete(serialized['pk'], serialized['sk']) + serialized_license_update = update.serialize_to_database_record() + add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) updates_deleted_count += 1 logger.info('Will delete license update record if provider is eligible for rollback', update_type=update.updateType) else: @@ -652,8 +677,8 @@ def add_delete(pk: str, sk: str): elif start_datetime <= license_update.createDate <= end_datetime: # Upload-related update within window - mark for deletion license_updates_in_window.append(license_update) - serialized = license_update.serialize_to_database_record() - add_delete(serialized['pk'], serialized['sk']) + serialized_license_update = license_update.serialize_to_database_record() + add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) updates_deleted_count += 1 logger.info('Will delete license update record if provider is eligible for rollback', update_type=license_update.updateType) @@ -671,9 +696,9 @@ def add_delete(pk: str, sk: str): if (license_record.firstUploadDate is not None and start_datetime <= license_record.firstUploadDate <= end_datetime): # License created during upload - delete it - serialized = license_record.serialize_to_database_record() - add_delete(serialized['pk'], serialized['sk']) - logger.info('Deleting license record (created during upload)') + serialized_license_record = license_record.serialize_to_database_record() + add_delete(serialized_license_record['pk'], serialized_license_record['sk'], update_record=False) + logger.info('Will delete license record (created during upload)') reverted_licenses.append( RevertedLicense( @@ -689,14 +714,14 @@ def add_delete(pk: str, sk: str): reverted_license_data.update(earliest_update_in_window.previous) reverted_license = LicenseData.create_new(reverted_license_data) - serialized_reverted = reverted_license.serialize_to_database_record() + serialized_reverted_license = reverted_license.serialize_to_database_record() - add_put(serialized_reverted) + add_put(serialized_reverted_license, update_record=True) logger.info('Reverting license record to pre-upload state') # Track for provider record regeneration license_schema = LicenseRecordSchema() - reverted_licenses_dict.append(license_schema.load(serialized_reverted)) + reverted_licenses_dict.append(license_schema.load(serialized_reverted_license)) reverted_licenses.append( RevertedLicense( @@ -727,7 +752,9 @@ def add_delete(pk: str, sk: str): ineligible_updates=ineligible_updates, ) - # Execute transactions in batches of 100 + # process primary records first, then update records + transaction_items = primary_record_transaction_items + update_record_transactions_items + if not transaction_items: logger.warning('No transaction items to execute') return ProviderRevertedSummary( @@ -747,7 +774,10 @@ def add_delete(pk: str, sk: str): ) top_level_provider_record: ProviderData = provider_records.get_provider_record() privilege_records: list[PrivilegeData] = provider_records.get_privilege_records() - transaction_items.clear() + + # Create a new list for provider record updates (all first tier items) + primary_record_transaction_items.clear() + try: best_license = provider_records.find_best_license_in_current_known_licenses() provider_record = ProviderRecordUtility.populate_provider_record( @@ -755,14 +785,14 @@ def add_delete(pk: str, sk: str): license_record=best_license.to_dict(), privilege_records=[privilege.to_dict() for privilege in privilege_records], ) - add_put(provider_record.serialize_to_database_record()) + add_put(provider_record.serialize_to_database_record(), update_record=False) except CCNotFoundException: # all licenses for the provider were removed as part of the rollback, # the provider record needs to be removed as well serialized_provider_record = top_level_provider_record.serialize_to_database_record() - add_delete(pk=serialized_provider_record['pk'], sk=serialized_provider_record['sk']) + add_delete(pk=serialized_provider_record['pk'], sk=serialized_provider_record['sk'], update_record=False) - _perform_transaction(transaction_items) + _perform_transaction(primary_record_transaction_items) # TODO - log full change summary (DO NOT LOG PII) return ProviderRevertedSummary( From e5a3ccf2764794e8266a8c3ec09d61d98282dbee Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 6 Nov 2025 17:03:25 -0600 Subject: [PATCH 22/81] add test to check S3 contents --- .../handlers/rollback_license_upload.py | 3 +- .../function/test_rollback_license_upload.py | 403 +++++++++++------- 2 files changed, 239 insertions(+), 167 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 761d32d88..8f3f80c01 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -100,8 +100,7 @@ def to_dict(self) -> dict: 'jurisdiction': license_record.jurisdiction, 'licenseType': license_record.license_type, 'revisionId': str(license_record.revision_id), - # TODO - add action field showing 'REVERT' or 'DELETED' - 'action': 'some-action' + 'action': license_record.action } for license_record in summary.licenses_reverted ], diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index f700c5251..9066d9c3a 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -9,15 +9,16 @@ - Event publishing - S3 result management """ + from datetime import datetime, timedelta -from unittest.mock import Mock, patch +from unittest.mock import Mock, patch, ANY from uuid import uuid4 import pytest -from moto import mock_aws - from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.exceptions import CCNotFoundException +from moto import mock_aws + from . import TstFunction MOCK_DATETIME_STRING = '2025-10-23T08:15:00+00:00' @@ -44,6 +45,7 @@ def setUp(self): self.default_start_datetime = self.default_upload_datetime - timedelta(days=1) self.default_end_datetime = self.default_upload_datetime from cc_common.data_model.schema.common import UpdateCategory + self.update_categories = UpdateCategory self._add_provider_record() @@ -61,12 +63,14 @@ def _generate_test_event(self): def _add_provider_record(self): # add provider record to provider table - self.test_data_generator.put_default_provider_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), - }) + self.test_data_generator.put_default_provider_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), + } + ) # Helper methods for setting up test scenarios def _when_provider_had_license_created_from_upload(self): @@ -74,15 +78,19 @@ def _when_provider_had_license_created_from_upload(self): Set up a scenario where a provider had a license created during the upload window. Returns the created license data. """ - return self.test_data_generator.put_default_license_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'firstUploadDate': self.default_upload_datetime, - 'dateOfUpdate': self.default_upload_datetime, - }) + return self.test_data_generator.put_default_license_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'firstUploadDate': self.default_upload_datetime, + 'dateOfUpdate': self.default_upload_datetime, + } + ) - def _when_provider_had_license_updated_from_upload(self, upload_datetime: datetime = None, license_upload_datetime: datetime = None): + def _when_provider_had_license_updated_from_upload( + self, upload_datetime: datetime = None, license_upload_datetime: datetime = None + ): """ Set up a scenario where a provider had an existing license updated during the upload window. Returns the license and its update record. @@ -92,53 +100,58 @@ def _when_provider_had_license_updated_from_upload(self, upload_datetime: dateti if license_upload_datetime is None: # by default, the license was originally uploaded a day before the bad upload license_upload_datetime = self.default_start_datetime - timedelta(days=1) - - + # Create original license before upload window, unless different time is provided - original_license = self.test_data_generator.put_default_license_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'familyName': MOCK_ORIGINAL_FAMILY_NAME, - 'givenName': MOCK_ORIGINAL_GIVEN_NAME, - 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), - 'dateOfExpiration': (self.default_start_datetime - timedelta(days=30)).date(), - 'firstUploadDate': license_upload_datetime, - }) - + original_license = self.test_data_generator.put_default_license_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'familyName': MOCK_ORIGINAL_FAMILY_NAME, + 'givenName': MOCK_ORIGINAL_GIVEN_NAME, + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), + 'dateOfExpiration': (self.default_start_datetime - timedelta(days=30)).date(), + 'firstUploadDate': license_upload_datetime, + } + ) + # Create update record within upload window - license_update = self.test_data_generator.put_default_license_update_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'licenseType': original_license.licenseType, - 'updateType': self.update_categories.RENEWAL, - 'createDate': upload_datetime, - 'effectiveDate': upload_datetime, - 'previous': { - 'dateOfExpiration': original_license.dateOfExpiration, - 'licenseStatus': 'inactive', - **original_license.to_dict() - }, - 'updatedValues': { - 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), - 'familyName': MOCK_UPDATED_FAMILY_NAME, - 'givenName': MOCK_UPDATED_GIVEN_NAME, - }, - }) + license_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': original_license.licenseType, + 'updateType': self.update_categories.RENEWAL, + 'createDate': upload_datetime, + 'effectiveDate': upload_datetime, + 'previous': { + 'dateOfExpiration': original_license.dateOfExpiration, + 'licenseStatus': 'inactive', + **original_license.to_dict(), + }, + 'updatedValues': { + 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, + }, + } + ) # Update the license record to reflect the new expiration - updated_license = self.test_data_generator.put_default_license_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'familyName': MOCK_UPDATED_FAMILY_NAME, - 'givenName': MOCK_UPDATED_GIVEN_NAME, - 'dateOfUpdate': upload_datetime, - 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), - 'licenseStatus': 'inactive', - 'firstUploadDate': license_upload_datetime, - }) + updated_license = self.test_data_generator.put_default_license_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, + 'dateOfUpdate': upload_datetime, + 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + 'licenseStatus': 'inactive', + 'firstUploadDate': license_upload_datetime, + } + ) return original_license, license_update, updated_license @@ -151,31 +164,35 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: if upload_datetime is None: upload_datetime = self.default_upload_datetime - + # Create privilege that was deactivated by upload - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), - 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, - 'dateOfExpiration': datetime.fromisoformat(MOCK_DATETIME_STRING) - }) - - # Create deactivation update record - privilege_update = self.test_data_generator.put_default_privilege_update_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'licenseType': privilege.licenseType, - 'updateType': self.update_categories.LICENSE_DEACTIVATION, - 'createDate': upload_datetime, - 'effectiveDate': upload_datetime, - 'updatedValues': { + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, - }, - }) - + 'dateOfExpiration': datetime.fromisoformat(MOCK_DATETIME_STRING), + } + ) + + # Create deactivation update record + privilege_update = self.test_data_generator.put_default_privilege_update_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': privilege.licenseType, + 'updateType': self.update_categories.LICENSE_DEACTIVATION, + 'createDate': upload_datetime, + 'effectiveDate': upload_datetime, + 'updatedValues': { + 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, + }, + } + ) + return privilege, privilege_update def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime: datetime = None): @@ -186,24 +203,28 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime """ if after_upload_datetime is None: after_upload_datetime = self.default_end_datetime + timedelta(hours=1) - - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - }) - + + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + } + ) + # Create a non-upload-related update (e.g., renewal) after the window - privilege_update = self.test_data_generator.put_default_privilege_update_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'licenseType': privilege.licenseType, - 'updateType': self.update_categories.RENEWAL, # Not LICENSE_DEACTIVATION - 'createDate': after_upload_datetime, - 'effectiveDate': after_upload_datetime, - }) - + privilege_update = self.test_data_generator.put_default_privilege_update_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'licenseType': privilege.licenseType, + 'updateType': self.update_categories.RENEWAL, # Not LICENSE_DEACTIVATION + 'createDate': after_upload_datetime, + 'effectiveDate': after_upload_datetime, + } + ) + return privilege, privilege_update def _when_provider_had_license_update_after_upload(self, after_upload_datetime: datetime = None): @@ -216,15 +237,17 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: after_upload_datetime = self.default_end_datetime + timedelta(hours=1) # Create a non-upload-related update (e.g., encumbrance) after the window - license_update = self.test_data_generator.put_default_license_update_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'jurisdiction': self.jurisdiction, - 'updateType': self.update_categories.ENCUMBRANCE, # Not an upload-related category - 'createDate': after_upload_datetime, - 'effectiveDate': after_upload_datetime, - }) - + license_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.jurisdiction, + 'updateType': self.update_categories.ENCUMBRANCE, # Not an upload-related category + 'createDate': after_upload_datetime, + 'effectiveDate': after_upload_datetime, + } + ) + return license_update def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: datetime = None): @@ -239,23 +262,27 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: self._when_provider_had_license_updated_from_upload() # Create provider record with old values - provider = self.test_data_generator.put_default_provider_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'familyName': MOCK_ORIGINAL_FAMILY_NAME, - 'givenName': MOCK_ORIGINAL_GIVEN_NAME, - 'dateOfUpdate': before_upload_datetime, - }) - + provider = self.test_data_generator.put_default_provider_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'familyName': MOCK_ORIGINAL_FAMILY_NAME, + 'givenName': MOCK_ORIGINAL_GIVEN_NAME, + 'dateOfUpdate': before_upload_datetime, + } + ) + # Simulate that the provider record was updated during upload - updated_provider = self.test_data_generator.put_default_provider_record_in_provider_table({ - 'providerId': self.provider_id, - 'compact': self.compact, - 'familyName': MOCK_UPDATED_FAMILY_NAME, - 'givenName': MOCK_UPDATED_GIVEN_NAME, - 'dateOfUpdate': self.default_upload_datetime, - }) - + updated_provider = self.test_data_generator.put_default_provider_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, + 'dateOfUpdate': self.default_upload_datetime, + } + ) + return provider, updated_provider def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(self): @@ -265,16 +292,16 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se # Setup: # Provider record was updated during upload old_provider, new_provider = self._when_provider_top_level_record_needs_reverted() - + # Execute: Perform rollback event = self._generate_test_event() - + result = rollback_license_upload(event, Mock()) - + # Assert: Rollback completed successfully self.assertEqual(result['rollbackStatus'], 'COMPLETE') self.assertEqual(1, result['providersReverted']) - + # Verify: Provider record has been reset to old values provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, @@ -308,28 +335,27 @@ def test_provider_top_level_record_deleted_when_license_created_during_bad_uploa provider_id=self.provider_id, ) - def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self): """Test that license record is reset to values before upload.""" from handlers.rollback_license_upload import rollback_license_upload # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time - original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( - license_upload_datetime=self.default_start_datetime - timedelta(hours = 1) + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) ) - + # Store the original expiration date from the update's previous values original_expiration = license_update.previous['dateOfExpiration'] - + # Execute: Perform rollback event = self._generate_test_event() - + result = rollback_license_upload(event, Mock()) - + # should return complete message self.assertEqual(result['rollbackStatus'], 'COMPLETE') self.assertEqual(result['providersReverted'], 1) - + # Verify: License record has been reset to original values provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, @@ -340,10 +366,10 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self self.assertEqual(len(licenses), 1) license_record = licenses[0] self.assertEqual(license_record.dateOfExpiration, original_expiration) - + # Verify: Update record has been deleted license_updates = provider_records.get_all_license_update_records() - self.assertEqual(len(license_updates), 0, "License update records should be deleted") + self.assertEqual(len(license_updates), 0, 'License update records should be deleted') def test_provider_privilege_record_reactivated_when_upload_reverted(self): """Test that privilege is reactivated when license deactivation is reverted.""" @@ -352,18 +378,19 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): # Setup: Privilege was deactivated during upload due to license deactivation # license was uploaded before rollback window self._when_provider_had_license_updated_from_upload( - license_upload_datetime=self.default_start_datetime - timedelta(hours = 1)) + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) self._when_provider_had_privilege_deactivated_from_upload() - + # Execute: Perform rollback event = self._generate_test_event() - + result = rollback_license_upload(event, Mock()) - + # Assert: Rollback completed successfully self.assertEqual(result['rollbackStatus'], 'COMPLETE') self.assertEqual(result['providersReverted'], 1) - + # Verify: Privilege has been reactivated (status should be 'active') provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, @@ -373,12 +400,12 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): privileges = provider_records.get_privilege_records() self.assertEqual(len(privileges), 1) privilege_record = privileges[0] - self.assertEqual(privilege_record.status, 'active', "Privilege should be reactivated") + self.assertEqual(privilege_record.status, 'active', 'Privilege should be reactivated') self.assertIsNone(privilege_record.licenseDeactivatedStatus) - + # Verify: Privilege update record has been deleted privilege_updates = provider_records.get_all_privilege_update_records() - self.assertEqual(len(privilege_updates), 0, "Privilege update records should be deleted") + self.assertEqual(len(privilege_updates), 0, 'Privilege update records should be deleted') def test_provider_license_updates_and_license_record_within_time_period_removed_when_upload_reverted(self): """Test that license update records and license record within the time window are deleted.""" @@ -386,9 +413,9 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ # Setup: License was uploaded and then updated during upload self._when_provider_had_license_updated_from_upload( - license_upload_datetime=self.default_start_datetime + timedelta(hours = 1) + license_upload_datetime=self.default_start_datetime + timedelta(hours=1) ) - + # Verify update record exists before rollback provider_records_before = self.config.data_client.get_provider_user_records( compact=self.compact, @@ -396,19 +423,18 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ include_update_tier=UpdateTierEnum.TIER_THREE, ) licenses_before = provider_records_before.get_license_records() - self.assertEqual(len(licenses_before), 1, "Should have license record before rollback") + self.assertEqual(len(licenses_before), 1, 'Should have license record before rollback') license_updates_before = provider_records_before.get_all_license_update_records() - self.assertEqual(len(license_updates_before), 1, "Should have update record before rollback") + self.assertEqual(len(license_updates_before), 1, 'Should have update record before rollback') - # Execute: Perform rollback event = self._generate_test_event() - + result = rollback_license_upload(event, Mock()) - + # Assert: Rollback completed successfully self.assertEqual(result['rollbackStatus'], 'COMPLETE') - + # Verify: All records within time window have been deleted with pytest.raises(CCNotFoundException) as exec_info: self.config.data_client.get_provider_user_records( @@ -427,10 +453,10 @@ def test_provider_skipped_if_license_updates_detected_after_end_of_time_window_w ) # update also occurred after upload window self._when_provider_had_license_update_after_upload() - + event = self._generate_test_event() result = rollback_license_upload(event, Mock()) - + # Assert: Rollback completed but provider was skipped self.assertEqual('COMPLETE', result['rollbackStatus']) self.assertEqual(0, result['providersReverted']) @@ -443,9 +469,9 @@ def test_provider_skipped_if_license_updates_detected_after_end_of_time_window_w include_update_tier=UpdateTierEnum.TIER_THREE, ) licenses = provider_records.get_license_records() - self.assertEqual(len(licenses), 1, "License should still exist") + self.assertEqual(len(licenses), 1, 'License should still exist') license_updates = provider_records.get_all_license_update_records() - self.assertEqual(2, len(license_updates), "License updates should still exist") + self.assertEqual(2, len(license_updates), 'License updates should still exist') def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_upload_reverted(self): """Test that provider is skipped if non-upload-related privilege updates exist after time window.""" @@ -454,17 +480,17 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u # Setup: Provider had privilege update after upload window self._when_provider_had_license_updated_from_upload() self._when_provider_had_privilege_update_after_upload() - + # Execute: Perform rollback event = self._generate_test_event() - + result = rollback_license_upload(event, Mock()) - + # Assert: Rollback completed but provider was skipped self.assertEqual(result['rollbackStatus'], 'COMPLETE') self.assertEqual(1, result['providersSkipped']) self.assertEqual(0, result['providersReverted']) - + # Verify: Privilege record and update still exist (not rolled back) provider_records = self.config.data_client.get_provider_user_records( compact=self.compact, @@ -472,9 +498,9 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u include_update_tier=UpdateTierEnum.TIER_THREE, ) privileges = provider_records.get_privilege_records() - self.assertEqual(1, len(privileges), "Privilege should still exist") + self.assertEqual(1, len(privileges), 'Privilege should still exist') privilege_updates = provider_records.get_all_privilege_update_records() - self.assertEqual(1, len(privilege_updates), "Privilege update should still exist") + self.assertEqual(1, len(privilege_updates), 'Privilege update should still exist') # Validation tests def test_rollback_validates_datetime_format(self): @@ -518,3 +544,50 @@ def test_rollback_validates_maximum_time_window(self): self.assertEqual(result['rollbackStatus'], 'FAILED') self.assertIn('cannot exceed', result['error']) + # Tests for checking data written to S3 + def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_values(self): + """Test that license record is reset to values before upload.""" + import json + + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + + # Execute: Perform rollback + event = self._generate_test_event() + execution_id = event['executionId'] + + rollback_license_upload(event, Mock()) + + # Read object from S3 and verify its contents match what is expected + s3_key = f'{execution_id}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) + + # Verify the structure of the results + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [ + { + 'licensesReverted': [ + { + 'action': 'REVERT', + 'jurisdiction': original_license.jurisdiction, + 'licenseType': original_license.licenseType, + # random UUID, we won't check for it here + 'revisionId': ANY, + } + ], + 'privilegesReverted': [], + 'providerId': self.provider_id, + 'updatesDeleted': 1, + } + ], + 'skippedProviderDetails': [], + }, + results_data, + ) From f931aee78cc3dc658e42dff2d92952ae914f46ec Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 00:16:13 -0600 Subject: [PATCH 23/81] record sort keys of update records that were deleted --- .../handlers/rollback_license_upload.py | 17 ++++++++--------- .../function/test_rollback_license_upload.py | 2 +- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 8f3f80c01..b3f08679c 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -71,7 +71,7 @@ class ProviderRevertedSummary: provider_id: str licenses_reverted: list[RevertedLicense] = field(default_factory=list) privileges_reverted: list[RevertedPrivilege] = field(default_factory=list) - updates_deleted: int = 0 + updates_deleted: list[str] = field(default_factory=list) # List of SKs for deleted update records @dataclass @@ -112,7 +112,6 @@ def to_dict(self) -> dict: } for privilege in summary.privileges_reverted ], - # TODO - add pk/sk to list 'updatesDeleted': summary.updates_deleted, } for summary in self.reverted_provider_summaries @@ -157,7 +156,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': ) for privilege in summary.get('privilegesReverted', []) ], - updates_deleted=summary.get('updatesDeleted', 0), + updates_deleted=summary.get('updatesDeleted', []), ) for summary in data.get('revertedProviderSummaries', []) ], @@ -494,7 +493,7 @@ def _build_and_execute_revert_transactions( table_name = config.provider_table_name reverted_licenses = [] reverted_privileges = [] - updates_deleted_count = 0 + updates_deleted_sks = [] # List of SKs for deleted update records ineligible_updates: list[IneligibleUpdate] = [] # Helper functions for cleaner item building @@ -587,7 +586,7 @@ def add_delete(pk: str, sk: str, update_record: bool): # License deactivation within window - mark for deletion serialized_privilege_update = privilege_update.serialize_to_database_record() add_delete(serialized_privilege_update['pk'], serialized_privilege_update['sk'], update_record=True) - updates_deleted_count += 1 + updates_deleted_sks.append(serialized_privilege_update['sk']) logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') # Reactivate the privilege @@ -655,7 +654,7 @@ def add_delete(pk: str, sk: str, update_record: bool): for update in license_updates_after_start: serialized_license_update = update.serialize_to_database_record() add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) - updates_deleted_count += 1 + updates_deleted_sks.append(serialized_license_update['sk']) logger.info('Will delete license update record if provider is eligible for rollback', update_type=update.updateType) else: # If license record was not created during the window, check license updates for eligibility and build transactions @@ -678,7 +677,7 @@ def add_delete(pk: str, sk: str, update_record: bool): license_updates_in_window.append(license_update) serialized_license_update = license_update.serialize_to_database_record() add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) - updates_deleted_count += 1 + updates_deleted_sks.append(serialized_license_update['sk']) logger.info('Will delete license update record if provider is eligible for rollback', update_type=license_update.updateType) # If there were updates in the window and no updates after end_datetime, revert the license @@ -760,7 +759,7 @@ def add_delete(pk: str, sk: str, update_record: bool): provider_id=provider_id, licenses_reverted=reverted_licenses, privileges_reverted=reverted_privileges, - updates_deleted=updates_deleted_count, + updates_deleted=updates_deleted_sks, ) _perform_transaction(transaction_items) @@ -798,7 +797,7 @@ def add_delete(pk: str, sk: str, update_record: bool): provider_id=provider_id, licenses_reverted=reverted_licenses, privileges_reverted=reverted_privileges, - updates_deleted=updates_deleted_count, + updates_deleted=updates_deleted_sks, ) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 9066d9c3a..0398ed4d9 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -584,7 +584,7 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v ], 'privilegesReverted': [], 'providerId': self.provider_id, - 'updatesDeleted': 1, + 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d8781f4e9489217462892394a791e885'], } ], 'skippedProviderDetails': [], From 6ac5318b87734b16121d5791379e2eb8f9ebc90b Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 00:24:21 -0600 Subject: [PATCH 24/81] tracking additional fields on event bus event --- .../data_model/schema/data_event/api.py | 6 ++++ .../common/cc_common/event_bus_client.py | 18 +++++++++++ .../handlers/rollback_license_upload.py | 30 +++++++++++++++---- 3 files changed, 49 insertions(+), 5 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py index d5d11812f..91e29e5be 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py @@ -73,6 +73,9 @@ class LicenseRevertDetailSchema(DataEventDetailBaseSchema): licenseType = String(required=True, allow_none=False) jurisdiction = Jurisdiction(required=True, allow_none=False) rollbackReason = String(required=True, allow_none=False) + startTime = DateTime(required=True, allow_none=False) + endTime = DateTime(required=True, allow_none=False) + revisionId = UUID(required=True, allow_none=False) class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): @@ -80,3 +83,6 @@ class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): licenseType = String(required=True, allow_none=False) jurisdiction = Jurisdiction(required=True, allow_none=False) rollbackReason = String(required=True, allow_none=False) + startTime = DateTime(required=True, allow_none=False) + endTime = DateTime(required=True, allow_none=False) + revisionId = UUID(required=True, allow_none=False) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py index ab2e74ceb..921e6989a 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py @@ -446,6 +446,9 @@ def publish_license_revert_event( jurisdiction: str, license_type: str, rollback_reason: str, + start_time: datetime, + end_time: datetime, + revision_id: UUID, event_batch_writer: EventBatchWriter | None = None, ): """ @@ -457,6 +460,9 @@ def publish_license_revert_event( :param jurisdiction: The jurisdiction of the license :param license_type: The license type :param rollback_reason: The reason for the rollback + :param start_time: The start time of the rollback window + :param end_time: The end time of the rollback window + :param revision_id: The revision ID of the reverted license :param event_batch_writer: Optional EventBatchWriter for efficient batch publishing """ from cc_common.data_model.schema.data_event.api import LicenseRevertDetailSchema @@ -467,6 +473,9 @@ def publish_license_revert_event( 'jurisdiction': jurisdiction, 'licenseType': license_type, 'rollbackReason': rollback_reason, + 'startTime': start_time, + 'endTime': end_time, + 'revisionId': revision_id, 'eventTime': config.current_standard_datetime, } @@ -488,6 +497,9 @@ def publish_privilege_revert_event( jurisdiction: str, license_type: str, rollback_reason: str, + start_time: datetime, + end_time: datetime, + revision_id: UUID, event_batch_writer: EventBatchWriter | None = None, ): """ @@ -499,6 +511,9 @@ def publish_privilege_revert_event( :param jurisdiction: The jurisdiction of the privilege :param license_type: The license type :param rollback_reason: The reason for the rollback + :param start_time: The start time of the rollback window + :param end_time: The end time of the rollback window + :param revision_id: The revision ID of the reverted privilege :param event_batch_writer: Optional EventBatchWriter for efficient batch publishing """ from cc_common.data_model.schema.data_event.api import PrivilegeRevertDetailSchema @@ -509,6 +524,9 @@ def publish_privilege_revert_event( 'jurisdiction': jurisdiction, 'licenseType': license_type, 'rollbackReason': rollback_reason, + 'startTime': start_time, + 'endTime': end_time, + 'revisionId': revision_id, 'eventTime': config.current_standard_datetime, } diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index b3f08679c..ae7a563cd 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -442,7 +442,7 @@ def _process_provider_rollback( return result # Publish events for successful rollback - _publish_revert_events(result, compact, rollback_reason) + _publish_revert_events(result, compact, rollback_reason, start_datetime, end_datetime) logger.info('Provider rollback successful', provider_id=provider_id) return result @@ -801,21 +801,38 @@ def add_delete(pk: str, sk: str, update_record: bool): ) -def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str, rollback_reason: str): +def _publish_revert_events( + revert_summary: ProviderRevertedSummary, + compact: str, + rollback_reason: str, + start_datetime: datetime, + end_datetime: datetime, +): """ Publish revert events for all reverted licenses and privileges. + + :param revert_summary: Summary of reverted provider records + :param compact: The compact name + :param rollback_reason: The reason for the rollback + :param start_datetime: The start time of the rollback window + :param end_datetime: The end time of the rollback window """ with EventBatchWriter(config.events_client) as event_writer: + # Convert provider_id string to UUID for event publishing + provider_id_uuid = UUID(revert_summary.provider_id) + # Publish license revert events for reverted_license in revert_summary.licenses_reverted: config.event_bus_client.publish_license_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - # TODO - add start time, end time, and revert id - provider_id=revert_summary.provider_id, + provider_id=provider_id_uuid, jurisdiction=reverted_license.jurisdiction, license_type=reverted_license.license_type, rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_license.revision_id, event_batch_writer=event_writer, ) @@ -824,10 +841,13 @@ def _publish_revert_events(revert_summary: ProviderRevertedSummary, compact: str config.event_bus_client.publish_privilege_revert_event( source='org.compactconnect.disaster-recovery', compact=compact, - provider_id=revert_summary.provider_id, + provider_id=provider_id_uuid, jurisdiction=reverted_privilege.jurisdiction, license_type=reverted_privilege.license_type, rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_privilege.revision_id, event_batch_writer=event_writer, ) From 0db315fa273bf893a698b7795e88cb7fbe1312af Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 08:30:41 -0600 Subject: [PATCH 25/81] formatting --- .../handlers/rollback_license_upload.py | 159 ++++++++++-------- 1 file changed, 88 insertions(+), 71 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index ae7a563cd..93d8a22c9 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -1,15 +1,15 @@ import json import time -from uuid import UUID, uuid4 from dataclasses import asdict, dataclass, field from datetime import datetime +from uuid import UUID, uuid4 from aws_lambda_powertools.utilities.typing import LambdaContext from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError from cc_common.config import config, logger -from cc_common.data_model.provider_record_util import ProviderUserRecords, ProviderRecordUtility -from cc_common.data_model.schema.common import UpdateCategory, LICENSE_UPLOAD_UPDATE_CATEGORIES +from cc_common.data_model.provider_record_util import ProviderRecordUtility, ProviderUserRecords +from cc_common.data_model.schema.common import LICENSE_UPLOAD_UPDATE_CATEGORIES, UpdateCategory from cc_common.data_model.schema.privilege import PrivilegeData from cc_common.data_model.schema.provider import ProviderData from cc_common.data_model.update_tier_enum import UpdateTierEnum @@ -27,6 +27,7 @@ @dataclass class IneligibleUpdate: """Represents an update that makes a provider ineligible for rollback.""" + type: str # 'licenseUpdate', 'privilegeUpdate', or 'providerUpdate' update_type: str create_date: str @@ -36,6 +37,7 @@ class IneligibleUpdate: @dataclass class ProviderSkippedDetails: """Details for a provider that was skipped.""" + provider_id: str reason: str ineligible_updates: list[IneligibleUpdate] = field(default_factory=list) @@ -44,6 +46,7 @@ class ProviderSkippedDetails: @dataclass class ProviderFailedDetails: """Details for a provider that failed to revert.""" + provider_id: str error: str @@ -51,6 +54,7 @@ class ProviderFailedDetails: @dataclass class RevertedLicense: """Details of a reverted license for event publishing.""" + jurisdiction: str license_type: str revision_id: UUID @@ -60,6 +64,7 @@ class RevertedLicense: @dataclass class RevertedPrivilege: """Details of a reverted privilege for event publishing.""" + jurisdiction: str license_type: str revision_id: UUID @@ -68,6 +73,7 @@ class RevertedPrivilege: @dataclass class ProviderRevertedSummary: """Summary for a provider that was successfully reverted.""" + provider_id: str licenses_reverted: list[RevertedLicense] = field(default_factory=list) privileges_reverted: list[RevertedPrivilege] = field(default_factory=list) @@ -77,6 +83,7 @@ class ProviderRevertedSummary: @dataclass class RollbackResults: """Complete results of a rollback operation.""" + skipped_provider_details: list[ProviderSkippedDetails] = field(default_factory=list) failed_provider_details: list[ProviderFailedDetails] = field(default_factory=list) reverted_provider_summaries: list[ProviderRevertedSummary] = field(default_factory=list) @@ -84,14 +91,8 @@ class RollbackResults: def to_dict(self) -> dict: """Convert to dictionary for S3 storage.""" return { - 'skippedProviderDetails': [ - asdict(detail) - for detail in self.skipped_provider_details - ], - 'failedProviderDetails': [ - asdict(detail) - for detail in self.failed_provider_details - ], + 'skippedProviderDetails': [asdict(detail) for detail in self.skipped_provider_details], + 'failedProviderDetails': [asdict(detail) for detail in self.failed_provider_details], 'revertedProviderSummaries': [ { 'providerId': str(summary.provider_id), @@ -100,7 +101,7 @@ def to_dict(self) -> dict: 'jurisdiction': license_record.jurisdiction, 'licenseType': license_record.license_type, 'revisionId': str(license_record.revision_id), - 'action': license_record.action + 'action': license_record.action, } for license_record in summary.licenses_reverted ], @@ -436,7 +437,7 @@ def _process_provider_rollback( jurisdiction=jurisdiction, provider_id=provider_id, ) - + # If provider was skipped due to ineligibility, return early if isinstance(result, ProviderSkippedDetails): return result @@ -454,11 +455,12 @@ def _process_provider_rollback( error=str(e), ) + def _perform_transaction(transaction_items: list[dict]) -> None: logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') for i in range(0, len(transaction_items), 100): - batch = transaction_items[i:i + 100] + batch = transaction_items[i : i + 100] # Use Table resource's client for automatic type conversion # TODO - catch failures and add failure record to write to S3 results object config.provider_table.meta.client.transact_write_items(TransactItems=batch) @@ -475,12 +477,12 @@ def _build_and_execute_revert_transactions( ) -> ProviderRevertedSummary | ProviderSkippedDetails: """ Build and execute DynamoDB transactions to revert provider records. - + This function processes all records in a single pass: - Checks eligibility (returns ProviderSkippedDetails if ineligible) - Builds transaction items - Executes transactions - + Returns either a summary of what was reverted or details about why the provider was skipped. """ from cc_common.data_model.schema.license import LicenseData @@ -500,7 +502,7 @@ def _build_and_execute_revert_transactions( def add_put(item: dict, update_record: bool): """ Add a Put operation to the appropriate list. - + Args: item: The item to put update_record: True if the item is an update record, False if it is a primary record @@ -519,7 +521,7 @@ def add_put(item: dict, update_record: bool): def add_delete(pk: str, sk: str, update_record: bool): """ Add a Delete operation. - + Args: pk: Partition key sk: Sort key - used to determine if this is an update record @@ -550,12 +552,10 @@ def add_delete(pk: str, sk: str, update_record: bool): ) # Step 2: Process each license record for the jurisdiction - license_records = provider_records.get_license_records( - filter_condition=lambda x: x.jurisdiction == jurisdiction - ) - + license_records = provider_records.get_license_records(filter_condition=lambda x: x.jurisdiction == jurisdiction) + reverted_licenses_dict = [] - + for license_record in license_records: privileges_associated_with_license = provider_records.get_privilege_records( filter_condition=lambda x: x.jurisdiction == jurisdiction and x.licenseType == license_record.licenseType @@ -566,10 +566,13 @@ def add_delete(pk: str, sk: str, update_record: bool): privilege_updates = provider_records.get_all_privilege_update_records( filter_condition=lambda x: x.jurisdiction in privilege_jurisdictions and x.dateOfUpdate >= start_datetime, ) - + # Check privilege updates for eligibility for privilege_update in privilege_updates: - if privilege_update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY or privilege_update.createDate > end_datetime: + if ( + privilege_update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY + or privilege_update.createDate > end_datetime + ): # Non-license-deactivation privilege update or privilege update after end_datetime make provider ineligible ineligible_updates.append( IneligibleUpdate( @@ -578,8 +581,8 @@ def add_delete(pk: str, sk: str, update_record: bool): create_date=privilege_update.dateOfUpdate.isoformat(), # include privilege jurisdiction in reason reason=f'Privilege in jurisdiction {privilege_update.jurisdiction} for license ' - f'type {privilege_update.licenseType} was updated with a change unrelated to license ' - f'upload or the update occurred after rollback end time. Manual review required.', + f'type {privilege_update.licenseType} was updated with a change unrelated to license ' + f'upload or the update occurred after rollback end time. Manual review required.', ) ) elif start_datetime <= privilege_update.createDate <= end_datetime: @@ -588,7 +591,7 @@ def add_delete(pk: str, sk: str, update_record: bool): add_delete(serialized_privilege_update['pk'], serialized_privilege_update['sk'], update_record=True) updates_deleted_sks.append(serialized_privilege_update['sk']) logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') - + # Reactivate the privilege privilege_record = provider_records.get_specific_privilege_record( jurisdiction=privilege_update.jurisdiction, @@ -599,19 +602,21 @@ def add_delete(pk: str, sk: str, update_record: bool): 'privilege record found associated with deactivation, reactivating privilege', provider_id=provider_id, privilege_jurisdiction=privilege_record.jurisdiction, - license_type=privilege_record.licenseType + license_type=privilege_record.licenseType, ) # Remove the licenseDeactivatedStatus field to reactivate using UPDATE operation serialized_privilege = privilege_record.serialize_to_database_record() - primary_record_transaction_items.append({ - 'Update': { - 'TableName': table_name, - 'Key': {'pk': serialized_privilege['pk'], 'sk': serialized_privilege['sk']}, - 'UpdateExpression': 'REMOVE licenseDeactivatedStatus' + primary_record_transaction_items.append( + { + 'Update': { + 'TableName': table_name, + 'Key': {'pk': serialized_privilege['pk'], 'sk': serialized_privilege['sk']}, + 'UpdateExpression': 'REMOVE licenseDeactivatedStatus', + } } - }) + ) logger.info('Will reactivate privilege record if provider is eligible for rollback') - + reverted_privileges.append( RevertedPrivilege( jurisdiction=license_record.jurisdiction, @@ -628,18 +633,21 @@ def add_delete(pk: str, sk: str, update_record: bool): ) # if license record was created during the window, delete it and all update records after start_datetime - if license_record.firstUploadDate is not None and start_datetime <= license_record.firstUploadDate <= end_datetime: + if ( + license_record.firstUploadDate is not None + and start_datetime <= license_record.firstUploadDate <= end_datetime + ): if privilege_jurisdictions: ineligible_updates.append( IneligibleUpdate( type='privilegeUpdate', update_type='Issuance', create_date=datetime.now().isoformat(), - reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} for ' + - f'license type {license_record.licenseType} after license upload. Manual review required.', + reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} for ' + + f'license type {license_record.licenseType} after license upload. Manual review required.', ) ) - # no privileges found, so we can delete the license record + # no privileges found, so we can delete the license record serialized_license_record = license_record.serialize_to_database_record() add_delete(serialized_license_record['pk'], serialized_license_record['sk'], update_record=False) logger.info('Will delete license record (created during upload) if provider is eligible for rollback') @@ -655,12 +663,18 @@ def add_delete(pk: str, sk: str, update_record: bool): serialized_license_update = update.serialize_to_database_record() add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) updates_deleted_sks.append(serialized_license_update['sk']) - logger.info('Will delete license update record if provider is eligible for rollback', update_type=update.updateType) + logger.info( + 'Will delete license update record if provider is eligible for rollback', + update_type=update.updateType, + ) else: # If license record was not created during the window, check license updates for eligibility and build transactions license_updates_in_window = [] for license_update in license_updates_after_start: - if license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES or license_update.createDate > end_datetime: + if ( + license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES + or license_update.createDate > end_datetime + ): # Non-upload-related license updates make provider ineligible ineligible_updates.append( IneligibleUpdate( @@ -668,8 +682,8 @@ def add_delete(pk: str, sk: str, update_record: bool): update_type=license_update.updateType, create_date=license_update.createDate.isoformat(), reason=f'License update for license type {license_update.licenseType} was updated with a ' - f'change unrelated to license upload or the update occurred after rollback end ' - f'time. Manual review required.', + f'change unrelated to license upload or the update occurred after rollback end ' + f'time. Manual review required.', ) ) elif start_datetime <= license_update.createDate <= end_datetime: @@ -678,26 +692,33 @@ def add_delete(pk: str, sk: str, update_record: bool): serialized_license_update = license_update.serialize_to_database_record() add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) updates_deleted_sks.append(serialized_license_update['sk']) - logger.info('Will delete license update record if provider is eligible for rollback', update_type=license_update.updateType) - + logger.info( + 'Will delete license update record if provider is eligible for rollback', + update_type=license_update.updateType, + ) + # If there were updates in the window and no updates after end_datetime, revert the license # to the previous values of the earliest update in the window if license_updates_in_window: updates_after_window = [u for u in license_updates_after_start if u.createDate > end_datetime] - + if not updates_after_window: # Find the earliest update in the window to get the previous state license_updates_in_window.sort(key=lambda x: x.createDate) earliest_update_in_window = license_updates_in_window[0] - + # Check if license was created during the window (uploadDate within window) - if (license_record.firstUploadDate is not None and - start_datetime <= license_record.firstUploadDate <= end_datetime): + if ( + license_record.firstUploadDate is not None + and start_datetime <= license_record.firstUploadDate <= end_datetime + ): # License created during upload - delete it serialized_license_record = license_record.serialize_to_database_record() - add_delete(serialized_license_record['pk'], serialized_license_record['sk'], update_record=False) + add_delete( + serialized_license_record['pk'], serialized_license_record['sk'], update_record=False + ) logger.info('Will delete license record (created during upload)') - + reverted_licenses.append( RevertedLicense( jurisdiction=license_record.jurisdiction, @@ -710,17 +731,17 @@ def add_delete(pk: str, sk: str, update_record: bool): # License existed before - revert to previous state reverted_license_data = license_record.to_dict() reverted_license_data.update(earliest_update_in_window.previous) - + reverted_license = LicenseData.create_new(reverted_license_data) serialized_reverted_license = reverted_license.serialize_to_database_record() - + add_put(serialized_reverted_license, update_record=True) logger.info('Reverting license record to pre-upload state') - + # Track for provider record regeneration license_schema = LicenseRecordSchema() reverted_licenses_dict.append(license_schema.load(serialized_reverted_license)) - + reverted_licenses.append( RevertedLicense( jurisdiction=license_record.jurisdiction, @@ -736,23 +757,23 @@ def add_delete(pk: str, sk: str, update_record: bool): else: # No updates in window, keep license as-is reverted_licenses_dict.append(license_record.to_dict()) - + # Check if provider is ineligible for rollback if ineligible_updates: logger.info( 'Provider not eligible for automatic rollback', provider_id=provider_id, - ineligible_updates=ineligible_updates + ineligible_updates=ineligible_updates, ) return ProviderSkippedDetails( provider_id=provider_id, reason='Provider has updates that are either unrelated to license upload or occurred after rollback end time. Manual review required.', ineligible_updates=ineligible_updates, ) - + # process primary records first, then update records transaction_items = primary_record_transaction_items + update_record_transactions_items - + if not transaction_items: logger.warning('No transaction items to execute') return ProviderRevertedSummary( @@ -761,25 +782,22 @@ def add_delete(pk: str, sk: str, update_record: bool): privileges_reverted=reverted_privileges, updates_deleted=updates_deleted_sks, ) - + _perform_transaction(transaction_items) # Now read all the license records for the provider and update the provider record # Fetch all provider records including all update tiers - provider_records = config.data_client.get_provider_user_records( - compact=compact, - provider_id=provider_id - ) + provider_records = config.data_client.get_provider_user_records(compact=compact, provider_id=provider_id) top_level_provider_record: ProviderData = provider_records.get_provider_record() privilege_records: list[PrivilegeData] = provider_records.get_privilege_records() - + # Create a new list for provider record updates (all first tier items) primary_record_transaction_items.clear() - + try: best_license = provider_records.find_best_license_in_current_known_licenses() provider_record = ProviderRecordUtility.populate_provider_record( - current_provider_record=top_level_provider_record, + current_provider_record=top_level_provider_record, license_record=best_license.to_dict(), privilege_records=[privilege.to_dict() for privilege in privilege_records], ) @@ -810,7 +828,7 @@ def _publish_revert_events( ): """ Publish revert events for all reverted licenses and privileges. - + :param revert_summary: Summary of reverted provider records :param compact: The compact name :param rollback_reason: The reason for the rollback @@ -820,7 +838,7 @@ def _publish_revert_events( with EventBatchWriter(config.events_client) as event_writer: # Convert provider_id string to UUID for event publishing provider_id_uuid = UUID(revert_summary.provider_id) - + # Publish license revert events for reverted_license in revert_summary.licenses_reverted: config.event_bus_client.publish_license_revert_event( @@ -880,4 +898,3 @@ def _write_results_to_s3(key: str, results: RollbackResults): except Exception as e: logger.error(f'Error writing results to S3: {str(e)}') raise - From 93dd46f2c7a198f3a5b6d286ff940cd245a68f1d Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 10:32:28 -0600 Subject: [PATCH 26/81] tests/fixes for verifying result report objects placed in S3 --- .../common/common_test/test_constants.py | 4 + .../common/common_test/test_data_generator.py | 53 +++- .../handlers/rollback_license_upload.py | 61 ++-- .../function/test_rollback_license_upload.py | 280 ++++++++++++++++-- 4 files changed, 341 insertions(+), 57 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_constants.py b/backend/compact-connect/lambdas/python/common/common_test/test_constants.py index 54043c5c3..542a08d8d 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_constants.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_constants.py @@ -57,6 +57,7 @@ PRIVILEGE_RECORD_TYPE = 'privilege' PRIVILEGE_UPDATE_RECORD_TYPE = 'privilegeUpdate' PROVIDER_RECORD_TYPE = 'provider' +PROVIDER_UPDATE_RECORD_TYPE = 'providerUpdate' # Privilege update default values DEFAULT_PRIVILEGE_UPDATE_TYPE = 'renewal' @@ -74,6 +75,9 @@ DEFAULT_LICENSE_UPDATE_PREVIOUS_DATE_OF_EXPIRATION = '2020-06-06' DEFAULT_LICENSE_UPDATE_PREVIOUS_DATE_OF_RENEWAL = '2015-06-06' +# Provider update default values +DEFAULT_PROVIDER_UPDATE_TYPE = 'registration' + # Adverse Action defaults DEFAULT_ACTION_AGAINST_PRIVILEGE = 'privilege' DEFAULT_BLOCKS_FUTURE_PRIVILEGES = True diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py index 5fdc0629e..5bc5c265d 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py @@ -13,7 +13,7 @@ from cc_common.data_model.schema.license import LicenseData, LicenseUpdateData from cc_common.data_model.schema.military_affiliation import MilitaryAffiliationData from cc_common.data_model.schema.privilege import PrivilegeData, PrivilegeUpdateData -from cc_common.data_model.schema.provider import ProviderData +from cc_common.data_model.schema.provider import ProviderData, ProviderUpdateData from cc_common.utils import ResponseEncoder from common_test.test_constants import * @@ -495,6 +495,57 @@ def put_default_provider_record_in_provider_table( return provider_data + @staticmethod + def generate_default_provider_update( + value_overrides: dict | None = None, previous_provider: ProviderData | None = None + ) -> ProviderUpdateData: + """Generate a default provider update""" + if previous_provider is None: + previous_provider = TestDataGenerator.generate_default_provider() + + # Ensure previous provider has dateOfUpdate for the previous field + previous_dict = previous_provider.to_dict() + if 'dateOfUpdate' not in previous_dict: + previous_dict['dateOfUpdate'] = datetime.fromisoformat(DEFAULT_PROVIDER_UPDATE_DATETIME) + + provider_update = { + 'updateType': DEFAULT_PROVIDER_UPDATE_TYPE, + 'providerId': DEFAULT_PROVIDER_ID, + 'compact': DEFAULT_COMPACT, + 'type': PROVIDER_UPDATE_RECORD_TYPE, + 'previous': previous_dict, + 'updatedValues': { + 'compactConnectRegisteredEmailAddress': DEFAULT_REGISTERED_EMAIL_ADDRESS, + 'currentHomeJurisdiction': DEFAULT_LICENSE_JURISDICTION, + }, + 'dateOfUpdate': datetime.fromisoformat(DEFAULT_PROVIDER_UPDATE_DATETIME), + } + if value_overrides: + provider_update.update(value_overrides) + + return ProviderUpdateData.create_new(provider_update) + + @staticmethod + def put_default_provider_update_record_in_provider_table( + value_overrides: dict | None = None, date_of_update_override: str = None + ) -> ProviderUpdateData: + """ + Creates a default provider update record and stores it in the provider table. + + :param value_overrides: Optional dictionary to override default values + :param date_of_update_override: optional date for date of update to be shown on provider record + :return: The ProviderUpdateData instance that was stored + """ + provider_update_data = TestDataGenerator.generate_default_provider_update(value_overrides) + provider_update_record = provider_update_data.serialize_to_database_record() + if date_of_update_override: + provider_update_record['dateOfUpdate'] = date_of_update_override + + TestDataGenerator.store_record_in_provider_table(provider_update_record) + + # recreate data object to ensure it picks up the dateOfUpdate change + return ProviderUpdateData.from_database_record(provider_update_record) + @staticmethod def _override_date_of_update_for_record(data_class: CCDataClass, date_of_update: datetime): # we have to access this here, as in runtime code dateOfUpdate is not to be modified diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 93d8a22c9..3c1eaa428 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -28,10 +28,11 @@ class IneligibleUpdate: """Represents an update that makes a provider ineligible for rollback.""" - type: str # 'licenseUpdate', 'privilegeUpdate', or 'providerUpdate' - update_type: str - create_date: str + record_type: str # 'licenseUpdate', 'privilegeUpdate', or 'providerUpdate' + type_of_update: str + update_time: str reason: str + license_type: str | None = None # License type if applicable (None for provider updates) @dataclass @@ -68,6 +69,7 @@ class RevertedPrivilege: jurisdiction: str license_type: str revision_id: UUID + action: str @dataclass @@ -110,6 +112,7 @@ def to_dict(self) -> dict: 'jurisdiction': privilege.jurisdiction, 'licenseType': privilege.license_type, 'revisionId': str(privilege.revision_id), + 'action': privilege.action, } for privilege in summary.privileges_reverted ], @@ -544,10 +547,12 @@ def add_delete(pk: str, sk: str, update_record: bool): if update.dateOfUpdate >= start_datetime: ineligible_updates.append( IneligibleUpdate( - type='providerUpdate', - update_type=update.updateType, - create_date=update.dateOfUpdate.isoformat(), + record_type='providerUpdate', + type_of_update=update.updateType, + update_time=update.dateOfUpdate.isoformat(), reason='Provider update occurred after rollback start time. Manual review required.', + # provider updates are not specific to a license type + license_type='N/A' ) ) @@ -558,7 +563,7 @@ def add_delete(pk: str, sk: str, update_record: bool): for license_record in license_records: privileges_associated_with_license = provider_records.get_privilege_records( - filter_condition=lambda x: x.jurisdiction == jurisdiction and x.licenseType == license_record.licenseType + filter_condition=lambda x: x.licenseJurisdiction == jurisdiction and x.licenseType == license_record.licenseType ) privilege_jurisdictions = [x.jurisdiction for x in privileges_associated_with_license] # Get privilege updates for all privileges associated with this license @@ -576,13 +581,14 @@ def add_delete(pk: str, sk: str, update_record: bool): # Non-license-deactivation privilege update or privilege update after end_datetime make provider ineligible ineligible_updates.append( IneligibleUpdate( - type='privilegeUpdate', - update_type=privilege_update.updateType, - create_date=privilege_update.dateOfUpdate.isoformat(), + record_type='privilegeUpdate', + type_of_update=privilege_update.updateType, + update_time=privilege_update.dateOfUpdate.isoformat(), + license_type=privilege_update.licenseType, # include privilege jurisdiction in reason - reason=f'Privilege in jurisdiction {privilege_update.jurisdiction} for license ' - f'type {privilege_update.licenseType} was updated with a change unrelated to license ' - f'upload or the update occurred after rollback end time. Manual review required.', + reason=f'Privilege in jurisdiction {privilege_update.jurisdiction} was updated with a change ' + f'unrelated to license upload or the update occurred after rollback end time. ' + f'Manual review required.', ) ) elif start_datetime <= privilege_update.createDate <= end_datetime: @@ -619,9 +625,10 @@ def add_delete(pk: str, sk: str, update_record: bool): reverted_privileges.append( RevertedPrivilege( - jurisdiction=license_record.jurisdiction, - license_type=license_record.licenseType, + jurisdiction=privilege_record.jurisdiction, + license_type=privilege_record.licenseType, revision_id=uuid4(), + action='REACTIVATED', ) ) @@ -640,11 +647,12 @@ def add_delete(pk: str, sk: str, update_record: bool): if privilege_jurisdictions: ineligible_updates.append( IneligibleUpdate( - type='privilegeUpdate', - update_type='Issuance', - create_date=datetime.now().isoformat(), - reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} for ' - + f'license type {license_record.licenseType} after license upload. Manual review required.', + record_type='privilegeUpdate', + type_of_update='Issuance', + update_time=datetime.now().isoformat(), + license_type=license_record.licenseType, + reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} after license upload. ' + f'Manual review required.', ) ) # no privileges found, so we can delete the license record @@ -678,12 +686,12 @@ def add_delete(pk: str, sk: str, update_record: bool): # Non-upload-related license updates make provider ineligible ineligible_updates.append( IneligibleUpdate( - type='licenseUpdate', - update_type=license_update.updateType, - create_date=license_update.createDate.isoformat(), - reason=f'License update for license type {license_update.licenseType} was updated with a ' - f'change unrelated to license upload or the update occurred after rollback end ' - f'time. Manual review required.', + record_type='licenseUpdate', + type_of_update=license_update.updateType, + update_time=license_update.createDate.isoformat(), + license_type=license_update.licenseType, + reason='License was updated with a change unrelated to license upload or the update ' + 'occurred after rollback end time. Manual review required.', ) ) elif start_datetime <= license_update.createDate <= end_datetime: @@ -695,6 +703,7 @@ def add_delete(pk: str, sk: str, update_record: bool): logger.info( 'Will delete license update record if provider is eligible for rollback', update_type=license_update.updateType, + license_type=license_update.licenseType ) # If there were updates in the window and no updates after end_datetime, revert the license diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 0398ed4d9..a7c56faef 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -10,9 +10,9 @@ - S3 result management """ +import json from datetime import datetime, timedelta -from unittest.mock import Mock, patch, ANY -from uuid import uuid4 +from unittest.mock import ANY, Mock, patch import pytest from cc_common.data_model.update_tier_enum import UpdateTierEnum @@ -26,6 +26,7 @@ MOCK_ORIGINAL_FAMILY_NAME = 'originalFamily' MOCK_UPDATED_GIVEN_NAME = 'updatedGiven' MOCK_UPDATED_FAMILY_NAME = 'updatedFamily' +MOCK_PROVIDER_ID = 'ba880c7c-5ed3-4be4-8ad5-c8558f58ef6f' @mock_aws @@ -38,8 +39,8 @@ def setUp(self): super().setUp() # Create sample test data self.compact = 'aslp' - self.jurisdiction = 'oh' - self.provider_id = str(uuid4()) + self.license_jurisdiction = 'oh' + self.provider_id = MOCK_PROVIDER_ID # default upload time between start and end time self.default_upload_datetime = datetime.fromisoformat(MOCK_DATETIME_STRING) - timedelta(hours=1) self.default_start_datetime = self.default_upload_datetime - timedelta(days=1) @@ -48,12 +49,12 @@ def setUp(self): self.update_categories = UpdateCategory - self._add_provider_record() + self.provider_data = self._add_provider_record() def _generate_test_event(self): return { 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'startDateTime': self.default_start_datetime.isoformat(), 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', @@ -63,15 +64,17 @@ def _generate_test_event(self): def _add_provider_record(self): # add provider record to provider table - self.test_data_generator.put_default_provider_record_in_provider_table( + provider_data = self.test_data_generator.put_default_provider_record_in_provider_table( { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), } ) + return provider_data + # Helper methods for setting up test scenarios def _when_provider_had_license_created_from_upload(self): """ @@ -82,7 +85,7 @@ def _when_provider_had_license_created_from_upload(self): { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'firstUploadDate': self.default_upload_datetime, 'dateOfUpdate': self.default_upload_datetime, } @@ -106,7 +109,7 @@ def _when_provider_had_license_updated_from_upload( { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'familyName': MOCK_ORIGINAL_FAMILY_NAME, 'givenName': MOCK_ORIGINAL_GIVEN_NAME, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), @@ -120,7 +123,7 @@ def _when_provider_had_license_updated_from_upload( { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'licenseType': original_license.licenseType, 'updateType': self.update_categories.RENEWAL, 'createDate': upload_datetime, @@ -143,7 +146,7 @@ def _when_provider_had_license_updated_from_upload( { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'familyName': MOCK_UPDATED_FAMILY_NAME, 'givenName': MOCK_UPDATED_GIVEN_NAME, 'dateOfUpdate': upload_datetime, @@ -165,12 +168,13 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: if upload_datetime is None: upload_datetime = self.default_upload_datetime - # Create privilege that was deactivated by upload + # provider has privilege in Nebraska that was deactivated by upload privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': 'ne', + 'licenseJurisdiction': self.license_jurisdiction, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, 'dateOfExpiration': datetime.fromisoformat(MOCK_DATETIME_STRING), @@ -182,11 +186,14 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': 'ne', 'licenseType': privilege.licenseType, 'updateType': self.update_categories.LICENSE_DEACTIVATION, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, + 'previous': { + **privilege.to_dict() + }, 'updatedValues': { 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, }, @@ -208,7 +215,7 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, } ) @@ -217,7 +224,7 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'licenseType': privilege.licenseType, 'updateType': self.update_categories.RENEWAL, # Not LICENSE_DEACTIVATION 'createDate': after_upload_datetime, @@ -241,7 +248,7 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.jurisdiction, + 'jurisdiction': self.license_jurisdiction, 'updateType': self.update_categories.ENCUMBRANCE, # Not an upload-related category 'createDate': after_upload_datetime, 'effectiveDate': after_upload_datetime, @@ -285,6 +292,35 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: return provider, updated_provider + def _when_provider_changed_home_jurisdiction_after_license_upload(self): + + self._when_provider_had_license_created_from_upload() + + provider_update_record = self.test_data_generator.put_default_provider_update_record_in_provider_table( + value_overrides={ + 'providerId': self.provider_id, + 'compact': self.compact, + 'updateType': self.update_categories.HOME_JURISDICTION_CHANGE, + 'previous': { + **self.provider_data.to_dict() + }, + 'updatedValues': { + 'currentHomeJurisdiction': self.license_jurisdiction, + }, + }, + # home jurisdiction was changed during license upload window + date_of_update_override=self.default_upload_datetime.isoformat() + ) + + # Simulate that the provider record was updated during upload + self.test_data_generator.put_default_provider_record_in_provider_table( + { + 'currentHomeJurisdiction': self.license_jurisdiction, + } + ) + + return provider_update_record + def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(self): """Test that provider top-level record is reset to values before upload.""" from handlers.rollback_license_upload import rollback_license_upload @@ -504,7 +540,6 @@ def test_provider_skipped_if_privilege_updates_detected_after_time_period_when_u # Validation tests def test_rollback_validates_datetime_format(self): - """Test that rollback validates datetime format.""" from handlers.rollback_license_upload import rollback_license_upload event = self._generate_test_event() @@ -516,7 +551,6 @@ def test_rollback_validates_datetime_format(self): self.assertIn('Invalid datetime format', result['error']) def test_rollback_validates_time_window_order(self): - """Test that rollback validates start time is before end time.""" from handlers.rollback_license_upload import rollback_license_upload event = self._generate_test_event() @@ -529,7 +563,6 @@ def test_rollback_validates_time_window_order(self): self.assertIn('Start time must be before end time', result['error']) def test_rollback_validates_maximum_time_window(self): - """Test that rollback validates maximum time window.""" from handlers.rollback_license_upload import rollback_license_upload start = datetime.now() - timedelta(days=8) # More than 7 days @@ -544,18 +577,10 @@ def test_rollback_validates_maximum_time_window(self): self.assertEqual(result['rollbackStatus'], 'FAILED') self.assertIn('cannot exceed', result['error']) - # Tests for checking data written to S3 - def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_values(self): - """Test that license record is reset to values before upload.""" - import json + def _perform_rollback_and_get_s3_object(self): from handlers.rollback_license_upload import rollback_license_upload - # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time - original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( - license_upload_datetime=self.default_start_datetime - timedelta(hours=1) - ) - # Execute: Perform rollback event = self._generate_test_event() execution_id = event['executionId'] @@ -567,6 +592,17 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) + return results_data + + # Tests for checking data written to S3 + def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_values(self): + # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + + results_data = self._perform_rollback_and_get_s3_object() + # Verify the structure of the results self.assertEqual( { @@ -591,3 +627,187 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v }, results_data, ) + + def test_expected_s3_object_stored_when_provider_license_record_deleted_from_rollback(self): + # Setup: License was updated during upload (e.g., renewed), but was first uploaded before start time + new_license = self._when_provider_had_license_created_from_upload() + + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [ + { + 'licensesReverted': [ + { + 'action': 'DELETE', + 'jurisdiction': new_license.jurisdiction, + 'licenseType': new_license.licenseType, + # random UUID, we won't check for it here + 'revisionId': ANY, + } + ], + 'privilegesReverted': [], + 'providerId': self.provider_id, + 'updatesDeleted': [], + } + ], + 'skippedProviderDetails': [], + }, + results_data, + ) + + def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_from_rollback(self): + # Setup: Privilege was deactivated during upload due to license deactivation + # license was uploaded before rollback window + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() + + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [ + { + 'licensesReverted': [ + { + 'action': 'REVERT', + 'jurisdiction': self.license_jurisdiction, + 'licenseType': privilege.licenseType, + # random UUID, we won't check for it here + 'revisionId': ANY, + } + ], + 'privilegesReverted': [ + { + 'action': 'REACTIVATED', + 'jurisdiction': privilege.jurisdiction, + 'licenseType': privilege.licenseType, + # random UUID, we won't check for it here + 'revisionId': ANY, + } + ], + 'providerId': self.provider_id, + 'updatesDeleted': ['aslp#UPDATE#1#privilege/ne/slp/1761207300/06b886756a79b796ad10b17bd67057e6', + 'aslp#UPDATE#3#license/oh/slp/1761207300/d8781f4e9489217462892394a791e885'], + } + ], + 'skippedProviderDetails': [], + }, + results_data, + ) + + def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_license_updates(self): + # Setup: Provider had valid license before upload, and update occurred during upload window + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + # update also occurred after upload window + encumbrance_update = self._when_provider_had_license_update_after_upload() + + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results + expected_reason_message = ("License was updated with a change unrelated to license upload or the update " + "occurred after rollback end time. Manual review required.") + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + 'skippedProviderDetails': [ + { + 'ineligible_updates': [ + { + 'update_time': encumbrance_update.createDate.isoformat(), + 'license_type': original_license.licenseType, + 'reason': expected_reason_message, + 'record_type': 'licenseUpdate', + 'type_of_update': encumbrance_update.updateType, + } + ], + 'provider_id': MOCK_PROVIDER_ID, + 'reason': 'Provider has updates that are either ' + 'unrelated to license upload or ' + 'occurred after rollback end time. ' + 'Manual review required.', + } + ], + }, + results_data, + ) + + def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_updates(self): + # Setup: Provider had privilege update after upload window + self._when_provider_had_license_updated_from_upload() + privilege, privilege_update = self._when_provider_had_privilege_update_after_upload() + + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results + expected_reason_message = ("Privilege in jurisdiction oh was updated with a change unrelated to license upload or the update " + "occurred after rollback end time. Manual review required.") + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + 'skippedProviderDetails': [ + { + 'ineligible_updates': [ + { + 'update_time': privilege_update.createDate.isoformat(), + 'license_type': privilege.licenseType, + 'reason': expected_reason_message, + 'record_type': 'privilegeUpdate', + 'type_of_update': privilege_update.updateType, + } + ], + 'provider_id': MOCK_PROVIDER_ID, + 'reason': 'Provider has updates that are either ' + 'unrelated to license upload or ' + 'occurred after rollback end time. ' + 'Manual review required.', + } + ], + }, + results_data, + ) + + def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_updates(self): + # Setup: Provider had privilege update after upload window + provider_update = self._when_provider_changed_home_jurisdiction_after_license_upload() + + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results + expected_reason_message = "Provider update occurred after rollback start time. Manual review required." + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + 'skippedProviderDetails': [ + { + 'ineligible_updates': [ + { + 'update_time': provider_update.dateOfUpdate.isoformat(), + 'reason': expected_reason_message, + 'record_type': 'providerUpdate', + 'type_of_update': provider_update.updateType, + 'license_type': 'N/A' + } + ], + 'provider_id': MOCK_PROVIDER_ID, + 'reason': 'Provider has updates that are either ' + 'unrelated to license upload or ' + 'occurred after rollback end time. ' + 'Manual review required.', + } + ], + }, + results_data, + ) From fc3a61d627e795f1f8bc88a7c1623b56c8027641 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 10:46:57 -0600 Subject: [PATCH 27/81] update test to simulate accidental license deactivation --- .../function/test_rollback_license_upload.py | 31 +++++++++++++------ 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index a7c56faef..62c900342 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -113,35 +113,39 @@ def _when_provider_had_license_updated_from_upload( 'familyName': MOCK_ORIGINAL_FAMILY_NAME, 'givenName': MOCK_ORIGINAL_GIVEN_NAME, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), - 'dateOfExpiration': (self.default_start_datetime - timedelta(days=30)).date(), + # simulate license record that has not expired yet + 'dateOfExpiration': (self.default_start_datetime + timedelta(days=30)).date(), 'firstUploadDate': license_upload_datetime, + 'licenseStatus': 'active', } ) - # Create update record within upload window + # Create update record within upload window to simulate license deactivation license_update = self.test_data_generator.put_default_license_update_record_in_provider_table( { 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': self.license_jurisdiction, 'licenseType': original_license.licenseType, - 'updateType': self.update_categories.RENEWAL, + 'updateType': self.update_categories.DEACTIVATION, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, 'previous': { 'dateOfExpiration': original_license.dateOfExpiration, - 'licenseStatus': 'inactive', + 'licenseStatus': 'active', **original_license.to_dict(), }, 'updatedValues': { - 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + # simulate accidentally changing the expiration to last year + 'dateOfExpiration': (upload_datetime - timedelta(days=365)).date(), + 'licenseStatus': 'inactive', 'familyName': MOCK_UPDATED_FAMILY_NAME, 'givenName': MOCK_UPDATED_GIVEN_NAME, }, } ) - # Update the license record to reflect the new expiration + # Update the license record to reflect the new expiration and status updated_license = self.test_data_generator.put_default_license_record_in_provider_table( { 'providerId': self.provider_id, @@ -150,7 +154,7 @@ def _when_provider_had_license_updated_from_upload( 'familyName': MOCK_UPDATED_FAMILY_NAME, 'givenName': MOCK_UPDATED_GIVEN_NAME, 'dateOfUpdate': upload_datetime, - 'dateOfExpiration': (upload_datetime + timedelta(days=365)).date(), + 'dateOfExpiration': (upload_datetime - timedelta(days=365)).date(), 'licenseStatus': 'inactive', 'firstUploadDate': license_upload_datetime, } @@ -443,6 +447,13 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): privilege_updates = provider_records.get_all_privilege_update_records() self.assertEqual(len(privilege_updates), 0, 'Privilege update records should be deleted') + # make sure license record was reactivated as well + license_record = provider_records.get_specific_license_record( + jurisdiction=self.license_jurisdiction, + license_abbreviation=privilege_record.licenseTypeAbbreviation + ) + self.assertEqual('active', license_record.licenseStatus) + def test_provider_license_updates_and_license_record_within_time_period_removed_when_upload_reverted(self): """Test that license update records and license record within the time window are deleted.""" from handlers.rollback_license_upload import rollback_license_upload @@ -620,7 +631,8 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v ], 'privilegesReverted': [], 'providerId': self.provider_id, - 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d8781f4e9489217462892394a791e885'], + # NOTE: if the test update data is modified, the sha here will need to be updated + 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6'], } ], 'skippedProviderDetails': [], @@ -694,8 +706,9 @@ def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_fr } ], 'providerId': self.provider_id, + # NOTE: if the test update data is modified, the shas here will need to be updated 'updatesDeleted': ['aslp#UPDATE#1#privilege/ne/slp/1761207300/06b886756a79b796ad10b17bd67057e6', - 'aslp#UPDATE#3#license/oh/slp/1761207300/d8781f4e9489217462892394a791e885'], + 'aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6'], } ], 'skippedProviderDetails': [], From b3b52810c1964df3d663cbe5999e8f46843c0274 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 11:27:44 -0600 Subject: [PATCH 28/81] Add test to verify exception case --- .../common/cc_common/event_bus_client.py | 2 +- .../handlers/rollback_license_upload.py | 93 ++++++++++++------- .../function/test_rollback_license_upload.py | 32 +++++++ 3 files changed, 91 insertions(+), 36 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py index 921e6989a..9244e5149 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py @@ -442,7 +442,7 @@ def publish_license_revert_event( self, source: str, compact: str, - provider_id: UUID, + provider_id: str, jurisdiction: str, license_type: str, rollback_reason: str, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 3c1eaa428..eb15462e7 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -444,20 +444,18 @@ def _process_provider_rollback( # If provider was skipped due to ineligibility, return early if isinstance(result, ProviderSkippedDetails): return result - - # Publish events for successful rollback - _publish_revert_events(result, compact, rollback_reason, start_datetime, end_datetime) - - logger.info('Provider rollback successful', provider_id=provider_id) - return result - except Exception as e: logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) return ProviderFailedDetails( provider_id=provider_id, - error=str(e), + error=f"Failed to rollback updates for provider. Manual review required: {str(e)}", ) + # Publish events for successful rollback + _publish_revert_events(result, compact, rollback_reason, start_datetime, end_datetime) + logger.info('Provider rollback successful', provider_id=provider_id) + return result + def _perform_transaction(transaction_items: list[dict]) -> None: logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') @@ -845,38 +843,63 @@ def _publish_revert_events( :param end_datetime: The end time of the rollback window """ with EventBatchWriter(config.events_client) as event_writer: - # Convert provider_id string to UUID for event publishing - provider_id_uuid = UUID(revert_summary.provider_id) - # Publish license revert events for reverted_license in revert_summary.licenses_reverted: - config.event_bus_client.publish_license_revert_event( - source='org.compactconnect.disaster-recovery', - compact=compact, - provider_id=provider_id_uuid, - jurisdiction=reverted_license.jurisdiction, - license_type=reverted_license.license_type, - rollback_reason=rollback_reason, - start_time=start_datetime, - end_time=end_datetime, - revision_id=reverted_license.revision_id, - event_batch_writer=event_writer, - ) + try: + config.event_bus_client.publish_license_revert_event( + source='org.compactconnect.disaster-recovery', + compact=compact, + provider_id=revert_summary.provider_id, + jurisdiction=reverted_license.jurisdiction, + license_type=reverted_license.license_type, + rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_license.revision_id, + event_batch_writer=event_writer, + ) + except Exception as e: + # this event publishing is not business critical, so we log the error and move on + logger.error("Unable to publish license revert event", + compact=compact, + provider_id=revert_summary.provider_id, + jurisdiction=reverted_license.jurisdiction, + license_type=reverted_license.license_type, + rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_license.revision_id, + error=str(e), + ) # Publish privilege revert events for reverted_privilege in revert_summary.privileges_reverted: - config.event_bus_client.publish_privilege_revert_event( - source='org.compactconnect.disaster-recovery', - compact=compact, - provider_id=provider_id_uuid, - jurisdiction=reverted_privilege.jurisdiction, - license_type=reverted_privilege.license_type, - rollback_reason=rollback_reason, - start_time=start_datetime, - end_time=end_datetime, - revision_id=reverted_privilege.revision_id, - event_batch_writer=event_writer, - ) + try: + config.event_bus_client.publish_privilege_revert_event( + source='org.compactconnect.disaster-recovery', + compact=compact, + provider_id=revert_summary.provider_id, + jurisdiction=reverted_privilege.jurisdiction, + license_type=reverted_privilege.license_type, + rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_privilege.revision_id, + event_batch_writer=event_writer, + ) + except Exception as e: + # this event publishing is not business critical, so we log the error and move on + logger.error("Unable to publish privilege revert event", + compact=compact, + provider_id=revert_summary.provider_id, + jurisdiction=reverted_privilege.jurisdiction, + license_type=reverted_privilege.license_type, + rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_privilege.revision_id, + error=str(e), + ) def _load_results_from_s3(key: str) -> RollbackResults: diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 62c900342..7ba688a54 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -824,3 +824,35 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_u }, results_data, ) + + def test_expected_s3_object_stored_when_provider_fails_during_rollback(self): + """Test that failed provider details are correctly stored in S3 results when an exception occurs.""" + # Setup: License was updated during upload + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + + # Mock get_provider_user_records to raise an exception when called during rollback + mock_error_message = 'Database connection error' + with patch.object( + self.config.data_client, + 'get_provider_user_records', + side_effect=Exception(mock_error_message) + ): + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results contains failed provider details + self.assertEqual( + { + 'failedProviderDetails': [ + { + 'error': f'Failed to rollback updates for provider. ' + f'Manual review required: {mock_error_message}', + 'provider_id': self.provider_id, + } + ], + 'revertedProviderSummaries': [], + 'skippedProviderDetails': [], + }, + results_data, + ) From 53323ada3aeceb17e04d307d100f32ad358b520e Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 13:20:03 -0600 Subject: [PATCH 29/81] Fix/test pagination logic --- .../handlers/rollback_license_upload.py | 57 +++-- .../tests/function/__init__.py | 7 +- .../function/test_rollback_license_upload.py | 242 +++++++++++++++++- .../stacks/persistent_stack/provider_table.py | 7 +- 4 files changed, 281 insertions(+), 32 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index eb15462e7..e4d0970dd 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -146,19 +146,21 @@ def from_dict(cls, data: dict) -> 'RollbackResults': provider_id=summary['providerId'], licenses_reverted=[ RevertedLicense( - jurisdiction=license['jurisdiction'], - license_type=license['licenseType'], + jurisdiction=reverted_license['jurisdiction'], + license_type=reverted_license['licenseType'], revision_id=uuid4(), + action=reverted_license['action'] ) - for license in summary.get('licensesReverted', []) + for reverted_license in summary.get('licensesReverted', []) ], privileges_reverted=[ RevertedPrivilege( - jurisdiction=privilege['jurisdiction'], - license_type=privilege['licenseType'], + jurisdiction=reverted_privilege['jurisdiction'], + license_type=reverted_privilege['licenseType'], revision_id=uuid4(), + action=reverted_privilege['action'] ) - for privilege in summary.get('privilegesReverted', []) + for reverted_privilege in summary.get('privilegesReverted', []) ], updates_deleted=summary.get('updatesDeleted', []), ) @@ -184,7 +186,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'rollbackReason': 'Invalid data uploaded', 'executionId': 'unique-execution-id', 'providersProcessed': 0, - 'lastEvaluatedGSIKey': None + 'continueFromProviderId': None } Returns: @@ -194,7 +196,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'providersReverted': int, 'providersSkipped': int, 'providersFailed': int, - 'lastEvaluatedGSIKey': dict | None, + 'continueFromProviderId': str | None, } """ execution_start_time = time.time() @@ -208,7 +210,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 rollback_reason = event['rollbackReason'] execution_id = event['executionId'] providers_processed = event.get('providersProcessed', 0) - last_evaluated_gsi_key = event.get('lastEvaluatedGSIKey') + continue_from_provider_id = event.get('continueFromProviderId') # Parse and validate datetime parameters try: @@ -264,11 +266,32 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 jurisdiction, start_datetime, end_datetime, - last_evaluated_gsi_key, ) + + # Convert to sorted list for consistent ordering across invocations + affected_provider_ids_list = sorted(affected_provider_ids) + + # If continuing from a previous invocation, slice the list to start from that provider + if continue_from_provider_id: + try: + start_index = affected_provider_ids_list.index(continue_from_provider_id) + affected_provider_ids_list = affected_provider_ids_list[start_index:] + logger.info( + f'Continuing from provider {continue_from_provider_id} (index {start_index}). ' + f'{len(affected_provider_ids_list)} providers remaining to process.' + ) + except ValueError as e: + # Provider ID in event input not found in list + # Log error and raise exception + logger.error( + f'Continue-from provider {continue_from_provider_id} not found in affected providers list.', + continue_from_provider_id=continue_from_provider_id, + affected_provider_ids_list=affected_provider_ids_list, + ) + raise e # Process each provider - for provider_id in affected_provider_ids: + for provider_id in affected_provider_ids_list: # Check time limit elapsed_time = time.time() - execution_start_time if elapsed_time > max_execution_time: @@ -283,7 +306,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'providersReverted': providers_reverted, 'providersSkipped': providers_skipped, 'providersFailed': providers_failed, - 'lastEvaluatedGSIKey': None, # Continue from next provider + 'continueFromProviderId': provider_id, # Continue from next provider 'compact': compact, 'jurisdiction': jurisdiction, 'startDateTime': start_datetime_str, @@ -341,7 +364,6 @@ def _query_gsi_for_affected_providers( jurisdiction: str, start_datetime: datetime, end_datetime: datetime, - last_evaluated_key: dict | None, ) -> set[str]: """ Query the licenseUploadDateGSI to find all affected provider IDs. @@ -378,19 +400,14 @@ def _query_gsi_for_affected_providers( ), } - if last_evaluated_key: - query_kwargs['ExclusiveStartKey'] = last_evaluated_key - while True: response = config.provider_table.query(**query_kwargs) # Extract provider IDs from the results for item in response.get('Items', []): # The providerId is in the SK: TIME#{epoch}#LT#{license_type}#PID#{provider_id} - sk = item.get('licenseUploadDateGSISK', '') - if '#PID#' in sk: - provider_id = sk.split('#PID#')[1] - affected_provider_ids.add(provider_id) + provider_id = item['providerId'] + affected_provider_ids.add(provider_id) # Check for pagination last_evaluated_key = response.get('LastEvaluatedKey') diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py index db71681c6..95057db7b 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py @@ -110,7 +110,12 @@ def create_provider_table(self): {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, ], - 'Projection': {'ProjectionType': 'KEYS_ONLY'}, + 'Projection': { + 'ProjectionType': 'INCLUDE', + 'NonKeyAttributes': [ + 'providerId', + ], + }, }, ], ) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 7ba688a54..3b683ed9c 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -27,6 +27,7 @@ MOCK_UPDATED_GIVEN_NAME = 'updatedGiven' MOCK_UPDATED_FAMILY_NAME = 'updatedFamily' MOCK_PROVIDER_ID = 'ba880c7c-5ed3-4be4-8ad5-c8558f58ef6f' +MOCK_EXECUTION_ID = 'test-execution-123' @mock_aws @@ -49,7 +50,7 @@ def setUp(self): self.update_categories = UpdateCategory - self.provider_data = self._add_provider_record() + self.provider_data = self._add_provider_record(provider_id=None) def _generate_test_event(self): return { @@ -58,15 +59,18 @@ def _generate_test_event(self): 'startDateTime': self.default_start_datetime.isoformat(), 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', - 'executionId': 'test-execution-123', + 'executionId': MOCK_EXECUTION_ID, 'providersProcessed': 0, } - def _add_provider_record(self): + def _add_provider_record(self, provider_id: str): + if provider_id is None: + provider_id = self.provider_id + # add provider record to provider table provider_data = self.test_data_generator.put_default_provider_record_in_provider_table( { - 'providerId': self.provider_id, + 'providerId': provider_id, 'compact': self.compact, 'jurisdiction': self.license_jurisdiction, 'dateOfUpdate': self.default_start_datetime - timedelta(days=30), @@ -92,7 +96,7 @@ def _when_provider_had_license_created_from_upload(self): ) def _when_provider_had_license_updated_from_upload( - self, upload_datetime: datetime = None, license_upload_datetime: datetime = None + self, upload_datetime: datetime = None, license_upload_datetime: datetime = None, provider_id: str = None ): """ Set up a scenario where a provider had an existing license updated during the upload window. @@ -103,11 +107,13 @@ def _when_provider_had_license_updated_from_upload( if license_upload_datetime is None: # by default, the license was originally uploaded a day before the bad upload license_upload_datetime = self.default_start_datetime - timedelta(days=1) + if provider_id is None: + provider_id = self.provider_id # Create original license before upload window, unless different time is provided original_license = self.test_data_generator.put_default_license_record_in_provider_table( { - 'providerId': self.provider_id, + 'providerId': provider_id, 'compact': self.compact, 'jurisdiction': self.license_jurisdiction, 'familyName': MOCK_ORIGINAL_FAMILY_NAME, @@ -123,7 +129,7 @@ def _when_provider_had_license_updated_from_upload( # Create update record within upload window to simulate license deactivation license_update = self.test_data_generator.put_default_license_update_record_in_provider_table( { - 'providerId': self.provider_id, + 'providerId': provider_id, 'compact': self.compact, 'jurisdiction': self.license_jurisdiction, 'licenseType': original_license.licenseType, @@ -148,7 +154,7 @@ def _when_provider_had_license_updated_from_upload( # Update the license record to reflect the new expiration and status updated_license = self.test_data_generator.put_default_license_record_in_provider_table( { - 'providerId': self.provider_id, + 'providerId': provider_id, 'compact': self.compact, 'jurisdiction': self.license_jurisdiction, 'familyName': MOCK_UPDATED_FAMILY_NAME, @@ -594,12 +600,11 @@ def _perform_rollback_and_get_s3_object(self): # Execute: Perform rollback event = self._generate_test_event() - execution_id = event['executionId'] rollback_license_upload(event, Mock()) # Read object from S3 and verify its contents match what is expected - s3_key = f'{execution_id}/results.json' + s3_key = f'{MOCK_EXECUTION_ID}/results.json' s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) @@ -856,3 +861,220 @@ def test_expected_s3_object_stored_when_provider_fails_during_rollback(self): }, results_data, ) + + def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self): + """Test that rollback can load existing S3 results and append new data without deleting previous data.""" + from uuid import uuid4 + + test_revision_id = str(uuid4()) + existing_skipped_provider_id = str(uuid4()) + existing_reverted_provider_id = str(uuid4()) + existing_failed_provider_id = str(uuid4()) + + # Setup: Create existing provider with license that will be reverted + # This provider will have a privilege that gets reactivated + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + self._when_provider_had_privilege_deactivated_from_upload() + + # Create initial S3 results with data in all fields + s3_key = f'{MOCK_EXECUTION_ID}/results.json' + + # Create existing results data in the format that from_dict expects (camelCase for top-level keys) + existing_results_data = { + 'skippedProviderDetails': [ + { + 'providerId': existing_skipped_provider_id, + 'reason': 'Existing skipped provider reason', + 'ineligibleUpdates': [ + { + 'record_type': 'licenseUpdate', + 'type_of_update': 'ENCUMBRANCE', + 'update_time': (self.default_start_datetime - timedelta(days=2)).isoformat(), + 'reason': 'Existing ineligible update reason', + 'license_type': 'audiologist', + } + ], + } + ], + 'failedProviderDetails': [ + { + 'providerId': existing_failed_provider_id, + 'error': 'Existing failure error message', + } + ], + 'revertedProviderSummaries': [ + { + 'providerId': existing_reverted_provider_id, + 'licensesReverted': [ + { + 'jurisdiction': 'tx', + 'licenseType': 'audiologist', + 'revisionId': test_revision_id, + 'action': 'REVERT', + } + ], + 'privilegesReverted': [], + 'updatesDeleted': ['existing-update-sha-1'], + } + ], + } + + # Write existing results to S3 + self.config.s3_client.put_object( + Bucket=self.config.rollback_results_bucket_name, + Key=s3_key, + Body=json.dumps(existing_results_data, indent=2), + ContentType='application/json', + ) + + final_results_data = self._perform_rollback_and_get_s3_object() + + # Verify: All existing data is preserved and new data is appended + # Note: to_dict() uses asdict() which produces snake_case for skipped/failed details + self.assertEqual( + { + 'skippedProviderDetails': [ + { + 'provider_id': existing_skipped_provider_id, + 'reason': 'Existing skipped provider reason', + 'ineligible_updates': [ + { + 'record_type': 'licenseUpdate', + 'type_of_update': 'ENCUMBRANCE', + 'update_time': (self.default_start_datetime - timedelta(days=2)).isoformat(), + 'reason': 'Existing ineligible update reason', + 'license_type': 'audiologist', + } + ], + } + ], + 'failedProviderDetails': [ + { + 'provider_id': existing_failed_provider_id, + 'error': 'Existing failure error message', + } + ], + 'revertedProviderSummaries': [ + { + 'providerId': existing_reverted_provider_id, + 'licensesReverted': [ + { + 'jurisdiction': 'tx', + 'licenseType': 'audiologist', + 'revisionId': ANY, + 'action': 'REVERT', + } + ], + 'privilegesReverted': [], + 'updatesDeleted': ['existing-update-sha-1'], + }, + { + 'providerId': self.provider_id, + 'licensesReverted': [ + { + 'action': 'REVERT', + 'jurisdiction': self.license_jurisdiction, + 'licenseType': ANY, + 'revisionId': ANY, + } + ], + 'privilegesReverted': [ + { + 'action': 'REACTIVATED', + 'jurisdiction': 'ne', + 'licenseType': ANY, + 'revisionId': ANY, + } + ], + 'updatesDeleted': ANY, + } + ], + }, + final_results_data, + ) + + @patch('handlers.rollback_license_upload.time') + def test_rollback_handles_pagination_when_provider_id_present_in_event_input(self, mock_time): + """Test that rollback can paginate across multiple invocations using continueFromProviderId.""" + from handlers.rollback_license_upload import rollback_license_upload + + # Lambda functions have a timeout of 15 minutes, so we set a cutoff of 12 minutes before we loop around + # the step function to reset the timeout. This mock allows us to test that branch of logic. + # the first time the mock_time function is called, it will return current time + # the second time the mock_time function is called, it will return + 1 second + # the third time the mock_time function is called, it will return 12 minutes + 2 seconds (cutoff is 12 minutes) + # this should cause the lambda to return an IN_PROGRESS status with a pagination key + mock_time.time.side_effect = [0, 1, 12 * 60 + 2] # current time, 12 minutes + 2 seconds + + # Setup: Create two providers with licenses that will be reverted + # Provider IDs in sorted order (to ensure consistent pagination behavior) + mock_first_provider_id = '11111111-5ed3-4be4-8ad5-c8558f587890' + mock_second_provider_id = '22222222-5ed3-4be4-8ad5-c8558f587890' + + # Add first provider + self._add_provider_record(provider_id=mock_first_provider_id) + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1), + provider_id=mock_first_provider_id + ) + + # Add second provider + self._add_provider_record(provider_id=mock_second_provider_id) + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1), + provider_id=mock_second_provider_id + ) + + # Execute: First invocation (should timeout after processing first provider) + event = self._generate_test_event() + + result_first = rollback_license_upload(event, Mock()) + + # Assert: First invocation returned IN_PROGRESS status + self.assertEqual(result_first['rollbackStatus'], 'IN_PROGRESS') + self.assertEqual(1, result_first['providersProcessed']) + self.assertEqual(1, result_first['providersReverted']) + self.assertEqual(0, result_first['providersSkipped']) + self.assertEqual(0, result_first['providersFailed']) + self.assertEqual(mock_second_provider_id, result_first['continueFromProviderId']) + + # Execute: Second invocation (continue from where we left off) + # Reset mock time for second invocation + mock_time.time.side_effect = [0, 1] # Won't timeout this time + + + result_second = rollback_license_upload(result_first, Mock()) + + # Assert: Second invocation completed successfully + self.assertEqual(result_second['rollbackStatus'], 'COMPLETE') + self.assertEqual(2, result_second['providersProcessed']) + self.assertEqual(2, result_second['providersReverted']) + self.assertEqual(0, result_second['providersSkipped']) + self.assertEqual(0, result_second['providersFailed']) + + # Verify: S3 results contain both providers + s3_key = f'{MOCK_EXECUTION_ID}/results.json' + s3_obj = self.config.s3_client.get_object( + Bucket=self.config.rollback_results_bucket_name, Key=s3_key + ) + final_results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) + + # Should have 2 reverted providers + self.assertEqual({'failedProviderDetails': [], + 'revertedProviderSummaries': [{'licensesReverted': [{'action': 'REVERT', + 'jurisdiction': 'oh', + 'licenseType': 'speech-language pathologist', + 'revisionId': '9f0f8c0c-40a9-4578-86d1-da4278e68bb4'}], + 'privilegesReverted': [], + 'providerId': mock_first_provider_id, + 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}, + {'licensesReverted': [{'action': 'REVERT', + 'jurisdiction': 'oh', + 'licenseType': 'speech-language pathologist', + 'revisionId': '97f68dba-ee91-4c6c-833d-66fed70b9a9a'}], + 'privilegesReverted': [], + 'providerId': mock_second_provider_id, + 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}], + 'skippedProviderDetails': []}, final_results_data) diff --git a/backend/compact-connect/stacks/persistent_stack/provider_table.py b/backend/compact-connect/stacks/persistent_stack/provider_table.py index 662574937..9ca0f49e4 100644 --- a/backend/compact-connect/stacks/persistent_stack/provider_table.py +++ b/backend/compact-connect/stacks/persistent_stack/provider_table.py @@ -84,11 +84,16 @@ def __init__( 'providerId', ], ) + # in this case, we only need to include the provider id since this GSI is used to + # determine which providers were associated with a particular license upload time self.add_global_secondary_index( index_name=self.license_upload_date_gsi_name, partition_key=Attribute(name='licenseUploadDateGSIPK', type=AttributeType.STRING), sort_key=Attribute(name='licenseUploadDateGSISK', type=AttributeType.STRING), - projection_type=ProjectionType.KEYS_ONLY, + projection_type=ProjectionType.INCLUDE, + non_key_attributes=[ + 'providerId', + ], ) # Set up backup plan backup_enabled = environment_context['backup_enabled'] From fb3ce64aa0987e2f491dace5dec51ea9ad35718a Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 13:53:12 -0600 Subject: [PATCH 30/81] Add exception handling for transaction --- .../handlers/rollback_license_upload.py | 43 +++++++++++++++--- .../function/test_rollback_license_upload.py | 45 ++++++++++++++++++- 2 files changed, 80 insertions(+), 8 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index e4d0970dd..36943a8ec 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -474,15 +474,46 @@ def _process_provider_rollback( return result -def _perform_transaction(transaction_items: list[dict]) -> None: +def _extract_sk_from_transaction_item(transaction_item: dict) -> str | None: + """ + Extract the sort key (SK) from a transaction item. + + Transaction items can be Put, Delete, or Update operations. + Returns the SK if found, None otherwise. + """ + if 'Put' in transaction_item: + return transaction_item['Put']['Item'].get('sk') + elif 'Delete' in transaction_item: + return transaction_item['Delete']['Key'].get('sk') + elif 'Update' in transaction_item: + return transaction_item['Update']['Key'].get('sk') + return None + + +def _perform_transaction(transaction_items: list[dict], provider_id: str) -> None: logger.info(f'Executing {len(transaction_items)} transaction items in batches of 100') for i in range(0, len(transaction_items), 100): batch = transaction_items[i : i + 100] # Use Table resource's client for automatic type conversion - # TODO - catch failures and add failure record to write to S3 results object - config.provider_table.meta.client.transact_write_items(TransactItems=batch) - logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') + try: + config.provider_table.meta.client.transact_write_items(TransactItems=batch) + logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') + except Exception as e: + # Extract all SKs from the failed transaction batch for debugging + failed_sks = [_extract_sk_from_transaction_item(item) for item in batch] + # filter out null values + failed_sks = [sk for sk in failed_sks if sk is not None] + + logger.error( + 'Transaction batch failed for provider', + provider_id=provider_id, + batch_number=i // 100 + 1, + batch_size=len(batch), + failed_sks=failed_sks, + error=str(e) + ) + raise def _build_and_execute_revert_transactions( @@ -807,7 +838,7 @@ def add_delete(pk: str, sk: str, update_record: bool): updates_deleted=updates_deleted_sks, ) - _perform_transaction(transaction_items) + _perform_transaction(transaction_items, provider_id) # Now read all the license records for the provider and update the provider record # Fetch all provider records including all update tiers @@ -832,7 +863,7 @@ def add_delete(pk: str, sk: str, update_record: bool): serialized_provider_record = top_level_provider_record.serialize_to_database_record() add_delete(pk=serialized_provider_record['pk'], sk=serialized_provider_record['sk'], update_record=False) - _perform_transaction(primary_record_transaction_items) + _perform_transaction(primary_record_transaction_items, provider_id) # TODO - log full change summary (DO NOT LOG PII) return ProviderRevertedSummary( diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 3b683ed9c..fa517d3af 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -50,7 +50,7 @@ def setUp(self): self.update_categories = UpdateCategory - self.provider_data = self._add_provider_record(provider_id=None) + self.provider_data = self._add_provider_record() def _generate_test_event(self): return { @@ -63,7 +63,7 @@ def _generate_test_event(self): 'providersProcessed': 0, } - def _add_provider_record(self, provider_id: str): + def _add_provider_record(self, provider_id: str | None = None): if provider_id is None: provider_id = self.provider_id @@ -1078,3 +1078,44 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'providerId': mock_second_provider_id, 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}], 'skippedProviderDetails': []}, final_results_data) + + def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): + """Test that transaction failures are properly logged and the provider is marked as failed.""" + from botocore.exceptions import ClientError + + # Setup: Create a scenario with privilege deactivation which will have PUT, DELETE, and UPDATE operations + # - License update (DELETE of update record) + # - Privilege update (DELETE of update record) + # - Privilege reactivation (UPDATE to remove licenseDeactivatedStatus) + # - Provider record update (PUT) + self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + self._when_provider_had_privilege_deactivated_from_upload() + + # Mock the transaction to fail with a ClientError + mock_error = ClientError( + error_response={ + 'Error': { + 'Code': 'TransactionCanceledException', + 'Message': 'Transaction cancelled' + } + }, + operation_name='TransactWriteItems' + ) + + with patch.object( + self.config.provider_table.meta.client, + 'transact_write_items', + side_effect=mock_error + ): + results_data = self._perform_rollback_and_get_s3_object() + + # Verify: Provider was marked as failed + self.assertEqual(1, len(results_data['failedProviderDetails'])) + self.assertEqual(self.provider_id, results_data['failedProviderDetails'][0]['provider_id']) + self.assertIn('TransactionCanceledException', results_data['failedProviderDetails'][0]['error']) + + # Verify: No providers were reverted or skipped + self.assertEqual(0, len(results_data['revertedProviderSummaries'])) + self.assertEqual(0, len(results_data['skippedProviderDetails'])) From 9980e0ea34cceff6d1e8e05f1878004e5e823ad6 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 14:09:36 -0600 Subject: [PATCH 31/81] logging results per provider --- .../handlers/rollback_license_upload.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 36943a8ec..c8d994da8 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -865,7 +865,13 @@ def add_delete(pk: str, sk: str, update_record: bool): _perform_transaction(primary_record_transaction_items, provider_id) - # TODO - log full change summary (DO NOT LOG PII) + logger.info( + 'Completed rollback for provider', + provider_id=provider_id, + licenses_reverted=reverted_licenses, + privileges_reverted=reverted_privileges, + updates_deleted=updates_deleted_sks, + ) return ProviderRevertedSummary( provider_id=provider_id, licenses_reverted=reverted_licenses, @@ -972,9 +978,13 @@ def _write_results_to_s3(key: str, results: RollbackResults): Key=key, Body=json.dumps(results.to_dict(), indent=2), ContentType='application/json', - ServerSideEncryption='aws:kms', ) logger.info('Results written to S3', bucket=config.rollback_results_bucket_name, key=key) - except Exception as e: + # handle json serialization errors + except json.JSONDecodeError as e: logger.error(f'Error writing results to S3: {str(e)}') raise + # handle other errors by logging the full object and raising the exception + except Exception as e: + logger.error(f'Error writing results to S3: {str(e)}', results=results.to_dict()) + raise From 7036dc00fb22ae8f75022193fee65a4fe4586d75 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 14:12:50 -0600 Subject: [PATCH 32/81] Fix test mock --- .../tests/function/test_rollback_license_upload.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index fa517d3af..ae251417f 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -1066,14 +1066,14 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'revertedProviderSummaries': [{'licensesReverted': [{'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', - 'revisionId': '9f0f8c0c-40a9-4578-86d1-da4278e68bb4'}], + 'revisionId': ANY}], 'privilegesReverted': [], 'providerId': mock_first_provider_id, 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}, {'licensesReverted': [{'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', - 'revisionId': '97f68dba-ee91-4c6c-833d-66fed70b9a9a'}], + 'revisionId': ANY}], 'privilegesReverted': [], 'providerId': mock_second_provider_id, 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}], @@ -1104,11 +1104,9 @@ def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): operation_name='TransactWriteItems' ) - with patch.object( - self.config.provider_table.meta.client, - 'transact_write_items', - side_effect=mock_error - ): + # Patch at the handler module level to ensure it works across the full test suite + with patch('handlers.rollback_license_upload.config.provider_table.meta.client.transact_write_items', + side_effect=mock_error): results_data = self._perform_rollback_and_get_s3_object() # Verify: Provider was marked as failed From 8816974f181c96d9bd7ed29e9be841e674d33451 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 16:58:25 -0600 Subject: [PATCH 33/81] Add smoke test for testing license upload rollback --- backend/compact-connect/tests/smoke/config.py | 4 + .../rollback_license_upload_smoke_tests.py | 563 ++++++++++++++++++ .../tests/smoke/smoke_common.py | 23 +- .../tests/smoke/smoke_tests_env_example.json | 3 +- 4 files changed, 588 insertions(+), 5 deletions(-) create mode 100644 backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py diff --git a/backend/compact-connect/tests/smoke/config.py b/backend/compact-connect/tests/smoke/config.py index 859e76fcf..a6b582e29 100644 --- a/backend/compact-connect/tests/smoke/config.py +++ b/backend/compact-connect/tests/smoke/config.py @@ -36,6 +36,10 @@ def environment_name(self): def aws_region(self): return os.environ['AWS_DEFAULT_REGION'] + @property + def license_upload_rollback_step_function_arn(self): + return os.environ['CC_TEST_ROLLBACK_STEP_FUNCTION_ARN'] + @property def provider_user_dynamodb_table(self): return boto3.resource('dynamodb').Table(os.environ['CC_TEST_PROVIDER_DYNAMO_TABLE_NAME']) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py new file mode 100644 index 000000000..edf9f2086 --- /dev/null +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -0,0 +1,563 @@ +# ruff: noqa: T201 we use print statements for smoke testing +#!/usr/bin/env python3 +import json +import time +from datetime import UTC, datetime, timedelta + +import boto3 +import requests +from config import config, logger +from smoke_common import ( + SmokeTestFailureException, + create_test_staff_user, + delete_test_staff_user, + get_api_base_url, + get_provider_user_records, + get_staff_user_auth_headers, + load_smoke_test_env, +) + +COMPACT = 'aslp' +JURISDICTION = 'ne' +TEST_STAFF_USER_EMAIL = 'testStaffUserLicenseRollback@smokeTestFakeEmail.com' + +# Test configuration +NUM_LICENSES_TO_UPLOAD = 1000 +BATCH_SIZE = 100 # Upload in batches of 100 to avoid timeouts + + +def upload_test_license_batch( + staff_headers: dict, batch_start_index: int, batch_size: int, upload_start_time: datetime +): + """ + Upload a batch of test license records. + + :param staff_headers: Authentication headers for staff user + :param batch_start_index: Starting index for this batch + :param batch_size: Number of licenses to upload in this batch + :param upload_start_time: The timestamp when upload started (for generating unique data) + :return: List of license records that were uploaded + """ + licenses_batch = [] + + for i in range(batch_start_index, batch_start_index + batch_size): + # Generate unique data for each license + license_data = { + 'licenseNumber': f'ROLLBACK-TEST-{i:04d}', + 'homeAddressPostalCode': '68001', + 'givenName': f'TestProvider{i}', + 'familyName': f'RollbackTest{i:04d}', + 'homeAddressStreet1': '123 Test Street', + 'dateOfBirth': '1985-01-01', + 'dateOfIssuance': '2020-01-01', + 'ssn': f'500-50-{i:04d}', # Incrementing SSN with padded zeros + 'licenseType': 'audiologist', + 'dateOfExpiration': '2050-12-10', + 'homeAddressState': 'NE', + 'homeAddressCity': 'Omaha', + 'compactEligibility': 'eligible', + 'licenseStatus': 'active', + } + licenses_batch.append(license_data) + + # Upload the batch + logger.info( + f'Uploading batch of {len(licenses_batch)} licenses (indices {batch_start_index}-{batch_start_index + batch_size - 1})' + ) + + post_response = requests.post( + url=f'{get_api_base_url()}/v1/compacts/{COMPACT}/jurisdictions/{JURISDICTION}/licenses', + headers=staff_headers, + json=licenses_batch, + timeout=60, # Longer timeout for batch uploads + ) + + if post_response.status_code != 200: + raise SmokeTestFailureException( + f'Failed to upload license batch {batch_start_index}. Response: {post_response.json()}' + ) + + logger.info(f'Successfully uploaded batch {batch_start_index}-{batch_start_index + batch_size - 1}') + return licenses_batch + + +def upload_test_licenses(staff_headers: dict, num_licenses: int, batch_size: int): + """ + Upload test license records in batches. + + :param staff_headers: Authentication headers for staff user + :param num_licenses: Total number of licenses to upload + :param batch_size: Number of licenses per batch + :return: Tuple of (all uploaded license data, upload start time, upload end time) + """ + upload_start_time = datetime.now(tz=UTC) + all_licenses = [] + + logger.info(f'Starting upload of {num_licenses} test licenses in batches of {batch_size}') + + for batch_start in range(0, num_licenses, batch_size): + current_batch_size = min(batch_size, num_licenses - batch_start) + batch_licenses = upload_test_license_batch(staff_headers, batch_start, current_batch_size, upload_start_time) + all_licenses.extend(batch_licenses) + + # Small delay between batches to avoid rate limiting + if batch_start + current_batch_size < num_licenses: + time.sleep(2) + + # wait for several minutes for all licenses to propagate in the system + + upload_end_time = datetime.now(tz=UTC) + logger.info(f'Completed upload of {len(all_licenses)} licenses') + + return all_licenses, upload_start_time, upload_end_time + + +def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max_wait_time: int = 900): + """ + Wait for all provider records to be created from uploaded licenses. + + :param staff_headers: Authentication headers for staff user + :param expected_count: Expected number of providers to be created + :param max_wait_time: Maximum time to wait in seconds (default: 900 = 15 minutes) + :return: List of provider IDs that were created + """ + logger.info(f'Waiting for {expected_count} provider records to be created...') + + start_time = time.time() + check_interval = 30 + + # Query using the common family name prefix 'RollbackTest' + # The API will return all providers with family names starting with this prefix + base_query_body = { + 'query': {'familyName': 'RollbackTest'}, + 'pagination': { + 'pageSize': 1000 # Maximum page size to minimize number of requests + }, + } + + while time.time() - start_time < max_wait_time: + all_provider_ids = [] + last_key = None + page_num = 1 + + # Collect all providers across all pages + while True: + query_body = base_query_body.copy() + if last_key: + query_body['pagination']['lastKey'] = last_key + + query_response = requests.post( + url=f'{get_api_base_url()}/v1/compacts/{COMPACT}/providers/query', + headers=staff_headers, + json=query_body, + timeout=30, + ) + + if query_response.status_code != 200: + logger.warning(f'Query failed with status {query_response.status_code}. Retrying...') + break + + response_data = query_response.json() + providers = response_data.get('providers', []) + pagination = response_data.get('pagination', {}) + + # Collect provider IDs from this page + page_provider_ids = [p['providerId'] for p in providers] + all_provider_ids.extend(page_provider_ids) + + logger.info( + f'Page {page_num}: Found {len(page_provider_ids)} providers ' + f'(total: {len(all_provider_ids)}/{expected_count})' + ) + + # Check if there are more pages + last_key = pagination.get('lastKey') + if not last_key: + # No more pages + break + + page_num += 1 + + num_found = len(all_provider_ids) + logger.info( + f'Found {num_found}/{expected_count} providers with family name prefix "RollbackTest" ' + f'(across {page_num} pages)' + ) + + if num_found >= expected_count: + logger.info(f'All {expected_count} providers found!') + return all_provider_ids[:expected_count] # Return only the expected count + + elapsed = time.time() - start_time + if elapsed < max_wait_time: + logger.info(f'Waiting {check_interval}s for remaining providers... (elapsed: {elapsed:.1f}s)') + time.sleep(check_interval) + + # Timeout reached - make one final query to get the latest results + logger.warning(f'Timeout reached after {max_wait_time}s. Making final query to collect all available providers.') + + all_provider_ids = [] + last_key = None + page_num = 1 + + while True: + query_body = base_query_body.copy() + if last_key: + query_body['pagination']['lastKey'] = last_key + + query_response = requests.post( + url=f'{get_api_base_url()}/v1/compacts/{COMPACT}/providers/query', + headers=staff_headers, + json=query_body, + timeout=30, + ) + + if query_response.status_code != 200: + logger.warning(f'Final query failed with status {query_response.status_code}') + break + + response_data = query_response.json() + providers = response_data.get('providers', []) + pagination = response_data.get('pagination', {}) + + page_provider_ids = [p['providerId'] for p in providers] + all_provider_ids.extend(page_provider_ids) + + logger.info(f'Final query page {page_num}: Found {len(page_provider_ids)} providers') + + last_key = pagination.get('lastKey') + if not last_key: + break + + page_num += 1 + + logger.warning(f'Final count: {len(all_provider_ids)}/{expected_count} providers found') + return all_provider_ids + + +def start_rollback_step_function( + step_function_arn: str, + compact: str, + jurisdiction: str, + start_datetime: datetime, + end_datetime: datetime, + rollback_reason: str, +): + """ + Start the license upload rollback step function. + + :param step_function_arn: ARN of the step function + :param compact: Compact abbreviation + :param jurisdiction: Jurisdiction abbreviation + :param start_datetime: Start of rollback time window + :param end_datetime: End of rollback time window + :param rollback_reason: Reason for the rollback + :return: Execution ARN + """ + sfn_client = boto3.client('stepfunctions') + + # Generate unique execution name + execution_name = f'smoke-test-rollback-{int(datetime.now(tz=UTC).timestamp())}' + + input_data = { + 'compact': compact, + 'jurisdiction': jurisdiction, + 'startDateTime': start_datetime.isoformat().replace('+00:00', 'Z'), + 'endDateTime': end_datetime.isoformat().replace('+00:00', 'Z'), + 'rollbackReason': rollback_reason, + 'tableNameRollbackConfirmation': 'CONFIRMED', # Required confirmation parameter + } + + logger.info(f'Starting step function execution: {execution_name}') + logger.info(f'Input: {json.dumps(input_data, indent=2)}') + + response = sfn_client.start_execution( + stateMachineArn=step_function_arn, + name=execution_name, + input=json.dumps(input_data), + ) + + execution_arn = response['executionArn'] + logger.info(f'Step function started. Execution ARN: {execution_arn}') + + return execution_arn + + +def wait_for_step_function_completion(execution_arn: str, max_wait_time: int = 3600): + """ + Poll the step function until it completes. + + :param execution_arn: ARN of the step function execution + :param max_wait_time: Maximum time to wait in seconds (default: 3600 = 1 hour) + :return: Final execution status and output + """ + sfn_client = boto3.client('stepfunctions') + + logger.info(f'Waiting for step function to complete...') + start_time = time.time() + check_interval = 30 + + while time.time() - start_time < max_wait_time: + response = sfn_client.describe_execution(executionArn=execution_arn) + + status = response['status'] + logger.info(f'Step function status: {status}') + + if status == 'SUCCEEDED': + output = json.loads(response['output']) + elapsed = time.time() - start_time + logger.info(f'Step function completed successfully after {elapsed:.1f}s') + return status, output + elif status in ['FAILED', 'TIMED_OUT', 'ABORTED']: + raise SmokeTestFailureException( + f'Step function execution failed with status: {status}. ' + f'Error: {response.get("error", "N/A")}, Cause: {response.get("cause", "N/A")}' + ) + + # Still running + time.sleep(check_interval) + + raise SmokeTestFailureException(f'Step function did not complete within {max_wait_time}s timeout') + + +def get_rollback_results_from_s3(results_s3_key: str): + """ + Retrieve rollback results from S3. + + :param results_s3_key: S3 URI or key to the results file + :param bucket_name: S3 bucket name + :return: Parsed results data + """ + s3_client = boto3.client('s3') + + # Format: s3://bucket-name/key + parts = results_s3_key.replace('s3://', '').split('/', 1) + bucket_name = parts[0] + key = parts[1] + + logger.info(f'Retrieving results from S3: {bucket_name}/{key}') + + response = s3_client.get_object(Bucket=bucket_name, Key=key) + results_json = response['Body'].read().decode('utf-8') + results = json.loads(results_json) + + logger.info('Retrieved results from S3') + return results + + +def verify_rollback_results(results: dict, expected_provider_count: int): + """ + Verify the rollback results match expected format and counts. + + :param results: Rollback results from S3 + :param expected_provider_count: Expected number of providers rolled back + """ + logger.info('Verifying rollback results...') + + # Verify structure + required_keys = ['revertedProviderSummaries', 'skippedProviderDetails', 'failedProviderDetails'] + for key in required_keys: + if key not in results: + raise SmokeTestFailureException(f'Missing required key in results: {key}') + + # Check counts + reverted = results['revertedProviderSummaries'] + skipped = results['skippedProviderDetails'] + failed = results['failedProviderDetails'] + + num_reverted = len(reverted) + num_skipped = len(skipped) + num_failed = len(failed) + + logger.info('Rollback summary:') + logger.info(f' - Reverted: {num_reverted}') + logger.info(f' - Skipped: {num_skipped}') + logger.info(f' - Failed: {num_failed}') + + # Verify all providers were reverted (none skipped or failed) + if num_skipped > 0: + logger.error(f'Found {num_skipped} skipped providers:') + for detail in skipped[:5]: # Show first 5 + logger.error(f'Details for skipped provider: {detail["providerId"]}', skipped=detail) + raise SmokeTestFailureException(f'Expected 0 skipped providers but found {num_skipped}') + + if num_failed > 0: + logger.error(f'Found {num_failed} failed providers:') + for detail in failed[:5]: # Show first 5 + logger.error(f'Details for failed provider: {detail["providerId"]}', failed=detail) + raise SmokeTestFailureException(f'Expected 0 failed providers but found {num_failed}') + + # Verify we got the expected number of reverted providers + if num_reverted != expected_provider_count: + logger.warning(f'Expected {expected_provider_count} reverted providers but found {num_reverted}') + + # Verify the reverted provider has the expected structure + for i, summary in enumerate(reverted): + if 'providerId' not in summary: + raise SmokeTestFailureException(f'Reverted provider summary {i} missing providerId') + if 'licensesReverted' not in summary: + raise SmokeTestFailureException(f'Reverted provider summary {i} missing licensesReverted') + + # Verify each license was deleted (not reverted to previous state) + licenses_reverted = summary['licensesReverted'] + if len(licenses_reverted) != 1: + raise SmokeTestFailureException( + f'Expected 1 license reverted for provider {summary["providerId"]}, found {len(licenses_reverted)}' + ) + + license_action = licenses_reverted[0]['action'] + if license_action != 'DELETE': + raise SmokeTestFailureException( + f'Expected license action "DELETE" but found "{license_action}" for provider {summary["providerId"]}' + ) + + logger.info('✅ Rollback results verification passed') + + +def verify_providers_deleted_from_database(results: dict, compact: str): + """ + Verify that all provider records were actually deleted from DynamoDB. + + :param results: Rollback results containing provider IDs + :param compact: Compact abbreviation + """ + logger.info('Verifying providers were deleted from database...') + + reverted_summaries = results['revertedProviderSummaries'] + + for i, summary in enumerate(reverted_summaries): + if i % 100 == 0: + logger.info(f'Verified deletion for {i}/{len(reverted_summaries)} providers') + + provider_id = summary['providerId'] + + # Try to get provider records - should return empty or raise exception + provider_user_records = get_provider_user_records(compact, provider_id) + + # Check if any records exist + all_records = provider_user_records.provider_records + if all_records: + raise SmokeTestFailureException( + f'Provider {provider_id} still has {len(all_records)} records in database after rollback' + ) + + logger.info(f'✅ Verified {len(reverted_summaries)} providers were deleted from database') + + +def rollback_license_upload_smoke_test(): + """ + Main smoke test for license upload rollback functionality. + + Steps: + 1. Upload 1,000 test license records + 2. Wait for all providers to be created + 3. Start rollback step function + 4. Wait for step function completion + 5. Retrieve and verify results from S3 + 6. Verify providers were deleted from database + """ + # Get environment configuration + step_function_arn = config.license_upload_rollback_step_function_arn + + if not step_function_arn: + raise SmokeTestFailureException('CC_TEST_ROLLBACK_STEP_FUNCTION_ARN environment variable not set') + + staff_headers = get_staff_user_auth_headers(TEST_STAFF_USER_EMAIL) + + # Step 1: Upload test licenses + logger.info('=' * 80) + logger.info('STEP 1: Uploading test licenses') + logger.info('=' * 80) + + uploaded_licenses, upload_start_time, upload_end_time = upload_test_licenses( + staff_headers, + NUM_LICENSES_TO_UPLOAD, + BATCH_SIZE, + ) + + logger.info(f'Upload time window: {upload_start_time.isoformat()} to {upload_end_time.isoformat()}') + + # Step 2: Wait for providers to be created + logger.info('=' * 80) + logger.info('STEP 2: Waiting for provider records to be created') + logger.info('=' * 80) + + provider_ids = wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) + + logger.info(f'Found {len(provider_ids)} provider records') + + # Step 3: Start rollback step function + logger.info('=' * 80) + logger.info('STEP 3: Starting rollback step function') + logger.info('=' * 80) + + # Add buffer to time window to ensure we catch all uploads + rollback_start = upload_start_time - timedelta(minutes=5) + rollback_end = upload_end_time + timedelta(minutes=5) + + execution_arn = start_rollback_step_function( + step_function_arn=step_function_arn, + compact=COMPACT, + jurisdiction=JURISDICTION, + start_datetime=rollback_start, + end_datetime=rollback_end, + rollback_reason='Smoke test validation of rollback functionality', + ) + + # Step 4: Wait for step function completion + logger.info('=' * 80) + logger.info('STEP 4: Waiting for step function to complete') + logger.info('=' * 80) + + status, output = wait_for_step_function_completion(execution_arn) + + logger.info(f'Step function output: {json.dumps(output, indent=2)}') + + # Step 5: Retrieve and verify results from S3 + logger.info('=' * 80) + logger.info('STEP 5: Retrieving and verifying results from S3') + logger.info('=' * 80) + + results_s3_key = output.get('resultsS3Key') + if not results_s3_key: + raise SmokeTestFailureException('No resultsS3Key in step function output') + + results = get_rollback_results_from_s3(results_s3_key) + + verify_rollback_results(results, len(provider_ids)) + + # Step 6: Verify providers deleted from database + logger.info('=' * 80) + logger.info('STEP 6: Verifying providers were deleted from database') + logger.info('=' * 80) + + verify_providers_deleted_from_database(results, COMPACT) + + logger.info('=' * 80) + logger.info('✅ ALL TESTS PASSED') + logger.info('=' * 80) + + +if __name__ == '__main__': + load_smoke_test_env() + + # Create staff user with permission to upload licenses and run rollback + test_user_sub = create_test_staff_user( + email=TEST_STAFF_USER_EMAIL, + compact=COMPACT, + jurisdiction=JURISDICTION, + permissions={'actions': {'admin'}, 'jurisdictions': {JURISDICTION: {'write', 'admin'}}}, + ) + + try: + rollback_license_upload_smoke_test() + logger.info('🎉 License upload rollback smoke test completed successfully!') + except SmokeTestFailureException as e: + logger.error(f'❌ License upload rollback smoke test failed: {str(e)}') + raise + except Exception as e: + logger.error(f'❌ Unexpected error during smoke test: {str(e)}', exc_info=True) + raise + finally: + # Clean up the test staff user + delete_test_staff_user(TEST_STAFF_USER_EMAIL, user_sub=test_user_sub, compact=COMPACT) diff --git a/backend/compact-connect/tests/smoke/smoke_common.py b/backend/compact-connect/tests/smoke/smoke_common.py index 9caded8c4..59e869fe6 100644 --- a/backend/compact-connect/tests/smoke/smoke_common.py +++ b/backend/compact-connect/tests/smoke/smoke_common.py @@ -257,11 +257,26 @@ def get_provider_user_records(compact: str, provider_id: str) -> ProviderUserRec :return: ProviderUserRecords instance containing all records for this provider """ # Query the provider database for all records - query_result = config.provider_user_dynamodb_table.query( - KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') - ) + resp = {'Items': []} + last_evaluated_key = None + while True: + pagination = {'ExclusiveStartKey': last_evaluated_key} if last_evaluated_key else {} + # This query key condition expression ensures we always grab all the primary and update records + query_resp = config.provider_user_dynamodb_table.query( + Select='ALL_ATTRIBUTES', + KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') + & Key('sk').lt(f'{compact}#UPDATE#9'), + ConsistentRead=True, + **pagination, + ) + + resp['Items'].extend(query_resp.get('Items', [])) + + last_evaluated_key = query_resp.get('LastEvaluatedKey') + if not last_evaluated_key: + break - return ProviderUserRecords(query_result['Items']) + return ProviderUserRecords(resp['Items']) def upload_license_record(staff_headers: dict, compact: str, jurisdiction: str, data_overrides: dict = None): diff --git a/backend/compact-connect/tests/smoke/smoke_tests_env_example.json b/backend/compact-connect/tests/smoke/smoke_tests_env_example.json index 4dbfbb5c9..cec0fdade 100644 --- a/backend/compact-connect/tests/smoke/smoke_tests_env_example.json +++ b/backend/compact-connect/tests/smoke/smoke_tests_env_example.json @@ -18,5 +18,6 @@ "CC_TEST_PROVIDER_USER_PASSWORD": "examplePassword", "ENVIRONMENT_NAME": "sandboxEnvironmentNamePlaceholder", "SANDBOX_AUTHORIZE_NET_API_LOGIN_ID": "your_sandbox_api_login_id", - "SANDBOX_AUTHORIZE_NET_TRANSACTION_KEY": "your_sandbox_transaction_key" + "SANDBOX_AUTHORIZE_NET_TRANSACTION_KEY": "your_sandbox_transaction_key", + "CC_TEST_ROLLBACK_STEP_FUNCTION_ARN": "arn:aws:states:us-east-1:123456789012:stateMachine:Sandbox-DisasterRecoveryStack-LicenseUploadRollbackStateMachine" } From 1b067fb43da266d08727727f34ad249961ca1e0c Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 17:10:56 -0600 Subject: [PATCH 34/81] Fix input to take execution name instead of execution id --- .../handlers/rollback_license_upload.py | 10 +++++----- .../tests/function/test_rollback_license_upload.py | 10 +++++----- .../smoke/rollback_license_upload_smoke_tests.py | 12 ++++-------- 3 files changed, 14 insertions(+), 18 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index c8d994da8..5fd810ad0 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -184,7 +184,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'startDateTime': '2024-01-01T00:00:00Z', 'endDateTime': '2024-01-01T23:59:59Z', 'rollbackReason': 'Invalid data uploaded', - 'executionId': 'unique-execution-id', + 'executionName': 'unique-execution-id', 'providersProcessed': 0, 'continueFromProviderId': None } @@ -208,7 +208,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 start_datetime_str = event['startDateTime'] end_datetime_str = event['endDateTime'] rollback_reason = event['rollbackReason'] - execution_id = event['executionId'] + execution_name = event['executionName'] providers_processed = event.get('providersProcessed', 0) continue_from_provider_id = event.get('continueFromProviderId') @@ -245,11 +245,11 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 jurisdiction=jurisdiction, start_datetime=start_datetime_str, end_datetime=end_datetime_str, - execution_id=execution_id, + execution_name=execution_name, ) # Initialize S3 client and bucket - results_s3_key = f'{execution_id}/results.json' + results_s3_key = f'{execution_name}/results.json' # Load existing results if this is a continuation existing_results = _load_results_from_s3(results_s3_key) @@ -312,7 +312,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'startDateTime': start_datetime_str, 'endDateTime': end_datetime_str, 'rollbackReason': rollback_reason, - 'executionId': execution_id, + 'executionName': execution_name, } providers_processed += 1 diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index ae251417f..d180a556d 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -27,7 +27,7 @@ MOCK_UPDATED_GIVEN_NAME = 'updatedGiven' MOCK_UPDATED_FAMILY_NAME = 'updatedFamily' MOCK_PROVIDER_ID = 'ba880c7c-5ed3-4be4-8ad5-c8558f58ef6f' -MOCK_EXECUTION_ID = 'test-execution-123' +MOCK_EXECUTION_NAME = 'test-execution-123' @mock_aws @@ -59,7 +59,7 @@ def _generate_test_event(self): 'startDateTime': self.default_start_datetime.isoformat(), 'endDateTime': self.default_end_datetime.isoformat(), 'rollbackReason': 'Test rollback', - 'executionId': MOCK_EXECUTION_ID, + 'executionName': MOCK_EXECUTION_NAME, 'providersProcessed': 0, } @@ -604,7 +604,7 @@ def _perform_rollback_and_get_s3_object(self): rollback_license_upload(event, Mock()) # Read object from S3 and verify its contents match what is expected - s3_key = f'{MOCK_EXECUTION_ID}/results.json' + s3_key = f'{MOCK_EXECUTION_NAME}/results.json' s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) @@ -879,7 +879,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) self._when_provider_had_privilege_deactivated_from_upload() # Create initial S3 results with data in all fields - s3_key = f'{MOCK_EXECUTION_ID}/results.json' + s3_key = f'{MOCK_EXECUTION_NAME}/results.json' # Create existing results data in the format that from_dict expects (camelCase for top-level keys) existing_results_data = { @@ -1055,7 +1055,7 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel self.assertEqual(0, result_second['providersFailed']) # Verify: S3 results contain both providers - s3_key = f'{MOCK_EXECUTION_ID}/results.json' + s3_key = f'{MOCK_EXECUTION_NAME}/results.json' s3_obj = self.config.s3_client.get_object( Bucket=self.config.rollback_results_bucket_name, Key=s3_key ) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index edf9f2086..098c0a2b1 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -27,7 +27,7 @@ def upload_test_license_batch( - staff_headers: dict, batch_start_index: int, batch_size: int, upload_start_time: datetime + staff_headers: dict, batch_start_index: int, batch_size: int ): """ Upload a batch of test license records. @@ -35,7 +35,6 @@ def upload_test_license_batch( :param staff_headers: Authentication headers for staff user :param batch_start_index: Starting index for this batch :param batch_size: Number of licenses to upload in this batch - :param upload_start_time: The timestamp when upload started (for generating unique data) :return: List of license records that were uploaded """ licenses_batch = [] @@ -97,7 +96,7 @@ def upload_test_licenses(staff_headers: dict, num_licenses: int, batch_size: int for batch_start in range(0, num_licenses, batch_size): current_batch_size = min(batch_size, num_licenses - batch_start) - batch_licenses = upload_test_license_batch(staff_headers, batch_start, current_batch_size, upload_start_time) + batch_licenses = upload_test_license_batch(staff_headers, batch_start, current_batch_size) all_licenses.extend(batch_licenses) # Small delay between batches to avoid rate limiting @@ -241,7 +240,6 @@ def start_rollback_step_function( jurisdiction: str, start_datetime: datetime, end_datetime: datetime, - rollback_reason: str, ): """ Start the license upload rollback step function. @@ -251,7 +249,6 @@ def start_rollback_step_function( :param jurisdiction: Jurisdiction abbreviation :param start_datetime: Start of rollback time window :param end_datetime: End of rollback time window - :param rollback_reason: Reason for the rollback :return: Execution ARN """ sfn_client = boto3.client('stepfunctions') @@ -264,8 +261,8 @@ def start_rollback_step_function( 'jurisdiction': jurisdiction, 'startDateTime': start_datetime.isoformat().replace('+00:00', 'Z'), 'endDateTime': end_datetime.isoformat().replace('+00:00', 'Z'), - 'rollbackReason': rollback_reason, - 'tableNameRollbackConfirmation': 'CONFIRMED', # Required confirmation parameter + 'executionName': execution_name, + 'rollbackReason': 'Smoke test validation of rollback functionality', } logger.info(f'Starting step function execution: {execution_name}') @@ -501,7 +498,6 @@ def rollback_license_upload_smoke_test(): jurisdiction=JURISDICTION, start_datetime=rollback_start, end_datetime=rollback_end, - rollback_reason='Smoke test validation of rollback functionality', ) # Step 4: Wait for step function completion From 0419812ce40661940ac5d78c7138a95ebcc7ac31 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 7 Nov 2025 17:31:00 -0600 Subject: [PATCH 35/81] Fix CDK nags for new resources --- .../disaster_recovery_stack/__init__.py | 21 +++++++++++++++++-- .../license_upload_rollback_step_function.py | 15 ++++++++++--- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py index 251b7f2ea..271cf14ce 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py @@ -1,8 +1,9 @@ -from aws_cdk import RemovalPolicy +from aws_cdk import RemovalPolicy, Stack from aws_cdk.aws_dynamodb import Table from aws_cdk.aws_iam import PolicyStatement, ServicePrincipal from aws_cdk.aws_kms import Key -from aws_cdk.aws_s3 import Bucket, BucketEncryption +from aws_cdk.aws_s3 import BlockPublicAccess, Bucket, BucketEncryption, ObjectOwnership +from cdk_nag import NagSuppressions from common_constructs.stack import AppStack from constructs import Construct @@ -61,6 +62,7 @@ def __init__( ) # Create S3 bucket for license upload rollback results + stack = Stack.of(self) self.rollback_results_bucket = Bucket( self, 'DisasterRecoveryResultsBucket', @@ -70,6 +72,21 @@ def __init__( auto_delete_objects=removal_policy == RemovalPolicy.DESTROY, versioned=True, enforce_ssl=True, + block_public_access=BlockPublicAccess.BLOCK_ALL, + object_ownership=ObjectOwnership.BUCKET_OWNER_ENFORCED, + server_access_logs_bucket=persistent_stack.access_logs_bucket, + server_access_logs_prefix=f'_logs/{stack.account}/{stack.region}/{self.node.path}/DisasterRecoveryResultsBucket/', + ) + + # Suppress replication requirement - replication to a logs archive account may be added as a future enhancement + NagSuppressions.add_resource_suppressions( + self.rollback_results_bucket, + suppressions=[ + { + 'id': 'HIPAA.Security-S3BucketReplicationEnabled', + 'reason': 'This bucket is for generating one time results of the rollback workflow and is not intended to be replicated.', + }, + ], ) # Create Step Functions for restoring DynamoDB tables diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index 42db9238a..a4fa1805d 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -71,6 +71,17 @@ def __init__( encryption_key=dr_shared_encryption_key, ) + # Suppress retention period requirement - we are deliberately retaining logs indefinitely + NagSuppressions.add_resource_suppressions( + state_machine_log_group, + suppressions=[ + { + 'id': 'HIPAA.Security-CloudWatchLogGroupRetentionPeriod', + 'reason': 'This system will be used infrequently. We are deliberately retaining logs indefinitely here.', + }, + ], + ) + # Create state machine self.state_machine = StateMachine( self, @@ -174,10 +185,8 @@ def _build_rollback_state_machine_definition(self) -> IChainable: 'startDateTime.$': '$.startDateTime', 'endDateTime.$': '$.endDateTime', 'rollbackReason.$': '$.rollbackReason', - 'tableNameRollbackConfirmation.$': '$.tableNameRollbackConfirmation', - 'executionId.$': '$$.Execution.Name', + 'executionName.$': '$$.Execution.Name', 'providersProcessed': 0, - 'lastEvaluatedGSIKey': None, }, comment='Initialize rollback parameters with execution ID', result_path='$', From 5a6804cdde62f1225d68d73ed899ca45bee29054 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 11:46:42 -0600 Subject: [PATCH 36/81] Fix smoke test to use state api endpoint for upload --- .../license_upload_rollback_step_function.py | 2 +- .../rollback_license_upload_smoke_tests.py | 137 ++++++++++-------- .../tests/smoke/smoke_common.py | 113 +++++++++++++++ 3 files changed, 191 insertions(+), 61 deletions(-) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index a4fa1805d..d5f49465b 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -129,7 +129,7 @@ def _create_rollback_function( index=os.path.join('handlers', 'rollback_license_upload.py'), handler='rollback_license_upload', timeout=Duration.minutes(15), - memory_size=5120, # 5 GB for managing potentially large results files + memory_size=3008, # for managing potentially large results files environment={ **stack.common_env_vars, 'ROLLBACK_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index 098c0a2b1..bf5d33753 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -9,17 +9,21 @@ from config import config, logger from smoke_common import ( SmokeTestFailureException, + create_test_app_client, create_test_staff_user, + delete_test_app_client, delete_test_staff_user, get_api_base_url, + get_client_auth_headers, get_provider_user_records, get_staff_user_auth_headers, load_smoke_test_env, ) -COMPACT = 'aslp' +COMPACT = 'octp' JURISDICTION = 'ne' TEST_STAFF_USER_EMAIL = 'testStaffUserLicenseRollback@smokeTestFakeEmail.com' +TEST_APP_CLIENT_NAME = 'test-license-rollback-client' # Test configuration NUM_LICENSES_TO_UPLOAD = 1000 @@ -27,12 +31,12 @@ def upload_test_license_batch( - staff_headers: dict, batch_start_index: int, batch_size: int + auth_headers: dict, batch_start_index: int, batch_size: int ): """ Upload a batch of test license records. - :param staff_headers: Authentication headers for staff user + :param auth_headers: Authentication headers for app client :param batch_start_index: Starting index for this batch :param batch_size: Number of licenses to upload in this batch :return: List of license records that were uploaded @@ -66,7 +70,7 @@ def upload_test_license_batch( post_response = requests.post( url=f'{get_api_base_url()}/v1/compacts/{COMPACT}/jurisdictions/{JURISDICTION}/licenses', - headers=staff_headers, + headers=auth_headers, json=licenses_batch, timeout=60, # Longer timeout for batch uploads ) @@ -80,11 +84,11 @@ def upload_test_license_batch( return licenses_batch -def upload_test_licenses(staff_headers: dict, num_licenses: int, batch_size: int): +def upload_test_licenses(auth_headers: dict, num_licenses: int, batch_size: int): """ Upload test license records in batches. - :param staff_headers: Authentication headers for staff user + :param auth_headers: Authentication headers for app client :param num_licenses: Total number of licenses to upload :param batch_size: Number of licenses per batch :return: Tuple of (all uploaded license data, upload start time, upload end time) @@ -96,7 +100,7 @@ def upload_test_licenses(staff_headers: dict, num_licenses: int, batch_size: int for batch_start in range(0, num_licenses, batch_size): current_batch_size = min(batch_size, num_licenses - batch_start) - batch_licenses = upload_test_license_batch(staff_headers, batch_start, current_batch_size) + batch_licenses = upload_test_license_batch(auth_headers, batch_start, current_batch_size) all_licenses.extend(batch_licenses) # Small delay between batches to avoid rate limiting @@ -459,79 +463,92 @@ def rollback_license_upload_smoke_test(): if not step_function_arn: raise SmokeTestFailureException('CC_TEST_ROLLBACK_STEP_FUNCTION_ARN environment variable not set') + # staff user to query providers staff_headers = get_staff_user_auth_headers(TEST_STAFF_USER_EMAIL) - # Step 1: Upload test licenses - logger.info('=' * 80) - logger.info('STEP 1: Uploading test licenses') - logger.info('=' * 80) + # Create test app client for authentication + client_credentials = create_test_app_client(TEST_APP_CLIENT_NAME, COMPACT, JURISDICTION) + client_id = client_credentials['client_id'] + client_secret = client_credentials['client_secret'] - uploaded_licenses, upload_start_time, upload_end_time = upload_test_licenses( - staff_headers, - NUM_LICENSES_TO_UPLOAD, - BATCH_SIZE, - ) + try: + # Get authentication headers using app client + auth_headers = get_client_auth_headers(client_id, client_secret, COMPACT, JURISDICTION) + + # Step 1: Upload test licenses + logger.info('=' * 80) + logger.info('STEP 1: Uploading test licenses') + logger.info('=' * 80) + + uploaded_licenses, upload_start_time, upload_end_time = upload_test_licenses( + auth_headers, + NUM_LICENSES_TO_UPLOAD, + BATCH_SIZE, + ) - logger.info(f'Upload time window: {upload_start_time.isoformat()} to {upload_end_time.isoformat()}') + logger.info(f'Upload time window: {upload_start_time.isoformat()} to {upload_end_time.isoformat()}') - # Step 2: Wait for providers to be created - logger.info('=' * 80) - logger.info('STEP 2: Waiting for provider records to be created') - logger.info('=' * 80) + # Step 2: Wait for providers to be created + logger.info('=' * 80) + logger.info('STEP 2: Waiting for provider records to be created') + logger.info('=' * 80) - provider_ids = wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) + provider_ids = wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) - logger.info(f'Found {len(provider_ids)} provider records') + logger.info(f'Found {len(provider_ids)} provider records') - # Step 3: Start rollback step function - logger.info('=' * 80) - logger.info('STEP 3: Starting rollback step function') - logger.info('=' * 80) + # Step 3: Start rollback step function + logger.info('=' * 80) + logger.info('STEP 3: Starting rollback step function') + logger.info('=' * 80) - # Add buffer to time window to ensure we catch all uploads - rollback_start = upload_start_time - timedelta(minutes=5) - rollback_end = upload_end_time + timedelta(minutes=5) + # Add buffer to time window to ensure we catch all uploads + rollback_start = upload_start_time - timedelta(minutes=5) + rollback_end = upload_end_time + timedelta(minutes=5) - execution_arn = start_rollback_step_function( - step_function_arn=step_function_arn, - compact=COMPACT, - jurisdiction=JURISDICTION, - start_datetime=rollback_start, - end_datetime=rollback_end, - ) + execution_arn = start_rollback_step_function( + step_function_arn=step_function_arn, + compact=COMPACT, + jurisdiction=JURISDICTION, + start_datetime=rollback_start, + end_datetime=rollback_end, + ) - # Step 4: Wait for step function completion - logger.info('=' * 80) - logger.info('STEP 4: Waiting for step function to complete') - logger.info('=' * 80) + # Step 4: Wait for step function completion + logger.info('=' * 80) + logger.info('STEP 4: Waiting for step function to complete') + logger.info('=' * 80) - status, output = wait_for_step_function_completion(execution_arn) + status, output = wait_for_step_function_completion(execution_arn) - logger.info(f'Step function output: {json.dumps(output, indent=2)}') + logger.info(f'Step function output: {json.dumps(output, indent=2)}') - # Step 5: Retrieve and verify results from S3 - logger.info('=' * 80) - logger.info('STEP 5: Retrieving and verifying results from S3') - logger.info('=' * 80) + # Step 5: Retrieve and verify results from S3 + logger.info('=' * 80) + logger.info('STEP 5: Retrieving and verifying results from S3') + logger.info('=' * 80) - results_s3_key = output.get('resultsS3Key') - if not results_s3_key: - raise SmokeTestFailureException('No resultsS3Key in step function output') + results_s3_key = output.get('resultsS3Key') + if not results_s3_key: + raise SmokeTestFailureException('No resultsS3Key in step function output') - results = get_rollback_results_from_s3(results_s3_key) + results = get_rollback_results_from_s3(results_s3_key) - verify_rollback_results(results, len(provider_ids)) + verify_rollback_results(results, len(provider_ids)) - # Step 6: Verify providers deleted from database - logger.info('=' * 80) - logger.info('STEP 6: Verifying providers were deleted from database') - logger.info('=' * 80) + # Step 6: Verify providers deleted from database + logger.info('=' * 80) + logger.info('STEP 6: Verifying providers were deleted from database') + logger.info('=' * 80) - verify_providers_deleted_from_database(results, COMPACT) + verify_providers_deleted_from_database(results, COMPACT) - logger.info('=' * 80) - logger.info('✅ ALL TESTS PASSED') - logger.info('=' * 80) + logger.info('=' * 80) + logger.info('✅ ALL TESTS PASSED') + logger.info('=' * 80) + finally: + # Clean up the test app client + delete_test_app_client(client_id) if __name__ == '__main__': diff --git a/backend/compact-connect/tests/smoke/smoke_common.py b/backend/compact-connect/tests/smoke/smoke_common.py index 59e869fe6..665414b3d 100644 --- a/backend/compact-connect/tests/smoke/smoke_common.py +++ b/backend/compact-connect/tests/smoke/smoke_common.py @@ -485,6 +485,119 @@ def cleanup_test_provider_records(provider_id: str, compact: str): logger.warning(f'Error during cleanup: {str(e)}') +def create_test_app_client(client_name: str, compact: str, jurisdiction: str): + """ + Create a test app client in Cognito for authentication testing. + + :param client_name: Name for the test app client + :param compact: Compact abbreviation + :param jurisdiction: Jurisdiction abbreviation + :return: Dictionary containing client_id and client_secret + """ + logger.info(f'Creating test app client: {client_name}') + + try: + cognito_client = boto3.client('cognito-idp') + + # Create the user pool client + response = cognito_client.create_user_pool_client( + UserPoolId=config.cognito_staff_user_pool_id, + ClientName=client_name, + PreventUserExistenceErrors='ENABLED', + GenerateSecret=True, + TokenValidityUnits={'AccessToken': 'minutes'}, + AccessTokenValidity=15, + AllowedOAuthFlowsUserPoolClient=True, + AllowedOAuthFlows=['client_credentials'], + AllowedOAuthScopes=[f'{compact}/readGeneral', f'{jurisdiction}/{compact}.write'], + ) + + user_pool_client = response.get('UserPoolClient', {}) + client_id = user_pool_client.get('ClientId') + client_secret = user_pool_client.get('ClientSecret') + + if not client_id or not client_secret: + raise SmokeTestFailureException('Failed to extract client ID or secret from AWS response') + + logger.info(f'Successfully created test app client with ID: {client_id}') + return {'client_id': client_id, 'client_secret': client_secret} + + except ClientError as e: + error_code = e.response['Error']['Code'] + error_message = e.response['Error']['Message'] + logger.error(f'Failed to create app client: {error_code} - {error_message}') + raise SmokeTestFailureException(f'Failed to create app client: {error_code} - {error_message}') from e + + +def delete_test_app_client(client_id: str): + """Delete the test app client from Cognito.""" + try: + cognito_client = boto3.client('cognito-idp') + cognito_client.delete_user_pool_client(UserPoolId=config.cognito_staff_user_pool_id, ClientId=client_id) + logger.info(f'Successfully deleted test app client: {client_id}') + except ClientError as e: + logger.error(f'Failed to delete app client {client_id}: {str(e)}') + # Don't raise here as this is cleanup + + +def get_client_credentials_token(client_id: str, client_secret: str, compact: str, jurisdiction: str): + """ + Get an access token using client credentials flow. + + :param client_id: The client ID + :param client_secret: The client secret + :param compact: Compact abbreviation + :param jurisdiction: Jurisdiction abbreviation + :return: Access token + """ + try: + auth_url = f'https://{config.cognito_staff_user_pool_id.split("_")[1]}.auth.{config.aws_region}.amazoncognito.com/oauth2/token' + + # Prepare the request data for client credentials flow + data = { + 'grant_type': 'client_credentials', + 'client_id': client_id, + 'client_secret': client_secret, + 'scope': f'{compact}/readGeneral {jurisdiction}/{compact}.write', + } + + headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'} + + response = requests.post(auth_url, data=data, headers=headers, timeout=10) + + if response.status_code != 200: + raise SmokeTestFailureException( + f'Failed to get access token. Status: {response.status_code}, Response: {response.text}' + ) + + token_data = response.json() + access_token = token_data.get('access_token') + + if not access_token: + raise SmokeTestFailureException('No access token in response') + + logger.info('Successfully obtained access token using client credentials') + return access_token + + except requests.RequestException as e: + logger.error(f'Failed to get client credentials token: {str(e)}') + raise SmokeTestFailureException(f'Failed to get client credentials token: {str(e)}') from e + + +def get_client_auth_headers(client_id: str, client_secret: str, compact: str, jurisdiction: str): + """ + Get authentication headers for client credentials flow. + + :param client_id: The client ID + :param client_secret: The client secret + :param compact: Compact abbreviation + :param jurisdiction: Jurisdiction abbreviation + :return: Headers dictionary with Authorization header + """ + access_token = get_client_credentials_token(client_id, client_secret, compact, jurisdiction) + return {'Authorization': f'Bearer {access_token}'} + + def generate_opaque_data(card_number: str): """Generate a payment nonce using Authorize.Net's Secure Payment Container API. From fdde06e212b22184d278d93e298493b34cfc103c Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 14:20:44 -0600 Subject: [PATCH 37/81] More smoke test fixes --- .../license_upload_rollback_step_function.py | 1 + .../rollback_license_upload_smoke_tests.py | 17 ++++++++++------- .../compact-connect/tests/smoke/smoke_common.py | 6 +++--- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index d5f49465b..3cfcb6879 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -132,6 +132,7 @@ def _create_rollback_function( memory_size=3008, # for managing potentially large results files environment={ **stack.common_env_vars, + 'PROVIDER_TABLE_NAME': persistent_stack.provider_table.table_name, 'ROLLBACK_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, 'LICENSE_UPLOAD_DATE_INDEX_NAME': persistent_stack.provider_table.license_upload_date_gsi_name, 'EVENT_BUS_NAME': data_event_bus.event_bus_name, diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index bf5d33753..c3b61835b 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -20,7 +20,7 @@ load_smoke_test_env, ) -COMPACT = 'octp' +COMPACT = 'coun' JURISDICTION = 'ne' TEST_STAFF_USER_EMAIL = 'testStaffUserLicenseRollback@smokeTestFakeEmail.com' TEST_APP_CLIENT_NAME = 'test-license-rollback-client' @@ -48,13 +48,15 @@ def upload_test_license_batch( license_data = { 'licenseNumber': f'ROLLBACK-TEST-{i:04d}', 'homeAddressPostalCode': '68001', - 'givenName': f'TestProvider{i}', - 'familyName': f'RollbackTest{i:04d}', + 'givenName': f'TestProvider{i:04d}', + # keep the family name consistent so we can query for all the providers which requires an exact + # match on the family name + 'familyName': 'RollbackTest', 'homeAddressStreet1': '123 Test Street', 'dateOfBirth': '1985-01-01', 'dateOfIssuance': '2020-01-01', 'ssn': f'500-50-{i:04d}', # Incrementing SSN with padded zeros - 'licenseType': 'audiologist', + 'licenseType': 'licensed professional counselor', 'dateOfExpiration': '2050-12-10', 'homeAddressState': 'NE', 'homeAddressCity': 'Omaha', @@ -69,7 +71,7 @@ def upload_test_license_batch( ) post_response = requests.post( - url=f'{get_api_base_url()}/v1/compacts/{COMPACT}/jurisdictions/{JURISDICTION}/licenses', + url=f'{config.state_api_base_url}/v1/compacts/{COMPACT}/jurisdictions/{JURISDICTION}/licenses', headers=auth_headers, json=licenses_batch, timeout=60, # Longer timeout for batch uploads @@ -134,7 +136,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max base_query_body = { 'query': {'familyName': 'RollbackTest'}, 'pagination': { - 'pageSize': 1000 # Maximum page size to minimize number of requests + 'pageSize': 100 # Maximum page size to minimize number of requests }, } @@ -157,7 +159,8 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max ) if query_response.status_code != 200: - logger.warning(f'Query failed with status {query_response.status_code}. Retrying...') + logger.warning(f'Query failed with status {query_response.status_code}: {query_response.json()}' + f' Retrying...') break response_data = query_response.json() diff --git a/backend/compact-connect/tests/smoke/smoke_common.py b/backend/compact-connect/tests/smoke/smoke_common.py index 665414b3d..14bbe454b 100644 --- a/backend/compact-connect/tests/smoke/smoke_common.py +++ b/backend/compact-connect/tests/smoke/smoke_common.py @@ -501,7 +501,7 @@ def create_test_app_client(client_name: str, compact: str, jurisdiction: str): # Create the user pool client response = cognito_client.create_user_pool_client( - UserPoolId=config.cognito_staff_user_pool_id, + UserPoolId=config.cognito_state_auth_user_pool_id, ClientName=client_name, PreventUserExistenceErrors='ENABLED', GenerateSecret=True, @@ -533,7 +533,7 @@ def delete_test_app_client(client_id: str): """Delete the test app client from Cognito.""" try: cognito_client = boto3.client('cognito-idp') - cognito_client.delete_user_pool_client(UserPoolId=config.cognito_staff_user_pool_id, ClientId=client_id) + cognito_client.delete_user_pool_client(UserPoolId=config.cognito_state_auth_user_pool_id, ClientId=client_id) logger.info(f'Successfully deleted test app client: {client_id}') except ClientError as e: logger.error(f'Failed to delete app client {client_id}: {str(e)}') @@ -551,7 +551,7 @@ def get_client_credentials_token(client_id: str, client_secret: str, compact: st :return: Access token """ try: - auth_url = f'https://{config.cognito_staff_user_pool_id.split("_")[1]}.auth.{config.aws_region}.amazoncognito.com/oauth2/token' + auth_url = config.state_auth_url # Prepare the request data for client credentials flow data = { From 24ef6e9aea23d9f0186e84f641c5535955312f48 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 14:26:36 -0600 Subject: [PATCH 38/81] start transaction collector an hour later to account for auth.net delays --- .../transaction_history_processing_workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/compact-connect/stacks/transaction_monitoring_stack/transaction_history_processing_workflow.py b/backend/compact-connect/stacks/transaction_monitoring_stack/transaction_history_processing_workflow.py index a2a909c6b..1df58a807 100644 --- a/backend/compact-connect/stacks/transaction_monitoring_stack/transaction_history_processing_workflow.py +++ b/backend/compact-connect/stacks/transaction_monitoring_stack/transaction_history_processing_workflow.py @@ -221,7 +221,7 @@ def __init__( Rule( self, f'{compact}-DailyTransactionProcessingRule', - schedule=Schedule.cron(week_day='*', hour='1', minute='0', month='*', year='*'), + schedule=Schedule.cron(week_day='*', hour='2', minute='0', month='*', year='*'), targets=[SfnStateMachine(state_machine)], ) From 4c326de1416c5fcc6fe9a00f6f2a354ebbe5c6a1 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 16:33:35 -0600 Subject: [PATCH 39/81] Migrate update record sort keys to be deterministic --- .../cc_common/data_model/data_client.py | 8 ++++++++ .../data_model/schema/license/record.py | 6 +++--- .../data_model/schema/privilege/record.py | 6 +++--- .../data_model/schema/provider/__init__.py | 4 ++++ .../data_model/schema/provider/record.py | 19 ++++++++++++++++--- .../common/common_test/test_data_generator.py | 1 + .../common/tests/function/test_data_client.py | 6 +++--- .../resources/dynamo/license-update.json | 4 +++- .../resources/dynamo/privilege-update.json | 2 +- .../test_schema/test_license.py | 4 ++++ .../function/test_rollback_license_upload.py | 14 +++++++++----- .../test_handlers/test_registration.py | 1 + 12 files changed, 56 insertions(+), 19 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index fe229a8b9..8cb220b84 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -925,6 +925,7 @@ def process_registration_values( } # Create provider update record to show registration event and fields that were updated + now = config.current_standard_datetime provider_update_record = ProviderUpdateData.create_new( { 'type': ProviderRecordType.PROVIDER_UPDATE, @@ -932,6 +933,7 @@ def process_registration_values( 'providerId': matched_license_record.providerId, 'compact': matched_license_record.compact, 'previous': current_provider_record.to_dict(), + 'createDate': now, 'updatedValues': {**registration_values}, } ) @@ -2632,6 +2634,7 @@ def _get_provider_record_transaction_items_for_jurisdiction_with_no_known_licens ) # Create the provider update record + now = config.current_standard_datetime provider_update_record = ProviderUpdateData.create_new( { 'type': ProviderRecordType.PROVIDER_UPDATE, @@ -2639,6 +2642,7 @@ def _get_provider_record_transaction_items_for_jurisdiction_with_no_known_licens 'providerId': provider_id, 'compact': compact, 'previous': provider_record.to_dict(), + 'createDate': now, 'updatedValues': { 'currentHomeJurisdiction': selected_jurisdiction, }, @@ -2792,6 +2796,7 @@ def _get_provider_record_transaction_items_for_jurisdiction_change_with_license( ) # Create the provider update record + now = config.current_standard_datetime provider_update_record = ProviderUpdateData.create_new( { 'type': ProviderRecordType.PROVIDER_UPDATE, @@ -2799,6 +2804,7 @@ def _get_provider_record_transaction_items_for_jurisdiction_change_with_license( 'providerId': provider_id, 'compact': compact, 'previous': provider_records.get_provider_record().to_dict(), + 'createDate': now, 'updatedValues': { 'licenseJurisdiction': new_license_record.jurisdiction, # we explicitly set this to align with what was passed in as the selected jurisdiction @@ -3485,6 +3491,7 @@ def complete_provider_email_update( current_provider_record = self.get_provider_top_level_record(compact=compact, provider_id=provider_id) # Create provider update record to track the email change + now = config.current_standard_datetime provider_update_record = ProviderUpdateData.create_new( { 'type': ProviderRecordType.PROVIDER_UPDATE, @@ -3492,6 +3499,7 @@ def complete_provider_email_update( 'providerId': provider_id, 'compact': compact, 'previous': current_provider_record.to_dict(), + 'createDate': now, 'updatedValues': { 'compactConnectRegisteredEmailAddress': new_email_address, }, diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index eb79bd5d7..a32deb1b9 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -243,13 +243,13 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument served out via API. """ in_data['pk'] = f'{in_data["compact"]}#PROVIDER#{in_data["providerId"]}' - # This needs to include a POSIX timestamp (seconds) and a hash of the changes - # to the record. We'll use the current time and the hash of the updatedValues + # This needs to include an iso formatted datetime string and a hash of the changes + # to the record. We'll use the createDate and the hash of the updatedValues # field for this. change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#UPDATE#3#license/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#UPDATE#3#license/{in_data["jurisdiction"]}/{license_type_abbr}/{in_data["createDate"]}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py index 3bcd3ed0e..625007827 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py @@ -234,13 +234,13 @@ class PrivilegeUpdateRecordSchema(BaseRecordSchema, ChangeHashMixin, ValidatesLi @post_dump # Must be _post_ dump so we have values that are more easily hashed def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument in_data['pk'] = f'{in_data["compact"]}#PROVIDER#{in_data["providerId"]}' - # This needs to include a POSIX timestamp (seconds) and a hash of the changes - # to the record. We'll use the current time and the hash of the updatedValues + # This needs to include an iso formatted datetime string and a hash of the changes + # to the record. We'll use the createDate and the hash of the updatedValues # field for this. change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#UPDATE#1#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#UPDATE#1#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{in_data["createDate"]}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/__init__.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/__init__.py index edab65d15..ebbf23e4b 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/__init__.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/__init__.py @@ -175,6 +175,10 @@ def providerId(self) -> UUID: def compact(self) -> str: return self._data['compact'] + @property + def createDate(self) -> str: + return self._data['createDate'] + @property def previous(self) -> dict: return self._data['previous'] diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index 5bc21b05c..2941bfaba 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -225,19 +225,32 @@ class ProviderUpdateRecordSchema(BaseRecordSchema, ChangeHashMixin): providerId = UUID(required=True, allow_none=False) compact = Compact(required=True, allow_none=False) previous = Nested(ProviderUpdatePreviousRecordSchema, required=True, allow_none=False) + # this tracks when the update record was created + createDate = DateTime(required=True, allow_none=False) # We'll allow any fields that can show up in the previous field to be here as well, but none are required updatedValues = Nested(ProviderUpdatePreviousRecordSchema(partial=True), required=True, allow_none=False) # List of field names that were present in the previous record but removed in the update removedValues = List(String(), required=False, allow_none=False) + # TODO - remove this pre_load hook after migration is complete # noqa: FIX002 + @pre_load + def populate_create_date_for_backwards_compatibility(self, in_data, **kwargs): # noqa: ARG001 unused-argument + """ + For backwards compatibility, populate createDate from dateOfUpdate if createDate is missing. + This allows us to load old records that were created before the createDate field was added. + """ + if 'createDate' not in in_data and 'updatedValues' in in_data and 'dateOfUpdate' in in_data['updatedValues']: + in_data['createDate'] = in_data['updatedValues']['dateOfUpdate'] + return in_data + @post_dump # Must be _post_ dump so we have values that are more easily hashed def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument in_data['pk'] = f'{in_data["compact"]}#PROVIDER#{in_data["providerId"]}' - # This needs to include a POSIX timestamp (seconds) and a hash of the changes - # to the record. We'll use the current time and the hash of the updatedValues + # This needs to include a iso formatted datetime string and a hash of the changes + # to the record. We'll use the createDate and the hash of the updatedValues # field for this. change_hash = self.hash_changes(in_data) in_data['sk'] = ( - f'{in_data["compact"]}#UPDATE#2#provider/{int(config.current_standard_datetime.timestamp())}/{change_hash}' + f'{in_data["compact"]}#UPDATE#2#provider/{in_data["createDate"]}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py index 5bc5c265d..7c36d9e9a 100644 --- a/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py +++ b/backend/compact-connect/lambdas/python/common/common_test/test_data_generator.py @@ -514,6 +514,7 @@ def generate_default_provider_update( 'compact': DEFAULT_COMPACT, 'type': PROVIDER_UPDATE_RECORD_TYPE, 'previous': previous_dict, + 'createDate': datetime.fromisoformat(DEFAULT_PROVIDER_UPDATE_DATETIME), 'updatedValues': { 'compactConnectRegisteredEmailAddress': DEFAULT_REGISTERED_EMAIL_ADDRESS, 'currentHomeJurisdiction': DEFAULT_LICENSE_JURISDICTION, diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index 1f93f8724..e5de9510e 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -361,7 +361,7 @@ def test_data_client_updates_privilege_records_for_specific_license_type(self): # A new history record { 'pk': f'aslp#PROVIDER#{provider_uuid}', - 'sk': 'aslp#UPDATE#1#privilege/ky/aud/1731110399/f61e34798e1775ff6230d1187d444146', + 'sk': 'aslp#UPDATE#1#privilege/ky/aud/2024-11-08T23:59:59+00:00/f61e34798e1775ff6230d1187d444146', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': provider_uuid, @@ -826,7 +826,7 @@ def test_deactivate_privilege_updates_record(self): self.assertEqual( { 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#UPDATE#1#privilege/ne/aud/1731110399/aac682a76e1182a641a1b40dd606ae51', + 'sk': 'aslp#UPDATE#1#privilege/ne/aud/2024-11-08T23:59:59+00:00/aac682a76e1182a641a1b40dd606ae51', 'type': 'privilegeUpdate', 'updateType': 'deactivation', 'providerId': str(provider_id), @@ -924,7 +924,7 @@ def test_deactivate_privilege_on_inactive_privilege_raises_exception(self): # We'll create it as if it were already deactivated original_history = { 'pk': f'aslp#PROVIDER#{provider_id}', - 'sk': 'aslp#UPDATE#1#privilege/ne/aud/1731110399/4ebb3dc8f1ffcc30fe7aad5ec49d0ca6', + 'sk': 'aslp#UPDATE#1#privilege/ne/aud/2024-11-08T23:59:59+00:00/4ebb3dc8f1ffcc30fe7aad5ec49d0ca6', 'type': 'privilegeUpdate', 'updateType': 'renewal', 'providerId': str(provider_id), diff --git a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json index 9bab8a231..68b682b23 100644 --- a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json +++ b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/license-update.json @@ -1,6 +1,8 @@ { "pk": "aslp#PROVIDER#89a6377e-c3a5-40e5-bca5-317ec854c570", - "sk": "aslp#UPDATE#3#license/oh/slp/1586264399/34702de3dc08e64922605a6b18f3838b", + "sk": "aslp#UPDATE#3#license/oh/slp/2024-11-08T23:59:59+00:00/34702de3dc08e64922605a6b18f3838b", + "licenseUploadDateGSIPK": "C#aslp#J#oh#D#2024-11", + "licenseUploadDateGSISK": "TIME#1731110399#LT#slp#PID#89a6377e-c3a5-40e5-bca5-317ec854c570", "type": "licenseUpdate", "updateType": "renewal", "providerId": "89a6377e-c3a5-40e5-bca5-317ec854c570", diff --git a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json index 4a22b0aff..95ad3ebd8 100644 --- a/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json +++ b/backend/compact-connect/lambdas/python/common/tests/resources/dynamo/privilege-update.json @@ -1,6 +1,6 @@ { "pk": "aslp#PROVIDER#89a6377e-c3a5-40e5-bca5-317ec854c570", - "sk": "aslp#UPDATE#1#privilege/ne/slp/1731110399/939a3c350708e34875f0a652bf7d7454", + "sk": "aslp#UPDATE#1#privilege/ne/slp/2020-05-05T12:59:59+00:00/939a3c350708e34875f0a652bf7d7454", "type": "privilegeUpdate", "updateType": "renewal", "providerId": "89a6377e-c3a5-40e5-bca5-317ec854c570", diff --git a/backend/compact-connect/lambdas/python/common/tests/unit/test_data_model/test_schema/test_license.py b/backend/compact-connect/lambdas/python/common/tests/unit/test_data_model/test_schema/test_license.py index d251530bc..73f239035 100644 --- a/backend/compact-connect/lambdas/python/common/tests/unit/test_data_model/test_schema/test_license.py +++ b/backend/compact-connect/lambdas/python/common/tests/unit/test_data_model/test_schema/test_license.py @@ -207,6 +207,8 @@ def test_hash_is_deterministic(self): 'compact': 'aslp', 'jurisdiction': 'ky', 'licenseType': 'speech-language pathologist', + 'updateType': loaded_record['updateType'], + 'createDate': loaded_record['createDate'], # These two fields should determine the change hash: 'previous': loaded_record['previous'].copy(), 'updatedValues': loaded_record['updatedValues'].copy(), @@ -234,6 +236,8 @@ def test_hash_is_unique(self): 'compact': 'aslp', 'jurisdiction': 'ky', 'licenseType': 'speech-language pathologist', + 'updateType': loaded_record['updateType'], + 'createDate': loaded_record['createDate'], # These two fields should determine the change hash: 'previous': loaded_record['previous'].copy(), 'updatedValues': loaded_record['updatedValues'].copy(), diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index d180a556d..2c8d8b2cd 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -637,7 +637,8 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v 'privilegesReverted': [], 'providerId': self.provider_id, # NOTE: if the test update data is modified, the sha here will need to be updated - 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6'], + 'updatesDeleted': [ + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6'], } ], 'skippedProviderDetails': [], @@ -712,8 +713,9 @@ def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_fr ], 'providerId': self.provider_id, # NOTE: if the test update data is modified, the shas here will need to be updated - 'updatesDeleted': ['aslp#UPDATE#1#privilege/ne/slp/1761207300/06b886756a79b796ad10b17bd67057e6', - 'aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6'], + 'updatesDeleted': [ + 'aslp#UPDATE#1#privilege/ne/slp/2025-10-23T07:15:00+00:00/06b886756a79b796ad10b17bd67057e6', + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6'], } ], 'skippedProviderDetails': [], @@ -1069,14 +1071,16 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'revisionId': ANY}], 'privilegesReverted': [], 'providerId': mock_first_provider_id, - 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}, + 'updatesDeleted': [ + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6']}, {'licensesReverted': [{'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', 'revisionId': ANY}], 'privilegesReverted': [], 'providerId': mock_second_provider_id, - 'updatesDeleted': ['aslp#UPDATE#3#license/oh/slp/1761207300/d92450a96739428f1a77c051dce9d4a6']}], + 'updatesDeleted': [ + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6']}], 'skippedProviderDetails': []}, final_results_data) def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_registration.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_registration.py index 506983a8e..f0e4ed6ae 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_registration.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_registration.py @@ -822,6 +822,7 @@ def test_registration_creates_provider_update_record(self, mock_verify_recaptcha self.assertEqual( { 'compact': provider_data.compact, + 'createDate': update_data.createDate, 'dateOfUpdate': datetime.fromisoformat(DEFAULT_DATE_OF_UPDATE_TIMESTAMP), 'previous': { 'compact': provider_data.compact, From b8f801c20dffc5d7e98580c5971531cba329067f Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 17:48:10 -0600 Subject: [PATCH 40/81] correct hook logic to set createDate field --- .../common/cc_common/data_model/schema/provider/record.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index 2941bfaba..a3edace54 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -239,8 +239,8 @@ def populate_create_date_for_backwards_compatibility(self, in_data, **kwargs): For backwards compatibility, populate createDate from dateOfUpdate if createDate is missing. This allows us to load old records that were created before the createDate field was added. """ - if 'createDate' not in in_data and 'updatedValues' in in_data and 'dateOfUpdate' in in_data['updatedValues']: - in_data['createDate'] = in_data['updatedValues']['dateOfUpdate'] + if 'createDate' not in in_data: + in_data['createDate'] = in_data['dateOfUpdate'] return in_data @post_dump # Must be _post_ dump so we have values that are more easily hashed From 31b73395dcbf1798c5980a4032c9352342ca17c9 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 17:49:26 -0600 Subject: [PATCH 41/81] Add migration script for migrating sort keys of update records --- .../migrate-update-sort-keys-1175/main.py | 177 ++++++++++++++++++ 1 file changed, 177 insertions(+) create mode 100644 backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py diff --git a/backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py b/backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py new file mode 100644 index 000000000..af53cd64c --- /dev/null +++ b/backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py @@ -0,0 +1,177 @@ +from boto3.dynamodb.conditions import Attr +from cc_common.config import config, logger +from cc_common.data_model.provider_record_util import ProviderRecordType, ProviderUpdateData, LicenseUpdateData, PrivilegeUpdateData +from cc_common.exceptions import CCInternalException +from custom_resource_handler import CustomResourceHandler, CustomResourceResponse + + +class UpdateRecordSortKeyMigration(CustomResourceHandler): + """Migration for migrating update record sort keys to support license upload rollbacks""" + + def on_create(self, properties: dict) -> None: + do_migration(properties) + + def on_update(self, properties: dict) -> None: + """ + No-op on delete. + """ + + def on_delete(self, _properties: dict) -> CustomResourceResponse | None: + """ + No-op on delete. + """ + + +on_event = UpdateRecordSortKeyMigration('update-record-sort-keys') + + +def do_migration(_properties: dict) -> None: + """ + This migration performs the following: + - Scans the provider table for all privilege update records + - For each update record, adds effectiveDate and createDate equal to that updates dateOfUpdate + - Handles batching for cases where there are more than 100 records to update + """ + logger.info('Starting update record sort key migration') + + # Scan for all privilege update records + update_records = [] + scan_pagination = {} + + while True: + response = config.provider_table.scan( + FilterExpression=Attr('type').eq(ProviderRecordType.LICENSE_UPDATE) + | Attr('type').eq(ProviderRecordType.PROVIDER_UPDATE) + | Attr('type').eq(ProviderRecordType.PRIVILEGE_UPDATE), + **scan_pagination, + ) + + + items = response.get('Items', []) + update_records.extend(items) + logger.info(f'Found {len(items)} privilege update records in current scan batch') + + # Check if we need to continue pagination + last_evaluated_key = response.get('LastEvaluatedKey') + if not last_evaluated_key: + break + + scan_pagination = {'ExclusiveStartKey': last_evaluated_key} + + logger.info(f'Found {len(update_records)} total update records to process') + + if not update_records: + logger.info('No update records found, migration complete') + return + + # Process records in batches of 50 (DynamoDB transaction limit is 100 items, + # and each record generates 2 items: 1 update + 1 delete) + batch_size = 50 + + for i in range(0, len(update_records), batch_size): + batch = update_records[i : i + batch_size] + logger.info(f'Processing batch {i // batch_size + 1} with {len(batch)} records') + + _process_batch(batch) + logger.info(f'Processed batch {i // batch_size + 1}') + + +def _generate_delete_transaction_item(pk: str, sk: str) -> dict: + """ + Generate a delete transaction item for a provider record. + :param pk: The primary key of the provider record + :param sk: The sort key of the provider record + :return: Delete transaction item + """ + return { + 'Delete': { + 'TableName': config.provider_table.table_name, + 'Key': { + 'pk': {'S': pk}, + 'sk': {'S': sk}, + }, + } + } + +def _generate_put_transaction_item(item: dict) -> dict: + """ + Generate a put transaction item for a provider record. + :param item: The provider record to put. + :return: Put transaction item + """ + return { + 'Put': { + 'TableName': config.provider_table.table_name, + 'Item': item, + } + } + +def _generate_transaction_items(update_record: dict) -> list[dict]: + """ + In the case of a provider update record, we add a createDate field based on the dateOfUpdate field. + Then we use the ProviderUpdateData class to serialize the record and return the transaction items. + (one to delete the old record and one to create the new record) + + :param update_record: The provider update record to process + :return: List of transaction items + """ + # grab the old pk and sk from the object + old_pk = update_record['pk'] + old_sk = update_record['sk'] + record_type = update_record.get('type') + if record_type == ProviderRecordType.PROVIDER_UPDATE: + data_class = ProviderUpdateData + elif record_type == ProviderRecordType.LICENSE_UPDATE: + data_class = LicenseUpdateData + elif record_type == ProviderRecordType.PRIVILEGE_UPDATE: + data_class = PrivilegeUpdateData + else: + logger.error("invalid record type found", record_type=record_type, pk=old_pk, sk=old_sk) + raise CCInternalException("invalid record type found") + + # Performing deserialization/serialization on the record, which will generate + # the new pk/sks values we are migrating to. + + update_data = data_class.from_database_record(update_record) + migrated_provider_update_record = update_data.serialize_to_database_record() + + logger.info( + 'Prepared update items for create date', + old_pk=old_pk, + old_sk=old_sk, + updated_pk=migrated_provider_update_record['pk'], + updated_sk=migrated_provider_update_record['sk'], + ) + + # delete old record with old pk/sk, and create new one + return [ + _generate_delete_transaction_item(pk=old_pk, sk=old_sk), + _generate_put_transaction_item(migrated_provider_update_record) + ] + +def _process_batch(update_records: list[dict]) -> None: + """ + Process a batch of privilege update records. + + :param update_records: List of update records to process + """ + transaction_items = [] + + for update_record in update_records: + try: + transaction_items.extend(_generate_transaction_items(update_record)) + except Exception as e: # noqa: BLE001 + logger.error( + 'Error preparing update items for update record, skipping.', + exc_info=e, + pk=update_record.get('pk'), + sk=update_record.get('sk'), + ) + + # Execute the transaction + if transaction_items: + logger.info(f'Executing transaction with {len(transaction_items)} items') + config.provider_table.meta.client.transact_write_items(TransactItems=transaction_items) + logger.info('Transaction completed successfully') + else: + logger.warning('No valid transaction items to process in this batch') From 12247e5ff74f88801a92abad47f47fa1c4662b18 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 10 Nov 2025 20:55:29 -0600 Subject: [PATCH 42/81] shorten smoke test wait time --- .../rollback_license_upload_smoke_tests.py | 52 +++---------------- 1 file changed, 7 insertions(+), 45 deletions(-) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index c3b61835b..3158dbbcf 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -117,7 +117,7 @@ def upload_test_licenses(auth_headers: dict, num_licenses: int, batch_size: int) return all_licenses, upload_start_time, upload_end_time -def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max_wait_time: int = 900): +def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max_wait_time: int = 120): """ Wait for all provider records to be created from uploaded licenses. @@ -129,7 +129,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max logger.info(f'Waiting for {expected_count} provider records to be created...') start_time = time.time() - check_interval = 30 + check_interval = 5 # Query using the common family name prefix 'RollbackTest' # The API will return all providers with family names starting with this prefix @@ -140,10 +140,10 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max }, } + last_key = None + page_num = 1 + all_provider_ids = [] while time.time() - start_time < max_wait_time: - all_provider_ids = [] - last_key = None - page_num = 1 # Collect all providers across all pages while True: @@ -186,7 +186,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max num_found = len(all_provider_ids) logger.info( - f'Found {num_found}/{expected_count} providers with family name prefix "RollbackTest" ' + f'Found {num_found}/{expected_count} providers with family name "RollbackTest" ' f'(across {page_num} pages)' ) @@ -200,45 +200,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max time.sleep(check_interval) # Timeout reached - make one final query to get the latest results - logger.warning(f'Timeout reached after {max_wait_time}s. Making final query to collect all available providers.') - - all_provider_ids = [] - last_key = None - page_num = 1 - - while True: - query_body = base_query_body.copy() - if last_key: - query_body['pagination']['lastKey'] = last_key - - query_response = requests.post( - url=f'{get_api_base_url()}/v1/compacts/{COMPACT}/providers/query', - headers=staff_headers, - json=query_body, - timeout=30, - ) - - if query_response.status_code != 200: - logger.warning(f'Final query failed with status {query_response.status_code}') - break - - response_data = query_response.json() - providers = response_data.get('providers', []) - pagination = response_data.get('pagination', {}) - - page_provider_ids = [p['providerId'] for p in providers] - all_provider_ids.extend(page_provider_ids) - - logger.info(f'Final query page {page_num}: Found {len(page_provider_ids)} providers') - - last_key = pagination.get('lastKey') - if not last_key: - break - - page_num += 1 - - logger.warning(f'Final count: {len(all_provider_ids)}/{expected_count} providers found') - return all_provider_ids + raise SmokeTestFailureException(f'Timeout reached waiting for providers after {max_wait_time}s.') def start_rollback_step_function( From 6e03c4584d005b82e219a55b64fb324af81da87c Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 10:38:38 -0600 Subject: [PATCH 43/81] Add tests for migration to verify sort keys are migrated as expected --- .../main.py | 18 +-- .../python/migration/tests/__init__.py | 86 ++++++++++++- .../migration/tests/function/__init__.py | 96 +++++++++++++++ .../function/test_migrate_update_sort_keys.py | 113 ++++++++++++++++++ 4 files changed, 302 insertions(+), 11 deletions(-) rename backend/compact-connect/lambdas/python/migration/{migrate-update-sort-keys-1175 => migrate_update_sort_keys}/main.py (91%) create mode 100644 backend/compact-connect/lambdas/python/migration/tests/function/__init__.py create mode 100644 backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py diff --git a/backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py similarity index 91% rename from backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py rename to backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py index af53cd64c..63bfc9a00 100644 --- a/backend/compact-connect/lambdas/python/migration/migrate-update-sort-keys-1175/main.py +++ b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py @@ -87,8 +87,8 @@ def _generate_delete_transaction_item(pk: str, sk: str) -> dict: 'Delete': { 'TableName': config.provider_table.table_name, 'Key': { - 'pk': {'S': pk}, - 'sk': {'S': sk}, + 'pk': pk, + 'sk': sk, }, } } @@ -106,19 +106,19 @@ def _generate_put_transaction_item(item: dict) -> dict: } } -def _generate_transaction_items(update_record: dict) -> list[dict]: +def _generate_transaction_items(original_update_record: dict) -> list[dict]: """ In the case of a provider update record, we add a createDate field based on the dateOfUpdate field. Then we use the ProviderUpdateData class to serialize the record and return the transaction items. (one to delete the old record and one to create the new record) - :param update_record: The provider update record to process + :param original_update_record: The provider update record to process :return: List of transaction items """ # grab the old pk and sk from the object - old_pk = update_record['pk'] - old_sk = update_record['sk'] - record_type = update_record.get('type') + old_pk = original_update_record['pk'] + old_sk = original_update_record['sk'] + record_type = original_update_record.get('type') if record_type == ProviderRecordType.PROVIDER_UPDATE: data_class = ProviderUpdateData elif record_type == ProviderRecordType.LICENSE_UPDATE: @@ -132,8 +132,10 @@ def _generate_transaction_items(update_record: dict) -> list[dict]: # Performing deserialization/serialization on the record, which will generate # the new pk/sks values we are migrating to. - update_data = data_class.from_database_record(update_record) + update_data = data_class.from_database_record(original_update_record) migrated_provider_update_record = update_data.serialize_to_database_record() + # retain original dateOfUpdate value + migrated_provider_update_record['dateOfUpdate'] = original_update_record['dateOfUpdate'] logger.info( 'Prepared update items for create date', diff --git a/backend/compact-connect/lambdas/python/migration/tests/__init__.py b/backend/compact-connect/lambdas/python/migration/tests/__init__.py index aa3942fac..9d5096a43 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/migration/tests/__init__.py @@ -1,3 +1,4 @@ +import json import os from unittest import TestCase from unittest.mock import MagicMock @@ -11,11 +12,90 @@ def setUpClass(cls): os.environ.update( { # Set to 'true' to enable debug logging - 'DEBUG': 'false', + 'DEBUG': 'true', + 'ALLOWED_ORIGINS': '["https://example.org"]', 'AWS_DEFAULT_REGION': 'us-east-1', - 'COMPACTS': '["aslp", "octp", "coun"]', - 'JURISDICTIONS': '["oh", "ky", "ne"]', + 'ROLLBACK_RESULTS_BUCKET_NAME': 'rollback-results-bucket', + 'EVENT_BUS_NAME': 'license-data-events', + 'PROVIDER_TABLE_NAME': 'provider-table', + 'RATE_LIMITING_TABLE_NAME': 'rate-limiting-table', + 'SSN_TABLE_NAME': 'ssn-table', 'COMPACT_CONFIGURATION_TABLE_NAME': 'compact-configuration-table', + 'ENVIRONMENT_NAME': 'test', + 'PROV_FAM_GIV_MID_INDEX_NAME': 'providerFamGivMid', + 'FAM_GIV_INDEX_NAME': 'famGiv', + 'LICENSE_GSI_NAME': 'licenseGSI', + 'PROV_DATE_OF_UPDATE_INDEX_NAME': 'providerDateOfUpdate', + 'SSN_INDEX_NAME': 'ssnIndex', + 'COMPACTS': '["aslp", "octp", "coun"]', + 'JURISDICTIONS': json.dumps( + [ + 'al', + 'ak', + 'az', + 'ar', + 'ca', + 'co', + 'ct', + 'de', + 'dc', + 'fl', + 'ga', + 'hi', + 'id', + 'il', + 'in', + 'ia', + 'ks', + 'ky', + 'la', + 'me', + 'md', + 'ma', + 'mi', + 'mn', + 'ms', + 'mo', + 'mt', + 'ne', + 'nv', + 'nh', + 'nj', + 'nm', + 'ny', + 'nc', + 'nd', + 'oh', + 'ok', + 'or', + 'pa', + 'pr', + 'ri', + 'sc', + 'sd', + 'tn', + 'tx', + 'ut', + 'vt', + 'va', + 'vi', + 'wa', + 'wv', + 'wi', + 'wy', + ] + ), + 'LICENSE_TYPES': json.dumps( + { + 'aslp': [ + {'name': 'audiologist', 'abbreviation': 'aud'}, + {'name': 'speech-language pathologist', 'abbreviation': 'slp'}, + ], + 'coun': [ + {"name": "licensed professional counselor", "abbreviation": "lpc"}, + ] + }, + ), }, ) # Monkey-patch config object to be sure we have it based diff --git a/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py b/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py new file mode 100644 index 000000000..ceb4e12eb --- /dev/null +++ b/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py @@ -0,0 +1,96 @@ +import logging +import os + +import boto3 +from moto import mock_aws + +from tests import TstLambdas + +logger = logging.getLogger(__name__) +logging.basicConfig() +logger.setLevel(logging.DEBUG if os.environ.get('DEBUG', 'false') == 'true' else logging.INFO) + + +@mock_aws +class TstFunction(TstLambdas): + """Base class to set up Moto mocking and create mock AWS resources for functional testing""" + + def setUp(self): # noqa: N801 invalid-name + super().setUp() + self.build_resources() + + # these must be imported within the tests, since they import modules which require + # environment variables that are not set until the TstLambdas class is initialized + import cc_common.config + from common_test.test_data_generator import TestDataGenerator + + cc_common.config.config = cc_common.config._Config() # noqa: SLF001 protected-access + self.config = cc_common.config.config + self.test_data_generator = TestDataGenerator + + self.addCleanup(self.delete_resources) + + def build_resources(self): + # in the case of DR, the lambda sync solution should be table agnostic, since we are performing the same + # cleanup and restoration process regardless of the table that is being recovered + self.create_provider_table() + + def create_provider_table(self): + self._provider_table = boto3.resource('dynamodb').create_table( + AttributeDefinitions=[ + {'AttributeName': 'pk', 'AttributeType': 'S'}, + {'AttributeName': 'sk', 'AttributeType': 'S'}, + {'AttributeName': 'providerFamGivMid', 'AttributeType': 'S'}, + {'AttributeName': 'providerDateOfUpdate', 'AttributeType': 'S'}, + {'AttributeName': 'licenseGSIPK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseGSISK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSIPK', 'AttributeType': 'S'}, + {'AttributeName': 'licenseUploadDateGSISK', 'AttributeType': 'S'}, + ], + TableName=os.environ['PROVIDER_TABLE_NAME'], + KeySchema=[{'AttributeName': 'pk', 'KeyType': 'HASH'}, {'AttributeName': 'sk', 'KeyType': 'RANGE'}], + BillingMode='PAY_PER_REQUEST', + GlobalSecondaryIndexes=[ + { + 'IndexName': os.environ['PROV_FAM_GIV_MID_INDEX_NAME'], + 'KeySchema': [ + {'AttributeName': 'sk', 'KeyType': 'HASH'}, + {'AttributeName': 'providerFamGivMid', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'ALL'}, + }, + { + 'IndexName': os.environ['PROV_DATE_OF_UPDATE_INDEX_NAME'], + 'KeySchema': [ + {'AttributeName': 'sk', 'KeyType': 'HASH'}, + {'AttributeName': 'providerDateOfUpdate', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'ALL'}, + }, + { + 'IndexName': os.environ['LICENSE_GSI_NAME'], + 'KeySchema': [ + {'AttributeName': 'licenseGSIPK', 'KeyType': 'HASH'}, + {'AttributeName': 'licenseGSISK', 'KeyType': 'RANGE'}, + ], + 'Projection': {'ProjectionType': 'ALL'}, + }, + { + 'IndexName': 'licenseUploadDateGSI', + 'KeySchema': [ + {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, + {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, + ], + 'Projection': { + 'ProjectionType': 'INCLUDE', + 'NonKeyAttributes': [ + 'providerId', + ], + }, + }, + ], + ) + + def delete_resources(self): + self._provider_table.delete() + diff --git a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py new file mode 100644 index 000000000..16bce90a5 --- /dev/null +++ b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py @@ -0,0 +1,113 @@ +from datetime import datetime +from moto import mock_aws +from unittest.mock import patch + +from common_test.test_constants import DEFAULT_PROVIDER_UPDATE_DATETIME, DEFAULT_LICENSE_UPDATE_DATETIME, \ + DEFAULT_LICENSE_UPDATE_CREATE_DATE, DEFAULT_LICENSE_JURISDICTION, DEFAULT_PRIVILEGE_JURISDICTION, \ + DEFAULT_PRIVILEGE_UPDATE_DATETIME + +from . import TstFunction + +MOCK_DATETIME_STRING = '2025-10-23T08:15:00+00:00' +MOCK_COMPACT = 'coun' +MOCK_PROVIDER_ID = '01d67765-76dd-47c8-b39a-8389445bb3b7' + + +@mock_aws +@patch('cc_common.config._Config.current_standard_datetime', datetime.fromisoformat(MOCK_DATETIME_STRING)) +class TestMigrateUpdateSortKeys(TstFunction): + """Test class for migrating update record sort keys.""" + + def test_should_migrate_provider_update_records_to_expected_pattern(self): + from migrate_update_sort_keys.main import do_migration + + old_provider_update_record = self.test_data_generator.generate_default_provider_update(value_overrides={ + 'compact': MOCK_COMPACT, + 'providerId': MOCK_PROVIDER_ID + }) + serialized_old_record = old_provider_update_record.serialize_to_database_record() + # replace sk with old pattern to simulate old record to be migrated + serialized_old_record['sk'] = 'aslp#PROVIDER#UPDATE#1752526787/2f429ccda22d273b1ee4876f2917e27f' + del serialized_old_record['createDate'] + serialized_old_record['dateOfUpdate'] = DEFAULT_PROVIDER_UPDATE_DATETIME + self.config.provider_table.put_item(Item=serialized_old_record) + + # run migration + do_migration({}) + + # verify old record was deleted + old_record_resp = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']}) + self.assertIsNone(old_record_resp.get('Item')) + + # verify new record was created with expected sk + expected_sk = f'{MOCK_COMPACT}#UPDATE#2#provider/{DEFAULT_PROVIDER_UPDATE_DATETIME}/2f429ccda22d273b1ee4876f2917e27f' + new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})['Item'] + + serialized_old_record['sk'] = expected_sk + # as part of migration, the createDate field will be populated with whatever the dateOfUpdate was + # so we expect that here + serialized_old_record['createDate'] = DEFAULT_PROVIDER_UPDATE_DATETIME + # only the sort key and the createDate should have been modified + self.assertEqual(serialized_old_record, new_record) + + + def test_should_migrate_license_update_records_to_expected_pattern(self): + from migrate_update_sort_keys.main import do_migration + + old_license_update_record = self.test_data_generator.generate_default_license_update(value_overrides={ + 'compact': MOCK_COMPACT, + 'providerId': MOCK_PROVIDER_ID, + 'licenseType': 'licensed professional counselor' + }) + serialized_old_record = old_license_update_record.serialize_to_database_record() + # replace sk with old pattern to simulate old record to be migrated + serialized_old_record['sk'] = f'{MOCK_COMPACT}#PROVIDER#license/{DEFAULT_LICENSE_JURISDICTION}/lpc#UPDATE#1752526787/21554583eb71ccc5f8aa5988c8a50ac2' + serialized_old_record['dateOfUpdate'] = DEFAULT_LICENSE_UPDATE_DATETIME + self.config.provider_table.put_item(Item=serialized_old_record) + + # run migration + do_migration({}) + + # verify old record was deleted + old_record_resp = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']}) + self.assertIsNone(old_record_resp.get('Item')) + + # verify new record was created with expected sk + expected_sk = f'{MOCK_COMPACT}#UPDATE#3#license/{DEFAULT_LICENSE_JURISDICTION}/lpc/{DEFAULT_LICENSE_UPDATE_CREATE_DATE}/21554583eb71ccc5f8aa5988c8a50ac2' + new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})['Item'] + serialized_old_record['sk'] = expected_sk + # nothing on the record should have changed other than the sort key + self.assertEqual(serialized_old_record, new_record) + + def test_should_migrate_privilege_update_records_to_expected_pattern(self): + from migrate_update_sort_keys.main import do_migration + + mock_create_date = '2025-07-07T07:07:07+00:00' + + old_privilege_update_record = self.test_data_generator.generate_default_privilege_update(value_overrides={ + 'compact': MOCK_COMPACT, + 'providerId': MOCK_PROVIDER_ID, + 'licenseType': 'licensed professional counselor', + 'createDate': datetime.fromisoformat(mock_create_date) + }) + serialized_old_record = old_privilege_update_record.serialize_to_database_record() + # replace sk with old pattern to simulate old record to be migrated + serialized_old_record['sk'] = f'{MOCK_COMPACT}#PROVIDER#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc#UPDATE#1752526787/399abde0989ad5e936920a3ba9f0944a' + serialized_old_record['dateOfUpdate'] = DEFAULT_PRIVILEGE_UPDATE_DATETIME + self.config.provider_table.put_item(Item=serialized_old_record) + + # run migration + do_migration({}) + + # verify old record was deleted + old_record_resp = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']}) + self.assertIsNone(old_record_resp.get('Item')) + + # verify new record was created with expected sk + expected_sk = f'{MOCK_COMPACT}#UPDATE#1#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc/{mock_create_date}/399abde0989ad5e936920a3ba9f0944a' + new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})['Item'] + serialized_old_record['sk'] = expected_sk + # nothing on the record should have changed other than the sort key + self.assertEqual(serialized_old_record, new_record) + + From a228f4d0d774ef0fee59bf70639f23fd62d943a3 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 11:42:47 -0600 Subject: [PATCH 44/81] Add data migration stack to run migration scripts --- .../compact-connect/pipeline/backend_stage.py | 17 ++++++ .../stacks/data_migration_stack/__init__.py | 58 +++++++++++++++++++ 2 files changed, 75 insertions(+) create mode 100644 backend/compact-connect/stacks/data_migration_stack/__init__.py diff --git a/backend/compact-connect/pipeline/backend_stage.py b/backend/compact-connect/pipeline/backend_stage.py index 318dfc887..8e19aff22 100644 --- a/backend/compact-connect/pipeline/backend_stage.py +++ b/backend/compact-connect/pipeline/backend_stage.py @@ -4,6 +4,7 @@ from stacks.api_lambda_stack import ApiLambdaStack from stacks.api_stack import ApiStack +from stacks.data_migration_stack import DataMigrationStack from stacks.disaster_recovery_stack import DisasterRecoveryStack from stacks.event_listener_stack import EventListenerStack from stacks.event_state_stack import EventStateStack @@ -203,3 +204,19 @@ def __init__( environment_context=environment_context, standard_tags=standard_tags, ) + + # Stack to house data migration custom resources + # This stack depends on the API Lambda stack to ensure + # all infrastructure is in place before migrations run + self.data_migration_stack = DataMigrationStack( + self, + 'DataMigrationStack', + env=environment, + environment_name=environment_name, + environment_context=environment_context, + standard_tags=standard_tags, + persistent_stack=self.persistent_stack, + api_stack=self.api_stack, + ) + # Explicitly declare the dependency to ensure proper deployment order + self.data_migration_stack.add_dependency(self.api_stack) diff --git a/backend/compact-connect/stacks/data_migration_stack/__init__.py b/backend/compact-connect/stacks/data_migration_stack/__init__.py new file mode 100644 index 000000000..bd4ae69e4 --- /dev/null +++ b/backend/compact-connect/stacks/data_migration_stack/__init__.py @@ -0,0 +1,58 @@ +from cdk_nag import NagSuppressions +from common_constructs.stack import AppStack +from constructs import Construct + +from common_constructs.data_migration import DataMigration +from stacks import persistent_stack as ps +from stacks.api_stack import ApiStack + + +class DataMigrationStack(AppStack): + """ + Stack to house data migration custom resources that run scripts to perform data migrations. + This stack should be deployed after other infrastructure stacks are in place. + """ + + def __init__( + self, + scope: Construct, + construct_id: str, + *, + environment_name: str, + environment_context: dict, + persistent_stack: ps.PersistentStack, + api_stack: ApiStack, + **kwargs, + ): + super().__init__( + scope, construct_id, environment_context=environment_context, environment_name=environment_name, **kwargs + ) + + # Reference dummy env var from the API Lambda stack to create a CloudFormation dependency + # This ensures the lambdas/API endpoints are fully deployed before migrations run + common_env_vars = self.common_env_vars + common_env_vars['apiStack'] = api_stack.stack_name + + update_sort_keys_migration = DataMigration( + self, + 'MigrateUpdateSortKeys', + migration_dir='migrate_update_sort_keys', + lambda_environment={ + 'PROVIDER_TABLE_NAME': persistent_stack.provider_table.table_name, + **common_env_vars, + }, + ) + persistent_stack.shared_encryption_key.grant_encrypt_decrypt(update_sort_keys_migration) + persistent_stack.provider_table.grant_read_write_data(update_sort_keys_migration) + NagSuppressions.add_resource_suppressions_by_path( + self, + f'{update_sort_keys_migration.migration_function.node.path}/ServiceRole/DefaultPolicy/Resource', + suppressions=[ + { + 'id': 'AwsSolutions-IAM5', + 'reason': 'This policy contains wild-carded actions and resources but they are scoped to the ' + 'specific actions, Table and Key that this lambda needs access to in order to perform the' + 'migration.', + }, + ], + ) From d2dd18926ac5b12b8657fe99972e4e03b45ccc72 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 12:43:28 -0600 Subject: [PATCH 45/81] Remove unneeded stack reference --- backend/compact-connect/pipeline/backend_stage.py | 1 - .../stacks/data_migration_stack/__init__.py | 11 ++--------- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/backend/compact-connect/pipeline/backend_stage.py b/backend/compact-connect/pipeline/backend_stage.py index 8e19aff22..edf24f9f1 100644 --- a/backend/compact-connect/pipeline/backend_stage.py +++ b/backend/compact-connect/pipeline/backend_stage.py @@ -216,7 +216,6 @@ def __init__( environment_context=environment_context, standard_tags=standard_tags, persistent_stack=self.persistent_stack, - api_stack=self.api_stack, ) # Explicitly declare the dependency to ensure proper deployment order self.data_migration_stack.add_dependency(self.api_stack) diff --git a/backend/compact-connect/stacks/data_migration_stack/__init__.py b/backend/compact-connect/stacks/data_migration_stack/__init__.py index bd4ae69e4..9279b9d14 100644 --- a/backend/compact-connect/stacks/data_migration_stack/__init__.py +++ b/backend/compact-connect/stacks/data_migration_stack/__init__.py @@ -4,7 +4,6 @@ from common_constructs.data_migration import DataMigration from stacks import persistent_stack as ps -from stacks.api_stack import ApiStack class DataMigrationStack(AppStack): @@ -21,32 +20,26 @@ def __init__( environment_name: str, environment_context: dict, persistent_stack: ps.PersistentStack, - api_stack: ApiStack, **kwargs, ): super().__init__( scope, construct_id, environment_context=environment_context, environment_name=environment_name, **kwargs ) - # Reference dummy env var from the API Lambda stack to create a CloudFormation dependency - # This ensures the lambdas/API endpoints are fully deployed before migrations run - common_env_vars = self.common_env_vars - common_env_vars['apiStack'] = api_stack.stack_name - update_sort_keys_migration = DataMigration( self, 'MigrateUpdateSortKeys', migration_dir='migrate_update_sort_keys', lambda_environment={ 'PROVIDER_TABLE_NAME': persistent_stack.provider_table.table_name, - **common_env_vars, + **self.common_env_vars, }, ) persistent_stack.shared_encryption_key.grant_encrypt_decrypt(update_sort_keys_migration) persistent_stack.provider_table.grant_read_write_data(update_sort_keys_migration) NagSuppressions.add_resource_suppressions_by_path( self, - f'{update_sort_keys_migration.migration_function.node.path}/ServiceRole/DefaultPolicy/Resource', + f'{update_sort_keys_migration.migration_function.role.node.path}/DefaultPolicy/Resource', suppressions=[ { 'id': 'AwsSolutions-IAM5', From a91a952d7f97504fa95bb8f72e3a4cd07c563025 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 14:31:35 -0600 Subject: [PATCH 46/81] update investigation tests to not peek at update sort keys --- .../common/tests/function/test_data_client.py | 170 +++++++++--------- 1 file changed, 82 insertions(+), 88 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index e5de9510e..ecac26f82 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -1,9 +1,11 @@ import json from datetime import UTC, date, datetime -from unittest.mock import patch +from unittest.mock import patch, ANY from uuid import UUID, uuid4 from boto3.dynamodb.conditions import Key + +from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.exceptions import CCAwsServiceException, CCInvalidRequestException from common_test.test_constants import DEFAULT_PROVIDER_ID from moto import mock_aws @@ -1107,10 +1109,13 @@ def test_create_privilege_investigation_success(self): client.create_investigation(investigation) # Verify investigation record was created - investigation_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/slp#INVESTIGATION#') - )['Items'] + provider_user_records = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_THREE + ) + investigation_records = provider_user_records.get_investigation_records_for_privilege( + privilege_jurisdiction='ne', + privilege_license_type_abbreviation='slp', + ) self.assertEqual(1, len(investigation_records)) investigation_record = investigation_records[0] @@ -1128,27 +1133,23 @@ def test_create_privilege_investigation_success(self): 'investigationId': str(investigation.investigationId), 'submittingUser': str(investigation.submittingUser), 'creationDate': investigation.creationDate.isoformat(), + 'dateOfUpdate': ANY, } # Pop dynamic fields that we don't want to assert on - investigation_record.pop('dateOfUpdate') - - self.assertEqual(expected_investigation, investigation_record) + self.assertEqual(expected_investigation, investigation_record.serialize_to_database_record()) # Verify privilege record was updated with investigation status - privilege_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').eq('aslp#PROVIDER#privilege/ne/slp#') - )['Items'] + privilege_records = provider_user_records.get_privilege_records() self.assertEqual(1, len(privilege_records)) privilege_record = privilege_records[0] - self.assertEqual('underInvestigation', privilege_record['investigationStatus']) + self.assertEqual('underInvestigation', privilege_record.investigationStatus) # Verify update record was created - update_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/slp#UPDATE#') - )['Items'] + update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction=privilege_record.jurisdiction, + license_type=privilege_record.licenseType, + ) self.assertEqual(1, len(update_records)) update_record = update_records[0] @@ -1156,6 +1157,7 @@ def test_create_privilege_investigation_success(self): # Verify the complete update record structure expected_update = { 'pk': f'aslp#PROVIDER#{provider_id}', + 'sk': ANY, 'compactTransactionIdGSIPK': 'COMPACT#aslp#TX#1234567890#', 'type': 'privilegeUpdate', 'updateType': 'investigation', @@ -1182,12 +1184,10 @@ def test_create_privilege_investigation_success(self): 'investigationDetails': { 'investigationId': str(investigation.investigationId), }, + 'dateOfUpdate': ANY, } - # Pop dynamic fields that we don't want to assert on - update_record.pop('dateOfUpdate') - update_record.pop('sk') - self.assertEqual(expected_update, update_record) + self.assertEqual(expected_update, update_record.serialize_to_database_record()) def test_create_license_investigation_success(self): """Test successful creation of license investigation""" @@ -1218,10 +1218,13 @@ def test_create_license_investigation_success(self): client.create_investigation(investigation) # Verify investigation record was created - investigation_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#license/oh/slp#INVESTIGATION#') - )['Items'] + provider_user_records = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_THREE + ) + investigation_records = provider_user_records.get_investigation_records_for_license( + license_jurisdiction='oh', + license_type_abbreviation='slp', + ) self.assertEqual(1, len(investigation_records)) investigation_record = investigation_records[0] @@ -1239,27 +1242,23 @@ def test_create_license_investigation_success(self): 'investigationId': str(investigation.investigationId), 'submittingUser': str(investigation.submittingUser), 'creationDate': investigation.creationDate.isoformat(), + 'dateOfUpdate': ANY, } - # Pop dynamic fields that we don't want to assert on - investigation_record.pop('dateOfUpdate') - self.assertEqual(expected_investigation, investigation_record) + self.assertEqual(expected_investigation, investigation_record.serialize_to_database_record()) # Verify license record was updated with investigation status - license_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').eq('aslp#PROVIDER#license/oh/slp#') - )['Items'] + license_records = provider_user_records.get_license_records() self.assertEqual(1, len(license_records)) license_record = license_records[0] - self.assertEqual('underInvestigation', license_record['investigationStatus']) + self.assertEqual('underInvestigation', license_record.investigationStatus) # Verify update record was created - update_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#license/oh/slp#UPDATE#') - )['Items'] + update_records = provider_user_records.get_update_records_for_license( + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + ) self.assertEqual(1, len(update_records)) update_record = update_records[0] @@ -1267,6 +1266,7 @@ def test_create_license_investigation_success(self): # Verify the complete update record structure expected_update = { 'pk': f'aslp#PROVIDER#{provider_id}', + 'sk': ANY, 'type': 'licenseUpdate', 'updateType': 'investigation', 'compact': 'aslp', @@ -1304,12 +1304,10 @@ def test_create_license_investigation_success(self): 'investigationDetails': { 'investigationId': str(investigation.investigationId), }, + 'dateOfUpdate': ANY, } - # Pop dynamic fields that we don't want to assert on - update_record.pop('dateOfUpdate') - update_record.pop('sk') - self.assertEqual(expected_update, update_record) + self.assertEqual(expected_update, update_record.serialize_to_database_record()) def test_create_privilege_investigation_privilege_not_found(self): """Test creation of privilege investigation when privilege doesn't exist""" @@ -1417,10 +1415,14 @@ def test_close_privilege_investigation_success(self): ) # Verify investigation record was updated with close information - investigation_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/slp#INVESTIGATION#') - )['Items'] + provider_user_records = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_THREE + ) + investigation_records = provider_user_records.get_investigation_records_for_privilege( + privilege_jurisdiction='ne', + privilege_license_type_abbreviation='slp', + include_closed=True + ) self.assertEqual(1, len(investigation_records)) investigation_record = investigation_records[0] @@ -1440,27 +1442,21 @@ def test_close_privilege_investigation_success(self): 'creationDate': investigation.creationDate.isoformat(), 'closeDate': investigation.creationDate.isoformat(), 'closingUser': closing_user, + 'dateOfUpdate': ANY, } - # Pop dynamic fields that we don't want to assert on - investigation_record.pop('dateOfUpdate') - - self.assertEqual(expected_investigation_close, investigation_record) + self.assertEqual(expected_investigation_close, investigation_record.serialize_to_database_record()) # Verify privilege record no longer has investigation status - privilege_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').eq('aslp#PROVIDER#privilege/ne/slp#') - )['Items'] - + privilege_records = provider_user_records.get_privilege_records() self.assertEqual(1, len(privilege_records)) privilege_record = privilege_records[0] - self.assertNotIn('investigationStatus', privilege_record) + self.assertIsNone(privilege_record.investigationStatus) # Verify update record was created for closure - update_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#privilege/ne/slp#UPDATE#') - )['Items'] + update_records = provider_user_records.get_update_records_for_privilege( + jurisdiction='ne', + license_type=privilege_record.licenseType, + ) # Should have 2 update records: one for creation, one for closure self.assertEqual(2, len(update_records)) @@ -1468,7 +1464,7 @@ def test_close_privilege_investigation_success(self): # Find the closure update record closure_update = None for update_record in update_records: - if update_record.get('updateType') == 'closingInvestigation': + if update_record.updateType == 'closingInvestigation': closure_update = update_record break @@ -1477,6 +1473,7 @@ def test_close_privilege_investigation_success(self): # Verify the complete closure update record structure expected_closure_update = { 'pk': f'aslp#PROVIDER#{provider_id}', + 'sk': ANY, 'type': 'privilegeUpdate', 'updateType': 'closingInvestigation', 'compact': 'aslp', @@ -1499,15 +1496,11 @@ def test_close_privilege_investigation_success(self): }, 'updatedValues': {}, 'removedValues': ['investigationStatus'], + 'dateOfUpdate': ANY, + 'compactTransactionIdGSIPK': ANY } - # Pop dynamic fields that we don't want to assert on - closure_update.pop('dateOfUpdate') - closure_update.pop('sk') - # Only pop compactTransactionIdGSIPK if it exists - if 'compactTransactionIdGSIPK' in closure_update: - closure_update.pop('compactTransactionIdGSIPK') - self.assertEqual(expected_closure_update, closure_update) + self.assertEqual(expected_closure_update, closure_update.serialize_to_database_record()) def test_close_license_investigation_success(self): """Test successful closing of license investigation""" @@ -1551,11 +1544,17 @@ def test_close_license_investigation_success(self): investigation_against=InvestigationAgainstEnum.LICENSE, ) + # grab all provider records to make assertions + provider_user_records = self.config.data_client.get_provider_user_records( + compact='aslp', provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_THREE + ) + # Verify investigation record was updated with close information - investigation_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#license/oh/slp#INVESTIGATION#') - )['Items'] + investigation_records = provider_user_records.get_investigation_records_for_license( + license_jurisdiction='oh', + license_type_abbreviation='slp', + include_closed=True + ) self.assertEqual(1, len(investigation_records)) investigation_record = investigation_records[0] @@ -1575,27 +1574,23 @@ def test_close_license_investigation_success(self): 'creationDate': investigation.creationDate.isoformat(), 'closeDate': close_date.isoformat(), 'closingUser': closing_user, + 'dateOfUpdate': ANY } - # Pop dynamic fields that we don't want to assert on - investigation_record.pop('dateOfUpdate') - self.assertEqual(expected_investigation_close, investigation_record) + self.assertEqual(expected_investigation_close, investigation_record.serialize_to_database_record()) # Verify license record no longer has investigation status - license_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').eq('aslp#PROVIDER#license/oh/slp#') - )['Items'] + license_records = provider_user_records.get_license_records() self.assertEqual(1, len(license_records)) license_record = license_records[0] - self.assertNotIn('investigationStatus', license_record) + self.assertNotIn('investigationStatus', license_record.to_dict()) # Verify update record was created for closure - update_records = self.config.provider_table.query( - KeyConditionExpression=Key('pk').eq(f'aslp#PROVIDER#{provider_id}') - & Key('sk').begins_with('aslp#PROVIDER#license/oh/slp#UPDATE#') - )['Items'] + update_records = provider_user_records.get_update_records_for_license( + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType + ) # Should have 2 update records: one for creation, one for closure self.assertEqual(2, len(update_records)) @@ -1603,7 +1598,7 @@ def test_close_license_investigation_success(self): # Find the closure update record closure_update = None for update_record in update_records: - if update_record.get('updateType') == 'closingInvestigation': + if update_record.updateType == 'closingInvestigation': closure_update = update_record break @@ -1612,6 +1607,7 @@ def test_close_license_investigation_success(self): # Verify the complete closure update record structure expected_closure_update = { 'pk': f'aslp#PROVIDER#{provider_id}', + 'sk': ANY, 'type': 'licenseUpdate', 'updateType': 'closingInvestigation', 'compact': 'aslp', @@ -1646,12 +1642,10 @@ def test_close_license_investigation_success(self): }, 'updatedValues': {}, 'removedValues': ['investigationStatus'], + 'dateOfUpdate': ANY } - # Pop dynamic fields that we don't want to assert on - closure_update.pop('dateOfUpdate') - closure_update.pop('sk') - self.assertEqual(expected_closure_update, closure_update) + self.assertEqual(expected_closure_update, closure_update.serialize_to_database_record()) def test_close_privilege_investigation_not_found(self): """Test closing privilege investigation when investigation doesn't exist""" From aff321947141e0d68ac30dca1bc4a0e3dde20cbd Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 16:23:35 -0600 Subject: [PATCH 47/81] Set data migration dependent on event listener stack --- backend/compact-connect/pipeline/backend_stage.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/compact-connect/pipeline/backend_stage.py b/backend/compact-connect/pipeline/backend_stage.py index edf24f9f1..6b4acc429 100644 --- a/backend/compact-connect/pipeline/backend_stage.py +++ b/backend/compact-connect/pipeline/backend_stage.py @@ -219,3 +219,4 @@ def __init__( ) # Explicitly declare the dependency to ensure proper deployment order self.data_migration_stack.add_dependency(self.api_stack) + self.data_migration_stack.add_dependency(self.event_listener_stack) From 38c985a9f9bb727f57bfce64cf2fb588f0d31899 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 16:44:43 -0600 Subject: [PATCH 48/81] update tests from merge to use new update access pattern --- .../function/test_handlers/test_investigation.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py index 347361ac0..c924f8ab0 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py @@ -3,6 +3,7 @@ from unittest.mock import patch from uuid import UUID +from cc_common.data_model.update_tier_enum import UpdateTierEnum from common_test.test_constants import ( DEFAULT_AA_SUBMITTING_USER_ID, DEFAULT_DATE_OF_UPDATE_TIMESTAMP, @@ -955,9 +956,12 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_privilege_record.compact, provider_id=test_privilege_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) update_records = provider_user_records.get_update_records_for_privilege( - jurisdiction=test_privilege_record.jurisdiction, license_type=test_privilege_record.licenseType + jurisdiction=test_privilege_record.jurisdiction, + license_type=test_privilege_record.licenseType, + ) investigation_update_records = [ @@ -1002,6 +1006,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_privilege_record.compact, provider_id=test_privilege_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) updated_privilege_record = provider_user_records.get_privilege_records()[0] @@ -1020,6 +1025,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_privilege_record.compact, provider_id=test_privilege_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) update_records = provider_user_records.get_update_records_for_privilege( jurisdiction=test_privilege_record.jurisdiction, license_type=test_privilege_record.licenseType @@ -1088,6 +1094,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) investigation_records = provider_user_records.get_investigation_records_for_license( license_jurisdiction=test_license_record.jurisdiction, @@ -1119,6 +1126,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) investigation_records = provider_user_records.get_investigation_records_for_license( license_jurisdiction=test_license_record.jurisdiction, @@ -1154,6 +1162,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) updated_license_record = provider_user_records.get_license_records()[0] @@ -1188,6 +1197,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) update_records = provider_user_records.get_update_records_for_license( jurisdiction=test_license_record.jurisdiction, license_type=test_license_record.licenseType @@ -1235,6 +1245,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) updated_license_record = provider_user_records.get_license_records()[0] @@ -1253,6 +1264,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, + include_update_tier=UpdateTierEnum.TIER_THREE ) update_records = provider_user_records.get_update_records_for_license( jurisdiction=test_license_record.jurisdiction, license_type=test_license_record.licenseType From 352bfd52a4a03b923ca55520c1156e55a5fd03cc Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 11 Nov 2025 17:07:18 -0600 Subject: [PATCH 49/81] formatting --- .../common_constructs/user_pool.py | 30 ++-- .../data_model/provider_record_util.py | 12 +- .../cc_common/data_model/schema/common.py | 1 + .../data_model/schema/license/record.py | 2 +- .../data_model/schema/provider/record.py | 4 +- .../common/tests/function/test_data_client.py | 20 +-- .../handlers/rollback_license_upload.py | 57 ++++--- .../function/test_rollback_license_upload.py | 161 ++++++++++-------- .../migrate_update_sort_keys/main.py | 23 ++- .../python/migration/tests/__init__.py | 4 +- .../migration/tests/function/__init__.py | 1 - .../function/test_migrate_update_sort_keys.py | 85 +++++---- .../provider-data-v1/handlers/ingest.py | 2 +- .../test_handlers/test_investigation.py | 19 +-- .../stacks/data_migration_stack/__init__.py | 4 +- .../license_upload_rollback_step_function.py | 5 +- .../stacks/provider_users/provider_users.py | 4 +- .../rollback_license_upload_smoke_tests.py | 13 +- 18 files changed, 232 insertions(+), 215 deletions(-) diff --git a/backend/compact-connect/common_constructs/user_pool.py b/backend/compact-connect/common_constructs/user_pool.py index 1bb903bc8..2fa9c8921 100644 --- a/backend/compact-connect/common_constructs/user_pool.py +++ b/backend/compact-connect/common_constructs/user_pool.py @@ -144,10 +144,10 @@ def __init__( # pylint: disable=too-many-arguments ) def add_custom_app_client_domain( - self, - hosted_zone: IHostedZone, - scope: Construct, - app_client_domain_prefix: str, + self, + hosted_zone: IHostedZone, + scope: Construct, + app_client_domain_prefix: str, ): """ Creates a custom subdomain for the cognito app client in the form of: @@ -159,17 +159,11 @@ def add_custom_app_client_domain( domain_name = f'{domain_prefix}.{hosted_zone.zone_name}' cert_id = f'{app_client_domain_prefix}AuthCert' cert = Certificate( - scope, - cert_id, - domain_name=domain_name, - validation=CertificateValidation.from_dns(hosted_zone=hosted_zone) + scope, cert_id, domain_name=domain_name, validation=CertificateValidation.from_dns(hosted_zone=hosted_zone) ) domain = self.add_domain( f'{app_client_domain_prefix}UserPoolDomain', - custom_domain=CustomDomainOptions( - certificate=cert, - domain_name=domain_name - ), + custom_domain=CustomDomainOptions(certificate=cert, domain_name=domain_name), managed_login_version=ManagedLoginVersion.NEWER_MANAGED_LOGIN, ) @@ -195,7 +189,7 @@ def add_custom_app_client_domain( 'id': 'AwsSolutions-IAM5', 'appliesTo': ['Resource::*'], 'reason': 'This is an AWS-managed custom resource Lambda that requires wildcard permissions' - 'to describe CloudFront distributions.', + 'to describe CloudFront distributions.', } ], ) @@ -211,7 +205,7 @@ def add_custom_app_client_domain( 'appliesTo': [ 'Policy::arn::iam::aws:policy/service-role/AWSLambdaBasicExecutionRole' ], - 'reason': 'This is an AWS-managed custom resource Lambda that uses the standard execution role.' + 'reason': 'This is an AWS-managed custom resource Lambda that uses the standard execution role.', } ], ) @@ -223,12 +217,12 @@ def add_custom_app_client_domain( { 'id': 'HIPAA.Security-LambdaDLQ', 'reason': 'This is an AWS-managed custom resource Lambda used only during deployment.' - 'A DLQ is not necessary.', + 'A DLQ is not necessary.', }, { 'id': 'HIPAA.Security-LambdaInsideVPC', 'reason': 'This is an AWS-managed custom resource Lambda that needs internet access to' - 'describe CloudFront distributions.', + 'describe CloudFront distributions.', }, ], ) @@ -236,8 +230,8 @@ def add_custom_app_client_domain( self.app_client_custom_domain = domain def add_default_app_client_domain( - self, - non_custom_domain_prefix: str, + self, + non_custom_domain_prefix: str, ): """ Creates a cognito based sub domain in the form of: diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py index 4aabaded2..3cd45bd8d 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py @@ -768,9 +768,7 @@ def get_all_license_update_records( :return: List of LicenseUpdateData records """ return [ - record - for record in self._license_update_records - if filter_condition is None or filter_condition(record) + record for record in self._license_update_records if filter_condition is None or filter_condition(record) ] def get_all_privilege_update_records( @@ -783,9 +781,7 @@ def get_all_privilege_update_records( :return: List of PrivilegeUpdateData records """ return [ - record - for record in self._privilege_update_records - if filter_condition is None or filter_condition(record) + record for record in self._privilege_update_records if filter_condition is None or filter_condition(record) ] def get_all_provider_update_records( @@ -798,9 +794,7 @@ def get_all_provider_update_records( :return: List of ProviderUpdateData records """ return [ - record - for record in self._provider_update_records - if filter_condition is None or filter_condition(record) + record for record in self._provider_update_records if filter_condition is None or filter_condition(record) ] def get_update_records_for_license( diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py index 8f886d934..ae058c729 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/common.py @@ -311,6 +311,7 @@ class UpdateCategory(CCEnum): # it is referenced in the event that an invalid license upload needs to be reverted. LICENSE_UPLOAD_UPDATE_OTHER = 'other' + # License upload related update categories LICENSE_UPLOAD_UPDATE_CATEGORIES = { UpdateCategory.DEACTIVATION, diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index a32deb1b9..6519b0ac9 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -12,12 +12,12 @@ ForgivingSchema, ) from cc_common.data_model.schema.common import ( + LICENSE_UPLOAD_UPDATE_CATEGORIES, ActiveInactiveStatus, ChangeHashMixin, CompactEligibilityStatus, LicenseEncumberedStatusEnum, UpdateCategory, - LICENSE_UPLOAD_UPDATE_CATEGORIES, ) from cc_common.data_model.schema.fields import ( ActiveInactive, diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index a3edace54..126c1ccc6 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -250,7 +250,5 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument # to the record. We'll use the createDate and the hash of the updatedValues # field for this. change_hash = self.hash_changes(in_data) - in_data['sk'] = ( - f'{in_data["compact"]}#UPDATE#2#provider/{in_data["createDate"]}/{change_hash}' - ) + in_data['sk'] = f'{in_data["compact"]}#UPDATE#2#provider/{in_data["createDate"]}/{change_hash}' return in_data diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py index ecac26f82..39267d400 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_client.py @@ -1,10 +1,9 @@ import json from datetime import UTC, date, datetime -from unittest.mock import patch, ANY +from unittest.mock import ANY, patch from uuid import UUID, uuid4 from boto3.dynamodb.conditions import Key - from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.exceptions import CCAwsServiceException, CCInvalidRequestException from common_test.test_constants import DEFAULT_PROVIDER_ID @@ -1419,9 +1418,7 @@ def test_close_privilege_investigation_success(self): compact='aslp', provider_id=provider_id, include_update_tier=UpdateTierEnum.TIER_THREE ) investigation_records = provider_user_records.get_investigation_records_for_privilege( - privilege_jurisdiction='ne', - privilege_license_type_abbreviation='slp', - include_closed=True + privilege_jurisdiction='ne', privilege_license_type_abbreviation='slp', include_closed=True ) self.assertEqual(1, len(investigation_records)) @@ -1497,7 +1494,7 @@ def test_close_privilege_investigation_success(self): 'updatedValues': {}, 'removedValues': ['investigationStatus'], 'dateOfUpdate': ANY, - 'compactTransactionIdGSIPK': ANY + 'compactTransactionIdGSIPK': ANY, } self.assertEqual(expected_closure_update, closure_update.serialize_to_database_record()) @@ -1551,9 +1548,7 @@ def test_close_license_investigation_success(self): # Verify investigation record was updated with close information investigation_records = provider_user_records.get_investigation_records_for_license( - license_jurisdiction='oh', - license_type_abbreviation='slp', - include_closed=True + license_jurisdiction='oh', license_type_abbreviation='slp', include_closed=True ) self.assertEqual(1, len(investigation_records)) @@ -1574,7 +1569,7 @@ def test_close_license_investigation_success(self): 'creationDate': investigation.creationDate.isoformat(), 'closeDate': close_date.isoformat(), 'closingUser': closing_user, - 'dateOfUpdate': ANY + 'dateOfUpdate': ANY, } self.assertEqual(expected_investigation_close, investigation_record.serialize_to_database_record()) @@ -1588,8 +1583,7 @@ def test_close_license_investigation_success(self): # Verify update record was created for closure update_records = provider_user_records.get_update_records_for_license( - jurisdiction=license_record.jurisdiction, - license_type=license_record.licenseType + jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType ) # Should have 2 update records: one for creation, one for closure @@ -1642,7 +1636,7 @@ def test_close_license_investigation_success(self): }, 'updatedValues': {}, 'removedValues': ['investigationStatus'], - 'dateOfUpdate': ANY + 'dateOfUpdate': ANY, } self.assertEqual(expected_closure_update, closure_update.serialize_to_database_record()) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 5fd810ad0..8de09170f 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -149,7 +149,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': jurisdiction=reverted_license['jurisdiction'], license_type=reverted_license['licenseType'], revision_id=uuid4(), - action=reverted_license['action'] + action=reverted_license['action'], ) for reverted_license in summary.get('licensesReverted', []) ], @@ -158,7 +158,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': jurisdiction=reverted_privilege['jurisdiction'], license_type=reverted_privilege['licenseType'], revision_id=uuid4(), - action=reverted_privilege['action'] + action=reverted_privilege['action'], ) for reverted_privilege in summary.get('privilegesReverted', []) ], @@ -267,10 +267,10 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 start_datetime, end_datetime, ) - + # Convert to sorted list for consistent ordering across invocations affected_provider_ids_list = sorted(affected_provider_ids) - + # If continuing from a previous invocation, slice the list to start from that provider if continue_from_provider_id: try: @@ -465,7 +465,7 @@ def _process_provider_rollback( logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) return ProviderFailedDetails( provider_id=provider_id, - error=f"Failed to rollback updates for provider. Manual review required: {str(e)}", + error=f'Failed to rollback updates for provider. Manual review required: {str(e)}', ) # Publish events for successful rollback @@ -477,7 +477,7 @@ def _process_provider_rollback( def _extract_sk_from_transaction_item(transaction_item: dict) -> str | None: """ Extract the sort key (SK) from a transaction item. - + Transaction items can be Put, Delete, or Update operations. Returns the SK if found, None otherwise. """ @@ -504,14 +504,14 @@ def _perform_transaction(transaction_items: list[dict], provider_id: str) -> Non failed_sks = [_extract_sk_from_transaction_item(item) for item in batch] # filter out null values failed_sks = [sk for sk in failed_sks if sk is not None] - + logger.error( 'Transaction batch failed for provider', provider_id=provider_id, batch_number=i // 100 + 1, batch_size=len(batch), failed_sks=failed_sks, - error=str(e) + error=str(e), ) raise @@ -598,7 +598,7 @@ def add_delete(pk: str, sk: str, update_record: bool): update_time=update.dateOfUpdate.isoformat(), reason='Provider update occurred after rollback start time. Manual review required.', # provider updates are not specific to a license type - license_type='N/A' + license_type='N/A', ) ) @@ -609,7 +609,8 @@ def add_delete(pk: str, sk: str, update_record: bool): for license_record in license_records: privileges_associated_with_license = provider_records.get_privilege_records( - filter_condition=lambda x: x.licenseJurisdiction == jurisdiction and x.licenseType == license_record.licenseType + filter_condition=lambda x: x.licenseJurisdiction == jurisdiction + and x.licenseType == license_record.licenseType ) privilege_jurisdictions = [x.jurisdiction for x in privileges_associated_with_license] # Get privilege updates for all privileges associated with this license @@ -633,8 +634,8 @@ def add_delete(pk: str, sk: str, update_record: bool): license_type=privilege_update.licenseType, # include privilege jurisdiction in reason reason=f'Privilege in jurisdiction {privilege_update.jurisdiction} was updated with a change ' - f'unrelated to license upload or the update occurred after rollback end time. ' - f'Manual review required.', + f'unrelated to license upload or the update occurred after rollback end time. ' + f'Manual review required.', ) ) elif start_datetime <= privilege_update.createDate <= end_datetime: @@ -698,7 +699,7 @@ def add_delete(pk: str, sk: str, update_record: bool): update_time=datetime.now().isoformat(), license_type=license_record.licenseType, reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} after license upload. ' - f'Manual review required.', + f'Manual review required.', ) ) # no privileges found, so we can delete the license record @@ -737,7 +738,7 @@ def add_delete(pk: str, sk: str, update_record: bool): update_time=license_update.createDate.isoformat(), license_type=license_update.licenseType, reason='License was updated with a change unrelated to license upload or the update ' - 'occurred after rollback end time. Manual review required.', + 'occurred after rollback end time. Manual review required.', ) ) elif start_datetime <= license_update.createDate <= end_datetime: @@ -749,7 +750,7 @@ def add_delete(pk: str, sk: str, update_record: bool): logger.info( 'Will delete license update record if provider is eligible for rollback', update_type=license_update.updateType, - license_type=license_update.licenseType + license_type=license_update.licenseType, ) # If there were updates in the window and no updates after end_datetime, revert the license @@ -914,17 +915,18 @@ def _publish_revert_events( ) except Exception as e: # this event publishing is not business critical, so we log the error and move on - logger.error("Unable to publish license revert event", - compact=compact, - provider_id=revert_summary.provider_id, - jurisdiction=reverted_license.jurisdiction, - license_type=reverted_license.license_type, - rollback_reason=rollback_reason, - start_time=start_datetime, - end_time=end_datetime, - revision_id=reverted_license.revision_id, - error=str(e), - ) + logger.error( + 'Unable to publish license revert event', + compact=compact, + provider_id=revert_summary.provider_id, + jurisdiction=reverted_license.jurisdiction, + license_type=reverted_license.license_type, + rollback_reason=rollback_reason, + start_time=start_datetime, + end_time=end_datetime, + revision_id=reverted_license.revision_id, + error=str(e), + ) # Publish privilege revert events for reverted_privilege in revert_summary.privileges_reverted: @@ -943,7 +945,8 @@ def _publish_revert_events( ) except Exception as e: # this event publishing is not business critical, so we log the error and move on - logger.error("Unable to publish privilege revert event", + logger.error( + 'Unable to publish privilege revert event', compact=compact, provider_id=revert_summary.provider_id, jurisdiction=reverted_privilege.jurisdiction, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 2c8d8b2cd..2fef11782 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -201,9 +201,7 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: 'updateType': self.update_categories.LICENSE_DEACTIVATION, 'createDate': upload_datetime, 'effectiveDate': upload_datetime, - 'previous': { - **privilege.to_dict() - }, + 'previous': {**privilege.to_dict()}, 'updatedValues': { 'licenseDeactivatedStatus': LicenseDeactivatedStatusEnum.LICENSE_DEACTIVATED, }, @@ -303,7 +301,6 @@ def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: return provider, updated_provider def _when_provider_changed_home_jurisdiction_after_license_upload(self): - self._when_provider_had_license_created_from_upload() provider_update_record = self.test_data_generator.put_default_provider_update_record_in_provider_table( @@ -311,15 +308,13 @@ def _when_provider_changed_home_jurisdiction_after_license_upload(self): 'providerId': self.provider_id, 'compact': self.compact, 'updateType': self.update_categories.HOME_JURISDICTION_CHANGE, - 'previous': { - **self.provider_data.to_dict() - }, + 'previous': {**self.provider_data.to_dict()}, 'updatedValues': { 'currentHomeJurisdiction': self.license_jurisdiction, }, }, # home jurisdiction was changed during license upload window - date_of_update_override=self.default_upload_datetime.isoformat() + date_of_update_override=self.default_upload_datetime.isoformat(), ) # Simulate that the provider record was updated during upload @@ -455,8 +450,7 @@ def test_provider_privilege_record_reactivated_when_upload_reverted(self): # make sure license record was reactivated as well license_record = provider_records.get_specific_license_record( - jurisdiction=self.license_jurisdiction, - license_abbreviation=privilege_record.licenseTypeAbbreviation + jurisdiction=self.license_jurisdiction, license_abbreviation=privilege_record.licenseTypeAbbreviation ) self.assertEqual('active', license_record.licenseStatus) @@ -594,7 +588,6 @@ def test_rollback_validates_maximum_time_window(self): self.assertEqual(result['rollbackStatus'], 'FAILED') self.assertIn('cannot exceed', result['error']) - def _perform_rollback_and_get_s3_object(self): from handlers.rollback_license_upload import rollback_license_upload @@ -638,7 +631,8 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v 'providerId': self.provider_id, # NOTE: if the test update data is modified, the sha here will need to be updated 'updatesDeleted': [ - 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6'], + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6' + ], } ], 'skippedProviderDetails': [], @@ -715,7 +709,8 @@ def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_fr # NOTE: if the test update data is modified, the shas here will need to be updated 'updatesDeleted': [ 'aslp#UPDATE#1#privilege/ne/slp/2025-10-23T07:15:00+00:00/06b886756a79b796ad10b17bd67057e6', - 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6'], + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6', + ], } ], 'skippedProviderDetails': [], @@ -734,8 +729,10 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_license_up results_data = self._perform_rollback_and_get_s3_object() # Verify the structure of the results - expected_reason_message = ("License was updated with a change unrelated to license upload or the update " - "occurred after rollback end time. Manual review required.") + expected_reason_message = ( + 'License was updated with a change unrelated to license upload or the update ' + 'occurred after rollback end time. Manual review required.' + ) self.assertEqual( { 'failedProviderDetails': [], @@ -770,8 +767,10 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_ results_data = self._perform_rollback_and_get_s3_object() # Verify the structure of the results - expected_reason_message = ("Privilege in jurisdiction oh was updated with a change unrelated to license upload or the update " - "occurred after rollback end time. Manual review required.") + expected_reason_message = ( + 'Privilege in jurisdiction oh was updated with a change unrelated to license upload or the update ' + 'occurred after rollback end time. Manual review required.' + ) self.assertEqual( { 'failedProviderDetails': [], @@ -805,7 +804,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_u results_data = self._perform_rollback_and_get_s3_object() # Verify the structure of the results - expected_reason_message = "Provider update occurred after rollback start time. Manual review required." + expected_reason_message = 'Provider update occurred after rollback start time. Manual review required.' self.assertEqual( { 'failedProviderDetails': [], @@ -818,7 +817,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_u 'reason': expected_reason_message, 'record_type': 'providerUpdate', 'type_of_update': provider_update.updateType, - 'license_type': 'N/A' + 'license_type': 'N/A', } ], 'provider_id': MOCK_PROVIDER_ID, @@ -842,9 +841,7 @@ def test_expected_s3_object_stored_when_provider_fails_during_rollback(self): # Mock get_provider_user_records to raise an exception when called during rollback mock_error_message = 'Database connection error' with patch.object( - self.config.data_client, - 'get_provider_user_records', - side_effect=Exception(mock_error_message) + self.config.data_client, 'get_provider_user_records', side_effect=Exception(mock_error_message) ): results_data = self._perform_rollback_and_get_s3_object() @@ -854,7 +851,7 @@ def test_expected_s3_object_stored_when_provider_fails_during_rollback(self): 'failedProviderDetails': [ { 'error': f'Failed to rollback updates for provider. ' - f'Manual review required: {mock_error_message}', + f'Manual review required: {mock_error_message}', 'provider_id': self.provider_id, } ], @@ -879,10 +876,10 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) license_upload_datetime=self.default_start_datetime - timedelta(hours=1) ) self._when_provider_had_privilege_deactivated_from_upload() - + # Create initial S3 results with data in all fields s3_key = f'{MOCK_EXECUTION_NAME}/results.json' - + # Create existing results data in the format that from_dict expects (camelCase for top-level keys) existing_results_data = { 'skippedProviderDetails': [ @@ -922,7 +919,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) } ], } - + # Write existing results to S3 self.config.s3_client.put_object( Bucket=self.config.rollback_results_bucket_name, @@ -930,9 +927,9 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) Body=json.dumps(existing_results_data, indent=2), ContentType='application/json', ) - + final_results_data = self._perform_rollback_and_get_s3_object() - + # Verify: All existing data is preserved and new data is appended # Note: to_dict() uses asdict() which produces snake_case for skipped/failed details self.assertEqual( @@ -991,7 +988,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) } ], 'updatesDeleted': ANY, - } + }, ], }, final_results_data, @@ -1014,26 +1011,25 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel # Provider IDs in sorted order (to ensure consistent pagination behavior) mock_first_provider_id = '11111111-5ed3-4be4-8ad5-c8558f587890' mock_second_provider_id = '22222222-5ed3-4be4-8ad5-c8558f587890' - + # Add first provider self._add_provider_record(provider_id=mock_first_provider_id) self._when_provider_had_license_updated_from_upload( - license_upload_datetime=self.default_start_datetime - timedelta(hours=1), - provider_id=mock_first_provider_id + license_upload_datetime=self.default_start_datetime - timedelta(hours=1), provider_id=mock_first_provider_id ) - + # Add second provider self._add_provider_record(provider_id=mock_second_provider_id) self._when_provider_had_license_updated_from_upload( license_upload_datetime=self.default_start_datetime - timedelta(hours=1), - provider_id=mock_second_provider_id + provider_id=mock_second_provider_id, ) - + # Execute: First invocation (should timeout after processing first provider) event = self._generate_test_event() - + result_first = rollback_license_upload(event, Mock()) - + # Assert: First invocation returned IN_PROGRESS status self.assertEqual(result_first['rollbackStatus'], 'IN_PROGRESS') self.assertEqual(1, result_first['providersProcessed']) @@ -1041,47 +1037,65 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel self.assertEqual(0, result_first['providersSkipped']) self.assertEqual(0, result_first['providersFailed']) self.assertEqual(mock_second_provider_id, result_first['continueFromProviderId']) - + # Execute: Second invocation (continue from where we left off) # Reset mock time for second invocation mock_time.time.side_effect = [0, 1] # Won't timeout this time - result_second = rollback_license_upload(result_first, Mock()) - + # Assert: Second invocation completed successfully self.assertEqual(result_second['rollbackStatus'], 'COMPLETE') self.assertEqual(2, result_second['providersProcessed']) self.assertEqual(2, result_second['providersReverted']) self.assertEqual(0, result_second['providersSkipped']) self.assertEqual(0, result_second['providersFailed']) - + # Verify: S3 results contain both providers s3_key = f'{MOCK_EXECUTION_NAME}/results.json' - s3_obj = self.config.s3_client.get_object( - Bucket=self.config.rollback_results_bucket_name, Key=s3_key - ) + s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) final_results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) - + # Should have 2 reverted providers - self.assertEqual({'failedProviderDetails': [], - 'revertedProviderSummaries': [{'licensesReverted': [{'action': 'REVERT', - 'jurisdiction': 'oh', - 'licenseType': 'speech-language pathologist', - 'revisionId': ANY}], - 'privilegesReverted': [], - 'providerId': mock_first_provider_id, - 'updatesDeleted': [ - 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6']}, - {'licensesReverted': [{'action': 'REVERT', - 'jurisdiction': 'oh', - 'licenseType': 'speech-language pathologist', - 'revisionId': ANY}], - 'privilegesReverted': [], - 'providerId': mock_second_provider_id, - 'updatesDeleted': [ - 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6']}], - 'skippedProviderDetails': []}, final_results_data) + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [ + { + 'licensesReverted': [ + { + 'action': 'REVERT', + 'jurisdiction': 'oh', + 'licenseType': 'speech-language pathologist', + 'revisionId': ANY, + } + ], + 'privilegesReverted': [], + 'providerId': mock_first_provider_id, + 'updatesDeleted': [ + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6' + ], + }, + { + 'licensesReverted': [ + { + 'action': 'REVERT', + 'jurisdiction': 'oh', + 'licenseType': 'speech-language pathologist', + 'revisionId': ANY, + } + ], + 'privilegesReverted': [], + 'providerId': mock_second_provider_id, + 'updatesDeleted': [ + 'aslp#UPDATE#3#license/oh/slp/2025-10-23T07:15:00+00:00/d92450a96739428f1a77c051dce9d4a6' + ], + }, + ], + 'skippedProviderDetails': [], + }, + final_results_data, + ) def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): """Test that transaction failures are properly logged and the provider is marked as failed.""" @@ -1089,7 +1103,7 @@ def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): # Setup: Create a scenario with privilege deactivation which will have PUT, DELETE, and UPDATE operations # - License update (DELETE of update record) - # - Privilege update (DELETE of update record) + # - Privilege update (DELETE of update record) # - Privilege reactivation (UPDATE to remove licenseDeactivatedStatus) # - Provider record update (PUT) self._when_provider_had_license_updated_from_upload( @@ -1099,25 +1113,22 @@ def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): # Mock the transaction to fail with a ClientError mock_error = ClientError( - error_response={ - 'Error': { - 'Code': 'TransactionCanceledException', - 'Message': 'Transaction cancelled' - } - }, - operation_name='TransactWriteItems' + error_response={'Error': {'Code': 'TransactionCanceledException', 'Message': 'Transaction cancelled'}}, + operation_name='TransactWriteItems', ) - + # Patch at the handler module level to ensure it works across the full test suite - with patch('handlers.rollback_license_upload.config.provider_table.meta.client.transact_write_items', - side_effect=mock_error): + with patch( + 'handlers.rollback_license_upload.config.provider_table.meta.client.transact_write_items', + side_effect=mock_error, + ): results_data = self._perform_rollback_and_get_s3_object() # Verify: Provider was marked as failed self.assertEqual(1, len(results_data['failedProviderDetails'])) self.assertEqual(self.provider_id, results_data['failedProviderDetails'][0]['provider_id']) self.assertIn('TransactionCanceledException', results_data['failedProviderDetails'][0]['error']) - + # Verify: No providers were reverted or skipped self.assertEqual(0, len(results_data['revertedProviderSummaries'])) self.assertEqual(0, len(results_data['skippedProviderDetails'])) diff --git a/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py index 63bfc9a00..6c66c29f6 100644 --- a/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py +++ b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py @@ -1,6 +1,11 @@ from boto3.dynamodb.conditions import Attr from cc_common.config import config, logger -from cc_common.data_model.provider_record_util import ProviderRecordType, ProviderUpdateData, LicenseUpdateData, PrivilegeUpdateData +from cc_common.data_model.provider_record_util import ( + LicenseUpdateData, + PrivilegeUpdateData, + ProviderRecordType, + ProviderUpdateData, +) from cc_common.exceptions import CCInternalException from custom_resource_handler import CustomResourceHandler, CustomResourceResponse @@ -41,12 +46,11 @@ def do_migration(_properties: dict) -> None: while True: response = config.provider_table.scan( FilterExpression=Attr('type').eq(ProviderRecordType.LICENSE_UPDATE) - | Attr('type').eq(ProviderRecordType.PROVIDER_UPDATE) - | Attr('type').eq(ProviderRecordType.PRIVILEGE_UPDATE), + | Attr('type').eq(ProviderRecordType.PROVIDER_UPDATE) + | Attr('type').eq(ProviderRecordType.PRIVILEGE_UPDATE), **scan_pagination, ) - items = response.get('Items', []) update_records.extend(items) logger.info(f'Found {len(items)} privilege update records in current scan batch') @@ -93,6 +97,7 @@ def _generate_delete_transaction_item(pk: str, sk: str) -> dict: } } + def _generate_put_transaction_item(item: dict) -> dict: """ Generate a put transaction item for a provider record. @@ -106,10 +111,11 @@ def _generate_put_transaction_item(item: dict) -> dict: } } + def _generate_transaction_items(original_update_record: dict) -> list[dict]: """ In the case of a provider update record, we add a createDate field based on the dateOfUpdate field. - Then we use the ProviderUpdateData class to serialize the record and return the transaction items. + Then we use the ProviderUpdateData class to serialize the record and return the transaction items. (one to delete the old record and one to create the new record) :param original_update_record: The provider update record to process @@ -126,8 +132,8 @@ def _generate_transaction_items(original_update_record: dict) -> list[dict]: elif record_type == ProviderRecordType.PRIVILEGE_UPDATE: data_class = PrivilegeUpdateData else: - logger.error("invalid record type found", record_type=record_type, pk=old_pk, sk=old_sk) - raise CCInternalException("invalid record type found") + logger.error('invalid record type found', record_type=record_type, pk=old_pk, sk=old_sk) + raise CCInternalException('invalid record type found') # Performing deserialization/serialization on the record, which will generate # the new pk/sks values we are migrating to. @@ -148,9 +154,10 @@ def _generate_transaction_items(original_update_record: dict) -> list[dict]: # delete old record with old pk/sk, and create new one return [ _generate_delete_transaction_item(pk=old_pk, sk=old_sk), - _generate_put_transaction_item(migrated_provider_update_record) + _generate_put_transaction_item(migrated_provider_update_record), ] + def _process_batch(update_records: list[dict]) -> None: """ Process a batch of privilege update records. diff --git a/backend/compact-connect/lambdas/python/migration/tests/__init__.py b/backend/compact-connect/lambdas/python/migration/tests/__init__.py index 9d5096a43..1622c7691 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/migration/tests/__init__.py @@ -92,8 +92,8 @@ def setUpClass(cls): {'name': 'speech-language pathologist', 'abbreviation': 'slp'}, ], 'coun': [ - {"name": "licensed professional counselor", "abbreviation": "lpc"}, - ] + {'name': 'licensed professional counselor', 'abbreviation': 'lpc'}, + ], }, ), }, diff --git a/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py b/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py index ceb4e12eb..bebb625a7 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/migration/tests/function/__init__.py @@ -93,4 +93,3 @@ def create_provider_table(self): def delete_resources(self): self._provider_table.delete() - diff --git a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py index 16bce90a5..9d7e7f4bf 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py +++ b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py @@ -1,10 +1,15 @@ from datetime import datetime -from moto import mock_aws from unittest.mock import patch -from common_test.test_constants import DEFAULT_PROVIDER_UPDATE_DATETIME, DEFAULT_LICENSE_UPDATE_DATETIME, \ - DEFAULT_LICENSE_UPDATE_CREATE_DATE, DEFAULT_LICENSE_JURISDICTION, DEFAULT_PRIVILEGE_JURISDICTION, \ - DEFAULT_PRIVILEGE_UPDATE_DATETIME +from common_test.test_constants import ( + DEFAULT_LICENSE_JURISDICTION, + DEFAULT_LICENSE_UPDATE_CREATE_DATE, + DEFAULT_LICENSE_UPDATE_DATETIME, + DEFAULT_PRIVILEGE_JURISDICTION, + DEFAULT_PRIVILEGE_UPDATE_DATETIME, + DEFAULT_PROVIDER_UPDATE_DATETIME, +) +from moto import mock_aws from . import TstFunction @@ -21,10 +26,9 @@ class TestMigrateUpdateSortKeys(TstFunction): def test_should_migrate_provider_update_records_to_expected_pattern(self): from migrate_update_sort_keys.main import do_migration - old_provider_update_record = self.test_data_generator.generate_default_provider_update(value_overrides={ - 'compact': MOCK_COMPACT, - 'providerId': MOCK_PROVIDER_ID - }) + old_provider_update_record = self.test_data_generator.generate_default_provider_update( + value_overrides={'compact': MOCK_COMPACT, 'providerId': MOCK_PROVIDER_ID} + ) serialized_old_record = old_provider_update_record.serialize_to_database_record() # replace sk with old pattern to simulate old record to be migrated serialized_old_record['sk'] = 'aslp#PROVIDER#UPDATE#1752526787/2f429ccda22d273b1ee4876f2917e27f' @@ -36,12 +40,18 @@ def test_should_migrate_provider_update_records_to_expected_pattern(self): do_migration({}) # verify old record was deleted - old_record_resp = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']}) + old_record_resp = self.config.provider_table.get_item( + Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']} + ) self.assertIsNone(old_record_resp.get('Item')) # verify new record was created with expected sk - expected_sk = f'{MOCK_COMPACT}#UPDATE#2#provider/{DEFAULT_PROVIDER_UPDATE_DATETIME}/2f429ccda22d273b1ee4876f2917e27f' - new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})['Item'] + expected_sk = ( + f'{MOCK_COMPACT}#UPDATE#2#provider/{DEFAULT_PROVIDER_UPDATE_DATETIME}/2f429ccda22d273b1ee4876f2917e27f' + ) + new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})[ + 'Item' + ] serialized_old_record['sk'] = expected_sk # as part of migration, the createDate field will be populated with whatever the dateOfUpdate was @@ -50,18 +60,21 @@ def test_should_migrate_provider_update_records_to_expected_pattern(self): # only the sort key and the createDate should have been modified self.assertEqual(serialized_old_record, new_record) - def test_should_migrate_license_update_records_to_expected_pattern(self): from migrate_update_sort_keys.main import do_migration - old_license_update_record = self.test_data_generator.generate_default_license_update(value_overrides={ - 'compact': MOCK_COMPACT, - 'providerId': MOCK_PROVIDER_ID, - 'licenseType': 'licensed professional counselor' - }) + old_license_update_record = self.test_data_generator.generate_default_license_update( + value_overrides={ + 'compact': MOCK_COMPACT, + 'providerId': MOCK_PROVIDER_ID, + 'licenseType': 'licensed professional counselor', + } + ) serialized_old_record = old_license_update_record.serialize_to_database_record() # replace sk with old pattern to simulate old record to be migrated - serialized_old_record['sk'] = f'{MOCK_COMPACT}#PROVIDER#license/{DEFAULT_LICENSE_JURISDICTION}/lpc#UPDATE#1752526787/21554583eb71ccc5f8aa5988c8a50ac2' + serialized_old_record['sk'] = ( + f'{MOCK_COMPACT}#PROVIDER#license/{DEFAULT_LICENSE_JURISDICTION}/lpc#UPDATE#1752526787/21554583eb71ccc5f8aa5988c8a50ac2' + ) serialized_old_record['dateOfUpdate'] = DEFAULT_LICENSE_UPDATE_DATETIME self.config.provider_table.put_item(Item=serialized_old_record) @@ -69,12 +82,16 @@ def test_should_migrate_license_update_records_to_expected_pattern(self): do_migration({}) # verify old record was deleted - old_record_resp = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']}) + old_record_resp = self.config.provider_table.get_item( + Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']} + ) self.assertIsNone(old_record_resp.get('Item')) # verify new record was created with expected sk expected_sk = f'{MOCK_COMPACT}#UPDATE#3#license/{DEFAULT_LICENSE_JURISDICTION}/lpc/{DEFAULT_LICENSE_UPDATE_CREATE_DATE}/21554583eb71ccc5f8aa5988c8a50ac2' - new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})['Item'] + new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})[ + 'Item' + ] serialized_old_record['sk'] = expected_sk # nothing on the record should have changed other than the sort key self.assertEqual(serialized_old_record, new_record) @@ -84,15 +101,19 @@ def test_should_migrate_privilege_update_records_to_expected_pattern(self): mock_create_date = '2025-07-07T07:07:07+00:00' - old_privilege_update_record = self.test_data_generator.generate_default_privilege_update(value_overrides={ - 'compact': MOCK_COMPACT, - 'providerId': MOCK_PROVIDER_ID, - 'licenseType': 'licensed professional counselor', - 'createDate': datetime.fromisoformat(mock_create_date) - }) + old_privilege_update_record = self.test_data_generator.generate_default_privilege_update( + value_overrides={ + 'compact': MOCK_COMPACT, + 'providerId': MOCK_PROVIDER_ID, + 'licenseType': 'licensed professional counselor', + 'createDate': datetime.fromisoformat(mock_create_date), + } + ) serialized_old_record = old_privilege_update_record.serialize_to_database_record() # replace sk with old pattern to simulate old record to be migrated - serialized_old_record['sk'] = f'{MOCK_COMPACT}#PROVIDER#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc#UPDATE#1752526787/399abde0989ad5e936920a3ba9f0944a' + serialized_old_record['sk'] = ( + f'{MOCK_COMPACT}#PROVIDER#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc#UPDATE#1752526787/399abde0989ad5e936920a3ba9f0944a' + ) serialized_old_record['dateOfUpdate'] = DEFAULT_PRIVILEGE_UPDATE_DATETIME self.config.provider_table.put_item(Item=serialized_old_record) @@ -100,14 +121,16 @@ def test_should_migrate_privilege_update_records_to_expected_pattern(self): do_migration({}) # verify old record was deleted - old_record_resp = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']}) + old_record_resp = self.config.provider_table.get_item( + Key={'pk': serialized_old_record['pk'], 'sk': serialized_old_record['sk']} + ) self.assertIsNone(old_record_resp.get('Item')) # verify new record was created with expected sk expected_sk = f'{MOCK_COMPACT}#UPDATE#1#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc/{mock_create_date}/399abde0989ad5e936920a3ba9f0944a' - new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})['Item'] + new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})[ + 'Item' + ] serialized_old_record['sk'] = expected_sk # nothing on the record should have changed other than the sort key self.assertEqual(serialized_old_record, new_record) - - diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py index c9c802cec..0293dd22b 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/ingest.py @@ -1,5 +1,5 @@ -from copy import deepcopy import json +from copy import deepcopy from boto3.dynamodb.types import TypeSerializer from cc_common.config import config, logger diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py index c924f8ab0..c89a412d1 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_investigation.py @@ -956,12 +956,11 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_privilege_record.compact, provider_id=test_privilege_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) update_records = provider_user_records.get_update_records_for_privilege( jurisdiction=test_privilege_record.jurisdiction, license_type=test_privilege_record.licenseType, - ) investigation_update_records = [ @@ -1006,7 +1005,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_privilege_record.compact, provider_id=test_privilege_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) updated_privilege_record = provider_user_records.get_privilege_records()[0] @@ -1025,7 +1024,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_privilege_record.compact, provider_id=test_privilege_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) update_records = provider_user_records.get_update_records_for_privilege( jurisdiction=test_privilege_record.jurisdiction, license_type=test_privilege_record.licenseType @@ -1094,7 +1093,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) investigation_records = provider_user_records.get_investigation_records_for_license( license_jurisdiction=test_license_record.jurisdiction, @@ -1126,7 +1125,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) investigation_records = provider_user_records.get_investigation_records_for_license( license_jurisdiction=test_license_record.jurisdiction, @@ -1162,7 +1161,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) updated_license_record = provider_user_records.get_license_records()[0] @@ -1197,7 +1196,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) update_records = provider_user_records.get_update_records_for_license( jurisdiction=test_license_record.jurisdiction, license_type=test_license_record.licenseType @@ -1245,7 +1244,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) updated_license_record = provider_user_records.get_license_records()[0] @@ -1264,7 +1263,7 @@ def test_closing_one_of_multiple_investigations_maintains_investigation_status(s provider_user_records = self.config.data_client.get_provider_user_records( compact=test_license_record.compact, provider_id=test_license_record.providerId, - include_update_tier=UpdateTierEnum.TIER_THREE + include_update_tier=UpdateTierEnum.TIER_THREE, ) update_records = provider_user_records.get_update_records_for_license( jurisdiction=test_license_record.jurisdiction, license_type=test_license_record.licenseType diff --git a/backend/compact-connect/stacks/data_migration_stack/__init__.py b/backend/compact-connect/stacks/data_migration_stack/__init__.py index 9279b9d14..5413c5b50 100644 --- a/backend/compact-connect/stacks/data_migration_stack/__init__.py +++ b/backend/compact-connect/stacks/data_migration_stack/__init__.py @@ -44,8 +44,8 @@ def __init__( { 'id': 'AwsSolutions-IAM5', 'reason': 'This policy contains wild-carded actions and resources but they are scoped to the ' - 'specific actions, Table and Key that this lambda needs access to in order to perform the' - 'migration.', + 'specific actions, Table and Key that this lambda needs access to in order to perform the' + 'migration.', }, ], ) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index 3cfcb6879..11a72d12d 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -56,7 +56,7 @@ def __init__( stack=stack, persistent_stack=persistent_stack, rollback_results_bucket=rollback_results_bucket, - data_event_bus=data_event_bus + data_event_bus=data_event_bus, ) # Build Step Function definition @@ -118,7 +118,7 @@ def _create_rollback_function( stack: Stack, persistent_stack: ps.PersistentStack, rollback_results_bucket: Bucket, - data_event_bus: EventBus + data_event_bus: EventBus, ): """Create the Lambda function for processing license upload rollback.""" self.rollback_function = PythonFunction( @@ -242,4 +242,3 @@ def _build_rollback_state_machine_definition(self) -> IChainable: # Start with initialization return initialize_rollback - diff --git a/backend/compact-connect/stacks/provider_users/provider_users.py b/backend/compact-connect/stacks/provider_users/provider_users.py index 449ac9a8b..1c8fd04e1 100644 --- a/backend/compact-connect/stacks/provider_users/provider_users.py +++ b/backend/compact-connect/stacks/provider_users/provider_users.py @@ -69,9 +69,7 @@ def __init__( if persistent_stack.hosted_zone: self.add_custom_app_client_domain( - app_client_domain_prefix='Licensee', - scope=self, - hosted_zone=persistent_stack.hosted_zone + app_client_domain_prefix='Licensee', scope=self, hosted_zone=persistent_stack.hosted_zone ) else: provider_prefix = f'{app_name}-provider' diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index 3158dbbcf..d80c3500d 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -30,9 +30,7 @@ BATCH_SIZE = 100 # Upload in batches of 100 to avoid timeouts -def upload_test_license_batch( - auth_headers: dict, batch_start_index: int, batch_size: int -): +def upload_test_license_batch(auth_headers: dict, batch_start_index: int, batch_size: int): """ Upload a batch of test license records. @@ -144,7 +142,6 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max page_num = 1 all_provider_ids = [] while time.time() - start_time < max_wait_time: - # Collect all providers across all pages while True: query_body = base_query_body.copy() @@ -159,8 +156,9 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max ) if query_response.status_code != 200: - logger.warning(f'Query failed with status {query_response.status_code}: {query_response.json()}' - f' Retrying...') + logger.warning( + f'Query failed with status {query_response.status_code}: {query_response.json()} Retrying...' + ) break response_data = query_response.json() @@ -186,8 +184,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max num_found = len(all_provider_ids) logger.info( - f'Found {num_found}/{expected_count} providers with family name "RollbackTest" ' - f'(across {page_num} pages)' + f'Found {num_found}/{expected_count} providers with family name "RollbackTest" (across {page_num} pages)' ) if num_found >= expected_count: From 2cd4ebac7657bee8b9ebd7dda686ec291780b628 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 12 Nov 2025 12:34:24 -0600 Subject: [PATCH 50/81] Add documentation for license upload rollback --- .../disaster_recovery/FULL_TABLE_RECOVERY.md | 230 +++++++++++++++++ .../LICENSE_UPLOAD_ROLLBACK.md | 189 ++++++++++++++ .../disaster_recovery/README.md | 232 +----------------- 3 files changed, 427 insertions(+), 224 deletions(-) create mode 100644 backend/compact-connect/disaster_recovery/FULL_TABLE_RECOVERY.md create mode 100644 backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md diff --git a/backend/compact-connect/disaster_recovery/FULL_TABLE_RECOVERY.md b/backend/compact-connect/disaster_recovery/FULL_TABLE_RECOVERY.md new file mode 100644 index 000000000..3ab83a5b6 --- /dev/null +++ b/backend/compact-connect/disaster_recovery/FULL_TABLE_RECOVERY.md @@ -0,0 +1,230 @@ +## Overview + +The Full Table Disaster Recovery (DR) system provides automated recovery capabilities for critical DynamoDB tables in the CompactConnect system. This system allows administrators to perform Point-in-Time Recovery (PITR) operations when tables become corrupted or require rollback to a previous state. + +**⚠️ WARNING: This system performs a HARD RESET of the target table, permanently deleting all current data before restoring from the specified timestamp.** + +## When to Use + +This Disaster Recovery process should only be run in the event that the system experiences an event that causes +system-wide failures, such as the following scenarios: + +1. **Data Corruption**: When a table contains corrupted or invalid data that cannot be fixed through normal operations +2. **Accidental Data Loss**: When critical data has been accidentally deleted or modified +3. **Failed Deployments**: When a deployment has caused data integrity issues +4. **Security Incidents**: When unauthorized modifications require rolling back to a clean state +5. **System-wide Issues**: When multiple tables need to be restored to a consistent point in time + +## Architecture + +### Two-Phase Recovery Process +DynamoDB PITR cannot directly restore data into your production database. Instead, it creates a new table with data matching the exact values you had in your production database at the specified timestamp. You as the owner of the database must decide what to do with that data from that point in time. For the purposes of disaster recovery rollback, we have determined to get the data into the production table by performing a 'hard reset', meaning **all the current data in the production table is deleted**, then we copy over the data from the temporary table into the production table. This process includes the following step functions. + +1. **RestoreDynamoDbTable Step Function** (Parent) + - Creates a backup of the current table for post-incident analysis + - Restores a temporary table from the specified PITR timestamp + - Invokes the SyncTableData Step Function + +2. **SyncTableData Step Function** (Child) + - **Delete Phase**: Removes all records from the production table + - **Copy Phase**: Copies all records from the temporary table to the production table + +Once this process is complete, the data in the target table will be restored with the data from the specified point in time. + +### Per-Table Isolation + +Each DynamoDB table has its own dedicated pair of Step Functions: + +- `DRRestoreDynamoDbTable{TableName}StateMachine` +- `{TableName}DRSyncTableDataStateMachine` + +This design allows for: +- **Targeted Recovery**: Restore only the affected table(s) +- **Granular Permissions**: Each Step Function has minimal, table-specific permissions + +## Supported Tables + +The following tables are configured for disaster recovery: + +| Table Name | Step Function Prefix | Purpose | Recovery Notes | +|------------|---------------------|---------|----------------| +| TransactionHistoryTable | `TransactionHistoryTable` | transaction data from authorize.net | Can be rolled back independently. After DR rollback, run the Transaction History Processing Workflow Step Function for each compact for every day where data was lost to restore all transaction data from Authorize.net accounts. The Transaction History Processing Workflow step functions are idempotent. They can be run multiple times without producing duplicate transaction items in the table. | +| ProviderTable | `ProviderTable` | Provider information and GSIs | **Dependent on SSN table** - Can be rolled back without updating SSN table since SSN table does not have a dependency on the provider table. **⚠️ WARNING**: If SSN table needs rollback, the provider table will likely need to be restored to same point in time as SSN table. Otherwise new provider IDs may be generated for existing SSNs causing data inconsistency/orphaned providers that won't receive license updates. After DR rollback, consider that the transaction history table will have a list of all privileges purchased as recorded in Authorize.net, and can be used as a data source for repopulating any privilege records that may have been lost as a result of the rollback.| +| CompactConfigurationTable | `CompactConfigurationTable` | System configuration data | Can be rolled back independently of other tables. Contains configuration set by compact and state admins. Admins may need to reset configurations that were lost as a result of the rollback. | +| DataEventTable | `DataEventTable` | License data events | Used for downstream processing events triggered by Event Bridge event bus. In the event of recovery, many of these events can likely be restored by replaying events placed on the event bus. See https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-archive.html | +| UsersTable | `UsersTable` | Staff user permissions and account data | Can be rolled back independently. Contains staff user permissions and account information. Admins may need to re-invite new users or reset permissions that were lost as a result of the rollback. | + +> **Note**: The SSN table is excluded due to additional security requirements and will be handled in a future implementation. + +## Running the Disaster Recovery Workflow + +## Pre-Execution Checklist + +1. ✅ **Verify Impact**: Confirm which applications/users will be affected +2. ✅ **Communication**: Notify stakeholders of the planned recovery +3. ✅ **Timestamp Selection**: Determine the UTC timestamp to restore to (must be within 35 days) +4. ✅ **Access Verification**: Confirm you have necessary permissions (Currently only AWS account admins can trigger a DR) + +### Step 1: Start Recovery Mode + +Before executing the DR Step Function, you must throttle all Lambda functions to prevent other data operations from occurring while attempting to roll any databases back. There is a script provided to perform this action: + +```bash +# Navigate to the disaster_recovery directory +cd backend/compact-connect/disaster_recovery + +# Start recovery mode for the environment (replace "Prod" with your target environment) +python start_recovery_mode.py --environment Prod +``` + +This will put the system into recovery mode by: +- Setting reserved concurrency to 0 for all environment Lambda functions, so they can't be invoked +- Leaving Disaster Recovery functions operational +- **Important**: If any functions failed to throttle, you may rerun the script or manually check their reserved concurrency settings if needed. The script is idempotent and can be run multiple times. + +### Step 2: Execute Disaster Recovery Step Function For Specific Tables +#### Prerequisites +- Identify the exact table name from the DynamoDB console (needed for `tableNameRecoveryConfirmation`) +- Verify the PITR timestamp is correct +- Create a unique incident ID for tracking (see [Execution Request Parameter Details](#execution-request-parameter-details)) + +When you are ready to perform a rollback, find the step function for the specific table you need to rollback (`DRRestoreDynamoDbTable{TableName}StateMachine`) and start an execution with the following input (replace placeholders with your values) + +```json +{ + "incidentId": "", + "pitrBackupTime": "", + "tableNameRecoveryConfirmation": "" +} +``` + +#### Execution Request Parameter Details + +- **`incidentId`** (required) + - Purpose: Unique identifier for tracking this recovery operation + - Format: String (80 chars or less, allows alphanumeric and hyphens) + - Example: `"incident-2025-001"`, `"corruption-fix-20250115"` + - Used in: Backup names, restored table names, execution tracking + +- **`pitrBackupTime`** (required) + - Purpose: The timestamp to restore the table to + - Format: UTC datetime string + - Example: `"2030-01-15T12:39:46Z"` + - Constraints: Must be within the PITR retention window (35 days) + +- **`tableNameRecoveryConfirmation`** (required) + - Purpose: Security guard rail to prevent accidental execution + - Format: Exact table name being recovered (you can copy this from the DynamoDB console) + - Example: `"Prod-PersistentStack-DataEventTable00A96798-C6VX9JVDOYGN"` + - Validation: Must match the actual destination table name + +example: +```json +{ + "incidentId": "transaction-corruption-20250115", + "pitrBackupTime": "2025-01-15T09:00:00Z", + "tableNameRecoveryConfirmation": "Prod-PersistentStack-TransactionHistoryTable00A96798-C6VX9JVDOYGN" +} +``` + +#### Running Step Functions from AWS Console + +1. Navigate to Step Functions in the AWS Console +2. Find the appropriate Step Function(s) for the table(s) you need to recover (e.g., `DRRestoreDynamoDbTableTransactionHistoryTableStateMachine`) +3. For each step function you need to run, Click "Start Execution" +4. Enter the JSON payload in the input field +5. Click "Start Execution" and wait for completion (multiple Step functions can be run concurrently if you are restoring multiple tables) + +### Step 3: End Recovery Mode + +**⚠️CRITICAL**: Only proceed after ALL recovery Step Functions you have run have completed successfully. + +After the DR Step Function completes successfully for each table you need to restore, end the recovery mode to restore normal operations: + +```bash +# End recovery mode for the environment +python end_recovery_mode.py --environment Prod +``` + +This will: +- Remove reserved concurrency throttling from all Lambda functions +- Restore normal application operations +- Complete the disaster recovery process +- **Important**: If any functions failed to unthrottle, you may rerun the script or manually check their reserved concurrency settings if needed. The script is idempotent and can be run multiple times. + +### Post-Execution + +1. **Verify Recovery**: Confirm data integrity and completeness +2. **Application Testing**: Test critical application functions +3. **Documentation**: Update incident documentation with recovery details +4. **Cleanup Review**: Cleanup temporary resources after post-incident analysis. + +### Operational Constraints + +- **Data Loss**: All data newer than the PITR timestamp will be permanently lost. The backup snapshot may be restored post-recovery to determine which records can potentially be recovered. +- **Dependencies**: Related tables may need coordinated restoration for consistency. + +## Monitoring and Troubleshooting +### Common Issues and Solutions + +#### Invalid table name +- **Cause**: `tableNameRecoveryConfirmation` doesn't match actual table name (this parameter is used to prevent accidental recovery on a database) +- **Solution**: Copy exact table name from DynamoDB console + +#### Restore timestamp out of range +- **Cause**: PITR timestamp is outside the 35-day retention window +- **Solution**: Choose a more recent timestamp within the retention period + +## Complete Table Deletion Recovery (Manual Backup Restoration) + +**⚠️ CRITICAL**: This section applies ONLY when a DynamoDB table has been completely deleted and PITR is not available. This requires manual intervention and cannot use the automated Step Functions. + +### Recovery Steps +Depending on how the table was deleted, there may be a latest 'snapshot' backup in the DynamoDB console that you can recover from. If that snapshot is not available, the system performs daily backups of our tables and store them in the AWS Backup service that you can recover from. + +#### Step 1: Locate the Latest Backup + +##### Option A: DynamoDB Console +1. Navigate to DynamoDB Console → Backups +2. Find the most recent backup for the deleted table +3. Note the backup name and creation time + +##### Option B: AWS Backup Console +1. Navigate to AWS Backup Console → Backup Vaults +2. Find the most recent recovery point for the deleted table +3. **CRITICAL**: Note the "Original table name" from the recovery point details + +#### Step 2: Restore Table from Backup + +1. **From DynamoDB Console**: + - Go to DynamoDB → Backups + - Select the backup → "Restore" + - **CRITICAL Configuration**: + - **Table Name**: Must match EXACTLY the original deleted table name + - **Encryption**: Select "Customer managed key" + - **KMS Key**: Choose `-PersistentStack-shared-encryption-key` for non-ssn tables, `ssn-key` for the SSN table + - Example: `Prod-PersistentStack-shared-encryption-key` + - **Global Secondary Indexes (GSIs)**: Ensure ALL original GSIs are included in the restore by selecting 'Restore the entire table' + - Select 'Restore' + +2. **From AWS Backup Console**: + - Navigate to Recovery Points → Select the backup + - Click "Restore" + - **CRITICAL Configuration**: + - **New Table Name**: Use the EXACT "Original table name" from the recovery point + - **Encryption**: Choose an AWS KMS key -> `-PersistentStack-shared-encryption-key` for non-ssn tables, `ssn-key` for the SSN table + - **GSIs**: Verify all original GSIs are restored + - Select 'Restore Backup' + +#### Step 3: Verify Restoration + +1. **Table Configuration**: + - ✅ Table name matches exactly (including environment prefix and suffix) + - ✅ All Global Secondary Indexes are present + - ✅ Encryption is set to the correct KMS key + - ✅ Table status is "ACTIVE" + +2. **Data Verification**: + - Spot-check critical records + - Verify record counts are reasonable + - Verify application functionality with the restored table diff --git a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md new file mode 100644 index 000000000..c4b67b7a5 --- /dev/null +++ b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md @@ -0,0 +1,189 @@ +# License Upload Rollback Guide + +## Overview + +The License Upload Rollback system allows AWS account administrators to automatically revert invalid or corrupted license data that was uploaded by a specific jurisdiction within a defined time window. + +The system will automatically determine which providers had their license records modified as a result of uploads during the time window, and confirm which license updates can be safely rolled back. A provider is eligible for automatic rollback if only license upload-related changes happened since the window. If any other updates have occurred since the start of the time window, the provider will be skipped and manual review will be required to determine which action should be taken for that individual. The rollback process will generate a full JSON report showing which providers had their licenses rolled back and which were skipped and require manual review. + +## Step-by-Step Execution Guide + +### Prerequisites + +Before starting the rollback: + +1. ✅ **Verify the Problem**: Confirm which jurisdiction uploaded bad data for which compact(s) +2. ✅ **Determine Time Window**: Identify the exact start and end times (UTC) of the problematic uploads +3. ✅ **Stakeholder Notification**: Coordinate with relevant state administrators and other stakeholders + +### Step 1: Gather Required Information + +You'll need the following information for the execution: + +| Parameter | Description | Example | +|-----------|-------------|---------| +| `compact` | The compact abbreviation (lowercase) | `"aslp"`, `"octp"`, `"counseling"` | +| `jurisdiction` | The state/jurisdiction code (lowercase) | `"oh"`, `"ky"`, `"ne"` | +| `startDateTime` | UTC timestamp when problematic uploads began | `"2020-01-15T08:00:00Z"` | +| `endDateTime` | UTC timestamp when problematic uploads ended | `"2020-01-15T17:59:59Z"` | +| `rollbackReason` | Description for audit trail | `"Invalid license data uploaded by OH staff"` | + +**Important Notes:** +- All timestamps must be in UTC +- Time window cannot exceed 7 days (604,800 seconds) + +### Step 2: Locate the Step Function + +1. Navigate to the AWS Console → Step Functions +2. Find the Step Function with the name prefix: **`LicenseUploadRollbackLicenseUploadRollbackStateMachine`** + +### Step 3: Execute the Step Function + +1. Click **"Start Execution"** +2. Enter a descriptive execution name (this will be used for the S3 results folder): + ``` + rollback-aslp-oh-2025-01-15 + ``` + +3. Paste the following JSON input (replace values with your specific parameters): + +```json +{ + "compact": "aslp", + "jurisdiction": "oh", + "startDateTime": "2020-01-15T08:00:00Z", + "endDateTime": "2020-01-15T17:59:59Z", + "rollbackReason": "Invalid license data uploaded - incorrect expiration dates" +} +``` + +4. Click **"Start Execution"** + +### Step 4: Monitor Execution Progress + +The Step Function will process providers in batches. Monitor the step function execution until it completes and verify the execution was successful. + +### Step 5: Review Results + +Once the execution completes, comprehensive results are stored in S3. The S3 key is returned as output from the lambda step of the step function + +#### Accessing the Results File + +1. Navigate to S3 in the AWS Console +2. Find the bucket with `disasterrecoveryrollbackresults` in the name. +3. Navigate to the folder matching your execution name: `rollback-aslp-oh-2025-01-15/` +4. Download the file: `results.json` + +#### Understanding the Results Structure + +The results file contains three main sections: + +##### 1. Reverted Provider Summaries + +Providers that were successfully rolled back (example): + +```json +{ + "revertedProviderSummaries": [ + { + "providerId": "01234567-89ab-cdef-0123-456789abcdef", + "licensesReverted": [ + { + "jurisdiction": "oh", + "licenseType": "audiologist", + "revisionId": "98765432-10ab-cdef-0123-456789abcdef", + "action": "REVERT" + } + ], + "privilegesReverted": [ + { + "jurisdiction": "ky", + "licenseType": "audiologist", + "revisionId": "11111111-2222-3333-4444-555555555555", + "action": "REACTIVATED" + } + ], + "updatesDeleted": [ + + ] + } + ] +} +``` + +**Actions Explained:** +- `"REVERT"`: License data was restored to its pre-upload state +- `"DELETE"`: License was newly created during the upload and has been removed +- `"REACTIVATED"`: Privilege was deactivated due to the upload and has been reactivated + +##### 2. Skipped Provider Details + +Providers that require manual review (example): + +```json +{ + "skippedProviderDetails": [ + { + "providerId": "12345678-90ab-cdef-0123-456789abcdef", + "reason": "Provider has updates that are either unrelated to license upload or occurred after rollback end time. Manual review required.", + "ineligibleUpdates": [ + { + "recordType": "licenseUpdate", + "typeOfUpdate": "encumbrance", + "updateTime": "2025-01-16T10:30:00Z", + "licenseType": "audiologist", + "reason": "License was updated with a change unrelated to license upload or the update occurred after rollback end time. Manual review required." + } + ] + } + ] +} +``` + +##### 3. Failed Provider Details + +Providers that encountered errors: + +```json +{ + "failedProviderDetails": [ + { + "providerId": "23456789-01ab-cdef-0123-456789abcdef", + "error": "Failed to rollback updates for provider. Manual review required: ConditionalCheckFailedException" + } + ] +} +``` + +These require technical investigation to determine the cause. + +#### Options for Skipped or Failed Providers + +For providers requiring manual review, you have three options: + +1. **Do Nothing**: If the subsequent updates are valid, the provider's current state is correct +2. **Manual Database Edit**: For complex cases, coordinate with stakeholders to manually adjust records and document manual edits made. +3. **Re-upload Data**: Have the state re-upload correct data for these specific providers through the normal upload process (often the simplest option) + +## Technical Details + +### How the System Identifies Affected Providers + +The system uses the `licenseUploadDateGSI` Global Secondary Index to efficiently query for all license records uploaded during the specified time window. This index is structured as: + +- **Partition Key**: `C#{compact}#J#{jurisdiction}#D#{year-month}` +- **Sort Key**: `TIME#{epoch}#LT#{license_type}#PID#{provider_id}` + +The system queries each month in the time range and collects unique provider IDs. + +### Event Publishing + +For each successfully reverted provider, the system publishes events to the EventBridge event bus: + +- `license.reverted` events for each reverted license +- `privilege.reverted` events for each reactivated privilege + +These events include: +- The rollback reason +- Time window information +- Revision IDs for tracking diff --git a/backend/compact-connect/disaster_recovery/README.md b/backend/compact-connect/disaster_recovery/README.md index 99e7db4a7..e09dfbea0 100644 --- a/backend/compact-connect/disaster_recovery/README.md +++ b/backend/compact-connect/disaster_recovery/README.md @@ -1,232 +1,16 @@ # DynamoDB Disaster Recovery System -## Overview +## 🚨 IMPORTANT: Choose the Right Recovery Tool -The Disaster Recovery (DR) system provides automated recovery capabilities for critical DynamoDB tables in the CompactConnect system. This system allows administrators to perform Point-in-Time Recovery (PITR) operations when tables become corrupted or require rollback to a previous state. +This repository contains TWO DIFFERENT recovery systems for different scenarios: -**⚠️ WARNING: This system performs a HARD RESET of the target table, permanently deleting all current data before restoring from the specified timestamp.** +### 1. **License Upload Rollback** +Use when you need to revert **specific license uploads** from **one jurisdiction** within a **time window**. -## When to Use +See: [LICENSE_UPLOAD_ROLLBACK.md](./LICENSE_UPLOAD_ROLLBACK.md) -This Disaster Recovery process should only be run in the event that the system experiences an event that causes -system-wide failures, such as the following scenarios: +### 2. **Full System Disaster Recovery** +Use when you need to recover **entire DynamoDB tables** affecting **ALL compacts and jurisdictions**. -1. **Data Corruption**: When a table contains corrupted or invalid data that cannot be fixed through normal operations -2. **Accidental Data Loss**: When critical data has been accidentally deleted or modified -3. **Failed Deployments**: When a deployment has caused data integrity issues -4. **Security Incidents**: When unauthorized modifications require rolling back to a clean state -5. **System-wide Issues**: When multiple tables need to be restored to a consistent point in time +See: [FULL_TABLE_RECOVERY.md](./FULL_TABLE_RECOVERY.md) -## Architecture - -### Two-Phase Recovery Process -DynamoDB PITR cannot directly restore data into your production database. Instead, it creates a new table with data matching the exact values you had in your production database at the specified timestamp. You as the owner of the database must decide what to do with that data from that point in time. For the purposes of disaster recovery rollback, we have determined to get the data into the production table by performing a 'hard reset', meaning **all the current data in the production table is deleted**, then we copy over the data from the temporary table into the production table. This process includes the following step functions. - -1. **RestoreDynamoDbTable Step Function** (Parent) - - Creates a backup of the current table for post-incident analysis - - Restores a temporary table from the specified PITR timestamp - - Invokes the SyncTableData Step Function - -2. **SyncTableData Step Function** (Child) - - **Delete Phase**: Removes all records from the production table - - **Copy Phase**: Copies all records from the temporary table to the production table - -Once this process is complete, the data in the target table will be restored with the data from the specified point in time. - -### Per-Table Isolation - -Each DynamoDB table has its own dedicated pair of Step Functions: - -- `DRRestoreDynamoDbTable{TableName}StateMachine` -- `{TableName}DRSyncTableDataStateMachine` - -This design allows for: -- **Targeted Recovery**: Restore only the affected table(s) -- **Granular Permissions**: Each Step Function has minimal, table-specific permissions - -## Supported Tables - -The following tables are configured for disaster recovery: - -| Table Name | Step Function Prefix | Purpose | Recovery Notes | -|------------|---------------------|---------|----------------| -| TransactionHistoryTable | `TransactionHistoryTable` | transaction data from authorize.net | Can be rolled back independently. After DR rollback, run the Transaction History Processing Workflow Step Function for each compact for every day where data was lost to restore all transaction data from Authorize.net accounts. The Transaction History Processing Workflow step functions are idempotent. They can be run multiple times without producing duplicate transaction items in the table. | -| ProviderTable | `ProviderTable` | Provider information and GSIs | **Dependent on SSN table** - Can be rolled back without updating SSN table since SSN table does not have a dependency on the provider table. **⚠️ WARNING**: If SSN table needs rollback, the provider table will likely need to be restored to same point in time as SSN table. Otherwise new provider IDs may be generated for existing SSNs causing data inconsistency/orphaned providers that won't receive license updates. After DR rollback, consider that the transaction history table will have a list of all privileges purchased as recorded in Authorize.net, and can be used as a data source for repopulating any privilege records that may have been lost as a result of the rollback.| -| CompactConfigurationTable | `CompactConfigurationTable` | System configuration data | Can be rolled back independently of other tables. Contains configuration set by compact and state admins. Admins may need to reset configurations that were lost as a result of the rollback. | -| DataEventTable | `DataEventTable` | License data events | Used for downstream processing events triggered by Event Bridge event bus. In the event of recovery, many of these events can likely be restored by replaying events placed on the event bus. See https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-archive.html | -| UsersTable | `UsersTable` | Staff user permissions and account data | Can be rolled back independently. Contains staff user permissions and account information. Admins may need to re-invite new users or reset permissions that were lost as a result of the rollback. | - -> **Note**: The SSN table is excluded due to additional security requirements and will be handled in a future implementation. - -## Running the Disaster Recovery Workflow - -## Pre-Execution Checklist - -1. ✅ **Verify Impact**: Confirm which applications/users will be affected -2. ✅ **Communication**: Notify stakeholders of the planned recovery -3. ✅ **Timestamp Selection**: Determine the UTC timestamp to restore to (must be within 35 days) -4. ✅ **Access Verification**: Confirm you have necessary permissions (Currently only AWS account admins can trigger a DR) - -### Step 1: Start Recovery Mode - -Before executing the DR Step Function, you must throttle all Lambda functions to prevent other data operations from occurring while attempting to roll any databases back. There is a script provided to perform this action: - -```bash -# Navigate to the disaster_recovery directory -cd backend/compact-connect/disaster_recovery - -# Start recovery mode for the environment (replace "Prod" with your target environment) -python start_recovery_mode.py --environment Prod -``` - -This will put the system into recovery mode by: -- Setting reserved concurrency to 0 for all environment Lambda functions, so they can't be invoked -- Leaving Disaster Recovery functions operational -- **Important**: If any functions failed to throttle, you may rerun the script or manually check their reserved concurrency settings if needed. The script is idempotent and can be run multiple times. - -### Step 2: Execute Disaster Recovery Step Function For Specific Tables -#### Prerequisites -- Identify the exact table name from the DynamoDB console (needed for `tableNameRecoveryConfirmation`) -- Verify the PITR timestamp is correct -- Create a unique incident ID for tracking (see [Execution Request Parameter Details](#execution-request-parameter-details)) - -When you are ready to perform a rollback, find the step function for the specific table you need to rollback (`DRRestoreDynamoDbTable{TableName}StateMachine`) and start an execution with the following input (replace placeholders with your values) - -```json -{ - "incidentId": "", - "pitrBackupTime": "", - "tableNameRecoveryConfirmation": "
" -} -``` - -#### Execution Request Parameter Details - -- **`incidentId`** (required) - - Purpose: Unique identifier for tracking this recovery operation - - Format: String (80 chars or less, allows alphanumeric and hyphens) - - Example: `"incident-2025-001"`, `"corruption-fix-20250115"` - - Used in: Backup names, restored table names, execution tracking - -- **`pitrBackupTime`** (required) - - Purpose: The timestamp to restore the table to - - Format: UTC datetime string - - Example: `"2030-01-15T12:39:46Z"` - - Constraints: Must be within the PITR retention window (35 days) - -- **`tableNameRecoveryConfirmation`** (required) - - Purpose: Security guard rail to prevent accidental execution - - Format: Exact table name being recovered (you can copy this from the DynamoDB console) - - Example: `"Prod-PersistentStack-DataEventTable00A96798-C6VX9JVDOYGN"` - - Validation: Must match the actual destination table name - -example: -```json -{ - "incidentId": "transaction-corruption-20250115", - "pitrBackupTime": "2025-01-15T09:00:00Z", - "tableNameRecoveryConfirmation": "Prod-PersistentStack-TransactionHistoryTable00A96798-C6VX9JVDOYGN" -} -``` - -#### Running Step Functions from AWS Console - -1. Navigate to Step Functions in the AWS Console -2. Find the appropriate Step Function(s) for the table(s) you need to recover (e.g., `DRRestoreDynamoDbTableTransactionHistoryTableStateMachine`) -3. For each step function you need to run, Click "Start Execution" -4. Enter the JSON payload in the input field -5. Click "Start Execution" and wait for completion (multiple Step functions can be run concurrently if you are restoring multiple tables) - -### Step 3: End Recovery Mode - -**⚠️CRITICAL**: Only proceed after ALL recovery Step Functions you have run have completed successfully. - -After the DR Step Function completes successfully for each table you need to restore, end the recovery mode to restore normal operations: - -```bash -# End recovery mode for the environment -python end_recovery_mode.py --environment Prod -``` - -This will: -- Remove reserved concurrency throttling from all Lambda functions -- Restore normal application operations -- Complete the disaster recovery process -- **Important**: If any functions failed to unthrottle, you may rerun the script or manually check their reserved concurrency settings if needed. The script is idempotent and can be run multiple times. - -### Post-Execution - -1. **Verify Recovery**: Confirm data integrity and completeness -2. **Application Testing**: Test critical application functions -3. **Documentation**: Update incident documentation with recovery details -4. **Cleanup Review**: Cleanup temporary resources after post-incident analysis. - -### Operational Constraints - -- **Data Loss**: All data newer than the PITR timestamp will be permanently lost. The backup snapshot may be restored post-recovery to determine which records can potentially be recovered. -- **Dependencies**: Related tables may need coordinated restoration for consistency. - -## Monitoring and Troubleshooting -### Common Issues and Solutions - -#### Invalid table name -- **Cause**: `tableNameRecoveryConfirmation` doesn't match actual table name (this parameter is used to prevent accidental recovery on a database) -- **Solution**: Copy exact table name from DynamoDB console - -#### Restore timestamp out of range -- **Cause**: PITR timestamp is outside the 35-day retention window -- **Solution**: Choose a more recent timestamp within the retention period - -## Complete Table Deletion Recovery (Manual Backup Restoration) - -**⚠️ CRITICAL**: This section applies ONLY when a DynamoDB table has been completely deleted and PITR is not available. This requires manual intervention and cannot use the automated Step Functions. - -### Recovery Steps -Depending on how the table was deleted, there may be a latest 'snapshot' backup in the DynamoDB console that you can recover from. If that snapshot is not available, the system performs daily backups of our tables and store them in the AWS Backup service that you can recover from. - -#### Step 1: Locate the Latest Backup - -##### Option A: DynamoDB Console -1. Navigate to DynamoDB Console → Backups -2. Find the most recent backup for the deleted table -3. Note the backup name and creation time - -##### Option B: AWS Backup Console -1. Navigate to AWS Backup Console → Backup Vaults -2. Find the most recent recovery point for the deleted table -3. **CRITICAL**: Note the "Original table name" from the recovery point details - -#### Step 2: Restore Table from Backup - -1. **From DynamoDB Console**: - - Go to DynamoDB → Backups - - Select the backup → "Restore" - - **CRITICAL Configuration**: - - **Table Name**: Must match EXACTLY the original deleted table name - - **Encryption**: Select "Customer managed key" - - **KMS Key**: Choose `-PersistentStack-shared-encryption-key` for non-ssn tables, `ssn-key` for the SSN table - - Example: `Prod-PersistentStack-shared-encryption-key` - - **Global Secondary Indexes (GSIs)**: Ensure ALL original GSIs are included in the restore by selecting 'Restore the entire table' - - Select 'Restore' - -2. **From AWS Backup Console**: - - Navigate to Recovery Points → Select the backup - - Click "Restore" - - **CRITICAL Configuration**: - - **New Table Name**: Use the EXACT "Original table name" from the recovery point - - **Encryption**: Choose an AWS KMS key -> `-PersistentStack-shared-encryption-key` for non-ssn tables, `ssn-key` for the SSN table - - **GSIs**: Verify all original GSIs are restored - - Select 'Restore Backup' - -#### Step 3: Verify Restoration - -1. **Table Configuration**: - - ✅ Table name matches exactly (including environment prefix and suffix) - - ✅ All Global Secondary Indexes are present - - ✅ Encryption is set to the correct KMS key - - ✅ Table status is "ACTIVE" - -2. **Data Verification**: - - Spot-check critical records - - Verify record counts are reasonable - - Verify application functionality with the restored table From 06fadd2851dfe9ca3faa0153113a7bc2ce61c90e Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 12 Nov 2025 12:42:00 -0600 Subject: [PATCH 51/81] Skip providers with orphaned update records --- .../handlers/rollback_license_upload.py | 55 ++++++++++++- .../function/test_rollback_license_upload.py | 82 +++++++++++++++++++ 2 files changed, 134 insertions(+), 3 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 8de09170f..214418618 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -342,7 +342,6 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 logger.info('Rollback complete', providers_processed=providers_processed) # Write final results to S3 - # TODO - consider writing a CSV file with final values for ease of reference _write_results_to_s3(results_s3_key, existing_results) return { @@ -516,6 +515,51 @@ def _perform_transaction(transaction_items: list[dict], provider_id: str) -> Non raise +def _check_for_orphaned_update_records( + provider_records: ProviderUserRecords, +) -> IneligibleUpdate | None: + """ + Check if there are any license update records without associated top-level license records. + + :param provider_records: The provider's records + :return: IneligibleUpdate if orphaned updates are found, None otherwise + """ + # Get all license update records + all_license_updates = provider_records.get_all_license_update_records() + + # Extract unique (jurisdiction, license_type) pairs from update records + license_keys_from_updates: set[tuple[str, str]] = set() + + for update in all_license_updates: + license_keys_from_updates.add((update.jurisdiction, update.licenseType)) + + # Check if each license key has a corresponding top-level license record + for license_jurisdiction, license_type in license_keys_from_updates: + # Try to find the license record + license_record = next( + ( + record + for record in provider_records.get_license_records() + if record.jurisdiction == license_jurisdiction and record.licenseType == license_type + ), + None, + ) + + if license_record is None: + # Found an orphaned update record + return IneligibleUpdate( + record_type='licenseUpdate', + type_of_update='Orphaned', + update_time=datetime.now().isoformat(), + license_type=license_type, + reason=f'License update record(s) exist for license in jurisdiction ' + f'{license_jurisdiction} with type {license_type}, but no corresponding top-level ' + f'license record was found. This indicates data inconsistency. Manual review required.', + ) + + return None + + def _build_and_execute_revert_transactions( provider_records: ProviderUserRecords, start_datetime: datetime, @@ -587,7 +631,12 @@ def add_delete(pk: str, sk: str, update_record: bool): else: primary_record_transaction_items.append(transaction_item) - # Step 1: Check provider updates - any after start_datetime make provider ineligible + # Step 1: Check for license update records without top-level license records + orphaned_update_check = _check_for_orphaned_update_records(provider_records) + if orphaned_update_check is not None: + ineligible_updates.append(orphaned_update_check) + + # Step 2: Check provider updates - any after start_datetime make provider ineligible provider_updates = provider_records.get_all_provider_update_records() for update in provider_updates: if update.dateOfUpdate >= start_datetime: @@ -602,7 +651,7 @@ def add_delete(pk: str, sk: str, update_record: bool): ) ) - # Step 2: Process each license record for the jurisdiction + # Step 3: Process each license record for the jurisdiction license_records = provider_records.get_license_records(filter_condition=lambda x: x.jurisdiction == jurisdiction) reverted_licenses_dict = [] diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 2fef11782..a7e21eecb 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -1132,3 +1132,85 @@ def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): # Verify: No providers were reverted or skipped self.assertEqual(0, len(results_data['revertedProviderSummaries'])) self.assertEqual(0, len(results_data['skippedProviderDetails'])) + + + def test_orphaned_license_updates_cause_provider_to_be_skipped(self): + """Test that orphaned license update records (without top-level license records) cause provider to be skipped.""" + from handlers.rollback_license_upload import rollback_license_upload + from uuid import uuid4 + + orphaned_provider_id = str(uuid4()) + + # Setup: License was uploaded and then updated during upload + # Create update record within upload window to simulate license deactivation + orphaned_license_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': orphaned_provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'updateType': self.update_categories.DEACTIVATION, + 'createDate': self.default_upload_datetime, + 'effectiveDate': self.default_upload_datetime, + 'updatedValues': { + # simulate accidentally changing the expiration to last year + 'dateOfExpiration': (self.default_upload_datetime - timedelta(days=365)).date(), + 'licenseStatus': 'inactive', + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, + }, + } + ) + + # Verify update record exists before rollback + provider_records_before = self.config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=orphaned_provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + licenses_before = provider_records_before.get_license_records() + self.assertEqual(len(licenses_before), 0, 'Should not have license record before rollback') + license_updates_before = provider_records_before.get_all_license_update_records() + self.assertEqual(len(license_updates_before), 1, 'Should have orphaned update record before rollback') + + # Execute: Perform rollback + event = self._generate_test_event() + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed with provider skipped + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersSkipped'], 1, 'Provider with orphaned updates should be skipped') + self.assertEqual(result['providersReverted'], 0, 'No providers should be reverted') + self.assertEqual(result['providersFailed'], 0, 'No providers should have failed') + + # Verify S3 results contain the orphaned update details + s3_key = f'{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) + + # Verify the structure of the results + expected_reason = ( + f'License or privilege update records exist for license in jurisdiction ' + f'{self.license_jurisdiction} with type {orphaned_license_update.licenseType}, ' + f'but no corresponding top-level license record was found. ' + f'This indicates data inconsistency. Manual review required.' + ) + + self.assertEqual(1, len(results_data['skippedProviderDetails'])) + skipped_detail = results_data['skippedProviderDetails'][0] + + self.assertEqual(orphaned_provider_id, skipped_detail['provider_id']) + self.assertIn('Manual review required', skipped_detail['reason']) + + # Check ineligible updates details + self.assertEqual(1, len(skipped_detail['ineligible_updates'])) + ineligible_update = skipped_detail['ineligible_updates'][0] + + self.assertEqual('licenseUpdate', ineligible_update['record_type']) + self.assertEqual('Orphaned', ineligible_update['type_of_update']) + self.assertEqual(orphaned_license_update.licenseType, ineligible_update['license_type']) + self.assertEqual(expected_reason, ineligible_update['reason']) + + # Verify no providers were reverted or failed + self.assertEqual(0, len(results_data['revertedProviderSummaries'])) + self.assertEqual(0, len(results_data['failedProviderDetails'])) From c355666f662684deffab9cf8e929982c85bddf08 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 12 Nov 2025 13:24:54 -0600 Subject: [PATCH 52/81] fix test assertion --- .../tests/function/test_rollback_license_upload.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index a7e21eecb..8689484ce 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -1190,7 +1190,7 @@ def test_orphaned_license_updates_cause_provider_to_be_skipped(self): # Verify the structure of the results expected_reason = ( - f'License or privilege update records exist for license in jurisdiction ' + f'License update record(s) exist for license in jurisdiction ' f'{self.license_jurisdiction} with type {orphaned_license_update.licenseType}, ' f'but no corresponding top-level license record was found. ' f'This indicates data inconsistency. Manual review required.' From 3884b0450a4295b61bd28bfe46e5b6168f74e420 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 12 Nov 2025 14:53:15 -0600 Subject: [PATCH 53/81] Linter fixes --- .../data_model/provider_record_util.py | 21 ++++++++ .../handlers/rollback_license_upload.py | 53 ++++++++++--------- .../function/test_rollback_license_upload.py | 40 +++++++------- .../function/test_migrate_update_sort_keys.py | 10 +++- .../disaster_recovery_stack/__init__.py | 3 +- .../license_upload_rollback_step_function.py | 3 +- .../rollback_license_upload_smoke_tests.py | 7 +-- 7 files changed, 84 insertions(+), 53 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py index 3cd45bd8d..998ce5397 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/provider_record_util.py @@ -500,6 +500,27 @@ def get_privilege_records( """ return [record for record in self._privilege_records if filter_condition is None or filter_condition(record)] + def get_privileges_associated_with_license( + self, + license_jurisdiction: str, + license_type_abbreviation: str, + filter_condition: Callable[[PrivilegeData], bool] | None = None, + ) -> list[PrivilegeData]: + """ + Get all privileges associated with a given license. + :param license_jurisdiction: The jurisdiction of the license. + :param license_type_abbreviation: The abbreviation of the license type. + :param filter_condition: An optional filter to apply to the privilege records + :return: A list of privilege records associated with the license + """ + return [ + record + for record in self._privilege_records + if record.licenseJurisdiction == license_jurisdiction + and record.licenseTypeAbbreviation == license_type_abbreviation + and (filter_condition is None or filter_condition(record)) + ] + def get_license_records( self, filter_condition: Callable[[LicenseData], bool] | None = None, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 214418618..4107cbb5c 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -460,7 +460,7 @@ def _process_provider_rollback( # If provider was skipped due to ineligibility, return early if isinstance(result, ProviderSkippedDetails): return result - except Exception as e: + except Exception as e: # noqa BLE001 logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) return ProviderFailedDetails( provider_id=provider_id, @@ -482,9 +482,9 @@ def _extract_sk_from_transaction_item(transaction_item: dict) -> str | None: """ if 'Put' in transaction_item: return transaction_item['Put']['Item'].get('sk') - elif 'Delete' in transaction_item: + if 'Delete' in transaction_item: return transaction_item['Delete']['Key'].get('sk') - elif 'Update' in transaction_item: + if 'Update' in transaction_item: return transaction_item['Update']['Key'].get('sk') return None @@ -529,7 +529,7 @@ def _check_for_orphaned_update_records( # Extract unique (jurisdiction, license_type) pairs from update records license_keys_from_updates: set[tuple[str, str]] = set() - + for update in all_license_updates: license_keys_from_updates.add((update.jurisdiction, update.licenseType)) @@ -544,19 +544,19 @@ def _check_for_orphaned_update_records( ), None, ) - + if license_record is None: # Found an orphaned update record return IneligibleUpdate( record_type='licenseUpdate', type_of_update='Orphaned', - update_time=datetime.now().isoformat(), + update_time='N/A', license_type=license_type, reason=f'License update record(s) exist for license in jurisdiction ' f'{license_jurisdiction} with type {license_type}, but no corresponding top-level ' f'license record was found. This indicates data inconsistency. Manual review required.', ) - + return None @@ -657,24 +657,24 @@ def add_delete(pk: str, sk: str, update_record: bool): reverted_licenses_dict = [] for license_record in license_records: - privileges_associated_with_license = provider_records.get_privilege_records( - filter_condition=lambda x: x.licenseJurisdiction == jurisdiction - and x.licenseType == license_record.licenseType + privileges_associated_with_license = provider_records.get_privileges_associated_with_license( + license_jurisdiction=license_record.jurisdiction, + license_type_abbreviation=license_record.licenseTypeAbbreviation, ) - privilege_jurisdictions = [x.jurisdiction for x in privileges_associated_with_license] - # Get privilege updates for all privileges associated with this license - # that are after the start_datetime + privileges_associated_with_license_jurisdictions = [x.jurisdiction for x in privileges_associated_with_license] + # Get privilege updates for all privileges that are after the start_datetime privilege_updates = provider_records.get_all_privilege_update_records( - filter_condition=lambda x: x.jurisdiction in privilege_jurisdictions and x.dateOfUpdate >= start_datetime, + filter_condition=lambda x: x.createDate >= start_datetime, ) # Check privilege updates for eligibility for privilege_update in privilege_updates: - if ( + if privilege_update.jurisdiction in privileges_associated_with_license_jurisdictions and ( privilege_update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY or privilege_update.createDate > end_datetime ): - # Non-license-deactivation privilege update or privilege update after end_datetime make provider ineligible + # Non-license-deactivation privilege update or privilege update + # after end_datetime make provider ineligible ineligible_updates.append( IneligibleUpdate( record_type='privilegeUpdate', @@ -736,19 +736,22 @@ def add_delete(pk: str, sk: str, update_record: bool): ) # if license record was created during the window, delete it and all update records after start_datetime + # unless a user has purchased privileges if ( license_record.firstUploadDate is not None and start_datetime <= license_record.firstUploadDate <= end_datetime ): - if privilege_jurisdictions: + if privileges_associated_with_license_jurisdictions: ineligible_updates.append( IneligibleUpdate( record_type='privilegeUpdate', type_of_update='Issuance', - update_time=datetime.now().isoformat(), + # We only need to show the issuance date of the first privilege associated with the license + # for the purposes of the report. + update_time=privileges_associated_with_license[0].dateOfIssuance.isoformat(), license_type=license_record.licenseType, - reason=f'Privileges issued in jurisdictions {privilege_jurisdictions} after license upload. ' - f'Manual review required.', + reason=f'Privileges issued in jurisdictions {privileges_associated_with_license_jurisdictions}' + ' after license upload. Manual review required.', ) ) # no privileges found, so we can delete the license record @@ -772,7 +775,8 @@ def add_delete(pk: str, sk: str, update_record: bool): update_type=update.updateType, ) else: - # If license record was not created during the window, check license updates for eligibility and build transactions + # If license record was not created during the window, + # check license updates for eligibility and build transactions license_updates_in_window = [] for license_update in license_updates_after_start: if ( @@ -872,7 +876,8 @@ def add_delete(pk: str, sk: str, update_record: bool): ) return ProviderSkippedDetails( provider_id=provider_id, - reason='Provider has updates that are either unrelated to license upload or occurred after rollback end time. Manual review required.', + reason='Provider has updates that are either unrelated to license upload or occurred after' + ' rollback end time. Manual review required.', ineligible_updates=ineligible_updates, ) @@ -962,7 +967,7 @@ def _publish_revert_events( revision_id=reverted_license.revision_id, event_batch_writer=event_writer, ) - except Exception as e: + except Exception as e: # noqa BLE001 # this event publishing is not business critical, so we log the error and move on logger.error( 'Unable to publish license revert event', @@ -992,7 +997,7 @@ def _publish_revert_events( revision_id=reverted_privilege.revision_id, event_batch_writer=event_writer, ) - except Exception as e: + except Exception as e: # noqa BLE001 # this event publishing is not business critical, so we log the error and move on logger.error( 'Unable to publish privilege revert event', diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 8689484ce..d665891ca 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -68,7 +68,7 @@ def _add_provider_record(self, provider_id: str | None = None): provider_id = self.provider_id # add provider record to provider table - provider_data = self.test_data_generator.put_default_provider_record_in_provider_table( + return self.test_data_generator.put_default_provider_record_in_provider_table( { 'providerId': provider_id, 'compact': self.compact, @@ -77,8 +77,6 @@ def _add_provider_record(self, provider_id: str | None = None): } ) - return provider_data - # Helper methods for setting up test scenarios def _when_provider_had_license_created_from_upload(self): """ @@ -252,7 +250,7 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: after_upload_datetime = self.default_end_datetime + timedelta(hours=1) # Create a non-upload-related update (e.g., encumbrance) after the window - license_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + return self.test_data_generator.put_default_license_update_record_in_provider_table( { 'providerId': self.provider_id, 'compact': self.compact, @@ -263,8 +261,6 @@ def _when_provider_had_license_update_after_upload(self, after_upload_datetime: } ) - return license_update - def _when_provider_top_level_record_needs_reverted(self, before_upload_datetime: datetime = None): """ Set up a scenario where the provider's top-level record needs to be reverted. @@ -353,7 +349,8 @@ def test_provider_top_level_record_reset_to_prior_values_when_upload_reverted(se self.assertEqual(old_provider.familyName, provider_record.familyName) def test_provider_top_level_record_deleted_when_license_created_during_bad_upload(self): - """Test that provider top-level record is deleted if the license record is also deleted when reverting upload.""" + """Test that provider top-level record is deleted if the license record + is also deleted when reverting upload.""" from handlers.rollback_license_upload import rollback_license_upload # Setup: @@ -370,7 +367,7 @@ def test_provider_top_level_record_deleted_when_license_created_during_bad_uploa self.assertEqual(1, result['providersReverted']) # Verify: All provider records have been deleted - with pytest.raises(CCNotFoundException) as exc_info: + with pytest.raises(CCNotFoundException): self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, @@ -483,7 +480,7 @@ def test_provider_license_updates_and_license_record_within_time_period_removed_ self.assertEqual(result['rollbackStatus'], 'COMPLETE') # Verify: All records within time window have been deleted - with pytest.raises(CCNotFoundException) as exec_info: + with pytest.raises(CCNotFoundException): self.config.data_client.get_provider_user_records( compact=self.compact, provider_id=self.provider_id, @@ -576,8 +573,8 @@ def test_rollback_validates_time_window_order(self): def test_rollback_validates_maximum_time_window(self): from handlers.rollback_license_upload import rollback_license_upload - start = datetime.now() - timedelta(days=8) # More than 7 days - end = datetime.now() + start = self.config.current_standard_datetime - timedelta(days=8) # More than 7 days + end = self.config.current_standard_datetime event = self._generate_test_event() event['startDateTime'] = start.isoformat() @@ -599,9 +596,7 @@ def _perform_rollback_and_get_s3_object(self): # Read object from S3 and verify its contents match what is expected s3_key = f'{MOCK_EXECUTION_NAME}/results.json' s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) - results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) - - return results_data + return json.loads(s3_obj['Body'].read().decode('utf-8')) # Tests for checking data written to S3 def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_values(self): @@ -1133,12 +1128,13 @@ def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): self.assertEqual(0, len(results_data['revertedProviderSummaries'])) self.assertEqual(0, len(results_data['skippedProviderDetails'])) - def test_orphaned_license_updates_cause_provider_to_be_skipped(self): - """Test that orphaned license update records (without top-level license records) cause provider to be skipped.""" - from handlers.rollback_license_upload import rollback_license_upload + """Test that orphaned license update records (without top-level license records) + cause provider to be skipped.""" from uuid import uuid4 + from handlers.rollback_license_upload import rollback_license_upload + orphaned_provider_id = str(uuid4()) # Setup: License was uploaded and then updated during upload @@ -1195,22 +1191,22 @@ def test_orphaned_license_updates_cause_provider_to_be_skipped(self): f'but no corresponding top-level license record was found. ' f'This indicates data inconsistency. Manual review required.' ) - + self.assertEqual(1, len(results_data['skippedProviderDetails'])) skipped_detail = results_data['skippedProviderDetails'][0] - + self.assertEqual(orphaned_provider_id, skipped_detail['provider_id']) self.assertIn('Manual review required', skipped_detail['reason']) - + # Check ineligible updates details self.assertEqual(1, len(skipped_detail['ineligible_updates'])) ineligible_update = skipped_detail['ineligible_updates'][0] - + self.assertEqual('licenseUpdate', ineligible_update['record_type']) self.assertEqual('Orphaned', ineligible_update['type_of_update']) self.assertEqual(orphaned_license_update.licenseType, ineligible_update['license_type']) self.assertEqual(expected_reason, ineligible_update['reason']) - + # Verify no providers were reverted or failed self.assertEqual(0, len(results_data['revertedProviderSummaries'])) self.assertEqual(0, len(results_data['failedProviderDetails'])) diff --git a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py index 9d7e7f4bf..f09a685e2 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py +++ b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py @@ -88,7 +88,10 @@ def test_should_migrate_license_update_records_to_expected_pattern(self): self.assertIsNone(old_record_resp.get('Item')) # verify new record was created with expected sk - expected_sk = f'{MOCK_COMPACT}#UPDATE#3#license/{DEFAULT_LICENSE_JURISDICTION}/lpc/{DEFAULT_LICENSE_UPDATE_CREATE_DATE}/21554583eb71ccc5f8aa5988c8a50ac2' + expected_sk = ( + f'{MOCK_COMPACT}#UPDATE#3#license/{DEFAULT_LICENSE_JURISDICTION}/lpc' + f'/{DEFAULT_LICENSE_UPDATE_CREATE_DATE}/21554583eb71ccc5f8aa5988c8a50ac2' + ) new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})[ 'Item' ] @@ -127,7 +130,10 @@ def test_should_migrate_privilege_update_records_to_expected_pattern(self): self.assertIsNone(old_record_resp.get('Item')) # verify new record was created with expected sk - expected_sk = f'{MOCK_COMPACT}#UPDATE#1#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc/{mock_create_date}/399abde0989ad5e936920a3ba9f0944a' + expected_sk = ( + f'{MOCK_COMPACT}#UPDATE#1#privilege/{DEFAULT_PRIVILEGE_JURISDICTION}/lpc' + f'/{mock_create_date}/399abde0989ad5e936920a3ba9f0944a' + ) new_record = self.config.provider_table.get_item(Key={'pk': serialized_old_record['pk'], 'sk': expected_sk})[ 'Item' ] diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py index 271cf14ce..f26b7ce19 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py @@ -84,7 +84,8 @@ def __init__( suppressions=[ { 'id': 'HIPAA.Security-S3BucketReplicationEnabled', - 'reason': 'This bucket is for generating one time results of the rollback workflow and is not intended to be replicated.', + 'reason': 'This bucket is for generating one time' + ' results of the rollback workflow and is not intended to be replicated.', }, ], ) diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index 11a72d12d..3bf7910db 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -77,7 +77,8 @@ def __init__( suppressions=[ { 'id': 'HIPAA.Security-CloudWatchLogGroupRetentionPeriod', - 'reason': 'This system will be used infrequently. We are deliberately retaining logs indefinitely here.', + 'reason': 'This system will be used infrequently.' + ' We are deliberately retaining logs indefinitely here.', }, ], ) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index d80c3500d..747c43879 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -65,7 +65,8 @@ def upload_test_license_batch(auth_headers: dict, batch_start_index: int, batch_ # Upload the batch logger.info( - f'Uploading batch of {len(licenses_batch)} licenses (indices {batch_start_index}-{batch_start_index + batch_size - 1})' + f'Uploading batch of {len(licenses_batch)} licenses' + f' (indices {batch_start_index}-{batch_start_index + batch_size - 1})' ) post_response = requests.post( @@ -256,7 +257,7 @@ def wait_for_step_function_completion(execution_arn: str, max_wait_time: int = 3 """ sfn_client = boto3.client('stepfunctions') - logger.info(f'Waiting for step function to complete...') + logger.info('Waiting for step function to complete...') start_time = time.time() check_interval = 30 @@ -271,7 +272,7 @@ def wait_for_step_function_completion(execution_arn: str, max_wait_time: int = 3 elapsed = time.time() - start_time logger.info(f'Step function completed successfully after {elapsed:.1f}s') return status, output - elif status in ['FAILED', 'TIMED_OUT', 'ABORTED']: + if status in ['FAILED', 'TIMED_OUT', 'ABORTED']: raise SmokeTestFailureException( f'Step function execution failed with status: {status}. ' f'Error: {response.get("error", "N/A")}, Cause: {response.get("cause", "N/A")}' From 6e92aac0bab1dc68772312a628846d899d2aba83 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 09:38:56 -0600 Subject: [PATCH 54/81] Simplify rollback algorithm/add test cases --- .../handlers/rollback_license_upload.py | 311 ++++++++---------- .../function/test_rollback_license_upload.py | 118 ++++++- 2 files changed, 253 insertions(+), 176 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 4107cbb5c..5bfb8f0d1 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -450,8 +450,8 @@ def _process_provider_rollback( # If ineligible updates are found, this will return a ProviderSkippedDetails result = _build_and_execute_revert_transactions( provider_records=provider_records, - start_datetime=start_datetime, - end_datetime=end_datetime, + upload_window_start_datetime=start_datetime, + upload_window_end_datetime=end_datetime, compact=compact, jurisdiction=jurisdiction, provider_id=provider_id, @@ -562,8 +562,8 @@ def _check_for_orphaned_update_records( def _build_and_execute_revert_transactions( provider_records: ProviderUserRecords, - start_datetime: datetime, - end_datetime: datetime, + upload_window_start_datetime: datetime, + upload_window_end_datetime: datetime, compact: str, jurisdiction: str, provider_id: str, @@ -639,7 +639,7 @@ def add_delete(pk: str, sk: str, update_record: bool): # Step 2: Check provider updates - any after start_datetime make provider ineligible provider_updates = provider_records.get_all_provider_update_records() for update in provider_updates: - if update.dateOfUpdate >= start_datetime: + if update.dateOfUpdate >= upload_window_start_datetime: ineligible_updates.append( IneligibleUpdate( record_type='providerUpdate', @@ -661,100 +661,139 @@ def add_delete(pk: str, sk: str, update_record: bool): license_jurisdiction=license_record.jurisdiction, license_type_abbreviation=license_record.licenseTypeAbbreviation, ) - privileges_associated_with_license_jurisdictions = [x.jurisdiction for x in privileges_associated_with_license] - # Get privilege updates for all privileges that are after the start_datetime - privilege_updates = provider_records.get_all_privilege_update_records( - filter_condition=lambda x: x.createDate >= start_datetime, - ) - # Check privilege updates for eligibility - for privilege_update in privilege_updates: - if privilege_update.jurisdiction in privileges_associated_with_license_jurisdictions and ( - privilege_update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY - or privilege_update.createDate > end_datetime - ): - # Non-license-deactivation privilege update or privilege update - # after end_datetime make provider ineligible + # Check if any privileges were issued for this license since the upload start date + for privilege in privileges_associated_with_license: + if privilege.dateOfIssuance >= upload_window_start_datetime: ineligible_updates.append( IneligibleUpdate( record_type='privilegeUpdate', - type_of_update=privilege_update.updateType, - update_time=privilege_update.dateOfUpdate.isoformat(), - license_type=privilege_update.licenseType, - # include privilege jurisdiction in reason - reason=f'Privilege in jurisdiction {privilege_update.jurisdiction} was updated with a change ' - f'unrelated to license upload or the update occurred after rollback end time. ' - f'Manual review required.', + type_of_update='Issuance', + update_time=privilege.dateOfIssuance.isoformat(), + license_type=license_record.licenseType, + reason=f"Privilege in jurisdiction '{privilege.jurisdiction}' issued after license upload. " + "Manual review required.", ) ) - elif start_datetime <= privilege_update.createDate <= end_datetime: - # License deactivation within window - mark for deletion - serialized_privilege_update = privilege_update.serialize_to_database_record() - add_delete(serialized_privilege_update['pk'], serialized_privilege_update['sk'], update_record=True) - updates_deleted_sks.append(serialized_privilege_update['sk']) - logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') - - # Reactivate the privilege - privilege_record = provider_records.get_specific_privilege_record( - jurisdiction=privilege_update.jurisdiction, - license_abbreviation=license_record.licenseTypeAbbreviation, - ) - if privilege_record: - logger.info( - 'privilege record found associated with deactivation, reactivating privilege', - provider_id=provider_id, - privilege_jurisdiction=privilege_record.jurisdiction, - license_type=privilege_record.licenseType, + # Check updates associated with this privilege that are after the start_datetime + privilege_updates_after_start_time = provider_records.get_update_records_for_privilege( + jurisdiction=privilege.jurisdiction, + license_type=privilege.licenseType, + filter_condition=lambda x: x.createDate >= upload_window_start_datetime, + ) + + # Check privilege updates for eligibility + for privilege_update in privilege_updates_after_start_time: + if privilege_update.updateType != PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY: + # Non-license-deactivation privilege update makes provider ineligible for rollback + ineligible_updates.append( + IneligibleUpdate( + record_type='privilegeUpdate', + type_of_update=privilege_update.updateType, + update_time=privilege_update.dateOfUpdate.isoformat(), + license_type=privilege_update.licenseType, + # include privilege jurisdiction in reason + reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was updated with a change " + "unrelated to license upload. Manual review required.", + ) ) - # Remove the licenseDeactivatedStatus field to reactivate using UPDATE operation - serialized_privilege = privilege_record.serialize_to_database_record() - primary_record_transaction_items.append( - { - 'Update': { - 'TableName': table_name, - 'Key': {'pk': serialized_privilege['pk'], 'sk': serialized_privilege['sk']}, - 'UpdateExpression': 'REMOVE licenseDeactivatedStatus', - } - } + elif privilege_update.createDate > upload_window_end_datetime: + # privilege update after upload window makes provider ineligible + ineligible_updates.append( + IneligibleUpdate( + record_type='privilegeUpdate', + type_of_update=privilege_update.updateType, + update_time=privilege_update.dateOfUpdate.isoformat(), + license_type=privilege_update.licenseType, + # include privilege jurisdiction in reason + reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was deactivated " + "after rollback end time. Manual review required.", + ) ) - logger.info('Will reactivate privilege record if provider is eligible for rollback') - - reverted_privileges.append( - RevertedPrivilege( - jurisdiction=privilege_record.jurisdiction, + else: + # License deactivation within window cause privilege deactivation - revert the deactivation + serialized_privilege_update = privilege_update.serialize_to_database_record() + add_delete(serialized_privilege_update['pk'], serialized_privilege_update['sk'], update_record=True) + updates_deleted_sks.append(serialized_privilege_update['sk']) + logger.info('Will delete privilege deactivation update record if provider is eligible for rollback') + + # Reactivate the privilege + privilege_record = provider_records.get_specific_privilege_record( + jurisdiction=privilege_update.jurisdiction, + license_abbreviation=license_record.licenseTypeAbbreviation, + ) + if privilege_record: + logger.info( + 'privilege record found associated with deactivation, reactivating privilege', + provider_id=provider_id, + privilege_jurisdiction=privilege_record.jurisdiction, license_type=privilege_record.licenseType, - revision_id=uuid4(), - action='REACTIVATED', ) - ) + # Remove the licenseDeactivatedStatus field to reactivate using UPDATE operation + serialized_privilege = privilege_record.serialize_to_database_record() + primary_record_transaction_items.append( + { + 'Update': { + 'TableName': table_name, + 'Key': {'pk': serialized_privilege['pk'], 'sk': serialized_privilege['sk']}, + 'UpdateExpression': 'REMOVE licenseDeactivatedStatus', + } + } + ) + logger.info('Will reactivate privilege record if provider is eligible for rollback') + + reverted_privileges.append( + RevertedPrivilege( + jurisdiction=privilege_record.jurisdiction, + license_type=privilege_record.licenseType, + revision_id=uuid4(), + action='REACTIVATED', + ) + ) + # Get license updates for this license after start_datetime license_updates_after_start = provider_records.get_update_records_for_license( jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - filter_condition=lambda x: x.createDate >= start_datetime, + filter_condition=lambda x: x.createDate >= upload_window_start_datetime, ) - # if license record was created during the window, delete it and all update records after start_datetime - # unless a user has purchased privileges - if ( - license_record.firstUploadDate is not None - and start_datetime <= license_record.firstUploadDate <= end_datetime - ): - if privileges_associated_with_license_jurisdictions: + # check license updates for eligibility + license_updates_in_window = [] + for license_update in license_updates_after_start: + if ( + license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES + or license_update.createDate > upload_window_end_datetime + ): + # Non-upload-related license updates make provider ineligible ineligible_updates.append( IneligibleUpdate( - record_type='privilegeUpdate', - type_of_update='Issuance', - # We only need to show the issuance date of the first privilege associated with the license - # for the purposes of the report. - update_time=privileges_associated_with_license[0].dateOfIssuance.isoformat(), - license_type=license_record.licenseType, - reason=f'Privileges issued in jurisdictions {privileges_associated_with_license_jurisdictions}' - ' after license upload. Manual review required.', + record_type='licenseUpdate', + type_of_update=license_update.updateType, + update_time=license_update.createDate.isoformat(), + license_type=license_update.licenseType, + reason='License was updated with a change unrelated to license upload or the update ' + 'occurred after rollback end time. Manual review required.', ) ) - # no privileges found, so we can delete the license record + else: + # Upload-related update within window - mark for deletion + license_updates_in_window.append(license_update) + serialized_license_update = license_update.serialize_to_database_record() + add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) + updates_deleted_sks.append(serialized_license_update['sk']) + logger.info( + 'Will delete license update record if provider is eligible for rollback', + update_type=license_update.updateType, + license_type=license_update.licenseType, + ) + + # if license record was created during the window, delete it + if ( + license_record.firstUploadDate is not None + and upload_window_start_datetime <= license_record.firstUploadDate <= upload_window_end_datetime + ): serialized_license_record = license_record.serialize_to_database_record() add_delete(serialized_license_record['pk'], serialized_license_record['sk'], update_record=False) logger.info('Will delete license record (created during upload) if provider is eligible for rollback') @@ -766,106 +805,34 @@ def add_delete(pk: str, sk: str, update_record: bool): action='DELETE', ) ) - for update in license_updates_after_start: - serialized_license_update = update.serialize_to_database_record() - add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) - updates_deleted_sks.append(serialized_license_update['sk']) - logger.info( - 'Will delete license update record if provider is eligible for rollback', - update_type=update.updateType, - ) + # license was not first uploaded during the upload window, revert it to last previous state before the upload else: - # If license record was not created during the window, - # check license updates for eligibility and build transactions - license_updates_in_window = [] - for license_update in license_updates_after_start: - if ( - license_update.updateType not in LICENSE_UPLOAD_UPDATE_CATEGORIES - or license_update.createDate > end_datetime - ): - # Non-upload-related license updates make provider ineligible - ineligible_updates.append( - IneligibleUpdate( - record_type='licenseUpdate', - type_of_update=license_update.updateType, - update_time=license_update.createDate.isoformat(), - license_type=license_update.licenseType, - reason='License was updated with a change unrelated to license upload or the update ' - 'occurred after rollback end time. Manual review required.', - ) - ) - elif start_datetime <= license_update.createDate <= end_datetime: - # Upload-related update within window - mark for deletion - license_updates_in_window.append(license_update) - serialized_license_update = license_update.serialize_to_database_record() - add_delete(serialized_license_update['pk'], serialized_license_update['sk'], update_record=True) - updates_deleted_sks.append(serialized_license_update['sk']) - logger.info( - 'Will delete license update record if provider is eligible for rollback', - update_type=license_update.updateType, - license_type=license_update.licenseType, - ) + # Find the earliest update in the window to get the previous state + license_updates_in_window.sort(key=lambda x: x.createDate) + earliest_update_in_window = license_updates_in_window[0] - # If there were updates in the window and no updates after end_datetime, revert the license - # to the previous values of the earliest update in the window - if license_updates_in_window: - updates_after_window = [u for u in license_updates_after_start if u.createDate > end_datetime] - - if not updates_after_window: - # Find the earliest update in the window to get the previous state - license_updates_in_window.sort(key=lambda x: x.createDate) - earliest_update_in_window = license_updates_in_window[0] - - # Check if license was created during the window (uploadDate within window) - if ( - license_record.firstUploadDate is not None - and start_datetime <= license_record.firstUploadDate <= end_datetime - ): - # License created during upload - delete it - serialized_license_record = license_record.serialize_to_database_record() - add_delete( - serialized_license_record['pk'], serialized_license_record['sk'], update_record=False - ) - logger.info('Will delete license record (created during upload)') - - reverted_licenses.append( - RevertedLicense( - jurisdiction=license_record.jurisdiction, - license_type=license_record.licenseType, - revision_id=uuid4(), - action='DELETE', - ) - ) - else: - # License existed before - revert to previous state - reverted_license_data = license_record.to_dict() - reverted_license_data.update(earliest_update_in_window.previous) + # License existed before - revert to previous state + reverted_license_data = license_record.to_dict() + reverted_license_data.update(earliest_update_in_window.previous) - reverted_license = LicenseData.create_new(reverted_license_data) - serialized_reverted_license = reverted_license.serialize_to_database_record() + reverted_license = LicenseData.create_new(reverted_license_data) + serialized_reverted_license = reverted_license.serialize_to_database_record() - add_put(serialized_reverted_license, update_record=True) - logger.info('Reverting license record to pre-upload state') + add_put(serialized_reverted_license, update_record=True) + logger.info('Reverting license record to pre-upload state') - # Track for provider record regeneration - license_schema = LicenseRecordSchema() - reverted_licenses_dict.append(license_schema.load(serialized_reverted_license)) + # Track for provider record regeneration + license_schema = LicenseRecordSchema() + reverted_licenses_dict.append(license_schema.load(serialized_reverted_license)) - reverted_licenses.append( - RevertedLicense( - jurisdiction=license_record.jurisdiction, - license_type=license_record.licenseType, - revision_id=uuid4(), - action='REVERT', - ) - ) - else: - # Keep current license state if there were updates after the window - logger.info('Updates detected after rollback end time - will keep license record as-is.') - reverted_licenses_dict.append(license_record.to_dict()) - else: - # No updates in window, keep license as-is - reverted_licenses_dict.append(license_record.to_dict()) + reverted_licenses.append( + RevertedLicense( + jurisdiction=license_record.jurisdiction, + license_type=license_record.licenseType, + revision_id=uuid4(), + action='REVERT', + ) + ) # Check if provider is ineligible for rollback if ineligible_updates: diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index d665891ca..c03b09e34 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -208,6 +208,25 @@ def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: return privilege, privilege_update + def _when_provider_had_privilege_issued_during_upload(self): + """ + Set up a scenario where a provider had a non-upload-related privilege update AFTER the upload window. + This makes them ineligible for automatic rollback. + Returns the privilege and its update record. + """ + + privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': 'ne', + 'licenseJurisdiction': self.license_jurisdiction, + 'dateOfIssuance': self.default_upload_datetime + } + ) + + return privilege + def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime: datetime = None): """ Set up a scenario where a provider had a non-upload-related privilege update AFTER the upload window. @@ -221,7 +240,8 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.license_jurisdiction, + 'jurisdiction': 'ne', + 'licenseJurisdiction': self.license_jurisdiction, } ) @@ -230,7 +250,7 @@ def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime { 'providerId': self.provider_id, 'compact': self.compact, - 'jurisdiction': self.license_jurisdiction, + 'jurisdiction': 'ne', 'licenseType': privilege.licenseType, 'updateType': self.update_categories.RENEWAL, # Not LICENSE_DEACTIVATION 'createDate': after_upload_datetime, @@ -754,6 +774,43 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_license_up results_data, ) + def test_expected_s3_object_stored_when_provider_skipped_due_to_privilege_issuance(self): + # Setup: Provider had privilege update after upload window + self._when_provider_had_license_updated_from_upload() + privilege = self._when_provider_had_privilege_issued_during_upload() + + results_data = self._perform_rollback_and_get_s3_object() + + # Verify the structure of the results + expected_reason_message = ( + f"Privilege in jurisdiction '{privilege.jurisdiction}' issued after license upload. Manual review required." + ) + self.assertEqual( + { + 'failedProviderDetails': [], + 'revertedProviderSummaries': [], + 'skippedProviderDetails': [ + { + 'ineligible_updates': [ + { + 'update_time': privilege.dateOfIssuance.isoformat(), + 'license_type': privilege.licenseType, + 'reason': expected_reason_message, + 'record_type': 'privilegeUpdate', + 'type_of_update': 'Issuance', + } + ], + 'provider_id': MOCK_PROVIDER_ID, + 'reason': 'Provider has updates that are either ' + 'unrelated to license upload or ' + 'occurred after rollback end time. ' + 'Manual review required.', + } + ], + }, + results_data, + ) + def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_updates(self): # Setup: Provider had privilege update after upload window self._when_provider_had_license_updated_from_upload() @@ -763,8 +820,8 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_ # Verify the structure of the results expected_reason_message = ( - 'Privilege in jurisdiction oh was updated with a change unrelated to license upload or the update ' - 'occurred after rollback end time. Manual review required.' + "Privilege in jurisdiction 'ne' was updated with a change unrelated to license upload. " + "Manual review required." ) self.assertEqual( { @@ -1210,3 +1267,56 @@ def test_orphaned_license_updates_cause_provider_to_be_skipped(self): # Verify no providers were reverted or failed self.assertEqual(0, len(results_data['revertedProviderSummaries'])) self.assertEqual(0, len(results_data['failedProviderDetails'])) + + def test_provider_skipped_when_encumbrance_update_created_within_upload_window(self): + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: License was created during upload window + self._when_provider_had_license_created_from_upload() + + # Create an encumbrance update that happens WITHIN the upload window + # but is NOT an upload-related update type + encumbrance_time = self.default_upload_datetime + timedelta(minutes=1) + self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': self.provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'updateType': self.update_categories.ENCUMBRANCE, # Not an upload-related category + 'createDate': encumbrance_time, + 'effectiveDate': encumbrance_time, + 'updatedValues': { + 'encumberedStatus': 'encumbered', + }, + } + ) + + # Execute: Perform rollback + event = self._generate_test_event() + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed but provider was skipped + self.assertEqual('COMPLETE', result['rollbackStatus']) + self.assertEqual(0, result['providersReverted']) + self.assertEqual(1, result['providersSkipped']) + + # Verify: License record and encumbrance update still exist (not rolled back) + provider_records = self.config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + licenses = provider_records.get_license_records() + self.assertEqual(len(licenses), 1, 'License should still exist') + license_updates = provider_records.get_all_license_update_records() + self.assertEqual(1, len(license_updates), 'Encumbrance update should still exist') + + # Verify S3 results contain skip details + s3_key = f'{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) + + self.assertEqual(1, len(results_data['skippedProviderDetails'])) + skipped_detail = results_data['skippedProviderDetails'][0] + self.assertEqual(self.provider_id, skipped_detail['provider_id']) + self.assertIn('Manual review required', skipped_detail['reason']) From 8ed1ff0d9aa379b6c58a2a3161798392620c138c Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 10:01:03 -0600 Subject: [PATCH 55/81] Refactor S3 bucket env name / set fixed key prefix --- .../lambdas/python/common/cc_common/config.py | 4 ++-- .../handlers/rollback_license_upload.py | 10 +++++----- .../disaster-recovery/tests/__init__.py | 2 +- .../tests/function/__init__.py | 2 +- .../function/test_rollback_license_upload.py | 20 +++++++++---------- .../python/migration/tests/__init__.py | 1 - .../license_upload_rollback_step_function.py | 2 +- 7 files changed, 20 insertions(+), 21 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/config.py b/backend/compact-connect/lambdas/python/common/cc_common/config.py index de790b852..79fd69b75 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/config.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/config.py @@ -188,8 +188,8 @@ def provider_user_bucket_name(self): return os.environ['PROVIDER_USER_BUCKET_NAME'] @property - def rollback_results_bucket_name(self): - return os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] + def disaster_recovery_results_bucket_name(self): + return os.environ['DISASTER_RECOVERY_RESULTS_BUCKET_NAME'] @property def user_pool_id(self): diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 5bfb8f0d1..331321013 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -249,7 +249,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 ) # Initialize S3 client and bucket - results_s3_key = f'{execution_name}/results.json' + results_s3_key = f'licenseUploadRollbacks/{execution_name}/results.json' # Load existing results if this is a continuation existing_results = _load_results_from_s3(results_s3_key) @@ -350,7 +350,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'providersReverted': providers_reverted, 'providersSkipped': providers_skipped, 'providersFailed': providers_failed, - 'resultsS3Key': f's3://{config.rollback_results_bucket_name}/{results_s3_key}', + 'resultsS3Key': f's3://{config.disaster_recovery_results_bucket_name}/{results_s3_key}', } except ClientError as e: @@ -983,7 +983,7 @@ def _publish_revert_events( def _load_results_from_s3(key: str) -> RollbackResults: """Load existing results from S3.""" try: - response = config.s3_client.get_object(Bucket=config.rollback_results_bucket_name, Key=key) + response = config.s3_client.get_object(Bucket=config.disaster_recovery_results_bucket_name, Key=key) data = json.loads(response['Body'].read().decode('utf-8')) return RollbackResults.from_dict(data) except config.s3_client.exceptions.NoSuchKey: @@ -998,12 +998,12 @@ def _write_results_to_s3(key: str, results: RollbackResults): """Write results to S3 with server-side encryption.""" try: config.s3_client.put_object( - Bucket=config.rollback_results_bucket_name, + Bucket=config.disaster_recovery_results_bucket_name, Key=key, Body=json.dumps(results.to_dict(), indent=2), ContentType='application/json', ) - logger.info('Results written to S3', bucket=config.rollback_results_bucket_name, key=key) + logger.info('Results written to S3', bucket=config.disaster_recovery_results_bucket_name, key=key) # handle json serialization errors except json.JSONDecodeError as e: logger.error(f'Error writing results to S3: {str(e)}') diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py index 7cc09b5db..1cd9cd8af 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py @@ -15,7 +15,7 @@ def setUpClass(cls): 'DEBUG': 'true', 'ALLOWED_ORIGINS': '["https://example.org"]', 'AWS_DEFAULT_REGION': 'us-east-1', - 'ROLLBACK_RESULTS_BUCKET_NAME': 'rollback-results-bucket', + 'DISASTER_RECOVERY_RESULTS_BUCKET_NAME': 'rollback-results-bucket', 'EVENT_BUS_NAME': 'license-data-events', 'PROVIDER_TABLE_NAME': 'provider-table', 'RATE_LIMITING_TABLE_NAME': 'rate-limiting-table', diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py index 95057db7b..4cf83bde7 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/__init__.py @@ -47,7 +47,7 @@ def build_resources(self): def create_rollback_results_bucket(self): self._rollback_results_bucket = boto3.resource('s3').create_bucket( - Bucket=os.environ['ROLLBACK_RESULTS_BUCKET_NAME'] + Bucket=os.environ['DISASTER_RECOVERY_RESULTS_BUCKET_NAME'] ) def create_event_bus(self): diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index c03b09e34..f0f223442 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -614,8 +614,8 @@ def _perform_rollback_and_get_s3_object(self): rollback_license_upload(event, Mock()) # Read object from S3 and verify its contents match what is expected - s3_key = f'{MOCK_EXECUTION_NAME}/results.json' - s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key) return json.loads(s3_obj['Body'].read().decode('utf-8')) # Tests for checking data written to S3 @@ -930,7 +930,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) self._when_provider_had_privilege_deactivated_from_upload() # Create initial S3 results with data in all fields - s3_key = f'{MOCK_EXECUTION_NAME}/results.json' + s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' # Create existing results data in the format that from_dict expects (camelCase for top-level keys) existing_results_data = { @@ -974,7 +974,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) # Write existing results to S3 self.config.s3_client.put_object( - Bucket=self.config.rollback_results_bucket_name, + Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key, Body=json.dumps(existing_results_data, indent=2), ContentType='application/json', @@ -1104,8 +1104,8 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel self.assertEqual(0, result_second['providersFailed']) # Verify: S3 results contain both providers - s3_key = f'{MOCK_EXECUTION_NAME}/results.json' - s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key) final_results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) # Should have 2 reverted providers @@ -1237,8 +1237,8 @@ def test_orphaned_license_updates_cause_provider_to_be_skipped(self): self.assertEqual(result['providersFailed'], 0, 'No providers should have failed') # Verify S3 results contain the orphaned update details - s3_key = f'{MOCK_EXECUTION_NAME}/results.json' - s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key) results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) # Verify the structure of the results @@ -1312,8 +1312,8 @@ def test_provider_skipped_when_encumbrance_update_created_within_upload_window(s self.assertEqual(1, len(license_updates), 'Encumbrance update should still exist') # Verify S3 results contain skip details - s3_key = f'{MOCK_EXECUTION_NAME}/results.json' - s3_obj = self.config.s3_client.get_object(Bucket=self.config.rollback_results_bucket_name, Key=s3_key) + s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key) results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) self.assertEqual(1, len(results_data['skippedProviderDetails'])) diff --git a/backend/compact-connect/lambdas/python/migration/tests/__init__.py b/backend/compact-connect/lambdas/python/migration/tests/__init__.py index 1622c7691..6a10f0d67 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/migration/tests/__init__.py @@ -15,7 +15,6 @@ def setUpClass(cls): 'DEBUG': 'true', 'ALLOWED_ORIGINS': '["https://example.org"]', 'AWS_DEFAULT_REGION': 'us-east-1', - 'ROLLBACK_RESULTS_BUCKET_NAME': 'rollback-results-bucket', 'EVENT_BUS_NAME': 'license-data-events', 'PROVIDER_TABLE_NAME': 'provider-table', 'RATE_LIMITING_TABLE_NAME': 'rate-limiting-table', diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py index 3bf7910db..b8e78a96d 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/license_upload_rollback_step_function.py @@ -134,7 +134,7 @@ def _create_rollback_function( environment={ **stack.common_env_vars, 'PROVIDER_TABLE_NAME': persistent_stack.provider_table.table_name, - 'ROLLBACK_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, + 'DISASTER_RECOVERY_RESULTS_BUCKET_NAME': rollback_results_bucket.bucket_name, 'LICENSE_UPLOAD_DATE_INDEX_NAME': persistent_stack.provider_table.license_upload_date_gsi_name, 'EVENT_BUS_NAME': data_event_bus.event_bus_name, }, From 85765da6a671fc02e6cb2f970fe274f0979c312f Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 10:16:38 -0600 Subject: [PATCH 56/81] Enhance smoke test to check for skip test cases --- .../rollback_license_upload_smoke_tests.py | 309 +++++++++++++++--- .../tests/smoke/smoke_common.py | 1 + 2 files changed, 266 insertions(+), 44 deletions(-) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index 747c43879..12de77cd8 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -8,6 +8,8 @@ import requests from config import config, logger from smoke_common import ( + LicenseData, + LicenseUpdateData, SmokeTestFailureException, create_test_app_client, create_test_staff_user, @@ -25,18 +27,26 @@ TEST_STAFF_USER_EMAIL = 'testStaffUserLicenseRollback@smokeTestFakeEmail.com' TEST_APP_CLIENT_NAME = 'test-license-rollback-client' +LICENSE_TYPE = 'licensed professional counselor' + # Test configuration NUM_LICENSES_TO_UPLOAD = 1000 BATCH_SIZE = 100 # Upload in batches of 100 to avoid timeouts +# Global list to track all provider IDs for cleanup +ALL_PROVIDER_IDS = [] + -def upload_test_license_batch(auth_headers: dict, batch_start_index: int, batch_size: int): +def upload_test_license_batch( + auth_headers: dict, batch_start_index: int, batch_size: int, street_address: str = '123 Test Street' +): """ Upload a batch of test license records. :param auth_headers: Authentication headers for app client :param batch_start_index: Starting index for this batch :param batch_size: Number of licenses to upload in this batch + :param street_address: Street address to use :return: List of license records that were uploaded """ licenses_batch = [] @@ -50,11 +60,11 @@ def upload_test_license_batch(auth_headers: dict, batch_start_index: int, batch_ # keep the family name consistent so we can query for all the providers which requires an exact # match on the family name 'familyName': 'RollbackTest', - 'homeAddressStreet1': '123 Test Street', + 'homeAddressStreet1': street_address, 'dateOfBirth': '1985-01-01', 'dateOfIssuance': '2020-01-01', - 'ssn': f'500-50-{i:04d}', # Incrementing SSN with padded zeros - 'licenseType': 'licensed professional counselor', + 'ssn': f'999-50-{i:04d}', # Incrementing SSN with padded zeros + 'licenseType': LICENSE_TYPE, 'dateOfExpiration': '2050-12-10', 'homeAddressState': 'NE', 'homeAddressCity': 'Omaha', @@ -85,23 +95,25 @@ def upload_test_license_batch(auth_headers: dict, batch_start_index: int, batch_ return licenses_batch -def upload_test_licenses(auth_headers: dict, num_licenses: int, batch_size: int): +def upload_test_licenses( + auth_headers: dict, num_licenses: int, batch_size: int, street_address: str = '123 Test Street' +): """ Upload test license records in batches. :param auth_headers: Authentication headers for app client :param num_licenses: Total number of licenses to upload :param batch_size: Number of licenses per batch + :param street_address: Street address to use :return: Tuple of (all uploaded license data, upload start time, upload end time) """ - upload_start_time = datetime.now(tz=UTC) all_licenses = [] logger.info(f'Starting upload of {num_licenses} test licenses in batches of {batch_size}') for batch_start in range(0, num_licenses, batch_size): current_batch_size = min(batch_size, num_licenses - batch_start) - batch_licenses = upload_test_license_batch(auth_headers, batch_start, current_batch_size) + batch_licenses = upload_test_license_batch(auth_headers, batch_start, current_batch_size, street_address) all_licenses.extend(batch_licenses) # Small delay between batches to avoid rate limiting @@ -109,11 +121,9 @@ def upload_test_licenses(auth_headers: dict, num_licenses: int, batch_size: int) time.sleep(2) # wait for several minutes for all licenses to propagate in the system - - upload_end_time = datetime.now(tz=UTC) logger.info(f'Completed upload of {len(all_licenses)} licenses') - return all_licenses, upload_start_time, upload_end_time + return all_licenses def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max_wait_time: int = 120): @@ -226,9 +236,8 @@ def start_rollback_step_function( input_data = { 'compact': compact, 'jurisdiction': jurisdiction, - 'startDateTime': start_datetime.isoformat().replace('+00:00', 'Z'), - 'endDateTime': end_datetime.isoformat().replace('+00:00', 'Z'), - 'executionName': execution_name, + 'startDateTime': start_datetime.isoformat(), + 'endDateTime': end_datetime.isoformat(), 'rollbackReason': 'Smoke test validation of rollback functionality', } @@ -309,12 +318,138 @@ def get_rollback_results_from_s3(results_s3_key: str): return results -def verify_rollback_results(results: dict, expected_provider_count: int): +def create_privilege_for_provider(provider_id: str, compact: str): + """ + Manually create a privilege record for a provider to test skip conditions. + + :param provider_id: The provider ID to create privilege for + :param compact: The compact abbreviation + """ + from datetime import date + + # Create a privilege record for a different jurisdiction (e.g., 'co' for Colorado) + privilege_jurisdiction = 'co' + license_type_abbr = 'lpc' + + privilege_record = { + 'pk': f'{compact}#PROVIDER#{provider_id}', + 'sk': f'{compact}#PROVIDER#privilege/{privilege_jurisdiction}/{license_type_abbr}#', + 'type': 'privilege', + 'providerId': provider_id, + 'compact': compact, + 'jurisdiction': privilege_jurisdiction, + 'licenseJurisdiction': JURISDICTION, + 'licenseType': LICENSE_TYPE, + 'dateOfIssuance': datetime.now(tz=UTC).isoformat(), + 'dateOfRenewal': datetime.now(tz=UTC).isoformat(), + 'dateOfExpiration': date(2050, 12, 10).isoformat(), + 'dateOfUpdate': datetime.now(tz=UTC).isoformat(), + 'privilegeId': f'{license_type_abbr.upper()}-{privilege_jurisdiction.upper()}-12345', + 'administratorSetStatus': 'active', + 'compactTransactionId': 'test-transaction-12345', + 'compactTransactionIdGSIPK': f'COMPACT#{compact}#TX#test-transaction-12345#', + 'attestations': [], + } + + config.provider_user_dynamodb_table.put_item(Item=privilege_record) + logger.info(f'Created privilege record for provider {provider_id}') + + +def create_encumbrance_update_for_provider(provider_id: str, compact: str, license_jurisdiction: str): + """ + Manually create a license encumbrance update record to test skip conditions. + + :param provider_id: The provider ID + :param compact: The compact abbreviation + :param license_jurisdiction: The jurisdiction of the license + """ + + license_type_abbr = 'lpc' + # Use current time or specified time + now = datetime.now(tz=UTC) + + # First, query the actual license record to get the previous state + license_sk = f'{compact}#PROVIDER#license/{license_jurisdiction}/{license_type_abbr}#' + + try: + response = config.provider_user_dynamodb_table.get_item( + Key={'pk': f'{compact}#PROVIDER#{provider_id}', 'sk': license_sk} + ) + license_record_item = response.get('Item') + + if not license_record_item: + raise SmokeTestFailureException(f'License record not found for provider {provider_id}') + + # Load the license record using the schema to get properly typed data + license_record = LicenseData.from_database_record(license_record_item) + + except Exception as e: + logger.error(f'Failed to retrieve license record for provider {provider_id}: {str(e)}') + raise + + # Create a license encumbrance update record using LicenseUpdateData + # This ensures proper schema validation and field generation (including SK hash) + update_data = LicenseUpdateData.create_new( + { + 'type': 'licenseUpdate', + 'updateType': 'encumbrance', + 'providerId': provider_id, + 'compact': compact, + 'jurisdiction': license_jurisdiction, + 'licenseType': LICENSE_TYPE, + 'createDate': now, + 'effectiveDate': now, + 'previous': license_record.to_dict(), + 'updatedValues': { + 'encumberedStatus': 'encumbered', + }, + } + ) + + # Serialize to database record format + update_record = update_data.serialize_to_database_record() + + config.provider_user_dynamodb_table.put_item(Item=update_record) + logger.info(f'Created encumbrance update record for provider {provider_id} with createDate {now.isoformat()}') + + +def delete_all_provider_records(provider_ids: list[str], compact: str): + """ + Delete all records for the given provider IDs. + + :param provider_ids: List of provider IDs to delete + :param compact: The compact abbreviation + """ + logger.info(f'Starting cleanup of {len(provider_ids)} provider records...') + + for i, provider_id in enumerate(provider_ids): + if i % 100 == 0: + logger.info(f'Cleaned up {i}/{len(provider_ids)} provider records') + + try: + # Query all records for this provider + response = config.provider_user_dynamodb_table.query( + KeyConditionExpression='pk = :pk', + ExpressionAttributeValues={':pk': f'{compact}#PROVIDER#{provider_id}'}, + ) + + # Delete all records in batches + with config.provider_user_dynamodb_table.batch_writer() as batch: + for item in response.get('Items', []): + batch.delete_item(Key={'pk': item['pk'], 'sk': item['sk']}) + except Exception as e: # noqa: BLE001 + logger.warning(f'Failed to delete records for provider {provider_id}: {str(e)}') + + logger.info(f'✅ Completed cleanup of {len(provider_ids)} provider records') + + +def verify_rollback_results(results: dict, expected_provider_count: int, expected_skipped_count: int = 0): """ Verify the rollback results match expected format and counts. :param results: Rollback results from S3 - :param expected_provider_count: Expected number of providers rolled back + :param expected_provider_count: Expected number of providers rolled back (reverted) + :param expected_skipped_count: Expected number of providers that should be skipped """ logger.info('Verifying rollback results...') @@ -338,12 +473,12 @@ def verify_rollback_results(results: dict, expected_provider_count: int): logger.info(f' - Skipped: {num_skipped}') logger.info(f' - Failed: {num_failed}') - # Verify all providers were reverted (none skipped or failed) - if num_skipped > 0: - logger.error(f'Found {num_skipped} skipped providers:') + # Verify skipped count matches expectation + if num_skipped != expected_skipped_count: + logger.error(f'Found {num_skipped} skipped providers, expected {expected_skipped_count}:') for detail in skipped[:5]: # Show first 5 logger.error(f'Details for skipped provider: {detail["providerId"]}', skipped=detail) - raise SmokeTestFailureException(f'Expected 0 skipped providers but found {num_skipped}') + raise SmokeTestFailureException(f'Expected {expected_skipped_count} skipped providers but found {num_skipped}') if num_failed > 0: logger.error(f'Found {num_failed} failed providers:') @@ -355,12 +490,14 @@ def verify_rollback_results(results: dict, expected_provider_count: int): if num_reverted != expected_provider_count: logger.warning(f'Expected {expected_provider_count} reverted providers but found {num_reverted}') - # Verify the reverted provider has the expected structure + # Verify the reverted provider has the expected structure for i, summary in enumerate(reverted): if 'providerId' not in summary: raise SmokeTestFailureException(f'Reverted provider summary {i} missing providerId') if 'licensesReverted' not in summary: raise SmokeTestFailureException(f'Reverted provider summary {i} missing licensesReverted') + if 'updatesDeleted' not in summary: + raise SmokeTestFailureException(f'Reverted provider summary {i} missing updatesDeleted') # Verify each license was deleted (not reverted to previous state) licenses_reverted = summary['licensesReverted'] @@ -375,6 +512,14 @@ def verify_rollback_results(results: dict, expected_provider_count: int): f'Expected license action "DELETE" but found "{license_action}" for provider {summary["providerId"]}' ) + # Verify that update records were deleted (should have at least 1 from the re-upload) + updates_deleted = summary['updatesDeleted'] + if len(updates_deleted) < 1: + raise SmokeTestFailureException( + f'Expected at least 1 update record deleted for provider {summary["providerId"]}, ' + f'found {len(updates_deleted)}' + ) + logger.info('✅ Rollback results verification passed') @@ -413,13 +558,20 @@ def rollback_license_upload_smoke_test(): Main smoke test for license upload rollback functionality. Steps: - 1. Upload 1,000 test license records - 2. Wait for all providers to be created - 3. Start rollback step function - 4. Wait for step function completion - 5. Retrieve and verify results from S3 - 6. Verify providers were deleted from database + 1. Upload 1,000 test license records (first time) + 2. Upload 1,000 test license records again with different address (creates update records) + 3. Wait for all providers to be created + 4. Store all provider IDs for cleanup + 5. Create privilege for first provider (should be skipped) + 6. Create encumbrance update for second provider (should be skipped) + 7. Start rollback step function + 8. Wait for step function completion + 9. Retrieve and verify results from S3 (expect 998 reverted, 2 skipped) + 10. Verify providers were deleted from database (except 2 skipped) + 11. Clean up remaining test records """ + global ALL_PROVIDER_IDS + # Get environment configuration step_function_arn = config.license_upload_rollback_step_function_arn @@ -434,40 +586,90 @@ def rollback_license_upload_smoke_test(): client_id = client_credentials['client_id'] client_secret = client_credentials['client_secret'] + skipped_provider_ids = [] + try: # Get authentication headers using app client auth_headers = get_client_auth_headers(client_id, client_secret, COMPACT, JURISDICTION) - # Step 1: Upload test licenses + # Step 1: Upload test licenses (first time) logger.info('=' * 80) - logger.info('STEP 1: Uploading test licenses') + logger.info('STEP 1: Uploading test licenses (first time)') logger.info('=' * 80) - uploaded_licenses, upload_start_time, upload_end_time = upload_test_licenses( + first_upload_start_time = datetime.now(tz=UTC) + uploaded_licenses = upload_test_licenses( auth_headers, NUM_LICENSES_TO_UPLOAD, BATCH_SIZE, + street_address='123 Test Street', ) + first_upload_end_time = datetime.now(tz=UTC) - logger.info(f'Upload time window: {upload_start_time.isoformat()} to {upload_end_time.isoformat()}') + logger.info( + f'First upload time window: {first_upload_start_time.isoformat()} to {first_upload_end_time.isoformat()}' + ) - # Step 2: Wait for providers to be created + # Step 2: Upload test licenses again with different address to create update records logger.info('=' * 80) - logger.info('STEP 2: Waiting for provider records to be created') + logger.info('STEP 2: Uploading test licenses again with different address (creates update records)') + logger.info('=' * 80) + + upload_test_licenses( + auth_headers, + NUM_LICENSES_TO_UPLOAD, + BATCH_SIZE, + street_address='456 Updated Street', + ) + + logger.info('Second upload completed - update records should be created') + + # Step 3: Wait for providers to be created + logger.info('=' * 80) + logger.info('STEP 3: Waiting for provider records to be created') logger.info('=' * 80) provider_ids = wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) + # set end of upload window for after all providers are accounted for in system + second_upload_end_time = datetime.now(tz=UTC) + + # Store all provider IDs globally for cleanup + ALL_PROVIDER_IDS = provider_ids.copy() logger.info(f'Found {len(provider_ids)} provider records') - # Step 3: Start rollback step function + # Step 4: Create privilege for first provider (should be skipped in rollback) + logger.info('=' * 80) + logger.info('STEP 4: Creating privilege for first provider to test skip condition') + logger.info('=' * 80) + + first_provider_id = provider_ids[0] + create_privilege_for_provider(first_provider_id, COMPACT) + skipped_provider_ids.append(first_provider_id) + logger.info(f'Created privilege for provider {first_provider_id} - should be skipped in rollback') + + # Step 5: Create encumbrance update for second provider (should be skipped in rollback) logger.info('=' * 80) - logger.info('STEP 3: Starting rollback step function') + logger.info('STEP 5: Creating encumbrance update for second provider to test skip condition') logger.info('=' * 80) - # Add buffer to time window to ensure we catch all uploads - rollback_start = upload_start_time - timedelta(minutes=5) - rollback_end = upload_end_time + timedelta(minutes=5) + second_provider_id = provider_ids[1] + create_encumbrance_update_for_provider(second_provider_id, COMPACT, JURISDICTION) + skipped_provider_ids.append(second_provider_id) + logger.info(f'Created encumbrance update for provider {second_provider_id} - should be skipped in rollback') + + # Wait a moment to ensure records are written + logger.info('Waiting for records to propagate...') + time.sleep(5) + + # Step 6: Start rollback step function + logger.info('=' * 80) + logger.info('STEP 6: Starting rollback step function') + logger.info('=' * 80) + + rollback_start = first_upload_start_time + # Add buffer to end time window to ensure we catch all uploads + rollback_end = second_upload_end_time + timedelta(minutes=5) execution_arn = start_rollback_step_function( step_function_arn=step_function_arn, @@ -477,18 +679,18 @@ def rollback_license_upload_smoke_test(): end_datetime=rollback_end, ) - # Step 4: Wait for step function completion + # Step 7: Wait for step function completion logger.info('=' * 80) - logger.info('STEP 4: Waiting for step function to complete') + logger.info('STEP 7: Waiting for step function to complete') logger.info('=' * 80) status, output = wait_for_step_function_completion(execution_arn) logger.info(f'Step function output: {json.dumps(output, indent=2)}') - # Step 5: Retrieve and verify results from S3 + # Step 8: Retrieve and verify results from S3 logger.info('=' * 80) - logger.info('STEP 5: Retrieving and verifying results from S3') + logger.info('STEP 8: Retrieving and verifying results from S3') logger.info('=' * 80) results_s3_key = output.get('resultsS3Key') @@ -497,18 +699,37 @@ def rollback_license_upload_smoke_test(): results = get_rollback_results_from_s3(results_s3_key) - verify_rollback_results(results, len(provider_ids)) + # Expect 998 reverted (1000 - 2 skipped) and 2 skipped + expected_reverted = NUM_LICENSES_TO_UPLOAD - 2 + expected_skipped = 2 + verify_rollback_results(results, expected_reverted, expected_skipped) - # Step 6: Verify providers deleted from database + # Step 9: Verify providers deleted from database (except the 2 skipped ones) logger.info('=' * 80) - logger.info('STEP 6: Verifying providers were deleted from database') + logger.info('STEP 9: Verifying providers were deleted from database') logger.info('=' * 80) verify_providers_deleted_from_database(results, COMPACT) + # Step 10: Clean up the 2 skipped provider records + logger.info('=' * 80) + logger.info('STEP 10: Cleaning up skipped provider records') + logger.info('=' * 80) + + delete_all_provider_records(skipped_provider_ids, COMPACT) + logger.info('=' * 80) logger.info('✅ ALL TESTS PASSED') logger.info('=' * 80) + except Exception as e: + logger.error(f'Test failed: {str(e)}') + # If test failed, we need to clean up all provider records + if ALL_PROVIDER_IDS: + logger.info('=' * 80) + logger.info('CLEANUP: Test failed, cleaning up all provider records') + logger.info('=' * 80) + delete_all_provider_records(ALL_PROVIDER_IDS, COMPACT) + raise finally: # Clean up the test app client delete_test_app_client(client_id) diff --git a/backend/compact-connect/tests/smoke/smoke_common.py b/backend/compact-connect/tests/smoke/smoke_common.py index 14bbe454b..3e371c0aa 100644 --- a/backend/compact-connect/tests/smoke/smoke_common.py +++ b/backend/compact-connect/tests/smoke/smoke_common.py @@ -34,6 +34,7 @@ def __init__(self, message): # We have to import this after we've added the common lib to our path and environment from cc_common.data_model.provider_record_util import ProviderUserRecords # noqa: E402 +from cc_common.data_model.schema.license import LicenseData, LicenseUpdateData # noqa: E402 from cc_common.data_model.schema.user.record import UserRecordSchema # noqa: E402 _TEST_STAFF_USER_PASSWORD = 'TestPass123!' # noqa: S105 test credential for test staff user From c97f7cf0b96bb622bf6a281df242a0e3205b5a02 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 10:40:11 -0600 Subject: [PATCH 57/81] Fix serde of data classes for pagination --- .../handlers/rollback_license_upload.py | 39 +++++++- .../function/test_rollback_license_upload.py | 92 +++++++++---------- 2 files changed, 81 insertions(+), 50 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 331321013..b8790d681 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -1,6 +1,6 @@ import json import time -from dataclasses import asdict, dataclass, field +from dataclasses import dataclass, field from datetime import datetime from uuid import UUID, uuid4 @@ -93,8 +93,30 @@ class RollbackResults: def to_dict(self) -> dict: """Convert to dictionary for S3 storage.""" return { - 'skippedProviderDetails': [asdict(detail) for detail in self.skipped_provider_details], - 'failedProviderDetails': [asdict(detail) for detail in self.failed_provider_details], + 'skippedProviderDetails': [ + { + 'providerId': detail.provider_id, + 'reason': detail.reason, + 'ineligibleUpdates': [ + { + 'recordType': update.record_type, + 'typeOfUpdate': update.type_of_update, + 'updateTime': update.update_time, + 'reason': update.reason, + 'licenseType': update.license_type, + } + for update in detail.ineligible_updates + ], + } + for detail in self.skipped_provider_details + ], + 'failedProviderDetails': [ + { + 'providerId': detail.provider_id, + 'error': detail.error, + } + for detail in self.failed_provider_details + ], 'revertedProviderSummaries': [ { 'providerId': str(summary.provider_id), @@ -130,7 +152,16 @@ def from_dict(cls, data: dict) -> 'RollbackResults': ProviderSkippedDetails( provider_id=detail['providerId'], reason=detail['reason'], - ineligible_updates=detail.get('ineligibleUpdates', []), + ineligible_updates=[ + IneligibleUpdate( + record_type=update['recordType'], + type_of_update=update['typeOfUpdate'], + update_time=update['updateTime'], + reason=update['reason'], + license_type=update['licenseType'], + ) + for update in detail.get('ineligibleUpdates', []) + ], ) for detail in data.get('skippedProviderDetails', []) ], diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index f0f223442..228ffa5ac 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -754,16 +754,16 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_license_up 'revertedProviderSummaries': [], 'skippedProviderDetails': [ { - 'ineligible_updates': [ + 'ineligibleUpdates': [ { - 'update_time': encumbrance_update.createDate.isoformat(), - 'license_type': original_license.licenseType, + 'updateTime': encumbrance_update.createDate.isoformat(), + 'licenseType': original_license.licenseType, 'reason': expected_reason_message, - 'record_type': 'licenseUpdate', - 'type_of_update': encumbrance_update.updateType, + 'recordType': 'licenseUpdate', + 'typeOfUpdate': encumbrance_update.updateType, } ], - 'provider_id': MOCK_PROVIDER_ID, + 'providerId': MOCK_PROVIDER_ID, 'reason': 'Provider has updates that are either ' 'unrelated to license upload or ' 'occurred after rollback end time. ' @@ -791,16 +791,16 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_privilege_issuan 'revertedProviderSummaries': [], 'skippedProviderDetails': [ { - 'ineligible_updates': [ + 'ineligibleUpdates': [ { - 'update_time': privilege.dateOfIssuance.isoformat(), - 'license_type': privilege.licenseType, + 'updateTime': privilege.dateOfIssuance.isoformat(), + 'licenseType': privilege.licenseType, 'reason': expected_reason_message, - 'record_type': 'privilegeUpdate', - 'type_of_update': 'Issuance', + 'recordType': 'privilegeUpdate', + 'typeOfUpdate': 'Issuance', } ], - 'provider_id': MOCK_PROVIDER_ID, + 'providerId': MOCK_PROVIDER_ID, 'reason': 'Provider has updates that are either ' 'unrelated to license upload or ' 'occurred after rollback end time. ' @@ -829,16 +829,16 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_ 'revertedProviderSummaries': [], 'skippedProviderDetails': [ { - 'ineligible_updates': [ + 'ineligibleUpdates': [ { - 'update_time': privilege_update.createDate.isoformat(), - 'license_type': privilege.licenseType, + 'updateTime': privilege_update.createDate.isoformat(), + 'licenseType': privilege.licenseType, 'reason': expected_reason_message, - 'record_type': 'privilegeUpdate', - 'type_of_update': privilege_update.updateType, + 'recordType': 'privilegeUpdate', + 'typeOfUpdate': privilege_update.updateType, } ], - 'provider_id': MOCK_PROVIDER_ID, + 'providerId': MOCK_PROVIDER_ID, 'reason': 'Provider has updates that are either ' 'unrelated to license upload or ' 'occurred after rollback end time. ' @@ -863,16 +863,16 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_u 'revertedProviderSummaries': [], 'skippedProviderDetails': [ { - 'ineligible_updates': [ + 'ineligibleUpdates': [ { - 'update_time': provider_update.dateOfUpdate.isoformat(), + 'updateTime': provider_update.dateOfUpdate.isoformat(), 'reason': expected_reason_message, - 'record_type': 'providerUpdate', - 'type_of_update': provider_update.updateType, - 'license_type': 'N/A', + 'recordType': 'providerUpdate', + 'typeOfUpdate': provider_update.updateType, + 'licenseType': 'N/A', } ], - 'provider_id': MOCK_PROVIDER_ID, + 'providerId': MOCK_PROVIDER_ID, 'reason': 'Provider has updates that are either ' 'unrelated to license upload or ' 'occurred after rollback end time. ' @@ -904,7 +904,7 @@ def test_expected_s3_object_stored_when_provider_fails_during_rollback(self): { 'error': f'Failed to rollback updates for provider. ' f'Manual review required: {mock_error_message}', - 'provider_id': self.provider_id, + 'providerId': self.provider_id, } ], 'revertedProviderSummaries': [], @@ -932,7 +932,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) # Create initial S3 results with data in all fields s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' - # Create existing results data in the format that from_dict expects (camelCase for top-level keys) + # Create existing results data in the format that from_dict expects (camelCase for all keys) existing_results_data = { 'skippedProviderDetails': [ { @@ -940,11 +940,11 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) 'reason': 'Existing skipped provider reason', 'ineligibleUpdates': [ { - 'record_type': 'licenseUpdate', - 'type_of_update': 'ENCUMBRANCE', - 'update_time': (self.default_start_datetime - timedelta(days=2)).isoformat(), + 'recordType': 'licenseUpdate', + 'typeOfUpdate': 'ENCUMBRANCE', + 'updateTime': (self.default_start_datetime - timedelta(days=2)).isoformat(), 'reason': 'Existing ineligible update reason', - 'license_type': 'audiologist', + 'licenseType': 'audiologist', } ], } @@ -983,27 +983,27 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) final_results_data = self._perform_rollback_and_get_s3_object() # Verify: All existing data is preserved and new data is appended - # Note: to_dict() uses asdict() which produces snake_case for skipped/failed details + # Note: All keys should now be camelCase for consistency self.assertEqual( { 'skippedProviderDetails': [ { - 'provider_id': existing_skipped_provider_id, + 'providerId': existing_skipped_provider_id, 'reason': 'Existing skipped provider reason', - 'ineligible_updates': [ + 'ineligibleUpdates': [ { - 'record_type': 'licenseUpdate', - 'type_of_update': 'ENCUMBRANCE', - 'update_time': (self.default_start_datetime - timedelta(days=2)).isoformat(), + 'recordType': 'licenseUpdate', + 'typeOfUpdate': 'ENCUMBRANCE', + 'updateTime': (self.default_start_datetime - timedelta(days=2)).isoformat(), 'reason': 'Existing ineligible update reason', - 'license_type': 'audiologist', + 'licenseType': 'audiologist', } ], } ], 'failedProviderDetails': [ { - 'provider_id': existing_failed_provider_id, + 'providerId': existing_failed_provider_id, 'error': 'Existing failure error message', } ], @@ -1178,7 +1178,7 @@ def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): # Verify: Provider was marked as failed self.assertEqual(1, len(results_data['failedProviderDetails'])) - self.assertEqual(self.provider_id, results_data['failedProviderDetails'][0]['provider_id']) + self.assertEqual(self.provider_id, results_data['failedProviderDetails'][0]['providerId']) self.assertIn('TransactionCanceledException', results_data['failedProviderDetails'][0]['error']) # Verify: No providers were reverted or skipped @@ -1252,16 +1252,16 @@ def test_orphaned_license_updates_cause_provider_to_be_skipped(self): self.assertEqual(1, len(results_data['skippedProviderDetails'])) skipped_detail = results_data['skippedProviderDetails'][0] - self.assertEqual(orphaned_provider_id, skipped_detail['provider_id']) + self.assertEqual(orphaned_provider_id, skipped_detail['providerId']) self.assertIn('Manual review required', skipped_detail['reason']) # Check ineligible updates details - self.assertEqual(1, len(skipped_detail['ineligible_updates'])) - ineligible_update = skipped_detail['ineligible_updates'][0] + self.assertEqual(1, len(skipped_detail['ineligibleUpdates'])) + ineligible_update = skipped_detail['ineligibleUpdates'][0] - self.assertEqual('licenseUpdate', ineligible_update['record_type']) - self.assertEqual('Orphaned', ineligible_update['type_of_update']) - self.assertEqual(orphaned_license_update.licenseType, ineligible_update['license_type']) + self.assertEqual('licenseUpdate', ineligible_update['recordType']) + self.assertEqual('Orphaned', ineligible_update['typeOfUpdate']) + self.assertEqual(orphaned_license_update.licenseType, ineligible_update['licenseType']) self.assertEqual(expected_reason, ineligible_update['reason']) # Verify no providers were reverted or failed @@ -1318,5 +1318,5 @@ def test_provider_skipped_when_encumbrance_update_created_within_upload_window(s self.assertEqual(1, len(results_data['skippedProviderDetails'])) skipped_detail = results_data['skippedProviderDetails'][0] - self.assertEqual(self.provider_id, skipped_detail['provider_id']) + self.assertEqual(self.provider_id, skipped_detail['providerId']) self.assertIn('Manual review required', skipped_detail['reason']) From bad17c3610cfeaec77072f65be4d1f9bceea041d Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 10:53:45 -0600 Subject: [PATCH 58/81] linter/formatting/cleanup --- .../handlers/rollback_license_upload.py | 11 ++++++----- .../tests/function/test_rollback_license_upload.py | 8 +++----- .../stacks/disaster_recovery_stack/__init__.py | 6 +++--- backend/compact-connect/tests/smoke/smoke_common.py | 6 ++++-- 4 files changed, 16 insertions(+), 15 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index b8790d681..80ed69d6a 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -17,6 +17,8 @@ from cc_common.exceptions import CCNotFoundException # Maximum time window for rollback (1 week in seconds) +# this is set as a safety net to prevent accidental rollback over large time period +# it can be modified if needed MAX_ROLLBACK_WINDOW_SECONDS = 7 * 24 * 60 * 60 # Privilege update category for license deactivations @@ -703,7 +705,7 @@ def add_delete(pk: str, sk: str, update_record: bool): update_time=privilege.dateOfIssuance.isoformat(), license_type=license_record.licenseType, reason=f"Privilege in jurisdiction '{privilege.jurisdiction}' issued after license upload. " - "Manual review required.", + 'Manual review required.', ) ) # Check updates associated with this privilege that are after the start_datetime @@ -724,8 +726,8 @@ def add_delete(pk: str, sk: str, update_record: bool): update_time=privilege_update.dateOfUpdate.isoformat(), license_type=privilege_update.licenseType, # include privilege jurisdiction in reason - reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was updated with a change " - "unrelated to license upload. Manual review required.", + reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was updated " + f'with a change unrelated to license upload. Manual review required.', ) ) elif privilege_update.createDate > upload_window_end_datetime: @@ -738,7 +740,7 @@ def add_delete(pk: str, sk: str, update_record: bool): license_type=privilege_update.licenseType, # include privilege jurisdiction in reason reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was deactivated " - "after rollback end time. Manual review required.", + 'after rollback end time. Manual review required.', ) ) else: @@ -782,7 +784,6 @@ def add_delete(pk: str, sk: str, update_record: bool): ) ) - # Get license updates for this license after start_datetime license_updates_after_start = provider_records.get_update_records_for_license( jurisdiction=license_record.jurisdiction, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 228ffa5ac..fb5d78e70 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -215,18 +215,16 @@ def _when_provider_had_privilege_issued_during_upload(self): Returns the privilege and its update record. """ - privilege = self.test_data_generator.put_default_privilege_record_in_provider_table( + return self.test_data_generator.put_default_privilege_record_in_provider_table( { 'providerId': self.provider_id, 'compact': self.compact, 'jurisdiction': 'ne', 'licenseJurisdiction': self.license_jurisdiction, - 'dateOfIssuance': self.default_upload_datetime + 'dateOfIssuance': self.default_upload_datetime, } ) - return privilege - def _when_provider_had_privilege_update_after_upload(self, after_upload_datetime: datetime = None): """ Set up a scenario where a provider had a non-upload-related privilege update AFTER the upload window. @@ -821,7 +819,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_ # Verify the structure of the results expected_reason_message = ( "Privilege in jurisdiction 'ne' was updated with a change unrelated to license upload. " - "Manual review required." + 'Manual review required.' ) self.assertEqual( { diff --git a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py index f26b7ce19..00756452f 100644 --- a/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py +++ b/backend/compact-connect/stacks/disaster_recovery_stack/__init__.py @@ -63,7 +63,7 @@ def __init__( # Create S3 bucket for license upload rollback results stack = Stack.of(self) - self.rollback_results_bucket = Bucket( + self.disaster_recovery_results_bucket = Bucket( self, 'DisasterRecoveryResultsBucket', encryption=BucketEncryption.KMS, @@ -80,7 +80,7 @@ def __init__( # Suppress replication requirement - replication to a logs archive account may be added as a future enhancement NagSuppressions.add_resource_suppressions( - self.rollback_results_bucket, + self.disaster_recovery_results_bucket, suppressions=[ { 'id': 'HIPAA.Security-S3BucketReplicationEnabled', @@ -116,7 +116,7 @@ def __init__( self, 'LicenseUploadRollback', persistent_stack=persistent_stack, - rollback_results_bucket=self.rollback_results_bucket, + rollback_results_bucket=self.disaster_recovery_results_bucket, dr_shared_encryption_key=self.dr_shared_encryption_key, ) diff --git a/backend/compact-connect/tests/smoke/smoke_common.py b/backend/compact-connect/tests/smoke/smoke_common.py index 3e371c0aa..9a57e7906 100644 --- a/backend/compact-connect/tests/smoke/smoke_common.py +++ b/backend/compact-connect/tests/smoke/smoke_common.py @@ -33,8 +33,10 @@ def __init__(self, message): os.environ['LICENSE_TYPES'] = json.dumps(LICENSE_TYPES) # We have to import this after we've added the common lib to our path and environment -from cc_common.data_model.provider_record_util import ProviderUserRecords # noqa: E402 -from cc_common.data_model.schema.license import LicenseData, LicenseUpdateData # noqa: E402 +from cc_common.data_model.provider_record_util import ProviderUserRecords # noqa: E402 F401 + +# importing this here so it can be easily referenced in the rollback upload tests +from cc_common.data_model.schema.license import LicenseData, LicenseUpdateData # noqa: E402 F401 from cc_common.data_model.schema.user.record import UserRecordSchema # noqa: E402 _TEST_STAFF_USER_PASSWORD = 'TestPass123!' # noqa: S105 test credential for test staff user From 155dd7f961119ba9b57940252691ebcf14edb870 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 13:04:23 -0600 Subject: [PATCH 59/81] fix smoke test to verify record creation before rollback --- .../rollback_license_upload_smoke_tests.py | 65 +++++++++++++++---- 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index 12de77cd8..41aacb961 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -30,8 +30,8 @@ LICENSE_TYPE = 'licensed professional counselor' # Test configuration -NUM_LICENSES_TO_UPLOAD = 1000 -BATCH_SIZE = 100 # Upload in batches of 100 to avoid timeouts +NUM_LICENSES_TO_UPLOAD = 300 +BATCH_SIZE = 100 # Upload in batches of 100 # Global list to track all provider IDs for cleanup ALL_PROVIDER_IDS = [] @@ -126,6 +126,33 @@ def upload_test_licenses( return all_licenses +def verify_license_update_records_created(provider_ids, retry_count: int = 0): + """ + Checks all provider ids for license update records, if none are found, adds to list to retry + and retries after a delay + :param provider_ids: List of provider IDs to check + :param retry_count: Current retry count + :return: None + """ + provider_ids_to_retry = [] + for provider_id in provider_ids: + provider_user_records = get_provider_user_records(COMPACT, provider_id) + if len(provider_user_records.get_all_license_update_records()) == 0: + logger.info(f'no license update records found for provider {provider_id}. Will retry.') + provider_ids_to_retry.append(provider_id) + + if provider_ids_to_retry: + if retry_count >= 3: + raise SmokeTestFailureException( + f'failed to find license update records for {len(provider_ids_to_retry)} providers after 3 retries' + ) + time.sleep(10) + logger.info(f'retrying {len(provider_ids_to_retry)} providers after 10 seconds...') + verify_license_update_records_created(provider_ids_to_retry, retry_count + 1) + else: + logger.info('all license update records found') + + def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max_wait_time: int = 120): """ Wait for all provider records to be created from uploaded licenses. @@ -200,7 +227,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max if num_found >= expected_count: logger.info(f'All {expected_count} providers found!') - return all_provider_ids[:expected_count] # Return only the expected count + return all_provider_ids # Return only the expected count elapsed = time.time() - start_time if elapsed < max_wait_time: @@ -558,15 +585,15 @@ def rollback_license_upload_smoke_test(): Main smoke test for license upload rollback functionality. Steps: - 1. Upload 1,000 test license records (first time) - 2. Upload 1,000 test license records again with different address (creates update records) - 3. Wait for all providers to be created + 1. Upload test license records (first time) + 2. Upload test license records again with different address (creates update records) + 3. Wait for all providers to be created AND verify license update records exist in DynamoDB 4. Store all provider IDs for cleanup 5. Create privilege for first provider (should be skipped) 6. Create encumbrance update for second provider (should be skipped) 7. Start rollback step function 8. Wait for step function completion - 9. Retrieve and verify results from S3 (expect 998 reverted, 2 skipped) + 9. Retrieve and verify results from S3 10. Verify providers were deleted from database (except 2 skipped) 11. Clean up remaining test records """ @@ -610,6 +637,13 @@ def rollback_license_upload_smoke_test(): f'First upload time window: {first_upload_start_time.isoformat()} to {first_upload_end_time.isoformat()}' ) + # Wait for first upload's license records to be created before second upload + logger.info('=' * 80) + logger.info('Waiting for first upload providers and license records to be created...') + logger.info('=' * 80) + wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) + logger.info('✅ All first upload license records have been created') + # Step 2: Upload test licenses again with different address to create update records logger.info('=' * 80) logger.info('STEP 2: Uploading test licenses again with different address (creates update records)') @@ -624,18 +658,21 @@ def rollback_license_upload_smoke_test(): logger.info('Second upload completed - update records should be created') - # Step 3: Wait for providers to be created + # Step 3: Wait for providers to be created and update records to propagate logger.info('=' * 80) - logger.info('STEP 3: Waiting for provider records to be created') + logger.info('STEP 3: Waiting for provider records and update records to be created') logger.info('=' * 80) provider_ids = wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) - # set end of upload window for after all providers are accounted for in system - second_upload_end_time = datetime.now(tz=UTC) # Store all provider IDs globally for cleanup ALL_PROVIDER_IDS = provider_ids.copy() + logger.info('Checking for license update records.') + verify_license_update_records_created(provider_ids) + # Capture end time after verifying update records exist + second_upload_end_time = datetime.now(tz=UTC) + logger.info(f'Found {len(provider_ids)} provider records') # Step 4: Create privilege for first provider (should be skipped in rollback) @@ -658,8 +695,8 @@ def rollback_license_upload_smoke_test(): skipped_provider_ids.append(second_provider_id) logger.info(f'Created encumbrance update for provider {second_provider_id} - should be skipped in rollback') - # Wait a moment to ensure records are written - logger.info('Waiting for records to propagate...') + # Brief wait to ensure the manually created records are written + logger.info('Waiting briefly for test records to propagate...') time.sleep(5) # Step 6: Start rollback step function @@ -699,7 +736,7 @@ def rollback_license_upload_smoke_test(): results = get_rollback_results_from_s3(results_s3_key) - # Expect 998 reverted (1000 - 2 skipped) and 2 skipped + # Expect all providers reverted except for the 2 skipped expected_reverted = NUM_LICENSES_TO_UPLOAD - 2 expected_skipped = 2 verify_rollback_results(results, expected_reverted, expected_skipped) From a4eff6bc81c6f2c57ffaa962cdbe02e0ac62c919 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 15:41:04 -0600 Subject: [PATCH 60/81] PR feedback --- .../common/cc_common/data_model/data_client.py | 4 ++-- .../handlers/rollback_license_upload.py | 15 ++++++++++----- .../function/test_rollback_license_upload.py | 11 ++++++++++- .../migration/migrate_update_sort_keys/main.py | 9 +++++---- .../provider-data-v1/tests/function/__init__.py | 5 ++++- .../smoke/rollback_license_upload_smoke_tests.py | 11 +++++------ 6 files changed, 36 insertions(+), 19 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index 8cb220b84..764538110 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -1065,7 +1065,7 @@ def _get_privilege_update_records_directly( return [PrivilegeUpdateData.from_database_record(item) for item in response_items] - @logger_inject_kwargs(logger, 'compact', 'provider_id', 'detail', 'jurisdiction', 'license_type') + @logger_inject_kwargs(logger, 'compact', 'provider_id', 'detail', 'jurisdiction', 'license_type_abbr') def get_privilege_data( self, *, @@ -1117,7 +1117,7 @@ def get_privilege_data( return result - @logger_inject_kwargs(logger, 'compact', 'provider_id', 'jurisdiction', 'license_type') + @logger_inject_kwargs(logger, 'compact', 'provider_id', 'jurisdiction', 'license_type_abbr') def deactivate_privilege( self, *, compact: str, provider_id: str, jurisdiction: str, license_type_abbr: str, deactivation_details: dict ) -> None: diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 80ed69d6a..a8f6b4120 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -181,7 +181,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': RevertedLicense( jurisdiction=reverted_license['jurisdiction'], license_type=reverted_license['licenseType'], - revision_id=uuid4(), + revision_id=reverted_license['revisionId'], action=reverted_license['action'], ) for reverted_license in summary.get('licensesReverted', []) @@ -190,7 +190,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults': RevertedPrivilege( jurisdiction=reverted_privilege['jurisdiction'], license_type=reverted_privilege['licenseType'], - revision_id=uuid4(), + revision_id=reverted_privilege['revisionId'], action=reverted_privilege['action'], ) for reverted_privilege in summary.get('privilegesReverted', []) @@ -723,7 +723,7 @@ def add_delete(pk: str, sk: str, update_record: bool): IneligibleUpdate( record_type='privilegeUpdate', type_of_update=privilege_update.updateType, - update_time=privilege_update.dateOfUpdate.isoformat(), + update_time=privilege_update.createDate.isoformat(), license_type=privilege_update.licenseType, # include privilege jurisdiction in reason reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was updated " @@ -736,7 +736,7 @@ def add_delete(pk: str, sk: str, update_record: bool): IneligibleUpdate( record_type='privilegeUpdate', type_of_update=privilege_update.updateType, - update_time=privilege_update.dateOfUpdate.isoformat(), + update_time=privilege_update.createDate.isoformat(), license_type=privilege_update.licenseType, # include privilege jurisdiction in reason reason=f"Privilege in jurisdiction '{privilege_update.jurisdiction}' was deactivated " @@ -839,6 +839,11 @@ def add_delete(pk: str, sk: str, update_record: bool): ) # license was not first uploaded during the upload window, revert it to last previous state before the upload else: + # if the provider is ineligible for rollback, the list of license updates may be empty, and we need to + # defensively check for that here and continue to the next license + if not license_updates_in_window: + continue + # Find the earliest update in the window to get the previous state license_updates_in_window.sort(key=lambda x: x.createDate) earliest_update_in_window = license_updates_in_window[0] @@ -1037,7 +1042,7 @@ def _write_results_to_s3(key: str, results: RollbackResults): ) logger.info('Results written to S3', bucket=config.disaster_recovery_results_bucket_name, key=key) # handle json serialization errors - except json.JSONDecodeError as e: + except TypeError as e: logger.error(f'Error writing results to S3: {str(e)}') raise # handle other errors by logging the full object and raising the exception diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index fb5d78e70..f5aa134c2 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -1088,6 +1088,14 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel self.assertEqual(0, result_first['providersFailed']) self.assertEqual(mock_second_provider_id, result_first['continueFromProviderId']) + # Verify: S3 results contain first provider with revision id + s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' + s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key) + first_results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) + + # grab the revision id from the results which we will use when asserting on the final object + revision_id = first_results_data['revertedProviderSummaries'][0]['licensesReverted'][0]['revisionId'] + # Execute: Second invocation (continue from where we left off) # Reset mock time for second invocation mock_time.time.side_effect = [0, 1] # Won't timeout this time @@ -1117,7 +1125,7 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', - 'revisionId': ANY, + 'revisionId': revision_id, } ], 'privilegesReverted': [], @@ -1132,6 +1140,7 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', + # unknown random UUID, we won't check for it here 'revisionId': ANY, } ], diff --git a/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py index 6c66c29f6..789201c46 100644 --- a/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py +++ b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py @@ -18,7 +18,7 @@ def on_create(self, properties: dict) -> None: def on_update(self, properties: dict) -> None: """ - No-op on delete. + No-op on update. """ def on_delete(self, _properties: dict) -> CustomResourceResponse | None: @@ -33,9 +33,10 @@ def on_delete(self, _properties: dict) -> CustomResourceResponse | None: def do_migration(_properties: dict) -> None: """ This migration performs the following: - - Scans the provider table for all privilege update records - - For each update record, adds effectiveDate and createDate equal to that updates dateOfUpdate - - Handles batching for cases where there are more than 100 records to update + - Scans the provider table for all update records + - For each update record, load the records and serialize it again, + so the schema classes will generate the new sort key patterns + - Recreate the records by deleting the update records with the old sort key and storing the migrated records. """ logger.info('Starting update record sort key migration') diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py index d2f028361..ba2061beb 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/__init__.py @@ -166,7 +166,10 @@ def create_provider_table(self): {'AttributeName': 'licenseUploadDateGSIPK', 'KeyType': 'HASH'}, {'AttributeName': 'licenseUploadDateGSISK', 'KeyType': 'RANGE'}, ], - 'Projection': {'ProjectionType': 'KEYS_ONLY'}, + 'Projection': { + 'ProjectionType': 'INCLUDE', + 'NonKeyAttributes': ['providerId'], + }, }, ], ) diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index 41aacb961..ec4281b12 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -178,7 +178,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max last_key = None page_num = 1 - all_provider_ids = [] + all_provider_ids: set[str] = set() while time.time() - start_time < max_wait_time: # Collect all providers across all pages while True: @@ -203,9 +203,9 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max providers = response_data.get('providers', []) pagination = response_data.get('pagination', {}) - # Collect provider IDs from this page + # Collect provider IDs from this page and add to set page_provider_ids = [p['providerId'] for p in providers] - all_provider_ids.extend(page_provider_ids) + all_provider_ids.update(page_provider_ids) logger.info( f'Page {page_num}: Found {len(page_provider_ids)} providers ' @@ -227,7 +227,7 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max if num_found >= expected_count: logger.info(f'All {expected_count} providers found!') - return all_provider_ids # Return only the expected count + return list(all_provider_ids) # Return only the expected count elapsed = time.time() - start_time if elapsed < max_wait_time: @@ -325,7 +325,6 @@ def get_rollback_results_from_s3(results_s3_key: str): Retrieve rollback results from S3. :param results_s3_key: S3 URI or key to the results file - :param bucket_name: S3 bucket name :return: Parsed results data """ s3_client = boto3.client('s3') @@ -632,7 +631,6 @@ def rollback_license_upload_smoke_test(): street_address='123 Test Street', ) first_upload_end_time = datetime.now(tz=UTC) - logger.info( f'First upload time window: {first_upload_start_time.isoformat()} to {first_upload_end_time.isoformat()}' ) @@ -641,6 +639,7 @@ def rollback_license_upload_smoke_test(): logger.info('=' * 80) logger.info('Waiting for first upload providers and license records to be created...') logger.info('=' * 80) + time.sleep(10) wait_for_all_providers_created(staff_headers, len(uploaded_licenses)) logger.info('✅ All first upload license records have been created') From 0ea455f0648362b94a9ca8095d912b3b667fdf0b Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 16:05:34 -0600 Subject: [PATCH 61/81] Add test case to verify update records before upload are retained --- .../function/test_rollback_license_upload.py | 166 ++++++++++++++++++ 1 file changed, 166 insertions(+) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index f5aa134c2..5331c9db9 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -11,6 +11,7 @@ """ import json +from calendar import month from datetime import datetime, timedelta from unittest.mock import ANY, Mock, patch @@ -166,6 +167,137 @@ def _when_provider_had_license_updated_from_upload( return original_license, license_update, updated_license + def _when_license_was_updated_twice( + self, provider_id: str = None + ): + """ + Set up a scenario where a provider had an existing license updated twice during the upload window. + Returns the original license, both update records, and the final updated license. + """ + first_upload_datetime = self.default_start_datetime + timedelta(minutes=30) + second_upload_datetime = self.default_start_datetime + timedelta(hours=1) + if provider_id is None: + provider_id = self.provider_id + + # License was originally uploaded before the upload window + license_upload_datetime = self.default_start_datetime - timedelta(days=1) + + # Create original license before upload window + original_license = self.test_data_generator.put_default_license_record_in_provider_table( + { + 'providerId': provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'familyName': MOCK_ORIGINAL_FAMILY_NAME, + 'givenName': MOCK_ORIGINAL_GIVEN_NAME, + 'dateOfExpiration': (self.default_start_datetime + timedelta(days=30)).date(), + 'firstUploadDate': license_upload_datetime, + 'licenseStatus': 'active', + } + ) + + # old update record before upload window (e.g., RENEWAL) + existing_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'licenseType': original_license.licenseType, + 'updateType': self.update_categories.LICENSE_UPLOAD_UPDATE_OTHER, + # last update was 5 days before upload, this should be ignored + 'createDate': first_upload_datetime - timedelta(days=5), + 'effectiveDate': first_upload_datetime, + 'previous': { + **original_license.to_dict(), + 'familyName': 'someFamilyName', + 'givenName': 'someGivenName', + }, + 'updatedValues': { + 'familyName': original_license.familyName, + 'givenName': original_license.givenName, + }, + } + ) + + # Create first update record within upload window (e.g., RENEWAL) + first_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'licenseType': original_license.licenseType, + 'updateType': self.update_categories.RENEWAL, + 'createDate': first_upload_datetime, + 'effectiveDate': first_upload_datetime, + 'previous': { + 'dateOfExpiration': original_license.dateOfExpiration, + 'licenseStatus': original_license.licenseStatus, + **original_license.to_dict(), + }, + 'updatedValues': { + 'dateOfExpiration': (first_upload_datetime + timedelta(days=365)).date(), + 'dateOfRenewal': first_upload_datetime.date(), + }, + } + ) + + # Create intermediate license state after first update + intermediate_license = self.test_data_generator.put_default_license_record_in_provider_table( + { + 'providerId': provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'familyName': MOCK_ORIGINAL_FAMILY_NAME, + 'givenName': MOCK_ORIGINAL_GIVEN_NAME, + 'dateOfUpdate': first_upload_datetime, + 'dateOfExpiration': (first_upload_datetime + timedelta(days=365)).date(), + 'dateOfRenewal': first_upload_datetime.date(), + 'firstUploadDate': license_upload_datetime, + 'licenseStatus': 'active', + } + ) + + # Create second update record within upload window (e.g., DEACTIVATION) + second_update = self.test_data_generator.put_default_license_update_record_in_provider_table( + { + 'providerId': provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'licenseType': original_license.licenseType, + 'updateType': self.update_categories.DEACTIVATION, + 'createDate': second_upload_datetime, + 'effectiveDate': second_upload_datetime, + 'previous': { + 'dateOfExpiration': intermediate_license.dateOfExpiration, + 'licenseStatus': intermediate_license.licenseStatus, + **intermediate_license.to_dict(), + }, + 'updatedValues': { + 'dateOfExpiration': (second_upload_datetime - timedelta(days=365)).date(), + 'licenseStatus': 'inactive', + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, + }, + } + ) + + # Update the license record to reflect the final state after second update + final_license = self.test_data_generator.put_default_license_record_in_provider_table( + { + 'providerId': provider_id, + 'compact': self.compact, + 'jurisdiction': self.license_jurisdiction, + 'familyName': MOCK_UPDATED_FAMILY_NAME, + 'givenName': MOCK_UPDATED_GIVEN_NAME, + 'dateOfUpdate': second_upload_datetime, + 'dateOfExpiration': (second_upload_datetime - timedelta(days=365)).date(), + 'firstUploadDate': license_upload_datetime, + 'licenseStatus': 'inactive', + } + ) + + return existing_update, original_license, first_update, second_update, final_license + def _when_provider_had_privilege_deactivated_from_upload(self, upload_datetime: datetime = None): """ Set up a scenario where a provider's privilege was deactivated due to license deactivation during upload. @@ -427,6 +559,40 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self license_updates = provider_records.get_all_license_update_records() self.assertEqual(len(license_updates), 0, 'License update records should be deleted') + def test_provider_license_record_reverted_to_earliest_update_previous_values_when_multiple_updates(self): + """Test that license record is reverted to the 'previous' field of the earliest update when multiple updates exist.""" + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: License was updated twice during upload window, but was first uploaded before start time + existing_update, original_license, first_update, second_update, final_license = self._when_license_was_updated_twice() + + # Execute: Perform rollback + event = self._generate_test_event() + + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersReverted'], 1) + + # Verify: License record has been reset to the values from the first (earliest) update's previous field + provider_records = self.config.data_client.get_provider_user_records( + compact=self.compact, + provider_id=self.provider_id, + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + licenses = provider_records.get_license_records() + self.assertEqual(len(licenses), 1) + license_record = licenses[0] + # license should look the same as it did before the updates that were rolled back + self.assertEqual(original_license.serialize_to_database_record(), license_record.serialize_to_database_record()) + + # Verify: Both update records have been deleted + license_updates = provider_records.get_all_license_update_records() + # license update that existed before upload should still be there + self.assertEqual(len(license_updates), 1, 'Expected one existing license update to remain') + self.assertEqual(existing_update.serialize_to_database_record(), license_updates[0].serialize_to_database_record()) + def test_provider_privilege_record_reactivated_when_upload_reverted(self): """Test that privilege is reactivated when license deactivation is reverted.""" from handlers.rollback_license_upload import rollback_license_upload From f7a759cb24cb0b3c85c8d48e844a6615bf5d96f2 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 16:07:55 -0600 Subject: [PATCH 62/81] linter --- .../tests/function/test_rollback_license_upload.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 5331c9db9..0265fb4bc 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -11,7 +11,6 @@ """ import json -from calendar import month from datetime import datetime, timedelta from unittest.mock import ANY, Mock, patch @@ -167,9 +166,7 @@ def _when_provider_had_license_updated_from_upload( return original_license, license_update, updated_license - def _when_license_was_updated_twice( - self, provider_id: str = None - ): + def _when_license_was_updated_twice(self, provider_id: str = None): """ Set up a scenario where a provider had an existing license updated twice during the upload window. Returns the original license, both update records, and the final updated license. @@ -560,11 +557,12 @@ def test_provider_license_record_reset_to_prior_values_when_upload_reverted(self self.assertEqual(len(license_updates), 0, 'License update records should be deleted') def test_provider_license_record_reverted_to_earliest_update_previous_values_when_multiple_updates(self): - """Test that license record is reverted to the 'previous' field of the earliest update when multiple updates exist.""" from handlers.rollback_license_upload import rollback_license_upload # Setup: License was updated twice during upload window, but was first uploaded before start time - existing_update, original_license, first_update, second_update, final_license = self._when_license_was_updated_twice() + existing_update, original_license, first_update, second_update, final_license = ( + self._when_license_was_updated_twice() + ) # Execute: Perform rollback event = self._generate_test_event() @@ -591,7 +589,9 @@ def test_provider_license_record_reverted_to_earliest_update_previous_values_whe license_updates = provider_records.get_all_license_update_records() # license update that existed before upload should still be there self.assertEqual(len(license_updates), 1, 'Expected one existing license update to remain') - self.assertEqual(existing_update.serialize_to_database_record(), license_updates[0].serialize_to_database_record()) + self.assertEqual( + existing_update.serialize_to_database_record(), license_updates[0].serialize_to_database_record() + ) def test_provider_privilege_record_reactivated_when_upload_reverted(self): """Test that privilege is reactivated when license deactivation is reverted.""" From ff8c7c05c240d531f69c4502268337f607df5ff1 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Thu, 13 Nov 2025 16:17:13 -0600 Subject: [PATCH 63/81] PR feedback --- .../disaster-recovery/handlers/rollback_license_upload.py | 2 +- .../python/migration/migrate_update_sort_keys/main.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index a8f6b4120..3c198f165 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -855,7 +855,7 @@ def add_delete(pk: str, sk: str, update_record: bool): reverted_license = LicenseData.create_new(reverted_license_data) serialized_reverted_license = reverted_license.serialize_to_database_record() - add_put(serialized_reverted_license, update_record=True) + add_put(serialized_reverted_license, update_record=False) logger.info('Reverting license record to pre-upload state') # Track for provider record regeneration diff --git a/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py index 789201c46..87a7bf2f0 100644 --- a/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py +++ b/backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py @@ -40,7 +40,7 @@ def do_migration(_properties: dict) -> None: """ logger.info('Starting update record sort key migration') - # Scan for all privilege update records + # Scan for all update records update_records = [] scan_pagination = {} @@ -54,7 +54,7 @@ def do_migration(_properties: dict) -> None: items = response.get('Items', []) update_records.extend(items) - logger.info(f'Found {len(items)} privilege update records in current scan batch') + logger.info(f'Found {len(items)} update records in current scan batch') # Check if we need to continue pagination last_evaluated_key = response.get('LastEvaluatedKey') @@ -161,7 +161,7 @@ def _generate_transaction_items(original_update_record: dict) -> list[dict]: def _process_batch(update_records: list[dict]) -> None: """ - Process a batch of privilege update records. + Process a batch of update records. :param update_records: List of update records to process """ From 5a60365139e2de9c6ee6d290ec9bd5f185057aa4 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 12:05:12 -0600 Subject: [PATCH 64/81] PR feedback - exception handling --- .../handlers/rollback_license_upload.py | 99 ++++++++++++------- .../function/test_rollback_license_upload.py | 48 +++++---- 2 files changed, 85 insertions(+), 62 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 3c198f165..a0095a818 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -7,14 +7,18 @@ from aws_lambda_powertools.utilities.typing import LambdaContext from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError +from marshmallow import ValidationError + from cc_common.config import config, logger from cc_common.data_model.provider_record_util import ProviderRecordUtility, ProviderUserRecords from cc_common.data_model.schema.common import LICENSE_UPLOAD_UPDATE_CATEGORIES, UpdateCategory +from cc_common.data_model.schema.license import LicenseData +from cc_common.data_model.schema.license.record import LicenseRecordSchema from cc_common.data_model.schema.privilege import PrivilegeData from cc_common.data_model.schema.provider import ProviderData from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.event_batch_writer import EventBatchWriter -from cc_common.exceptions import CCNotFoundException +from cc_common.exceptions import CCNotFoundException, CCInternalException # Maximum time window for rollback (1 week in seconds) # this is set as a safety net to prevent accidental rollback over large time period @@ -24,6 +28,12 @@ # Privilege update category for license deactivations PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY = UpdateCategory.LICENSE_DEACTIVATION +class ProviderRollbackFailedException(Exception): + """Custom exception that is thrown when a provider fails to rollback""" + def __init__(self, message: str): + self.message = message + super().__init__(message) + # Data classes for rollback operations @dataclass @@ -247,8 +257,8 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 # Parse and validate datetime parameters try: - start_datetime = datetime.fromisoformat(start_datetime_str.replace('Z', '+00:00')) - end_datetime = datetime.fromisoformat(end_datetime_str.replace('Z', '+00:00')) + start_datetime = datetime.fromisoformat(start_datetime_str) + end_datetime = datetime.fromisoformat(end_datetime_str) except ValueError as e: logger.error(f'Invalid datetime format: {str(e)}') return { @@ -471,18 +481,9 @@ def _process_provider_rollback( logger.info('Processing provider rollback', provider_id=provider_id) try: - # Fetch all provider records including all update tiers - provider_records = config.data_client.get_provider_user_records( - compact=compact, - provider_id=provider_id, - # tier three includes all update records for the provider - include_update_tier=UpdateTierEnum.TIER_THREE, - ) - # Build transactions and check eligibility in a single pass # If ineligible updates are found, this will return a ProviderSkippedDetails result = _build_and_execute_revert_transactions( - provider_records=provider_records, upload_window_start_datetime=start_datetime, upload_window_end_datetime=end_datetime, compact=compact, @@ -493,8 +494,8 @@ def _process_provider_rollback( # If provider was skipped due to ineligibility, return early if isinstance(result, ProviderSkippedDetails): return result - except Exception as e: # noqa BLE001 - logger.error(f'Error processing provider rollback: {str(e)}', provider_id=provider_id, exc_info=True) + except ProviderRollbackFailedException as e: # noqa BLE001 + logger.error('Error processing provider rollback', provider_id=provider_id, exc_info=e) return ProviderFailedDetails( provider_id=provider_id, error=f'Failed to rollback updates for provider. Manual review required: {str(e)}', @@ -545,7 +546,7 @@ def _perform_transaction(transaction_items: list[dict], provider_id: str) -> Non failed_sks=failed_sks, error=str(e), ) - raise + raise ProviderRollbackFailedException(message=str(e)) def _check_for_orphaned_update_records( @@ -594,7 +595,6 @@ def _check_for_orphaned_update_records( def _build_and_execute_revert_transactions( - provider_records: ProviderUserRecords, upload_window_start_datetime: datetime, upload_window_end_datetime: datetime, compact: str, @@ -611,9 +611,6 @@ def _build_and_execute_revert_transactions( Returns either a summary of what was reverted or details about why the provider was skipped. """ - from cc_common.data_model.schema.license import LicenseData - from cc_common.data_model.schema.license.record import LicenseRecordSchema - # Split transaction lists into first tier/second tier lists (license/privilege/provider first tier, updates second) # then merge the two lists into a single list of transaction items primary_record_transaction_items = [] # License, privilege, and provider records @@ -629,9 +626,8 @@ def add_put(item: dict, update_record: bool): """ Add a Put operation to the appropriate list. - Args: - item: The item to put - update_record: True if the item is an update record, False if it is a primary record + :param item: The item to put + :param update_record: True if the item is an update record, False if it is a primary record """ transaction_item = { 'Put': { @@ -648,10 +644,9 @@ def add_delete(pk: str, sk: str, update_record: bool): """ Add a Delete operation. - Args: - pk: Partition key - sk: Sort key - used to determine if this is an update record - update_record: True if the item is an update record, False if it is a primary record + :param pk: Partition key + :param sk: Sort key - used to determine if this is an update record + :param update_record: True if the item is an update record, False if it is a primary record """ transaction_item = { 'Delete': { @@ -664,6 +659,19 @@ def add_delete(pk: str, sk: str, update_record: bool): else: primary_record_transaction_items.append(transaction_item) + # Fetch all provider records including all update tiers + try: + provider_records = config.data_client.get_provider_user_records( + compact=compact, + provider_id=provider_id, + # tier three includes all update records for the provider + include_update_tier=UpdateTierEnum.TIER_THREE, + ) + except ValidationError as e: + logger.info('provider record data failed schema validation. Skipping provider', exc_info=e) + raise ProviderRollbackFailedException(message=f'Validation error: {str(e)}') from e + + # Step 1: Check for license update records without top-level license records orphaned_update_check = _check_for_orphaned_update_records(provider_records) if orphaned_update_check is not None: @@ -898,27 +906,44 @@ def add_delete(pk: str, sk: str, update_record: bool): ) _perform_transaction(transaction_items, provider_id) - - # Now read all the license records for the provider and update the provider record - # Fetch all provider records including all update tiers - provider_records = config.data_client.get_provider_user_records(compact=compact, provider_id=provider_id) - top_level_provider_record: ProviderData = provider_records.get_provider_record() - privilege_records: list[PrivilegeData] = provider_records.get_privilege_records() + try: + # Now read all the license records for the provider and update the provider record + provider_records_after_rollback = config.data_client.get_provider_user_records(compact=compact, provider_id=provider_id) + top_level_provider_record: ProviderData = provider_records_after_rollback.get_provider_record() + except CCNotFoundException | CCInternalException as e: + # This would most likely happen if the top level provider record was somehow deleted by another process. + # We don't ever expect to get into this state, so we are going to let this bubble to the top and end the entire + # process, to ensure we are not putting the system into a worse state. + logger.error('Expected top level provider record not found after rollback. ' + 'Ending workflow to prevent risk of data corruption.', + provider_id=provider_id, exc_info=e) + raise # Create a new list for provider record updates (all first tier items) primary_record_transaction_items.clear() try: - best_license = provider_records.find_best_license_in_current_known_licenses() - provider_record = ProviderRecordUtility.populate_provider_record( + privilege_records: list[PrivilegeData] = provider_records_after_rollback.get_privilege_records() + best_license = provider_records_after_rollback.find_best_license_in_current_known_licenses() + updated_provider_record = ProviderRecordUtility.populate_provider_record( current_provider_record=top_level_provider_record, license_record=best_license.to_dict(), privilege_records=[privilege.to_dict() for privilege in privilege_records], ) - add_put(provider_record.serialize_to_database_record(), update_record=False) + add_put(updated_provider_record.serialize_to_database_record(), update_record=False) except CCNotFoundException: - # all licenses for the provider were removed as part of the rollback, - # the provider record needs to be removed as well + # All licenses for the provider were removed as part of the rollback, meaning the provider + # needs to be removed as well. We first check to make sure there are no other record types + if len(provider_records_after_rollback.provider_records) > 1: + # We never expect this to happen, since license records should not have been removed if there were any + # privilege or other non-upload records found for the provider. If we hit this case, we will end the + # entire process to ensure we are not putting the system into a worse state. + message = ('No licenses found for provider after rollback, but other record types still exist. ' + 'Killing process to prevent potential data corruption.') + logger.error(message, provider_id=provider_id) + raise CCInternalException(message=str(message)) + + logger.info('Only top level provider record found. Deleting record', provider_id=provider_id) serialized_provider_record = top_level_provider_record.serialize_to_database_record() add_delete(pk=serialized_provider_record['pk'], sk=serialized_provider_record['sk'], update_record=False) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 0265fb4bc..108b99b0c 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -1047,35 +1047,33 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_u results_data, ) - def test_expected_s3_object_stored_when_provider_fails_during_rollback(self): - """Test that failed provider details are correctly stored in S3 results when an exception occurs.""" - # Setup: License was updated during upload - self._when_provider_had_license_updated_from_upload( + def test_expected_s3_object_stored_when_provider_schema_validation_fails_during_rollback(self): + """Test that failed provider details are correctly stored in S3 results when a validation exception occurs.""" + # Setup: License was updated during upload, but one update record has invalid field + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( license_upload_datetime=self.default_start_datetime - timedelta(hours=1) ) + serialized_license = updated_license.serialize_to_database_record() + serialized_license['jurisdictionUploadedLicenseStatus'] = 'foo' + self.config.provider_table.put_item(Item=serialized_license) - # Mock get_provider_user_records to raise an exception when called during rollback - mock_error_message = 'Database connection error' - with patch.object( - self.config.data_client, 'get_provider_user_records', side_effect=Exception(mock_error_message) - ): - results_data = self._perform_rollback_and_get_s3_object() + results_data = self._perform_rollback_and_get_s3_object() - # Verify the structure of the results contains failed provider details - self.assertEqual( - { - 'failedProviderDetails': [ - { - 'error': f'Failed to rollback updates for provider. ' - f'Manual review required: {mock_error_message}', - 'providerId': self.provider_id, - } - ], - 'revertedProviderSummaries': [], - 'skippedProviderDetails': [], - }, - results_data, - ) + # Verify the structure of the results contains failed provider details + self.assertEqual( + { + 'failedProviderDetails': [ + { + 'error': 'Failed to rollback updates for provider. Manual review required: Validation error: ' + "{'jurisdictionUploadedLicenseStatus': ['Must be one of: active, inactive.']}", + 'providerId': self.provider_id, + } + ], + 'revertedProviderSummaries': [], + 'skippedProviderDetails': [], + }, + results_data, + ) def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self): """Test that rollback can load existing S3 results and append new data without deleting previous data.""" From 5d2fd98e19ad7a26eabd99e58ee1409c74ed1dd4 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 13:38:20 -0600 Subject: [PATCH 65/81] PR feedback - tracking executions in report/data bus with execution name --- .../data_model/schema/data_event/api.py | 4 +- .../common/cc_common/event_bus_client.py | 26 +++--- .../handlers/rollback_license_upload.py | 58 +++++++------ .../function/test_rollback_license_upload.py | 85 +++++++++++++------ 4 files changed, 106 insertions(+), 67 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py index 91e29e5be..f7635f217 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py @@ -75,7 +75,7 @@ class LicenseRevertDetailSchema(DataEventDetailBaseSchema): rollbackReason = String(required=True, allow_none=False) startTime = DateTime(required=True, allow_none=False) endTime = DateTime(required=True, allow_none=False) - revisionId = UUID(required=True, allow_none=False) + RollbackExecutionName = String(required=True, allow_none=False) class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): @@ -85,4 +85,4 @@ class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): rollbackReason = String(required=True, allow_none=False) startTime = DateTime(required=True, allow_none=False) endTime = DateTime(required=True, allow_none=False) - revisionId = UUID(required=True, allow_none=False) + RollbackExecutionName = String(required=True, allow_none=False) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py index 9244e5149..a516188d5 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py @@ -8,9 +8,11 @@ EncumbranceEventDetailSchema, InvestigationEventDetailSchema, LicenseDeactivationDetailSchema, + LicenseRevertDetailSchema, PrivilegeIssuanceDetailSchema, PrivilegePurchaseEventDetailSchema, PrivilegeRenewalDetailSchema, + PrivilegeRevertDetailSchema, ) from cc_common.event_batch_writer import EventBatchWriter from cc_common.utils import ResponseEncoder @@ -448,7 +450,7 @@ def publish_license_revert_event( rollback_reason: str, start_time: datetime, end_time: datetime, - revision_id: UUID, + execution_name: str, event_batch_writer: EventBatchWriter | None = None, ): """ @@ -457,16 +459,14 @@ def publish_license_revert_event( :param source: The source of the event :param compact: The compact name :param provider_id: The provider ID - :param jurisdiction: The jurisdiction of the license - :param license_type: The license type + :param jurisdiction: The jurisdiction of the license. + :param license_type: The license type. :param rollback_reason: The reason for the rollback :param start_time: The start time of the rollback window :param end_time: The end time of the rollback window - :param revision_id: The revision ID of the reverted license + :param execution_name: The execution name for the rollback operation :param event_batch_writer: Optional EventBatchWriter for efficient batch publishing """ - from cc_common.data_model.schema.data_event.api import LicenseRevertDetailSchema - event_detail = { 'compact': compact, 'providerId': provider_id, @@ -475,11 +475,12 @@ def publish_license_revert_event( 'rollbackReason': rollback_reason, 'startTime': start_time, 'endTime': end_time, - 'revisionId': revision_id, + 'RollbackExecutionName': execution_name, 'eventTime': config.current_standard_datetime, } license_revert_detail_schema = LicenseRevertDetailSchema() + license_revert_detail_schema.validate(event_detail) deserialized_detail = license_revert_detail_schema.dump(event_detail) self._publish_event( @@ -493,13 +494,13 @@ def publish_privilege_revert_event( self, source: str, compact: str, - provider_id: UUID, + provider_id: str, jurisdiction: str, license_type: str, rollback_reason: str, start_time: datetime, end_time: datetime, - revision_id: UUID, + execution_name: str, event_batch_writer: EventBatchWriter | None = None, ): """ @@ -513,11 +514,9 @@ def publish_privilege_revert_event( :param rollback_reason: The reason for the rollback :param start_time: The start time of the rollback window :param end_time: The end time of the rollback window - :param revision_id: The revision ID of the reverted privilege + :param execution_name: The execution name for the rollback operation :param event_batch_writer: Optional EventBatchWriter for efficient batch publishing """ - from cc_common.data_model.schema.data_event.api import PrivilegeRevertDetailSchema - event_detail = { 'compact': compact, 'providerId': provider_id, @@ -526,11 +525,12 @@ def publish_privilege_revert_event( 'rollbackReason': rollback_reason, 'startTime': start_time, 'endTime': end_time, - 'revisionId': revision_id, + 'RollbackExecutionName': execution_name, 'eventTime': config.current_standard_datetime, } privilege_revert_detail_schema = PrivilegeRevertDetailSchema() + privilege_revert_detail_schema.validate(event_detail) deserialized_detail = privilege_revert_detail_schema.dump(event_detail) self._publish_event( diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index a0095a818..afbce1736 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -70,7 +70,6 @@ class RevertedLicense: jurisdiction: str license_type: str - revision_id: UUID action: str @@ -80,7 +79,6 @@ class RevertedPrivilege: jurisdiction: str license_type: str - revision_id: UUID action: str @@ -98,6 +96,7 @@ class ProviderRevertedSummary: class RollbackResults: """Complete results of a rollback operation.""" + execution_name: str skipped_provider_details: list[ProviderSkippedDetails] = field(default_factory=list) failed_provider_details: list[ProviderFailedDetails] = field(default_factory=list) reverted_provider_summaries: list[ProviderRevertedSummary] = field(default_factory=list) @@ -105,6 +104,7 @@ class RollbackResults: def to_dict(self) -> dict: """Convert to dictionary for S3 storage.""" return { + 'executionName': self.execution_name, 'skippedProviderDetails': [ { 'providerId': detail.provider_id, @@ -136,7 +136,6 @@ def to_dict(self) -> dict: { 'jurisdiction': license_record.jurisdiction, 'licenseType': license_record.license_type, - 'revisionId': str(license_record.revision_id), 'action': license_record.action, } for license_record in summary.licenses_reverted @@ -145,7 +144,6 @@ def to_dict(self) -> dict: { 'jurisdiction': privilege.jurisdiction, 'licenseType': privilege.license_type, - 'revisionId': str(privilege.revision_id), 'action': privilege.action, } for privilege in summary.privileges_reverted @@ -160,6 +158,7 @@ def to_dict(self) -> dict: def from_dict(cls, data: dict) -> 'RollbackResults': """Create from dictionary loaded from S3.""" return cls( + execution_name=data.get('executionName', ''), skipped_provider_details=[ ProviderSkippedDetails( provider_id=detail['providerId'], @@ -191,7 +190,6 @@ def from_dict(cls, data: dict) -> 'RollbackResults': RevertedLicense( jurisdiction=reverted_license['jurisdiction'], license_type=reverted_license['licenseType'], - revision_id=reverted_license['revisionId'], action=reverted_license['action'], ) for reverted_license in summary.get('licensesReverted', []) @@ -200,7 +198,6 @@ def from_dict(cls, data: dict) -> 'RollbackResults': RevertedPrivilege( jurisdiction=reverted_privilege['jurisdiction'], license_type=reverted_privilege['licenseType'], - revision_id=reverted_privilege['revisionId'], action=reverted_privilege['action'], ) for reverted_privilege in summary.get('privilegesReverted', []) @@ -295,7 +292,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 results_s3_key = f'licenseUploadRollbacks/{execution_name}/results.json' # Load existing results if this is a continuation - existing_results = _load_results_from_s3(results_s3_key) + existing_results = _load_results_from_s3(results_s3_key, execution_name) # Initialize counters providers_reverted = len(existing_results.reverted_provider_summaries) @@ -323,7 +320,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 f'Continuing from provider {continue_from_provider_id} (index {start_index}). ' f'{len(affected_provider_ids_list)} providers remaining to process.' ) - except ValueError as e: + except ValueError: # Provider ID in event input not found in list # Log error and raise exception logger.error( @@ -331,7 +328,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 continue_from_provider_id=continue_from_provider_id, affected_provider_ids_list=affected_provider_ids_list, ) - raise e + raise # Process each provider for provider_id in affected_provider_ids_list: @@ -368,6 +365,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 start_datetime=start_datetime, end_datetime=end_datetime, rollback_reason=rollback_reason, + execution_name=execution_name, ) # Update results based on outcome @@ -382,7 +380,12 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 existing_results.failed_provider_details.append(result) # All providers processed successfully - logger.info('Rollback complete', providers_processed=providers_processed) + logger.info('Rollback complete', + providers_processed=providers_processed, + providers_skipped=providers_skipped, + providers_reverted=providers_reverted, + providers_failed=providers_failed + ) # Write final results to S3 _write_results_to_s3(results_s3_key, existing_results) @@ -398,7 +401,7 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 except ClientError as e: logger.error(f'Error during rollback: {str(e)}') - raise e + raise def _query_gsi_for_affected_providers( @@ -469,6 +472,7 @@ def _process_provider_rollback( start_datetime: datetime, end_datetime: datetime, rollback_reason: str, + execution_name: str, ) -> ProviderRevertedSummary | ProviderSkippedDetails | ProviderFailedDetails: """ Process rollback for a single provider. @@ -502,7 +506,7 @@ def _process_provider_rollback( ) # Publish events for successful rollback - _publish_revert_events(result, compact, rollback_reason, start_datetime, end_datetime) + _publish_revert_events(result, compact, rollback_reason, start_datetime, end_datetime, execution_name) logger.info('Provider rollback successful', provider_id=provider_id) return result @@ -787,7 +791,6 @@ def add_delete(pk: str, sk: str, update_record: bool): RevertedPrivilege( jurisdiction=privilege_record.jurisdiction, license_type=privilege_record.licenseType, - revision_id=uuid4(), action='REACTIVATED', ) ) @@ -841,7 +844,6 @@ def add_delete(pk: str, sk: str, update_record: bool): RevertedLicense( jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - revision_id=uuid4(), action='DELETE', ) ) @@ -874,7 +876,6 @@ def add_delete(pk: str, sk: str, update_record: bool): RevertedLicense( jurisdiction=license_record.jurisdiction, license_type=license_record.licenseType, - revision_id=uuid4(), action='REVERT', ) ) @@ -897,13 +898,14 @@ def add_delete(pk: str, sk: str, update_record: bool): transaction_items = primary_record_transaction_items + update_record_transactions_items if not transaction_items: - logger.warning('No transaction items to execute') - return ProviderRevertedSummary( - provider_id=provider_id, - licenses_reverted=reverted_licenses, - privileges_reverted=reverted_privileges, - updates_deleted=updates_deleted_sks, - ) + # This should never happen, as it means that somehow the GSI query returned this provider id within + # the search results, but the provider was not either skipped over or had something to revert as we expect. + # If we do get here, we will exit the lambda in a failed state, as there is something unexpected happening that + # needs to be investigated before we attempt to roll back any other providers. + message = ('No transaction items to execute for provider. This is an unexpected state that should be ' + 'investigated before attempting to roll back any other providers') + logger.error(message, provider_id=provider_id) + raise CCInternalException(message=f'{message} provider_id: {provider_id}') _perform_transaction(transaction_items, provider_id) try: @@ -970,6 +972,7 @@ def _publish_revert_events( rollback_reason: str, start_datetime: datetime, end_datetime: datetime, + execution_name: str, ): """ Publish revert events for all reverted licenses and privileges. @@ -979,6 +982,7 @@ def _publish_revert_events( :param rollback_reason: The reason for the rollback :param start_datetime: The start time of the rollback window :param end_datetime: The end time of the rollback window + :param execution_name: The execution name for the rollback operation """ with EventBatchWriter(config.events_client) as event_writer: # Publish license revert events @@ -993,7 +997,7 @@ def _publish_revert_events( rollback_reason=rollback_reason, start_time=start_datetime, end_time=end_datetime, - revision_id=reverted_license.revision_id, + execution_name=execution_name, event_batch_writer=event_writer, ) except Exception as e: # noqa BLE001 @@ -1007,7 +1011,6 @@ def _publish_revert_events( rollback_reason=rollback_reason, start_time=start_datetime, end_time=end_datetime, - revision_id=reverted_license.revision_id, error=str(e), ) @@ -1023,7 +1026,7 @@ def _publish_revert_events( rollback_reason=rollback_reason, start_time=start_datetime, end_time=end_datetime, - revision_id=reverted_privilege.revision_id, + execution_name=execution_name, event_batch_writer=event_writer, ) except Exception as e: # noqa BLE001 @@ -1037,12 +1040,11 @@ def _publish_revert_events( rollback_reason=rollback_reason, start_time=start_datetime, end_time=end_datetime, - revision_id=reverted_privilege.revision_id, error=str(e), ) -def _load_results_from_s3(key: str) -> RollbackResults: +def _load_results_from_s3(key: str, execution_name: str) -> RollbackResults: """Load existing results from S3.""" try: response = config.s3_client.get_object(Bucket=config.disaster_recovery_results_bucket_name, Key=key) @@ -1050,7 +1052,7 @@ def _load_results_from_s3(key: str) -> RollbackResults: return RollbackResults.from_dict(data) except config.s3_client.exceptions.NoSuchKey: # First execution, no existing results - return RollbackResults() + return RollbackResults(execution_name=execution_name) except Exception as e: logger.error(f'Error loading results from S3: {str(e)}') raise diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 108b99b0c..ea79d7c1a 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -794,6 +794,7 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v # Verify the structure of the results self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [ { @@ -802,8 +803,6 @@ def test_expected_s3_object_stored_when_provider_license_record_reset_to_prior_v 'action': 'REVERT', 'jurisdiction': original_license.jurisdiction, 'licenseType': original_license.licenseType, - # random UUID, we won't check for it here - 'revisionId': ANY, } ], 'privilegesReverted': [], @@ -828,6 +827,7 @@ def test_expected_s3_object_stored_when_provider_license_record_deleted_from_rol # Verify the structure of the results self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [ { @@ -836,8 +836,6 @@ def test_expected_s3_object_stored_when_provider_license_record_deleted_from_rol 'action': 'DELETE', 'jurisdiction': new_license.jurisdiction, 'licenseType': new_license.licenseType, - # random UUID, we won't check for it here - 'revisionId': ANY, } ], 'privilegesReverted': [], @@ -863,6 +861,7 @@ def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_fr # Verify the structure of the results self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [ { @@ -871,8 +870,6 @@ def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_fr 'action': 'REVERT', 'jurisdiction': self.license_jurisdiction, 'licenseType': privilege.licenseType, - # random UUID, we won't check for it here - 'revisionId': ANY, } ], 'privilegesReverted': [ @@ -880,8 +877,6 @@ def test_expected_s3_object_stored_when_provider_privilege_record_reactivated_fr 'action': 'REACTIVATED', 'jurisdiction': privilege.jurisdiction, 'licenseType': privilege.licenseType, - # random UUID, we won't check for it here - 'revisionId': ANY, } ], 'providerId': self.provider_id, @@ -914,6 +909,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_license_up ) self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [], 'skippedProviderDetails': [ @@ -951,6 +947,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_privilege_issuan ) self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [], 'skippedProviderDetails': [ @@ -989,6 +986,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_privilege_ ) self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [], 'skippedProviderDetails': [ @@ -1023,6 +1021,7 @@ def test_expected_s3_object_stored_when_provider_skipped_due_to_extra_provider_u expected_reason_message = 'Provider update occurred after rollback start time. Manual review required.' self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [], 'skippedProviderDetails': [ @@ -1062,6 +1061,7 @@ def test_expected_s3_object_stored_when_provider_schema_validation_fails_during_ # Verify the structure of the results contains failed provider details self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [ { 'error': 'Failed to rollback updates for provider. Manual review required: Validation error: ' @@ -1079,7 +1079,6 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) """Test that rollback can load existing S3 results and append new data without deleting previous data.""" from uuid import uuid4 - test_revision_id = str(uuid4()) existing_skipped_provider_id = str(uuid4()) existing_reverted_provider_id = str(uuid4()) existing_failed_provider_id = str(uuid4()) @@ -1096,6 +1095,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) # Create existing results data in the format that from_dict expects (camelCase for all keys) existing_results_data = { + 'executionName': MOCK_EXECUTION_NAME, 'skippedProviderDetails': [ { 'providerId': existing_skipped_provider_id, @@ -1124,7 +1124,6 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) { 'jurisdiction': 'tx', 'licenseType': 'audiologist', - 'revisionId': test_revision_id, 'action': 'REVERT', } ], @@ -1148,6 +1147,7 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) # Note: All keys should now be camelCase for consistency self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'skippedProviderDetails': [ { 'providerId': existing_skipped_provider_id, @@ -1176,7 +1176,6 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) { 'jurisdiction': 'tx', 'licenseType': 'audiologist', - 'revisionId': ANY, 'action': 'REVERT', } ], @@ -1190,7 +1189,6 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) 'action': 'REVERT', 'jurisdiction': self.license_jurisdiction, 'licenseType': ANY, - 'revisionId': ANY, } ], 'privilegesReverted': [ @@ -1198,7 +1196,6 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) 'action': 'REACTIVATED', 'jurisdiction': 'ne', 'licenseType': ANY, - 'revisionId': ANY, } ], 'updatesDeleted': ANY, @@ -1252,14 +1249,6 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel self.assertEqual(0, result_first['providersFailed']) self.assertEqual(mock_second_provider_id, result_first['continueFromProviderId']) - # Verify: S3 results contain first provider with revision id - s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json' - s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key) - first_results_data = json.loads(s3_obj['Body'].read().decode('utf-8')) - - # grab the revision id from the results which we will use when asserting on the final object - revision_id = first_results_data['revertedProviderSummaries'][0]['licensesReverted'][0]['revisionId'] - # Execute: Second invocation (continue from where we left off) # Reset mock time for second invocation mock_time.time.side_effect = [0, 1] # Won't timeout this time @@ -1281,6 +1270,7 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel # Should have 2 reverted providers self.assertEqual( { + 'executionName': MOCK_EXECUTION_NAME, 'failedProviderDetails': [], 'revertedProviderSummaries': [ { @@ -1289,7 +1279,6 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', - 'revisionId': revision_id, } ], 'privilegesReverted': [], @@ -1304,8 +1293,6 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel 'action': 'REVERT', 'jurisdiction': 'oh', 'licenseType': 'speech-language pathologist', - # unknown random UUID, we won't check for it here - 'revisionId': ANY, } ], 'privilegesReverted': [], @@ -1320,6 +1307,56 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel final_results_data, ) + @patch('handlers.rollback_license_upload.config.event_bus_client') + def test_event_bus_client_called_with_expected_arguments_for_revert_events(self, mock_event_bus_client): + """Test that event bus client methods are called with expected arguments when publishing revert events.""" + from handlers.rollback_license_upload import rollback_license_upload + + # Setup: License was updated during upload and privilege was deactivated + # This scenario will trigger both license and privilege revert events + original_license, license_update, updated_license = self._when_provider_had_license_updated_from_upload( + license_upload_datetime=self.default_start_datetime - timedelta(hours=1) + ) + privilege, privilege_update = self._when_provider_had_privilege_deactivated_from_upload() + + # Execute: Perform rollback + event = self._generate_test_event() + result = rollback_license_upload(event, Mock()) + + # Assert: Rollback completed successfully + self.assertEqual(result['rollbackStatus'], 'COMPLETE') + self.assertEqual(result['providersReverted'], 1) + + # Verify: publish_license_revert_event was called with expected arguments + expected_license_kwargs = { + 'source': 'org.compactconnect.disaster-recovery', + 'compact': self.compact, + 'provider_id': self.provider_id, + 'jurisdiction': self.license_jurisdiction, + 'license_type': original_license.licenseType, + 'rollback_reason': 'Test rollback', + 'start_time': self.default_start_datetime, + 'end_time': self.default_end_datetime, + 'execution_name': MOCK_EXECUTION_NAME, + 'event_batch_writer': ANY, + } + mock_event_bus_client.publish_license_revert_event.assert_called_once_with(**expected_license_kwargs) + + # Verify: publish_privilege_revert_event was called with expected arguments + expected_privilege_kwargs = { + 'source': 'org.compactconnect.disaster-recovery', + 'compact': self.compact, + 'provider_id': self.provider_id, + 'jurisdiction': privilege.jurisdiction, + 'license_type': privilege.licenseType, + 'rollback_reason': 'Test rollback', + 'start_time': self.default_start_datetime, + 'end_time': self.default_end_datetime, + 'execution_name': MOCK_EXECUTION_NAME, + 'event_batch_writer': ANY, + } + mock_event_bus_client.publish_privilege_revert_event.assert_called_once_with(**expected_privilege_kwargs) + def test_transaction_failure_is_logged_and_provider_marked_as_failed(self): """Test that transaction failures are properly logged and the provider is marked as failed.""" from botocore.exceptions import ClientError From a0887e1e9d7c09fbee966d464eafaacaece1f02e Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 13:43:02 -0600 Subject: [PATCH 66/81] formatter/linter --- .../handlers/rollback_license_upload.py | 55 +++++++++++-------- .../function/test_rollback_license_upload.py | 2 +- 2 files changed, 33 insertions(+), 24 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index afbce1736..26e9370d8 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -2,13 +2,10 @@ import time from dataclasses import dataclass, field from datetime import datetime -from uuid import UUID, uuid4 from aws_lambda_powertools.utilities.typing import LambdaContext from boto3.dynamodb.conditions import Key from botocore.exceptions import ClientError -from marshmallow import ValidationError - from cc_common.config import config, logger from cc_common.data_model.provider_record_util import ProviderRecordUtility, ProviderUserRecords from cc_common.data_model.schema.common import LICENSE_UPLOAD_UPDATE_CATEGORIES, UpdateCategory @@ -18,7 +15,8 @@ from cc_common.data_model.schema.provider import ProviderData from cc_common.data_model.update_tier_enum import UpdateTierEnum from cc_common.event_batch_writer import EventBatchWriter -from cc_common.exceptions import CCNotFoundException, CCInternalException +from cc_common.exceptions import CCInternalException, CCNotFoundException +from marshmallow import ValidationError # Maximum time window for rollback (1 week in seconds) # this is set as a safety net to prevent accidental rollback over large time period @@ -28,8 +26,10 @@ # Privilege update category for license deactivations PRIVILEGE_LICENSE_DEACTIVATION_CATEGORY = UpdateCategory.LICENSE_DEACTIVATION + class ProviderRollbackFailedException(Exception): """Custom exception that is thrown when a provider fails to rollback""" + def __init__(self, message: str): self.message = message super().__init__(message) @@ -380,12 +380,13 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 existing_results.failed_provider_details.append(result) # All providers processed successfully - logger.info('Rollback complete', - providers_processed=providers_processed, - providers_skipped=providers_skipped, - providers_reverted=providers_reverted, - providers_failed=providers_failed - ) + logger.info( + 'Rollback complete', + providers_processed=providers_processed, + providers_skipped=providers_skipped, + providers_reverted=providers_reverted, + providers_failed=providers_failed, + ) # Write final results to S3 _write_results_to_s3(results_s3_key, existing_results) @@ -536,7 +537,7 @@ def _perform_transaction(transaction_items: list[dict], provider_id: str) -> Non try: config.provider_table.meta.client.transact_write_items(TransactItems=batch) logger.info(f'Executed batch {i // 100 + 1} with {len(batch)} items') - except Exception as e: + except ClientError as e: # Extract all SKs from the failed transaction batch for debugging failed_sks = [_extract_sk_from_transaction_item(item) for item in batch] # filter out null values @@ -550,7 +551,7 @@ def _perform_transaction(transaction_items: list[dict], provider_id: str) -> Non failed_sks=failed_sks, error=str(e), ) - raise ProviderRollbackFailedException(message=str(e)) + raise ProviderRollbackFailedException(message=str(e)) from e def _check_for_orphaned_update_records( @@ -675,7 +676,6 @@ def add_delete(pk: str, sk: str, update_record: bool): logger.info('provider record data failed schema validation. Skipping provider', exc_info=e) raise ProviderRollbackFailedException(message=f'Validation error: {str(e)}') from e - # Step 1: Check for license update records without top-level license records orphaned_update_check = _check_for_orphaned_update_records(provider_records) if orphaned_update_check is not None: @@ -902,23 +902,30 @@ def add_delete(pk: str, sk: str, update_record: bool): # the search results, but the provider was not either skipped over or had something to revert as we expect. # If we do get here, we will exit the lambda in a failed state, as there is something unexpected happening that # needs to be investigated before we attempt to roll back any other providers. - message = ('No transaction items to execute for provider. This is an unexpected state that should be ' - 'investigated before attempting to roll back any other providers') + message = ( + 'No transaction items to execute for provider. This is an unexpected state that should be ' + 'investigated before attempting to roll back any other providers' + ) logger.error(message, provider_id=provider_id) raise CCInternalException(message=f'{message} provider_id: {provider_id}') _perform_transaction(transaction_items, provider_id) try: # Now read all the license records for the provider and update the provider record - provider_records_after_rollback = config.data_client.get_provider_user_records(compact=compact, provider_id=provider_id) + provider_records_after_rollback = config.data_client.get_provider_user_records( + compact=compact, provider_id=provider_id + ) top_level_provider_record: ProviderData = provider_records_after_rollback.get_provider_record() - except CCNotFoundException | CCInternalException as e: + except (CCNotFoundException, CCInternalException) as e: # This would most likely happen if the top level provider record was somehow deleted by another process. # We don't ever expect to get into this state, so we are going to let this bubble to the top and end the entire # process, to ensure we are not putting the system into a worse state. - logger.error('Expected top level provider record not found after rollback. ' - 'Ending workflow to prevent risk of data corruption.', - provider_id=provider_id, exc_info=e) + logger.error( + 'Expected top level provider record not found after rollback. ' + 'Ending workflow to prevent risk of data corruption.', + provider_id=provider_id, + exc_info=e, + ) raise # Create a new list for provider record updates (all first tier items) @@ -940,10 +947,12 @@ def add_delete(pk: str, sk: str, update_record: bool): # We never expect this to happen, since license records should not have been removed if there were any # privilege or other non-upload records found for the provider. If we hit this case, we will end the # entire process to ensure we are not putting the system into a worse state. - message = ('No licenses found for provider after rollback, but other record types still exist. ' - 'Killing process to prevent potential data corruption.') + message = ( + 'No licenses found for provider after rollback, but other record types still exist. ' + 'Killing process to prevent potential data corruption.' + ) logger.error(message, provider_id=provider_id) - raise CCInternalException(message=str(message)) + raise CCInternalException(message=str(message)) # noqa: B904 logger.info('Only top level provider record found. Deleting record', provider_id=provider_id) serialized_provider_record = top_level_provider_record.serialize_to_database_record() diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index ea79d7c1a..b292df739 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -1065,7 +1065,7 @@ def test_expected_s3_object_stored_when_provider_schema_validation_fails_during_ 'failedProviderDetails': [ { 'error': 'Failed to rollback updates for provider. Manual review required: Validation error: ' - "{'jurisdictionUploadedLicenseStatus': ['Must be one of: active, inactive.']}", + "{'jurisdictionUploadedLicenseStatus': ['Must be one of: active, inactive.']}", 'providerId': self.provider_id, } ], From a4d0c0e831bef14ef2c150b3ac1dfecf41155e37 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 13:57:22 -0600 Subject: [PATCH 67/81] Coderabbit feedback --- .../common/cc_common/data_model/schema/data_event/api.py | 2 -- .../disaster-recovery/handlers/rollback_license_upload.py | 2 +- .../lambdas/python/disaster-recovery/tests/__init__.py | 1 + .../tests/function/test_migrate_update_sort_keys.py | 2 +- backend/compact-connect/pipeline/backend_stage.py | 4 ++-- backend/compact-connect/tests/smoke/smoke_common.py | 5 ++--- 6 files changed, 7 insertions(+), 9 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py index f7635f217..373cf793e 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py @@ -71,7 +71,6 @@ class LicenseDeactivationDetailSchema(DataEventDetailBaseSchema): class LicenseRevertDetailSchema(DataEventDetailBaseSchema): providerId = UUID(required=True, allow_none=False) licenseType = String(required=True, allow_none=False) - jurisdiction = Jurisdiction(required=True, allow_none=False) rollbackReason = String(required=True, allow_none=False) startTime = DateTime(required=True, allow_none=False) endTime = DateTime(required=True, allow_none=False) @@ -81,7 +80,6 @@ class LicenseRevertDetailSchema(DataEventDetailBaseSchema): class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): providerId = UUID(required=True, allow_none=False) licenseType = String(required=True, allow_none=False) - jurisdiction = Jurisdiction(required=True, allow_none=False) rollbackReason = String(required=True, allow_none=False) startTime = DateTime(required=True, allow_none=False) endTime = DateTime(required=True, allow_none=False) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 26e9370d8..2c38e9cd4 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -439,7 +439,7 @@ def _query_gsi_for_affected_providers( gsi_pk = f'C#{compact.lower()}#J#{jurisdiction.lower()}#D#{year_month}' query_kwargs = { - 'IndexName': 'licenseUploadDateGSI', + 'IndexName': config.license_upload_date_index_name, 'KeyConditionExpression': ( Key('licenseUploadDateGSIPK').eq(gsi_pk) & Key('licenseUploadDateGSISK').between(f'TIME#{start_epoch}#', f'TIME#{end_epoch}#~') diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py index 1cd9cd8af..d8d98d949 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/__init__.py @@ -25,6 +25,7 @@ def setUpClass(cls): 'PROV_FAM_GIV_MID_INDEX_NAME': 'providerFamGivMid', 'FAM_GIV_INDEX_NAME': 'famGiv', 'LICENSE_GSI_NAME': 'licenseGSI', + 'LICENSE_UPLOAD_DATE_INDEX_NAME': 'licenseUploadDateGSI', 'PROV_DATE_OF_UPDATE_INDEX_NAME': 'providerDateOfUpdate', 'SSN_INDEX_NAME': 'ssnIndex', 'COMPACTS': '["aslp", "octp", "coun"]', diff --git a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py index f09a685e2..dff07ff85 100644 --- a/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py +++ b/backend/compact-connect/lambdas/python/migration/tests/function/test_migrate_update_sort_keys.py @@ -31,7 +31,7 @@ def test_should_migrate_provider_update_records_to_expected_pattern(self): ) serialized_old_record = old_provider_update_record.serialize_to_database_record() # replace sk with old pattern to simulate old record to be migrated - serialized_old_record['sk'] = 'aslp#PROVIDER#UPDATE#1752526787/2f429ccda22d273b1ee4876f2917e27f' + serialized_old_record['sk'] = 'coun#PROVIDER#UPDATE#1752526787/2f429ccda22d273b1ee4876f2917e27f' del serialized_old_record['createDate'] serialized_old_record['dateOfUpdate'] = DEFAULT_PROVIDER_UPDATE_DATETIME self.config.provider_table.put_item(Item=serialized_old_record) diff --git a/backend/compact-connect/pipeline/backend_stage.py b/backend/compact-connect/pipeline/backend_stage.py index 6b4acc429..6a83de8a9 100644 --- a/backend/compact-connect/pipeline/backend_stage.py +++ b/backend/compact-connect/pipeline/backend_stage.py @@ -206,8 +206,8 @@ def __init__( ) # Stack to house data migration custom resources - # This stack depends on the API Lambda stack to ensure - # all infrastructure is in place before migrations run + # This stack depends on the API and event listener stacks to ensure + # all core infrastructure is in place before migrations run self.data_migration_stack = DataMigrationStack( self, 'DataMigrationStack', diff --git a/backend/compact-connect/tests/smoke/smoke_common.py b/backend/compact-connect/tests/smoke/smoke_common.py index 9a57e7906..aa9b5cc09 100644 --- a/backend/compact-connect/tests/smoke/smoke_common.py +++ b/backend/compact-connect/tests/smoke/smoke_common.py @@ -264,11 +264,10 @@ def get_provider_user_records(compact: str, provider_id: str) -> ProviderUserRec last_evaluated_key = None while True: pagination = {'ExclusiveStartKey': last_evaluated_key} if last_evaluated_key else {} - # This query key condition expression ensures we always grab all the primary and update records + # Grab all records under the provider partition query_resp = config.provider_user_dynamodb_table.query( Select='ALL_ATTRIBUTES', - KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}') - & Key('sk').lt(f'{compact}#UPDATE#9'), + KeyConditionExpression=Key('pk').eq(f'{compact}#PROVIDER#{provider_id}'), ConsistentRead=True, **pagination, ) From 13b31539a65b82df6363a3431de221a683c341a0 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 15:00:12 -0600 Subject: [PATCH 68/81] more CodeRabbit feedback --- .../cc_common/data_model/schema/data_event/api.py | 4 ++-- .../python/common/cc_common/event_bus_client.py | 13 +++++++++---- .../handlers/rollback_license_upload.py | 2 +- .../tests/function/test_rollback_license_upload.py | 2 ++ 4 files changed, 14 insertions(+), 7 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py index 373cf793e..ff73f21f3 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/data_event/api.py @@ -74,7 +74,7 @@ class LicenseRevertDetailSchema(DataEventDetailBaseSchema): rollbackReason = String(required=True, allow_none=False) startTime = DateTime(required=True, allow_none=False) endTime = DateTime(required=True, allow_none=False) - RollbackExecutionName = String(required=True, allow_none=False) + rollbackExecutionName = String(required=True, allow_none=False) class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): @@ -83,4 +83,4 @@ class PrivilegeRevertDetailSchema(DataEventDetailBaseSchema): rollbackReason = String(required=True, allow_none=False) startTime = DateTime(required=True, allow_none=False) endTime = DateTime(required=True, allow_none=False) - RollbackExecutionName = String(required=True, allow_none=False) + rollbackExecutionName = String(required=True, allow_none=False) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py index a516188d5..878fadfca 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py @@ -16,6 +16,7 @@ ) from cc_common.event_batch_writer import EventBatchWriter from cc_common.utils import ResponseEncoder +from marshmallow import ValidationError class EventBusClient: @@ -475,13 +476,15 @@ def publish_license_revert_event( 'rollbackReason': rollback_reason, 'startTime': start_time, 'endTime': end_time, - 'RollbackExecutionName': execution_name, + 'rollbackExecutionName': execution_name, 'eventTime': config.current_standard_datetime, } license_revert_detail_schema = LicenseRevertDetailSchema() - license_revert_detail_schema.validate(event_detail) deserialized_detail = license_revert_detail_schema.dump(event_detail) + validation_errors = license_revert_detail_schema.validate(deserialized_detail) + if validation_errors: + raise ValidationError(message=validation_errors) self._publish_event( source=source, @@ -525,13 +528,15 @@ def publish_privilege_revert_event( 'rollbackReason': rollback_reason, 'startTime': start_time, 'endTime': end_time, - 'RollbackExecutionName': execution_name, + 'rollbackExecutionName': execution_name, 'eventTime': config.current_standard_datetime, } privilege_revert_detail_schema = PrivilegeRevertDetailSchema() - privilege_revert_detail_schema.validate(event_detail) deserialized_detail = privilege_revert_detail_schema.dump(event_detail) + validation_errors = privilege_revert_detail_schema.validate(deserialized_detail) + if validation_errors: + raise ValidationError(message=validation_errors) self._publish_event( source=source, diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 2c38e9cd4..8da4a90df 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -684,7 +684,7 @@ def add_delete(pk: str, sk: str, update_record: bool): # Step 2: Check provider updates - any after start_datetime make provider ineligible provider_updates = provider_records.get_all_provider_update_records() for update in provider_updates: - if update.dateOfUpdate >= upload_window_start_datetime: + if update.createDate >= upload_window_start_datetime: ineligible_updates.append( IneligibleUpdate( record_type='providerUpdate', diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index b292df739..955f094c9 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -450,6 +450,8 @@ def _when_provider_changed_home_jurisdiction_after_license_upload(self): value_overrides={ 'providerId': self.provider_id, 'compact': self.compact, + # home jurisdiction was changed during license upload window + 'createDate': self.default_upload_datetime, 'updateType': self.update_categories.HOME_JURISDICTION_CHANGE, 'previous': {**self.provider_data.to_dict()}, 'updatedValues': { From a312ed88f0f9548f01ab33ef2e23435b2a1ff020 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 15:17:48 -0600 Subject: [PATCH 69/81] PR feedback - using enum in post_dump hooks --- .../cc_common/data_model/schema/license/record.py | 10 +++++----- .../cc_common/data_model/schema/privilege/record.py | 10 +++++----- .../cc_common/data_model/schema/provider/record.py | 8 ++++++-- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index 6519b0ac9..a01152e46 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -2,10 +2,6 @@ from datetime import date from urllib.parse import quote -from marshmallow import ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema -from marshmallow.fields import UUID, Date, DateTime, Email, List, Nested, String -from marshmallow.validate import Length - from cc_common.config import config from cc_common.data_model.schema.base_record import ( BaseRecordSchema, @@ -32,6 +28,10 @@ ) from cc_common.data_model.schema.investigation.record import InvestigationDetailsSchema from cc_common.data_model.schema.license.common import LicenseCommonSchema +from cc_common.data_model.update_tier_enum import UpdateTierEnum +from marshmallow import ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema +from marshmallow.fields import UUID, Date, DateTime, Email, List, Nested, String +from marshmallow.validate import Length @BaseRecordSchema.register_schema('license') @@ -249,7 +249,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#UPDATE#3#license/{in_data["jurisdiction"]}/{license_type_abbr}/{in_data["createDate"]}/{change_hash}' + f'{in_data["compact"]}#UPDATE#{UpdateTierEnum.TIER_THREE}#license/{in_data["jurisdiction"]}/{license_type_abbr}/{in_data["createDate"]}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py index 625007827..46dd898cd 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py @@ -1,10 +1,6 @@ # ruff: noqa: N801, N815, ARG002 invalid-name unused-argument from datetime import date -from marshmallow import Schema, ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema -from marshmallow.fields import UUID, Date, DateTime, List, Nested, String -from marshmallow.validate import Length - from cc_common.config import config from cc_common.data_model.schema.base_record import BaseRecordSchema, ForgivingSchema from cc_common.data_model.schema.common import ( @@ -27,6 +23,10 @@ UpdateType, ) from cc_common.data_model.schema.investigation.record import InvestigationDetailsSchema +from cc_common.data_model.update_tier_enum import UpdateTierEnum +from marshmallow import Schema, ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema +from marshmallow.fields import UUID, Date, DateTime, List, Nested, String +from marshmallow.validate import Length class AttestationVersionRecordSchema(Schema): @@ -240,7 +240,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument change_hash = self.hash_changes(in_data) license_type_abbr = config.license_type_abbreviations[in_data['compact']][in_data['licenseType']] in_data['sk'] = ( - f'{in_data["compact"]}#UPDATE#1#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{in_data["createDate"]}/{change_hash}' + f'{in_data["compact"]}#UPDATE#{UpdateTierEnum.TIER_ONE}#privilege/{in_data["jurisdiction"]}/{license_type_abbr}/{in_data["createDate"]}/{change_hash}' ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index 126c1ccc6..65137ab09 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -26,6 +26,7 @@ Set, UpdateType, ) +from cc_common.data_model.update_tier_enum import UpdateTierEnum @BaseRecordSchema.register_schema('provider') @@ -246,9 +247,12 @@ def populate_create_date_for_backwards_compatibility(self, in_data, **kwargs): @post_dump # Must be _post_ dump so we have values that are more easily hashed def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument in_data['pk'] = f'{in_data["compact"]}#PROVIDER#{in_data["providerId"]}' - # This needs to include a iso formatted datetime string and a hash of the changes + # This needs to include an iso formatted datetime string and a hash of the changes # to the record. We'll use the createDate and the hash of the updatedValues # field for this. + # Provider update records are considered a tier 2 update. Privilege updates are tier 1 because they are accessed + # most frequently. Provider update records are not generated often, so it is more performant to place them at + # tier 2, with license updates being the last tier 3. change_hash = self.hash_changes(in_data) - in_data['sk'] = f'{in_data["compact"]}#UPDATE#2#provider/{in_data["createDate"]}/{change_hash}' + in_data['sk'] = f'{in_data["compact"]}#UPDATE#{UpdateTierEnum.TIER_TWO}#provider/{in_data["createDate"]}/{change_hash}' return in_data From 6405fe3fe82614140dcfccd168e7796caed964be Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 15:38:58 -0600 Subject: [PATCH 70/81] Add pre-req to deactivate app client before rollback --- .../disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md index c4b67b7a5..65252b684 100644 --- a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md +++ b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md @@ -13,8 +13,9 @@ The system will automatically determine which providers had their license record Before starting the rollback: 1. ✅ **Verify the Problem**: Confirm which jurisdiction uploaded bad data for which compact(s) -2. ✅ **Determine Time Window**: Identify the exact start and end times (UTC) of the problematic uploads -3. ✅ **Stakeholder Notification**: Coordinate with relevant state administrators and other stakeholders +2. ✅ **Disable automated access for Jurisdiction**: If jurisdiction has API credentials for automated uploads, disable those credentials to prevent further data changes until system has been recovered. To do this, determine which Cognito app client(s) the jurisdiction is using for the compact(s) and delete the appropriate app client(s) from the State Auth Cognito user pool. +3. ✅ **Determine Time Window**: Identify the exact start and end times (UTC) of the problematic uploads +4. ✅ **Stakeholder Notification**: Coordinate with relevant state administrators and other stakeholders ### Step 1: Gather Required Information From 7715b5dda1d2ffa62093f31a24376d2e3cc6d708 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 15:58:53 -0600 Subject: [PATCH 71/81] More CodeRabbit feedback --- .../lambdas/python/common/cc_common/data_model/data_client.py | 4 ++-- .../disaster-recovery/handlers/rollback_license_upload.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py index 764538110..3f5188ef7 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py @@ -1037,7 +1037,7 @@ def _get_privilege_update_records_directly( # TODO - remove old pattern once migration is complete # noqa: FIX002 sk_prefixes = [ # New pattern - f'{compact}#UPDATE#1#privilege/{jurisdiction}/{license_type_abbr}/', + f'{compact}#UPDATE#{UpdateTierEnum.TIER_ONE}#privilege/{jurisdiction}/{license_type_abbr}/', # Old pattern f'{compact}#PROVIDER#privilege/{jurisdiction}/{license_type_abbr}#UPDATE', ] @@ -1090,7 +1090,7 @@ def get_privilege_data( :param bool detail: Boolean determining whether we include associated records or just privilege record itself :raises CCNotFoundException: If the privilege record is not found :return If detail = False list of length one containing privilege item, if detail = True list containing, - privilege record, privilege update records and privilege adverse action records + privilege record and privilege update records """ # Query directly for the privilege record privilege = self._get_privilege_record_directly( diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 8da4a90df..304445d29 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -689,7 +689,7 @@ def add_delete(pk: str, sk: str, update_record: bool): IneligibleUpdate( record_type='providerUpdate', type_of_update=update.updateType, - update_time=update.dateOfUpdate.isoformat(), + update_time=update.createDate.isoformat(), reason='Provider update occurred after rollback start time. Manual review required.', # provider updates are not specific to a license type license_type='N/A', From 1a93183acdc80b9c7d870d2284f730ee4734803c Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 16:37:03 -0600 Subject: [PATCH 72/81] CodeRabbit feedback --- .../disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md | 2 +- .../handlers/rollback_license_upload.py | 2 +- .../smoke/rollback_license_upload_smoke_tests.py | 11 ++++------- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md index 65252b684..72105c473 100644 --- a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md +++ b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md @@ -66,7 +66,7 @@ The Step Function will process providers in batches. Monitor the step function e ### Step 5: Review Results -Once the execution completes, comprehensive results are stored in S3. The S3 key is returned as output from the lambda step of the step function +Once the execution completes, comprehensive results are stored in S3. The S3 key is returned as output from the lambda step of the step function. Check the Step Function execution output/logs to get the S3 key. #### Accessing the Results File diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 304445d29..18072ad97 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -158,7 +158,7 @@ def to_dict(self) -> dict: def from_dict(cls, data: dict) -> 'RollbackResults': """Create from dictionary loaded from S3.""" return cls( - execution_name=data.get('executionName', ''), + execution_name=data['executionName'], skipped_provider_details=[ ProviderSkippedDetails( provider_id=detail['providerId'], diff --git a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py index ec4281b12..139bab552 100644 --- a/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py +++ b/backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py @@ -169,12 +169,6 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max # Query using the common family name prefix 'RollbackTest' # The API will return all providers with family names starting with this prefix - base_query_body = { - 'query': {'familyName': 'RollbackTest'}, - 'pagination': { - 'pageSize': 100 # Maximum page size to minimize number of requests - }, - } last_key = None page_num = 1 @@ -182,7 +176,10 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max while time.time() - start_time < max_wait_time: # Collect all providers across all pages while True: - query_body = base_query_body.copy() + query_body = { + 'query': {'familyName': 'RollbackTest'}, + 'pagination': {'pageSize': 100}, + } if last_key: query_body['pagination']['lastKey'] = last_key From fa64de2059b5db603088e5fb2c67560313cf12f1 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Mon, 17 Nov 2025 16:44:09 -0600 Subject: [PATCH 73/81] formatting --- .../common/cc_common/data_model/schema/license/record.py | 7 ++++--- .../common/cc_common/data_model/schema/privilege/record.py | 7 ++++--- .../common/cc_common/data_model/schema/provider/record.py | 4 +++- .../lambdas/python/common/cc_common/event_bus_client.py | 3 ++- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py index a01152e46..1cbbed27c 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/license/record.py @@ -2,6 +2,10 @@ from datetime import date from urllib.parse import quote +from marshmallow import ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema +from marshmallow.fields import UUID, Date, DateTime, Email, List, Nested, String +from marshmallow.validate import Length + from cc_common.config import config from cc_common.data_model.schema.base_record import ( BaseRecordSchema, @@ -29,9 +33,6 @@ from cc_common.data_model.schema.investigation.record import InvestigationDetailsSchema from cc_common.data_model.schema.license.common import LicenseCommonSchema from cc_common.data_model.update_tier_enum import UpdateTierEnum -from marshmallow import ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema -from marshmallow.fields import UUID, Date, DateTime, Email, List, Nested, String -from marshmallow.validate import Length @BaseRecordSchema.register_schema('license') diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py index 46dd898cd..cc6996389 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/privilege/record.py @@ -1,6 +1,10 @@ # ruff: noqa: N801, N815, ARG002 invalid-name unused-argument from datetime import date +from marshmallow import Schema, ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema +from marshmallow.fields import UUID, Date, DateTime, List, Nested, String +from marshmallow.validate import Length + from cc_common.config import config from cc_common.data_model.schema.base_record import BaseRecordSchema, ForgivingSchema from cc_common.data_model.schema.common import ( @@ -24,9 +28,6 @@ ) from cc_common.data_model.schema.investigation.record import InvestigationDetailsSchema from cc_common.data_model.update_tier_enum import UpdateTierEnum -from marshmallow import Schema, ValidationError, post_dump, post_load, pre_dump, pre_load, validates_schema -from marshmallow.fields import UUID, Date, DateTime, List, Nested, String -from marshmallow.validate import Length class AttestationVersionRecordSchema(Schema): diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py index 65137ab09..2c4de0c77 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/schema/provider/record.py @@ -254,5 +254,7 @@ def generate_pk_sk(self, in_data, **kwargs): # noqa: ARG001 unused-argument # most frequently. Provider update records are not generated often, so it is more performant to place them at # tier 2, with license updates being the last tier 3. change_hash = self.hash_changes(in_data) - in_data['sk'] = f'{in_data["compact"]}#UPDATE#{UpdateTierEnum.TIER_TWO}#provider/{in_data["createDate"]}/{change_hash}' + in_data['sk'] = ( + f'{in_data["compact"]}#UPDATE#{UpdateTierEnum.TIER_TWO}#provider/{in_data["createDate"]}/{change_hash}' + ) return in_data diff --git a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py index 878fadfca..fe6af52b7 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/event_bus_client.py @@ -2,6 +2,8 @@ from datetime import date, datetime from uuid import UUID +from marshmallow import ValidationError + from cc_common.config import config from cc_common.data_model.schema.common import InvestigationAgainstEnum from cc_common.data_model.schema.data_event.api import ( @@ -16,7 +18,6 @@ ) from cc_common.event_batch_writer import EventBatchWriter from cc_common.utils import ResponseEncoder -from marshmallow import ValidationError class EventBusClient: From c011f23047265f7e530aeb106496fd578ac2b539 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 18 Nov 2025 11:43:52 -0600 Subject: [PATCH 74/81] PR feedback - doc update --- .../disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md index 72105c473..aff1cfb53 100644 --- a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md +++ b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md @@ -43,7 +43,7 @@ You'll need the following information for the execution: 1. Click **"Start Execution"** 2. Enter a descriptive execution name (this will be used for the S3 results folder): ``` - rollback-aslp-oh-2025-01-15 + rollback-aslp-oh-2020-01-15 ``` 3. Paste the following JSON input (replace values with your specific parameters): From c10952ff2813c2daa4cd79c3f41d94acce248c92 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 18 Nov 2025 12:16:47 -0600 Subject: [PATCH 75/81] additional logging to show processed count --- .../handlers/rollback_license_upload.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py index 18072ad97..6fc26dc4e 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py @@ -355,8 +355,6 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 'executionName': execution_name, } - providers_processed += 1 - # Process the provider result = _process_provider_rollback( provider_id=provider_id, @@ -368,16 +366,29 @@ def rollback_license_upload(event: dict, context: LambdaContext): # noqa: ARG00 execution_name=execution_name, ) + providers_processed += 1 + # Update results based on outcome if isinstance(result, ProviderRevertedSummary): providers_reverted += 1 existing_results.reverted_provider_summaries.append(result) + logger.info('Provider reverted successfully', provider_id=provider_id) elif isinstance(result, ProviderSkippedDetails): providers_skipped += 1 existing_results.skipped_provider_details.append(result) + logger.info('Provider skipped due to ineligibility', provider_id=provider_id) elif isinstance(result, ProviderFailedDetails): providers_failed += 1 existing_results.failed_provider_details.append(result) + logger.info('Provider failed to revert', provider_id=provider_id, error=result.error) + + logger.info( + 'processed provider', + total_providers_processed=providers_processed, + providers_reverted=providers_reverted, + providers_skipped=providers_skipped, + providers_failed=providers_failed, + ) # All providers processed successfully logger.info( @@ -508,7 +519,6 @@ def _process_provider_rollback( # Publish events for successful rollback _publish_revert_events(result, compact, rollback_reason, start_datetime, end_datetime, execution_name) - logger.info('Provider rollback successful', provider_id=provider_id) return result From 4feef8af020366f24a49717b5f0b4f04ef433d5e Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 18 Nov 2025 15:13:26 -0600 Subject: [PATCH 76/81] Remove feature flag for duplicate ssn in uploads This feature has been deployed for several weeks without issue. This removes the associated feature flag --- .../common/cc_common/feature_flag_enum.py | 1 - .../provider-data-v1/handlers/bulk_upload.py | 24 +++++--------- .../provider-data-v1/handlers/licenses.py | 33 +++++++------------ .../test_handlers/test_bulk_upload.py | 6 +--- .../function/test_handlers/test_licenses.py | 6 +--- .../stacks/feature_flag_stack/__init__.py | 14 -------- 6 files changed, 22 insertions(+), 62 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py b/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py index 5180b9199..28dc38d60 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py @@ -12,4 +12,3 @@ class FeatureFlagEnum(StrEnum): TEST_FLAG = 'test-flag' # runtime flags ENCUMBRANCE_MULTI_CATEGORY_FLAG = 'encumbrance-multi-category-flag' - DUPLICATE_SSN_UPLOAD_CHECK_FLAG = 'duplicate-ssn-upload-check-flag' diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py index 7c518bac3..921024774 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py @@ -14,8 +14,6 @@ from cc_common.event_batch_writer import EventBatchWriter from cc_common.exceptions import CCInternalException -# initialize flag outside of handler so the flag is cached for the lifecycle of the lambda execution environment -from cc_common.feature_flag_client import FeatureFlagEnum, is_feature_enabled # noqa: E402 from cc_common.utils import ( ResponseEncoder, api_handler, @@ -25,11 +23,6 @@ from license_csv_reader import LicenseCSVReader from marshmallow import ValidationError -duplicate_ssn_check_flag_enabled = is_feature_enabled( - FeatureFlagEnum.DUPLICATE_SSN_UPLOAD_CHECK_FLAG, fail_default=True -) - - @api_handler @authorize_compact_jurisdiction(action='write') def bulk_upload_url_handler(event: dict, context: LambdaContext): @@ -160,15 +153,14 @@ def process_bulk_upload_file( validated_license = schema.load(dict(compact=compact, jurisdiction=jurisdiction, **raw_license)) # verify that this ssn has not been used previously in the same batch license_ssn = validated_license['ssn'] - if duplicate_ssn_check_flag_enabled: - matched_ssn_index = ssns_in_file_upload.get(license_ssn) - if matched_ssn_index: - raise ValidationError( - message=f'Duplicate License SSN detected. SSN matches with record ' - f'{matched_ssn_index}. Every record must have a unique SSN within the same ' - f'file.' - ) - ssns_in_file_upload.update({license_ssn: i + 1}) + matched_ssn_index = ssns_in_file_upload.get(license_ssn) + if matched_ssn_index: + raise ValidationError( + message=f'Duplicate License SSN detected. SSN matches with record ' + f'{matched_ssn_index}. Every record must have a unique SSN within the same ' + f'file.' + ) + ssns_in_file_upload.update({license_ssn: i + 1}) except TypeError as e: # This will be raised, if `raw_license` includes compact and/or jurisdiction fields logger.error('License contains unsupported fields', fields=list(raw_license.keys()), exc_info=e) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py index 405a22b5e..53759aba7 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py @@ -10,14 +10,6 @@ schema = LicensePostRequestSchema() -# initialize flag outside of handler so the flag is cached for the lifecycle of the execution environment -from cc_common.feature_flag_client import FeatureFlagEnum, is_feature_enabled # noqa: E402 - -# low risk flag, so we default to enabled if failure detected -duplicate_ssn_check_flag_enabled = is_feature_enabled( - FeatureFlagEnum.DUPLICATE_SSN_UPLOAD_CHECK_FLAG, fail_default=True -) - @api_handler @optional_signature_auth @@ -71,19 +63,18 @@ def post_licenses(event: dict, context: LambdaContext): # noqa: ARG001 unused-a 'errors': invalid_records, } ) - if duplicate_ssn_check_flag_enabled: - # verify that none of the SSNs are repeats within the same batch - license_ssns = [license_record['ssn'] for license_record in licenses] - if len(set(license_ssns)) < len(license_ssns): - raise CCInvalidRequestCustomResponseException( - response_body={ - 'message': 'Invalid license records in request. See errors for more detail.', - 'errors': { - 'SSN': 'Same SSN detected on multiple rows. ' - 'Every record must have a unique SSN within the same request.' - }, - } - ) + # verify that none of the SSNs are repeats within the same batch + license_ssns = [license_record['ssn'] for license_record in licenses] + if len(set(license_ssns)) < len(license_ssns): + raise CCInvalidRequestCustomResponseException( + response_body={ + 'message': 'Invalid license records in request. See errors for more detail.', + 'errors': { + 'SSN': 'Same SSN detected on multiple rows. ' + 'Every record must have a unique SSN within the same request.' + }, + } + ) event_time = config.current_standard_datetime diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py index 865f912a4..f2458b11f 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py @@ -1,6 +1,6 @@ import csv import json -from unittest.mock import MagicMock, patch +from unittest.mock import patch from uuid import uuid4 from botocore.exceptions import ClientError @@ -8,9 +8,6 @@ from tests.function import TstFunction -mock_flag_client = MagicMock() -mock_flag_client.return_value = True - @mock_aws class TestBulkUpload(TstFunction): @@ -43,7 +40,6 @@ def test_get_bulk_upload_url_forbidden(self): @mock_aws -@patch('cc_common.feature_flag_client.is_feature_enabled', mock_flag_client) class TestProcessObjects(TstFunction): def test_uploaded_csv(self): from handlers.bulk_upload import parse_bulk_upload_file diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py index f338eb0e6..49e767af4 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py @@ -1,6 +1,6 @@ import json from datetime import datetime -from unittest.mock import MagicMock, patch +from unittest.mock import patch from uuid import uuid4 from common_test.sign_request import sign_request @@ -8,12 +8,8 @@ from .. import TstFunction -mock_flag_client = MagicMock() -mock_flag_client.return_value = True - @mock_aws -@patch('cc_common.feature_flag_client.is_feature_enabled', mock_flag_client) @patch('cc_common.config._Config.current_standard_datetime', datetime.fromisoformat('2024-11-08T23:59:59+00:00')) class TestLicenses(TstFunction): def setUp(self): diff --git a/backend/compact-connect/stacks/feature_flag_stack/__init__.py b/backend/compact-connect/stacks/feature_flag_stack/__init__.py index 3700c5696..301d9536d 100644 --- a/backend/compact-connect/stacks/feature_flag_stack/__init__.py +++ b/backend/compact-connect/stacks/feature_flag_stack/__init__.py @@ -124,20 +124,6 @@ def __init__( environment_name=environment_name, ) - self.duplicate_ssn_upload_check_flag = FeatureFlagResource( - self, - 'DuplicateSsnUploadCheckFlag', - provider=self.provider, # Shared provider - flag_name='duplicate-ssn-upload-check-flag', - # Low risk update, we will automatically enable for every environment - auto_enable_envs=[ - FeatureFlagEnvironmentName.TEST, - FeatureFlagEnvironmentName.BETA, - FeatureFlagEnvironmentName.PROD, - ], - environment_name=environment_name, - ) - def _create_common_provider(self, environment_name: str) -> Provider: # Create shared Lambda function for managing all feature flags # This function is reused across all FeatureFlagResource instances From 0cbdbc22299f2dd27177a6b9cbbbcdc65ec1e079 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Tue, 18 Nov 2025 16:32:03 -0600 Subject: [PATCH 77/81] Formatting --- .../python/common/cc_common/data_model/transaction_client.py | 2 +- .../tests/function/test_data_model/test_transaction_client.py | 4 +--- .../lambdas/python/provider-data-v1/handlers/bulk_upload.py | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/transaction_client.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/transaction_client.py index 275a230b2..0ef086eb2 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/transaction_client.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/transaction_client.py @@ -377,7 +377,7 @@ def reconcile_unsettled_transactions(self, compact: str, settled_transactions: l if unmatched_settled_transaction_ids: logger.error( 'Unable to reconcile some transactions from Authorize.Net with our unsettled transactions', - unreconciled_transactions=unmatched_settled_transaction_ids + unreconciled_transactions=unmatched_settled_transaction_ids, ) for unsettled_tx in unmatched_unsettled: diff --git a/backend/compact-connect/lambdas/python/common/tests/function/test_data_model/test_transaction_client.py b/backend/compact-connect/lambdas/python/common/tests/function/test_data_model/test_transaction_client.py index a525544dd..e72dd93a6 100644 --- a/backend/compact-connect/lambdas/python/common/tests/function/test_data_model/test_transaction_client.py +++ b/backend/compact-connect/lambdas/python/common/tests/function/test_data_model/test_transaction_client.py @@ -261,9 +261,7 @@ def test_reconcile_unsettled_transactions_deletes_matching_record_and_returns_ol ) # Two unmatched transactions remain @patch('cc_common.data_model.transaction_client.logger') - def test_reconcile_unsettled_transactions_logs_error_when_settled_transactions_not_matched( - self, mock_logger - ): + def test_reconcile_unsettled_transactions_logs_error_when_settled_transactions_not_matched(self, mock_logger): """ Test that reconcile_unsettled_transactions logs an error when settled transactions don't match unsettled ones. """ diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py index 921024774..6efc5b24b 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py @@ -13,7 +13,6 @@ ) from cc_common.event_batch_writer import EventBatchWriter from cc_common.exceptions import CCInternalException - from cc_common.utils import ( ResponseEncoder, api_handler, @@ -23,6 +22,7 @@ from license_csv_reader import LicenseCSVReader from marshmallow import ValidationError + @api_handler @authorize_compact_jurisdiction(action='write') def bulk_upload_url_handler(event: dict, context: LambdaContext): From 8c407fec8867d95fcda53101946975516305a7e0 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 19 Nov 2025 10:09:10 -0600 Subject: [PATCH 78/81] Revert "Remove feature flag for duplicate ssn in uploads" This reverts commit 4feef8af --- .../common/cc_common/feature_flag_enum.py | 1 + .../provider-data-v1/handlers/bulk_upload.py | 24 +++++++++----- .../provider-data-v1/handlers/licenses.py | 33 ++++++++++++------- .../test_handlers/test_bulk_upload.py | 6 +++- .../function/test_handlers/test_licenses.py | 6 +++- .../stacks/feature_flag_stack/__init__.py | 14 ++++++++ 6 files changed, 62 insertions(+), 22 deletions(-) diff --git a/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py b/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py index 28dc38d60..5180b9199 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/feature_flag_enum.py @@ -12,3 +12,4 @@ class FeatureFlagEnum(StrEnum): TEST_FLAG = 'test-flag' # runtime flags ENCUMBRANCE_MULTI_CATEGORY_FLAG = 'encumbrance-multi-category-flag' + DUPLICATE_SSN_UPLOAD_CHECK_FLAG = 'duplicate-ssn-upload-check-flag' diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py index 6efc5b24b..7c518bac3 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py @@ -13,6 +13,9 @@ ) from cc_common.event_batch_writer import EventBatchWriter from cc_common.exceptions import CCInternalException + +# initialize flag outside of handler so the flag is cached for the lifecycle of the lambda execution environment +from cc_common.feature_flag_client import FeatureFlagEnum, is_feature_enabled # noqa: E402 from cc_common.utils import ( ResponseEncoder, api_handler, @@ -22,6 +25,10 @@ from license_csv_reader import LicenseCSVReader from marshmallow import ValidationError +duplicate_ssn_check_flag_enabled = is_feature_enabled( + FeatureFlagEnum.DUPLICATE_SSN_UPLOAD_CHECK_FLAG, fail_default=True +) + @api_handler @authorize_compact_jurisdiction(action='write') @@ -153,14 +160,15 @@ def process_bulk_upload_file( validated_license = schema.load(dict(compact=compact, jurisdiction=jurisdiction, **raw_license)) # verify that this ssn has not been used previously in the same batch license_ssn = validated_license['ssn'] - matched_ssn_index = ssns_in_file_upload.get(license_ssn) - if matched_ssn_index: - raise ValidationError( - message=f'Duplicate License SSN detected. SSN matches with record ' - f'{matched_ssn_index}. Every record must have a unique SSN within the same ' - f'file.' - ) - ssns_in_file_upload.update({license_ssn: i + 1}) + if duplicate_ssn_check_flag_enabled: + matched_ssn_index = ssns_in_file_upload.get(license_ssn) + if matched_ssn_index: + raise ValidationError( + message=f'Duplicate License SSN detected. SSN matches with record ' + f'{matched_ssn_index}. Every record must have a unique SSN within the same ' + f'file.' + ) + ssns_in_file_upload.update({license_ssn: i + 1}) except TypeError as e: # This will be raised, if `raw_license` includes compact and/or jurisdiction fields logger.error('License contains unsupported fields', fields=list(raw_license.keys()), exc_info=e) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py index 53759aba7..405a22b5e 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py @@ -10,6 +10,14 @@ schema = LicensePostRequestSchema() +# initialize flag outside of handler so the flag is cached for the lifecycle of the execution environment +from cc_common.feature_flag_client import FeatureFlagEnum, is_feature_enabled # noqa: E402 + +# low risk flag, so we default to enabled if failure detected +duplicate_ssn_check_flag_enabled = is_feature_enabled( + FeatureFlagEnum.DUPLICATE_SSN_UPLOAD_CHECK_FLAG, fail_default=True +) + @api_handler @optional_signature_auth @@ -63,18 +71,19 @@ def post_licenses(event: dict, context: LambdaContext): # noqa: ARG001 unused-a 'errors': invalid_records, } ) - # verify that none of the SSNs are repeats within the same batch - license_ssns = [license_record['ssn'] for license_record in licenses] - if len(set(license_ssns)) < len(license_ssns): - raise CCInvalidRequestCustomResponseException( - response_body={ - 'message': 'Invalid license records in request. See errors for more detail.', - 'errors': { - 'SSN': 'Same SSN detected on multiple rows. ' - 'Every record must have a unique SSN within the same request.' - }, - } - ) + if duplicate_ssn_check_flag_enabled: + # verify that none of the SSNs are repeats within the same batch + license_ssns = [license_record['ssn'] for license_record in licenses] + if len(set(license_ssns)) < len(license_ssns): + raise CCInvalidRequestCustomResponseException( + response_body={ + 'message': 'Invalid license records in request. See errors for more detail.', + 'errors': { + 'SSN': 'Same SSN detected on multiple rows. ' + 'Every record must have a unique SSN within the same request.' + }, + } + ) event_time = config.current_standard_datetime diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py index f2458b11f..865f912a4 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py @@ -1,6 +1,6 @@ import csv import json -from unittest.mock import patch +from unittest.mock import MagicMock, patch from uuid import uuid4 from botocore.exceptions import ClientError @@ -8,6 +8,9 @@ from tests.function import TstFunction +mock_flag_client = MagicMock() +mock_flag_client.return_value = True + @mock_aws class TestBulkUpload(TstFunction): @@ -40,6 +43,7 @@ def test_get_bulk_upload_url_forbidden(self): @mock_aws +@patch('cc_common.feature_flag_client.is_feature_enabled', mock_flag_client) class TestProcessObjects(TstFunction): def test_uploaded_csv(self): from handlers.bulk_upload import parse_bulk_upload_file diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py index 49e767af4..f338eb0e6 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py @@ -1,6 +1,6 @@ import json from datetime import datetime -from unittest.mock import patch +from unittest.mock import MagicMock, patch from uuid import uuid4 from common_test.sign_request import sign_request @@ -8,8 +8,12 @@ from .. import TstFunction +mock_flag_client = MagicMock() +mock_flag_client.return_value = True + @mock_aws +@patch('cc_common.feature_flag_client.is_feature_enabled', mock_flag_client) @patch('cc_common.config._Config.current_standard_datetime', datetime.fromisoformat('2024-11-08T23:59:59+00:00')) class TestLicenses(TstFunction): def setUp(self): diff --git a/backend/compact-connect/stacks/feature_flag_stack/__init__.py b/backend/compact-connect/stacks/feature_flag_stack/__init__.py index 301d9536d..3700c5696 100644 --- a/backend/compact-connect/stacks/feature_flag_stack/__init__.py +++ b/backend/compact-connect/stacks/feature_flag_stack/__init__.py @@ -124,6 +124,20 @@ def __init__( environment_name=environment_name, ) + self.duplicate_ssn_upload_check_flag = FeatureFlagResource( + self, + 'DuplicateSsnUploadCheckFlag', + provider=self.provider, # Shared provider + flag_name='duplicate-ssn-upload-check-flag', + # Low risk update, we will automatically enable for every environment + auto_enable_envs=[ + FeatureFlagEnvironmentName.TEST, + FeatureFlagEnvironmentName.BETA, + FeatureFlagEnvironmentName.PROD, + ], + environment_name=environment_name, + ) + def _create_common_provider(self, environment_name: str) -> Provider: # Create shared Lambda function for managing all feature flags # This function is reused across all FeatureFlagResource instances From c87807b346ec3d84e8052c3ba8a18bc1f87af4e8 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 19 Nov 2025 10:40:06 -0600 Subject: [PATCH 79/81] Match for duplicate license SSNs by license type Some licensees can have more than one license, so we need to check for duplicate SSNs in multiple records which have the same license type. --- .../provider-data-v1/handlers/bulk_upload.py | 15 +++--- .../provider-data-v1/handlers/licenses.py | 10 ++-- .../test_handlers/test_bulk_upload.py | 53 ++++++++++++++++++- .../function/test_handlers/test_licenses.py | 35 +++++++++++- 4 files changed, 97 insertions(+), 16 deletions(-) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py index 7c518bac3..1ace0c4f5 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py @@ -148,6 +148,7 @@ def process_bulk_upload_file( failed_validation_count = 0 # track which ssns were included in this file to detect duplicates, # which are not allowed within the same file upload + # We track by (ssn, licenseType) tuple to allow same SSN for different license types ssns_in_file_upload = {} with EventBatchWriter(config.events_client) as event_writer: @@ -158,17 +159,17 @@ def process_bulk_upload_file( # dict() here, because it prevents `compact` and `jurisdiction` from being allowed in the # raw_license validated_license = schema.load(dict(compact=compact, jurisdiction=jurisdiction, **raw_license)) - # verify that this ssn has not been used previously in the same batch - license_ssn = validated_license['ssn'] + # verify that this ssn/licenseType combination has not been used previously in the same batch + ssn_key = (validated_license['ssn'], validated_license['licenseType']) if duplicate_ssn_check_flag_enabled: - matched_ssn_index = ssns_in_file_upload.get(license_ssn) + matched_ssn_index = ssns_in_file_upload.get(ssn_key) if matched_ssn_index: raise ValidationError( - message=f'Duplicate License SSN detected. SSN matches with record ' - f'{matched_ssn_index}. Every record must have a unique SSN within the same ' - f'file.' + message=f'Duplicate License SSN detected for license type {validated_license["licenseType"]}. ' + f'SSN matches with record {matched_ssn_index}. Every record must have a unique SSN ' + f'per license type within the same file.' ) - ssns_in_file_upload.update({license_ssn: i + 1}) + ssns_in_file_upload.update({ssn_key: i + 1}) except TypeError as e: # This will be raised, if `raw_license` includes compact and/or jurisdiction fields logger.error('License contains unsupported fields', fields=list(raw_license.keys()), exc_info=e) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py index 405a22b5e..18ef1af65 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/licenses.py @@ -72,15 +72,15 @@ def post_licenses(event: dict, context: LambdaContext): # noqa: ARG001 unused-a } ) if duplicate_ssn_check_flag_enabled: - # verify that none of the SSNs are repeats within the same batch - license_ssns = [license_record['ssn'] for license_record in licenses] - if len(set(license_ssns)) < len(license_ssns): + # verify that none of the SSN+LicenseType combinations are repeats within the same batch + license_keys = [(license_record['ssn'], license_record['licenseType']) for license_record in licenses] + if len(set(license_keys)) < len(license_keys): raise CCInvalidRequestCustomResponseException( response_body={ 'message': 'Invalid license records in request. See errors for more detail.', 'errors': { - 'SSN': 'Same SSN detected on multiple rows. ' - 'Every record must have a unique SSN within the same request.' + 'SSN': 'Same SSN for the same license type detected on multiple rows. ' + 'Every record must have a unique SSN per license type within the same request.' }, } ) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py index 865f912a4..bba077bbf 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py @@ -343,8 +343,8 @@ def test_bulk_upload_prevents_repeated_ssns_within_the_same_file_upload(self): 'dateOfExpiration': '2026-01-01', }, 'errors': [ - 'Duplicate License SSN detected. SSN matches with record 1. ' - 'Every record must have a unique SSN within the same file.' + 'Duplicate License SSN detected for license type audiologist. SSN matches with record 1. ' + 'Every record must have a unique SSN per license type within the same file.' ], } ), @@ -353,6 +353,55 @@ def test_bulk_upload_prevents_repeated_ssns_within_the_same_file_upload(self): self.assertEqual(expected_entry, call_args) + def test_bulk_upload_allows_repeated_ssns_for_different_license_types(self): + """Test that duplicate SSNs within a CSV upload are allowed if the license types are different.""" + from handlers.bulk_upload import parse_bulk_upload_file + + # Create CSV content that includes duplicate SSNs but different license types + csv_content = ( + 'ssn,npi,licenseNumber,givenName,middleName,familyName,suffix,dateOfBirth,dateOfIssuance' + ',dateOfRenewal,dateOfExpiration,licenseStatus,compactEligibility,homeAddressStreet1' + ',homeAddressStreet2,homeAddressCity,homeAddressState,homeAddressPostalCode' + ',emailAddress,phoneNumber,licenseType,licenseStatusName\n' + '123-45-6789,1234567890,LICENSE123,John,Middle,Doe,Jr.,1990-01-01,2020-01-01,2021-01-01,2023-01-01,active,' + 'eligible,123 Main St,Apt 1,Columbus,OH,43215,test@example.com,+15551234567,audiologist,Active\n' + '123-45-6789,1234567890,LICENSE456,John,Middle,Doe,Jr.,1990-01-01,2023-01-01,2025-01-01,2026-01-01,active,' + 'eligible,123 Main St,Apt 1,Columbus,OH,43215,test@example.com,+15551234567,speech-language pathologist,Active' + ) + + # Upload the CSV content directly to the mock S3 bucket + object_key = f'aslp/oh/{uuid4().hex}' + self._bucket.put_object(Key=object_key, Body=csv_content) + + # Simulate the s3 bucket event + with open('../common/tests/resources/put-event.json') as f: + event = json.load(f) + + event['Records'][0]['s3']['bucket'] = { + 'name': self._bucket.name, + 'arn': f'arn:aws:s3:::{self._bucket.name}', + 'ownerIdentity': {'principalId': 'ASDFG123'}, + } + event['Records'][0]['s3']['object']['key'] = object_key + + parse_bulk_upload_file(event, self.mock_context) + + # Verify that both messages were sent to the preprocessing queue + messages = self._license_preprocessing_queue.receive_messages(MaxNumberOfMessages=10) + self.assertEqual(2, len(messages)) + + message_data_1 = json.loads(messages[0].body) + message_data_2 = json.loads(messages[1].body) + + # Verify the license types are correct + # Messages might not be in order, so we check both + license_types = {message_data_1['licenseType'], message_data_2['licenseType']} + self.assertEqual({'audiologist', 'speech-language pathologist'}, license_types) + + # Verify SSNs are the same + self.assertEqual(message_data_1['ssn'], '123-45-6789') + self.assertEqual(message_data_2['ssn'], '123-45-6789') + def test_bulk_upload_handles_bom_character(self): """Test that CSV files with BOM characters are handled correctly.""" from handlers.bulk_upload import parse_bulk_upload_file diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py index f338eb0e6..1d9a1987f 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py @@ -392,13 +392,44 @@ def test_post_licenses_returns_400_if_repeated_ssns_detected(self): { 'message': 'Invalid license records in request. See errors for more detail.', 'errors': { - 'SSN': 'Same SSN detected on multiple rows. ' - 'Every record must have a unique SSN within the same request.', + 'SSN': 'Same SSN for the same license type detected on multiple rows. ' + 'Every record must have a unique SSN per license type within the same request.', }, }, json.loads(resp['body']), ) + def test_post_licenses_succeeds_with_same_ssn_different_license_types(self): + from handlers.licenses import post_licenses + + with open('../common/tests/resources/api-event.json') as f: + event = json.load(f) + + # The user has write permission for aslp/oh + event['requestContext']['authorizer']['claims']['scope'] = 'openid email aslp/readGeneral oh/aslp.write' + event['pathParameters'] = {'compact': 'aslp', 'jurisdiction': 'oh'} + + with open('../common/tests/resources/api/license-post.json') as f: + license_data_1 = json.load(f) + + # Create second license with same SSN but different license type + license_data_2 = license_data_1.copy() + license_data_1['licenseType'] = 'audiologist' + license_data_2['licenseType'] = 'speech-language pathologist' + + event['body'] = json.dumps([license_data_1, license_data_2]) + + # Add signature authentication headers + event = self._create_signed_event(event) + + resp = post_licenses(event, self.mock_context) + + self.assertEqual(200, resp['statusCode']) + + # assert that the messages were sent to the preprocessing queue + queue_messages = self._license_preprocessing_queue.receive_messages(MaxNumberOfMessages=10) + self.assertEqual(2, len(queue_messages)) + def test_post_licenses_strips_whitespace_from_string_fields(self): """Test that whitespace is stripped from all string fields in license data.""" from handlers.licenses import post_licenses From b93c125a3c34333cf4b40fb04d3d01bfeec47c51 Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Wed, 19 Nov 2025 12:08:24 -0600 Subject: [PATCH 80/81] formatting --- .../python/provider-data-v1/handlers/bulk_upload.py | 7 ++++--- .../tests/function/test_handlers/test_bulk_upload.py | 3 ++- .../tests/function/test_handlers/test_licenses.py | 8 ++++---- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py index 1ace0c4f5..81284a585 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/handlers/bulk_upload.py @@ -165,9 +165,10 @@ def process_bulk_upload_file( matched_ssn_index = ssns_in_file_upload.get(ssn_key) if matched_ssn_index: raise ValidationError( - message=f'Duplicate License SSN detected for license type {validated_license["licenseType"]}. ' - f'SSN matches with record {matched_ssn_index}. Every record must have a unique SSN ' - f'per license type within the same file.' + message=f'Duplicate License SSN detected for license type ' + f'{validated_license["licenseType"]}. SSN matches with record ' + f'{matched_ssn_index}. Every record must have a unique SSN per license type ' + f'within the same file.' ) ssns_in_file_upload.update({ssn_key: i + 1}) except TypeError as e: diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py index bba077bbf..4103dcf15 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_bulk_upload.py @@ -366,7 +366,8 @@ def test_bulk_upload_allows_repeated_ssns_for_different_license_types(self): '123-45-6789,1234567890,LICENSE123,John,Middle,Doe,Jr.,1990-01-01,2020-01-01,2021-01-01,2023-01-01,active,' 'eligible,123 Main St,Apt 1,Columbus,OH,43215,test@example.com,+15551234567,audiologist,Active\n' '123-45-6789,1234567890,LICENSE456,John,Middle,Doe,Jr.,1990-01-01,2023-01-01,2025-01-01,2026-01-01,active,' - 'eligible,123 Main St,Apt 1,Columbus,OH,43215,test@example.com,+15551234567,speech-language pathologist,Active' + 'eligible,123 Main St,Apt 1,Columbus,OH,43215,test@example.com,+15551234567,speech-language pathologist,' + 'Active' ) # Upload the CSV content directly to the mock S3 bucket diff --git a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py index 1d9a1987f..e6ce09d7d 100644 --- a/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py +++ b/backend/compact-connect/lambdas/python/provider-data-v1/tests/function/test_handlers/test_licenses.py @@ -408,15 +408,15 @@ def test_post_licenses_succeeds_with_same_ssn_different_license_types(self): # The user has write permission for aslp/oh event['requestContext']['authorizer']['claims']['scope'] = 'openid email aslp/readGeneral oh/aslp.write' event['pathParameters'] = {'compact': 'aslp', 'jurisdiction': 'oh'} - + with open('../common/tests/resources/api/license-post.json') as f: license_data_1 = json.load(f) - + # Create second license with same SSN but different license type license_data_2 = license_data_1.copy() license_data_1['licenseType'] = 'audiologist' license_data_2['licenseType'] = 'speech-language pathologist' - + event['body'] = json.dumps([license_data_1, license_data_2]) # Add signature authentication headers @@ -425,7 +425,7 @@ def test_post_licenses_succeeds_with_same_ssn_different_license_types(self): resp = post_licenses(event, self.mock_context) self.assertEqual(200, resp['statusCode']) - + # assert that the messages were sent to the preprocessing queue queue_messages = self._license_preprocessing_queue.receive_messages(MaxNumberOfMessages=10) self.assertEqual(2, len(queue_messages)) From 11dd27c92f4a12857de9053bb70d8fa0b03af46e Mon Sep 17 00:00:00 2001 From: Landon Shumway Date: Fri, 21 Nov 2025 11:09:35 -0600 Subject: [PATCH 81/81] PR feedback - improve documentation and comments --- .../LICENSE_UPLOAD_ROLLBACK.md | 19 +++++++++++-------- .../cc_common/data_model/update_tier_enum.py | 16 ++++++++++++---- .../function/test_rollback_license_upload.py | 3 +-- 3 files changed, 24 insertions(+), 14 deletions(-) diff --git a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md index aff1cfb53..365bdfdc3 100644 --- a/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md +++ b/backend/compact-connect/disaster_recovery/LICENSE_UPLOAD_ROLLBACK.md @@ -15,19 +15,20 @@ Before starting the rollback: 1. ✅ **Verify the Problem**: Confirm which jurisdiction uploaded bad data for which compact(s) 2. ✅ **Disable automated access for Jurisdiction**: If jurisdiction has API credentials for automated uploads, disable those credentials to prevent further data changes until system has been recovered. To do this, determine which Cognito app client(s) the jurisdiction is using for the compact(s) and delete the appropriate app client(s) from the State Auth Cognito user pool. 3. ✅ **Determine Time Window**: Identify the exact start and end times (UTC) of the problematic uploads -4. ✅ **Stakeholder Notification**: Coordinate with relevant state administrators and other stakeholders +4. ✅ **Determine When Rollback Should be Performed**: Depending on the severity of the issue and scale of records that need to be rolled back, determine if the rollback needs to be performed as soon as possible or if it can be performed outside of peak traffic hours. When possible, it is recommended to perform rollbacks during periods of low traffic. While the risk is low, there is a narrow race condition (.2 second window based on load testing) where a license record may be modified by another part of the system after the rollback system checked for updates and the modification could be removed by the rollback. Running the rollback when traffic is low reduces this risk even further. +5. ✅ **Stakeholder Notification**: Coordinate with relevant state administrators and other stakeholders. Ensure jurisdiction is aware they should not attempt to upload any more license data until the rollback has been completed. ### Step 1: Gather Required Information You'll need the following information for the execution: -| Parameter | Description | Example | -|-----------|-------------|---------| -| `compact` | The compact abbreviation (lowercase) | `"aslp"`, `"octp"`, `"counseling"` | -| `jurisdiction` | The state/jurisdiction code (lowercase) | `"oh"`, `"ky"`, `"ne"` | -| `startDateTime` | UTC timestamp when problematic uploads began | `"2020-01-15T08:00:00Z"` | -| `endDateTime` | UTC timestamp when problematic uploads ended | `"2020-01-15T17:59:59Z"` | -| `rollbackReason` | Description for audit trail | `"Invalid license data uploaded by OH staff"` | +| Parameter | Description | Example | +|-----------|----------------------------------------------------------|---------| +| `compact` | The compact abbreviation (lowercase) | `"aslp"`, `"octp"`, `"counseling"` | +| `jurisdiction` | The state/jurisdiction code (lowercase) | `"oh"`, `"ky"`, `"ne"` | +| `startDateTime` | UTC timestamp when problematic uploads began (inclusive) | `"2020-01-15T08:00:00Z"` | +| `endDateTime` | UTC timestamp when problematic uploads ended (inclusive) | `"2020-01-15T17:59:59Z"` | +| `rollbackReason` | Description for audit trail | `"Invalid license data uploaded by OH staff"` | **Important Notes:** - All timestamps must be in UTC @@ -188,3 +189,5 @@ These events include: - The rollback reason - Time window information - Revision IDs for tracking + +These events purely for auditing purposes. They are not currently referenced by any downstream processes. diff --git a/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py b/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py index 30df103ed..a05fd8fe2 100644 --- a/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py +++ b/backend/compact-connect/lambdas/python/common/cc_common/data_model/update_tier_enum.py @@ -5,8 +5,19 @@ class UpdateTierEnum(StrEnum): """ Enum for update record tiers in the sort key hierarchy. + DynamoDB sort keys are treated as numeric values, even if the key is a string. + This means we can perform comparison operations on string sort keys, such as less than (lt) + and grab records within a certain range. + + To reduce risk that massive invalid updates from a jurisdiction will cause the system to crash + when loading provider data, we migrated the sort keys of our update records to follow this + tier based pattern, which will allow us to query for update records only as needed. + Update records are organized into tiers to enable efficient range queries. - Using lt (less than) conditions, we can fetch multiple tiers in a single query. + Because all the primary provider records are prefixed under a common `{compact}#PROVIDER` prefix, + which is lexicographically less than the `{compact}#UPDATE` prefix, using the lt condition with the + UPDATE prefix will grab all the update records up to the specified tier and all primary records under + the PROVIDER prefix. Tier structure in sort keys: - Tier 1: {compact}#UPDATE#1#privilege/... (Privilege updates) @@ -22,9 +33,6 @@ class UpdateTierEnum(StrEnum): - TIER_THREE: Fetches all updates (privilege + provider + license) Query: Key('sk').lt('{compact}#UPDATE#4') - - This tiered approach prevents bulk invalid license updates from breaking - queries that only need privilege and provider data. """ TIER_ONE = '1' # Privilege updates only diff --git a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py index 955f094c9..85ff56503 100644 --- a/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py +++ b/backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py @@ -193,7 +193,7 @@ def _when_license_was_updated_twice(self, provider_id: str = None): } ) - # old update record before upload window (e.g., RENEWAL) + # old update record before upload window existing_update = self.test_data_generator.put_default_license_update_record_in_provider_table( { 'providerId': provider_id, @@ -1146,7 +1146,6 @@ def test_rollback_handles_loading_existing_s3_results_and_appends_new_data(self) final_results_data = self._perform_rollback_and_get_s3_object() # Verify: All existing data is preserved and new data is appended - # Note: All keys should now be camelCase for consistency self.assertEqual( { 'executionName': MOCK_EXECUTION_NAME,