diff --git a/corehq/apps/app_manager/tests/test_repeater.py b/corehq/apps/app_manager/tests/test_repeater.py index 4283670cd89b..5a00165fd3aa 100644 --- a/corehq/apps/app_manager/tests/test_repeater.py +++ b/corehq/apps/app_manager/tests/test_repeater.py @@ -11,8 +11,7 @@ from corehq.apps.app_manager.models import Application from corehq.apps.domain.models import Domain from corehq.motech.models import ConnectionSettings -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records -from corehq.motech.repeaters.models import AppStructureRepeater, SQLRepeatRecord +from corehq.motech.repeaters.models import AppStructureRepeater, RepeatRecord class TestAppStructureRepeater(TestCase, DomainSubscriptionMixin): @@ -37,10 +36,6 @@ def tearDownClass(cls): clear_plan_version_cache() super().tearDownClass() - def tearDown(self): - delete_all_repeat_records() - super().tearDown() - def test_repeat_record_not_created(self): """ When an application without a repeater is saved, then a repeat record should not be created @@ -51,7 +46,7 @@ def test_repeat_record_not_created(self): # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - repeat_records = SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) + repeat_records = RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) self.assertEqual(len(repeat_records), 0) def test_repeat_record_created(self): @@ -67,7 +62,7 @@ def test_repeat_record_created(self): self.addCleanup(self.application.delete) later = datetime.utcnow() + timedelta(hours=48 + 1) - repeat_records = SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) + repeat_records = RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) self.assertEqual(len(repeat_records), 1) def test_repeat_record_forwarded(self): diff --git a/corehq/apps/cleanup/deletable_doc_types.py b/corehq/apps/cleanup/deletable_doc_types.py index 46d381ffb2c4..84ed193293ac 100644 --- a/corehq/apps/cleanup/deletable_doc_types.py +++ b/corehq/apps/cleanup/deletable_doc_types.py @@ -8,6 +8,7 @@ MAIN_DB = None FIXTURES_DB = 'fixtures' +REPEATERS_DB = 'receiverwrapper' # Doc types for classes we've removed from our code # but may still have docs lying around from @@ -43,6 +44,8 @@ 'FixtureDataType': (FIXTURES_DB,), 'FixtureDataItem': (FIXTURES_DB,), 'FixtureOwnership': (FIXTURES_DB,), + 'RepeatRecord': (REPEATERS_DB,), + 'RepeatRecordAttempt': (REPEATERS_DB,), # form and case types 'XFormInstance': (MAIN_DB,), diff --git a/corehq/apps/cleanup/management/commands/fire_repeaters.py b/corehq/apps/cleanup/management/commands/fire_repeaters.py index d869e51d7b9a..95366472aa29 100644 --- a/corehq/apps/cleanup/management/commands/fire_repeaters.py +++ b/corehq/apps/cleanup/management/commands/fire_repeaters.py @@ -1,6 +1,6 @@ from django.core.management.base import BaseCommand -from corehq.motech.repeaters.models import SQLRepeatRecord, State +from corehq.motech.repeaters.models import RepeatRecord, State class Command(BaseCommand): @@ -10,7 +10,7 @@ def add_arguments(self, parser): parser.add_argument('domain') def handle(self, domain, **options): - records = SQLRepeatRecord.objects.filter( + records = RepeatRecord.objects.filter( domain=domain, next_check__isnull=False, state__in=[State.Pending, State.Fail], diff --git a/corehq/apps/cleanup/management/commands/templates/populate_command.j2 b/corehq/apps/cleanup/management/commands/templates/populate_command.j2 index a7cb38ee31bf..b5c5634a719f 100644 --- a/corehq/apps/cleanup/management/commands/templates/populate_command.j2 +++ b/corehq/apps/cleanup/management/commands/templates/populate_command.j2 @@ -32,18 +32,3 @@ class Command(PopulateSQLCommand): helpers. """ return None - - def update_or_create_sql_object(self, doc): - model, created = self.sql_class().objects.update_or_create( - couch_id=doc['_id'], - defaults={ - {%- for update in suggested_updates %} - {{ update }} - {%- endfor %} - }) - {%- if submodels %} - {%- for model in submodels %} - # add code to migrate {{ class_name }}.{{ model }} - {%- endfor %} - {%- endif %} - return model, created diff --git a/corehq/apps/data_interfaces/tasks.py b/corehq/apps/data_interfaces/tasks.py index bdd3a443bf01..b246753d163e 100644 --- a/corehq/apps/data_interfaces/tasks.py +++ b/corehq/apps/data_interfaces/tasks.py @@ -20,7 +20,7 @@ from corehq.apps.users.models import CouchUser from corehq.form_processor.models import XFormInstance from corehq.apps.case_importer.do_import import SubmitCaseBlockHandler, RowAndCase -from corehq.motech.repeaters.models import SQLRepeatRecord +from corehq.motech.repeaters.models import RepeatRecord from corehq.sql_db.util import get_db_aliases_for_partitioned_query from corehq.toggles import DISABLE_CASE_UPDATE_RULE_SCHEDULED_TASK from corehq.util.celery_utils import no_result_task @@ -203,7 +203,6 @@ def task_operate_on_payloads( record_ids: List[str], domain: str, action, # type: Literal['resend', 'cancel', 'requeue'] # 3.8+ - use_sql: bool = True, ): return operate_on_payloads(record_ids, domain, action, task=task_operate_on_payloads) @@ -214,7 +213,6 @@ def task_generate_ids_and_operate_on_payloads( repeater_id: Optional[str], domain: str, action, # type: Literal['resend', 'cancel', 'requeue'] # 3.8+ - use_sql: bool = True, ) -> dict: repeat_record_ids = _get_repeat_record_ids(payload_id, repeater_id, domain) return operate_on_payloads(repeat_record_ids, domain, action, @@ -223,12 +221,12 @@ def task_generate_ids_and_operate_on_payloads( def _get_repeat_record_ids(payload_id, repeater_id, domain): if payload_id: - queryset = SQLRepeatRecord.objects.filter( + queryset = RepeatRecord.objects.filter( domain=domain, payload_id=payload_id, ) elif repeater_id: - queryset = SQLRepeatRecord.objects.filter( + queryset = RepeatRecord.objects.filter( domain=domain, repeater__id=repeater_id, ) diff --git a/corehq/apps/data_interfaces/tests/test_utils.py b/corehq/apps/data_interfaces/tests/test_utils.py index 9b2a2f97d352..d75cce3abc26 100644 --- a/corehq/apps/data_interfaces/tests/test_utils.py +++ b/corehq/apps/data_interfaces/tests/test_utils.py @@ -13,8 +13,7 @@ operate_on_payloads, ) from corehq.motech.models import ConnectionSettings -from corehq.motech.repeaters.models import FormRepeater, SQLRepeatRecord -from dimagi.utils.couch.migration import SyncSQLToCouchMixin +from corehq.motech.repeaters.models import FormRepeater, RepeatRecord DOMAIN = 'test-domain' @@ -25,7 +24,7 @@ def test__get_ids_no_data(self): response = _get_repeat_record_ids(None, None, 'test_domain') self.assertEqual(response, []) - @patch('corehq.apps.data_interfaces.tasks.SQLRepeatRecord.objects.filter') + @patch('corehq.apps.data_interfaces.tasks.RepeatRecord.objects.filter') def test__get_ids_payload_id_in_data(self, get_by_payload_id): payload_id = Mock() _get_repeat_record_ids(payload_id, None, 'test_domain') @@ -33,7 +32,7 @@ def test__get_ids_payload_id_in_data(self, get_by_payload_id): self.assertEqual(get_by_payload_id.call_count, 1) get_by_payload_id.assert_called_with(domain='test_domain', payload_id=payload_id) - @patch('corehq.apps.data_interfaces.tasks.SQLRepeatRecord.objects.filter') + @patch('corehq.apps.data_interfaces.tasks.RepeatRecord.objects.filter') def test__get_ids_payload_id_not_in_data(self, iter_by_repeater): REPEATER_ID = 'c0ffee' _get_repeat_record_ids(None, REPEATER_ID, 'test_domain') @@ -41,23 +40,23 @@ def test__get_ids_payload_id_not_in_data(self, iter_by_repeater): iter_by_repeater.assert_called_with(domain='test_domain', repeater__id=REPEATER_ID) self.assertEqual(iter_by_repeater.call_count, 1) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.objects') + @patch('corehq.motech.repeaters.models.RepeatRecord.objects') def test__validate_record_record_does_not_exist(self, mock_objects): - mock_objects.get.side_effect = [SQLRepeatRecord.DoesNotExist] + mock_objects.get.side_effect = [RepeatRecord.DoesNotExist] response = _get_sql_repeat_record('test_domain', '1234') mock_objects.get.assert_called_once_with(domain='test_domain', id='1234') self.assertIsNone(response) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.objects') + @patch('corehq.motech.repeaters.models.RepeatRecord.objects') def test__validate_record_invalid_domain(self, mock_objects): - mock_objects.get.side_effect = SQLRepeatRecord.DoesNotExist + mock_objects.get.side_effect = RepeatRecord.DoesNotExist response = _get_sql_repeat_record('test_domain', '1234') mock_objects.get.assert_called_once_with(domain='test_domain', id='1234') self.assertIsNone(response) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.objects') + @patch('corehq.motech.repeaters.models.RepeatRecord.objects') def test__validate_record_success(self, mock_objects): mock_record = Mock() mock_record.domain = 'test_domain' @@ -67,19 +66,6 @@ def test__validate_record_success(self, mock_objects): mock_objects.get.assert_called_once_with(domain='test_domain', id='1234') self.assertEqual(response, mock_record) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.objects') - def test__validate_record_success_with_couch_id(self, mock_objects): - couch_id = 'b6859ae05fd94dccbc3dfd25cdc6cb2c' - mock_record = Mock() - mock_record.domain = 'test_domain' - mock_objects.get.return_value = mock_record - response = _get_sql_repeat_record('test_domain', couch_id) - - mock_objects.get.assert_called_once_with(domain='test_domain', couch_id=couch_id) - self.assertEqual(response, mock_record) - assert issubclass(SQLRepeatRecord, SyncSQLToCouchMixin), \ - "couch_id not supported? Should this test be removed?" - class TestTasks(TestCase): @@ -504,13 +490,13 @@ def setUpClass(cls): @classmethod def create_repeat_records(cls): now = datetime.now() - cls.sql_records = [SQLRepeatRecord( + cls.sql_records = [RepeatRecord( domain=DOMAIN, repeater_id=cls.repeater.id, payload_id=cls.instance_id, registered_at=now, ) for __ in range(3)] - SQLRepeatRecord.objects.bulk_create(cls.sql_records) + RepeatRecord.objects.bulk_create(cls.sql_records) def test_no_payload_id_no_repeater_id_sql(self): result = _get_repeat_record_ids(payload_id=None, repeater_id=None, domain=DOMAIN) diff --git a/corehq/apps/data_interfaces/utils.py b/corehq/apps/data_interfaces/utils.py index 2206bac9b524..c498e5b5a6eb 100644 --- a/corehq/apps/data_interfaces/utils.py +++ b/corehq/apps/data_interfaces/utils.py @@ -184,12 +184,10 @@ def operate_on_payloads( def _get_sql_repeat_record(domain, record_id): - from corehq.motech.repeaters.models import SQLRepeatRecord, is_sql_id - - where = {"id": record_id} if is_sql_id(record_id) else {"couch_id": record_id} + from corehq.motech.repeaters.models import RepeatRecord try: - return SQLRepeatRecord.objects.get(domain=domain, **where) - except SQLRepeatRecord.DoesNotExist: + return RepeatRecord.objects.get(domain=domain, id=record_id) + except RepeatRecord.DoesNotExist: return None diff --git a/corehq/apps/domain/tests/test_delete_domain.py b/corehq/apps/domain/tests/test_delete_domain.py index 63738036969c..4e4c7646bd27 100644 --- a/corehq/apps/domain/tests/test_delete_domain.py +++ b/corehq/apps/domain/tests/test_delete_domain.py @@ -138,8 +138,8 @@ from corehq.motech.repeaters.models import ( CaseRepeater, Repeater, - SQLRepeatRecord, - SQLRepeatRecordAttempt, + RepeatRecord, + RepeatRecordAttempt, ) from settings import HQ_ACCOUNT_ROOT @@ -973,8 +973,8 @@ def test_motech_delete(self): def _assert_repeaters_count(self, domain_name, count): self._assert_queryset_count([ Repeater.objects.filter(domain=domain_name), - SQLRepeatRecord.objects.filter(domain=domain_name), - SQLRepeatRecordAttempt.objects.filter(repeat_record__domain=domain_name), + RepeatRecord.objects.filter(domain=domain_name), + RepeatRecordAttempt.objects.filter(repeat_record__domain=domain_name), ], count) def test_repeaters_delete(self): diff --git a/corehq/apps/domain/tests/test_deletion_models.py b/corehq/apps/domain/tests/test_deletion_models.py index a0ed9c4f5081..86b76ec78a51 100644 --- a/corehq/apps/domain/tests/test_deletion_models.py +++ b/corehq/apps/domain/tests/test_deletion_models.py @@ -62,8 +62,8 @@ 'fixtures.UserLookupTableStatus', 'fixtures.LookupTableRow', # handled by cascading delete 'fixtures.LookupTableRowOwner', # handled by cascading delete - 'repeaters.SQLRepeatRecord', # handled by cascading delete - 'repeaters.SQLRepeatRecordAttempt', # handled by cascading delete + 'repeaters.RepeatRecord', # handled by cascading delete + 'repeaters.RepeatRecordAttempt', # handled by cascading delete 'sms.MigrationStatus', 'util.BouncedEmail', 'util.ComplaintBounceMeta', diff --git a/corehq/apps/dump_reload/couch/dump.py b/corehq/apps/dump_reload/couch/dump.py index ebbfcb82aec0..7c9c8b5b1fa5 100644 --- a/corehq/apps/dump_reload/couch/dump.py +++ b/corehq/apps/dump_reload/couch/dump.py @@ -5,7 +5,6 @@ from corehq.apps.dump_reload.couch.id_providers import ( DocTypeIDProvider, - DomainInListKeyGenerator, DomainKeyGenerator, WebUserIDProvider, ViewIDProvider, @@ -36,7 +35,6 @@ DocTypeIDProvider('ExportDataSchema'), DocTypeIDProvider('CaseExportInstance'), DocTypeIDProvider('CaseExportDataSchema'), - ViewIDProvider('RepeatRecord', 'repeaters/repeat_records', DomainInListKeyGenerator([None])), } DOC_PROVIDERS_BY_DOC_TYPE = { diff --git a/corehq/apps/dump_reload/couch/id_providers.py b/corehq/apps/dump_reload/couch/id_providers.py index 9937605a3ab0..c63683ed897e 100644 --- a/corehq/apps/dump_reload/couch/id_providers.py +++ b/corehq/apps/dump_reload/couch/id_providers.py @@ -39,18 +39,6 @@ def get_key_args(self, doc_type, domain): } -class DomainInListKeyGenerator(ViewKeyGenerator): - def __init__(self, static_key_items=None): - self.static_key_items = static_key_items or [] - - def get_key_args(self, doc_type, domain): - startkey = [domain] + self.static_key_items - return { - 'startkey': startkey, - 'endkey': startkey + [{}], - } - - class ViewIDProvider(BaseIDProvider): """ID provider that gets ID's from view rows :param doc_type: Doc Type of returned docs diff --git a/corehq/apps/dump_reload/sql/dump.py b/corehq/apps/dump_reload/sql/dump.py index b9e8ab6799ec..c037922e2a92 100644 --- a/corehq/apps/dump_reload/sql/dump.py +++ b/corehq/apps/dump_reload/sql/dump.py @@ -199,10 +199,10 @@ FilteredModelIteratorBuilder('repeaters.Repeater', SimpleFilter('domain')), FilteredModelIteratorBuilder('motech.ConnectionSettings', SimpleFilter('domain')), FilteredModelIteratorBuilder('motech.RequestLog', SimpleFilter('domain')), - # NH (2021-01-08): Including SQLRepeatRecord because we dump (Couch) + # NH (2021-01-08): Including RepeatRecord because we dump (Couch) # RepeatRecord, but this does not seem like a good idea. - FilteredModelIteratorBuilder('repeaters.SQLRepeatRecord', SimpleFilter('domain')), - FilteredModelIteratorBuilder('repeaters.SQLRepeatRecordAttempt', SimpleFilter('repeat_record__domain')), + FilteredModelIteratorBuilder('repeaters.RepeatRecord', SimpleFilter('domain')), + FilteredModelIteratorBuilder('repeaters.RepeatRecordAttempt', SimpleFilter('repeat_record__domain')), FilteredModelIteratorBuilder('saved_reports.ScheduledReportLog', SimpleFilter('domain')), UnfilteredModelIteratorBuilder('saved_reports.ScheduledReportsCheckpoint'), FilteredModelIteratorBuilder('translations.SMSTranslations', SimpleFilter('domain')), diff --git a/corehq/apps/hqadmin/corrupt_couch.py b/corehq/apps/hqadmin/corrupt_couch.py index a58463966014..97b10c853e78 100644 --- a/corehq/apps/hqadmin/corrupt_couch.py +++ b/corehq/apps/hqadmin/corrupt_couch.py @@ -21,7 +21,6 @@ from corehq.apps.userreports.models import ReportConfiguration from corehq.apps.users.models import CommCareUser from corehq.apps.domain.models import Domain -from corehq.motech.repeaters.models import RepeatRecord from corehq.toggles.models import Toggle from corehq.util.couch_helpers import NoSkipArgsProvider from corehq.util.pagination import ResumableFunctionIterator @@ -53,11 +52,6 @@ "type": Application, "use_domain": True, }, - "receiver_wrapper_repeat_records": { - "type": RepeatRecord, - "use_domain": True, - "view": "repeaters/repeat_records", - }, "meta": { "type": ReportConfiguration, "use_domain": True diff --git a/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_type.scss b/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_type.scss index bb1d86cb88e1..f5c337787c6f 100644 --- a/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_type.scss +++ b/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_type.scss @@ -84,4 +84,8 @@ code { a { cursor: pointer; +} + +button { + font-weight: bold !important; // to meet WCAG AA guidelines } \ No newline at end of file diff --git a/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_variables.scss b/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_variables.scss index 9f23e33527ca..3e9f97205c48 100644 --- a/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_variables.scss +++ b/corehq/apps/hqwebapp/static/hqwebapp/scss/commcarehq/_variables.scss @@ -116,9 +116,9 @@ $dimagi-mango: #FC5F36; // Base color overrides -$blue: darken(#5D70D2, 20%); -$green: darken(#3FA12A, 10%); -$red: darken($dimagi-sunset, 20%); +$blue: #5D70D2; +$green: #358623; +$red: #E13019; $teal: #01A2A9; $yellow: $dimagi-marigold; $indigo: $dimagi-indigo; diff --git a/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/includes_variables._variables.style.diff.txt b/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/includes_variables._variables.style.diff.txt index 3a61dd2ee756..832ce1d3fdb1 100644 --- a/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/includes_variables._variables.style.diff.txt +++ b/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/includes_variables._variables.style.diff.txt @@ -223,9 +223,9 @@ + + +// Base color overrides -+$blue: darken(#5D70D2, 20%); -+$green: darken(#3FA12A, 10%); -+$red: darken($dimagi-sunset, 20%); ++$blue: #5D70D2; ++$green: #358623; ++$red: #E13019; +$teal: #01A2A9; +$yellow: $dimagi-marigold; +$indigo: $dimagi-indigo; diff --git a/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/typography._type.style.diff.txt b/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/typography._type.style.diff.txt index d658129e0435..3a3da0bb278d 100644 --- a/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/typography._type.style.diff.txt +++ b/corehq/apps/hqwebapp/tests/data/bootstrap5_diffs/stylesheets/imports/typography._type.style.diff.txt @@ -28,7 +28,7 @@ } .no-border { -@@ -81,11 +82,6 @@ +@@ -81,11 +82,10 @@ } } @@ -39,5 +39,8 @@ - a { cursor: pointer; --} + } ++ ++button { ++ font-weight: bold !important; // to meet WCAG AA guidelines +} \ No newline at end of file diff --git a/corehq/apps/reports/standard/cases/case_data.py b/corehq/apps/reports/standard/cases/case_data.py index 163125e77708..3637a4d61b7c 100644 --- a/corehq/apps/reports/standard/cases/case_data.py +++ b/corehq/apps/reports/standard/cases/case_data.py @@ -81,7 +81,7 @@ UserRequestedRebuild, XFormInstance, ) -from corehq.motech.repeaters.models import SQLRepeatRecord +from corehq.motech.repeaters.models import RepeatRecord from corehq.motech.repeaters.views.repeat_record_display import ( RepeatRecordDisplay, ) @@ -217,7 +217,7 @@ def _product_name(product_id): repeat_records = [ RepeatRecordDisplay(record, timezone, date_format=DATE_FORMAT) - for record in SQLRepeatRecord.objects.filter(domain=self.domain, payload_id=self.case_id) + for record in RepeatRecord.objects.filter(domain=self.domain, payload_id=self.case_id) ] can_edit_data = self.request.couch_user.can_edit_data diff --git a/corehq/apps/styleguide/context.py b/corehq/apps/styleguide/context.py index 83256407f279..6a17848def24 100644 --- a/corehq/apps/styleguide/context.py +++ b/corehq/apps/styleguide/context.py @@ -87,7 +87,7 @@ def get_interaction_colors(): title="Success", description="Use when an action has been completed successfully, primarily for messaging. " "Rarely used for interactive elements like buttons.", - main_color=Color('success', '3FA12A'), + main_color=Color('success', '358623'), subtle_color=Color('success-subtle', 'D9ECD4'), ), ColorGroup( @@ -101,7 +101,7 @@ def get_interaction_colors(): title="Error, Negative Attention", description="Use to highlight an error, something negative or a critical risk. " "Use as text, highlights, banners or destructive buttons. ", - main_color=Color('danger', 'E73C27'), + main_color=Color('danger', 'E13019'), subtle_color=Color('danger-subtle', 'FAD8D4'), ), ] diff --git a/corehq/apps/userreports/tests/test_pillow.py b/corehq/apps/userreports/tests/test_pillow.py index 40a246dc5a49..61a4ff6c77ab 100644 --- a/corehq/apps/userreports/tests/test_pillow.py +++ b/corehq/apps/userreports/tests/test_pillow.py @@ -9,7 +9,7 @@ from casexml.apps.case.mock import CaseBlock from casexml.apps.case.tests.util import delete_all_cases, delete_all_xforms from pillow_retry.models import PillowError -from corehq.motech.repeaters.models import SQLRepeatRecord +from corehq.motech.repeaters.models import RepeatRecord from corehq.apps.hqcase.utils import submit_case_blocks from corehq.apps.userreports.data_source_providers import ( DynamicDataSourceProvider, @@ -43,7 +43,6 @@ from corehq.apps.userreports.util import get_indicator_adapter from corehq.form_processor.models import CommCareCase from corehq.form_processor.signals import sql_case_post_save -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records from corehq.motech.repeaters.models import ( ConnectionSettings, DataSourceRepeater, @@ -404,7 +403,6 @@ def tearDownClass(cls): super(IndicatorPillowTest, cls).tearDownClass() def tearDown(self): - delete_all_repeat_records() self.adapter.clear_table() @flaky_slow @@ -536,7 +534,7 @@ def test_process_deleted_doc_from_sql_chunked(self, datetime_mock): self._test_process_deleted_doc_from_sql(datetime_mock) self.pillow = _get_pillow([self.config]) later = datetime.utcnow() + timedelta(hours=50) - repeat_records = SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) + repeat_records = RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) # We expect 2 repeat records for 2 repeaters each self.assertEqual(repeat_records.count(), 4) diff --git a/corehq/apps/zapier/tests/test_zapier_forwarding.py b/corehq/apps/zapier/tests/test_zapier_forwarding.py index ad0329e1a178..94bd93429204 100644 --- a/corehq/apps/zapier/tests/test_zapier_forwarding.py +++ b/corehq/apps/zapier/tests/test_zapier_forwarding.py @@ -9,8 +9,7 @@ from corehq.apps.zapier.consts import EventTypes from corehq.apps.zapier.models import ZapierSubscription from corehq.apps.zapier.tests.test_utils import bootrap_domain_for_zapier -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records -from corehq.motech.repeaters.models import SQLRepeatRecord +from corehq.motech.repeaters.models import RepeatRecord DOMAIN = 'zapier-case-forwarding-tests' ZAPIER_CASE_TYPE = 'animal' @@ -31,7 +30,6 @@ def tearDownClass(cls): super(TestZapierCaseForwarding, cls).tearDownClass() def tearDown(self): - delete_all_repeat_records() ZapierSubscription.objects.all().delete() def test_create_case_forwarding(self): @@ -75,7 +73,7 @@ def _run_test(self, event_type, expected_records_after_create, expected_records_ ) # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - repeat_records = list(SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later)) + repeat_records = list(RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later)) self.assertEqual(expected_records_after_create, len(repeat_records)) for record in repeat_records: self.assertEqual(case_id, record.payload_id) @@ -89,7 +87,7 @@ def _run_test(self, event_type, expected_records_after_create, expected_records_ ).as_text() ], domain=self.domain ) - repeat_records = list(SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later)) + repeat_records = list(RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later)) self.assertEqual(expected_records_after_update, len(repeat_records)) for record in repeat_records: self.assertEqual(case_id, record.payload_id) diff --git a/corehq/motech/openmrs/views.py b/corehq/motech/openmrs/views.py index 8b7390c15606..aab7e24d044b 100644 --- a/corehq/motech/openmrs/views.py +++ b/corehq/motech/openmrs/views.py @@ -36,7 +36,7 @@ ) from corehq.motech.openmrs.repeaters import OpenmrsRepeater from corehq.motech.openmrs.tasks import import_patients_to_domain -from corehq.motech.repeaters.models import SQLRepeatRecord, is_sql_id +from corehq.motech.repeaters.models import RepeatRecord from corehq.motech.repeaters.views import AddCaseRepeaterView, EditRepeaterView from corehq.motech.utils import b64_aes_encrypt @@ -126,9 +126,8 @@ def openmrs_raw_api(request, domain, repeater_id, rest_uri): @login_and_domain_required def openmrs_test_fire(request, domain, repeater_id, record_id): - where = {"id": record_id} if is_sql_id(record_id) else {"couch_id": record_id} repeater = OpenmrsRepeater.objects.get(domain=domain, id=repeater_id) - record = SQLRepeatRecord.objects.get(domain=domain, **where) + record = RepeatRecord.objects.get(domain=domain, id=record_id) assert record.repeater_id == repeater.id attempt = repeater.fire_for_record(record) diff --git a/corehq/motech/repeaters/_design/indexes/search/index.js b/corehq/motech/repeaters/_design/indexes/search/index.js deleted file mode 100644 index 09d600e953c9..000000000000 --- a/corehq/motech/repeaters/_design/indexes/search/index.js +++ /dev/null @@ -1,18 +0,0 @@ -function (doc) { - try { - if (doc.doc_type == "RepeatRecord") - { - - index("repeater_id", doc.repeater_id); - index("repeater_type", doc.repeater_type); - index("domain", doc.domain); - index("last_checked", doc.last_checked); - index("next_check", doc.next_check); - index("succeeded", doc.succeeded); - index("payload_id", doc.payload_id); - } - } - catch (err) { - // search may not be configured, do nothing - } -} diff --git a/corehq/motech/repeaters/_design/views/repeat_records/map.js b/corehq/motech/repeaters/_design/views/repeat_records/map.js deleted file mode 100644 index 171b4fb68f59..000000000000 --- a/corehq/motech/repeaters/_design/views/repeat_records/map.js +++ /dev/null @@ -1,14 +0,0 @@ -function (doc) { - var state = 'PENDING'; - if (doc.doc_type === 'RepeatRecord' || doc.doc_type === 'RepeatRecord-Failed') { - if (doc.succeeded) { - state = 'SUCCESS'; - } else if (doc.cancelled) { - state = 'CANCELLED'; - } else if (doc.failure_reason) { - state = 'FAIL'; - } - emit([doc.domain, doc.repeater_id, state, doc.last_checked], null); - emit([doc.domain, null, state, doc.last_checked], null); - } -} diff --git a/corehq/motech/repeaters/_design/views/repeat_records/reduce.js b/corehq/motech/repeaters/_design/views/repeat_records/reduce.js deleted file mode 100644 index c866cd727732..000000000000 --- a/corehq/motech/repeaters/_design/views/repeat_records/reduce.js +++ /dev/null @@ -1 +0,0 @@ -_count diff --git a/corehq/motech/repeaters/_design/views/repeat_records_by_next_check/map.js b/corehq/motech/repeaters/_design/views/repeat_records_by_next_check/map.js deleted file mode 100644 index 7f086af2edbd..000000000000 --- a/corehq/motech/repeaters/_design/views/repeat_records_by_next_check/map.js +++ /dev/null @@ -1,8 +0,0 @@ -function (doc) { - if (doc.doc_type === 'RepeatRecord') { - if (!doc.succeeded && doc.next_check && !doc.cancelled) { - emit([doc.domain, doc.next_check], null); - emit([null, doc.next_check], null); - } - } -} diff --git a/corehq/motech/repeaters/_design/views/repeat_records_by_next_check/reduce.js b/corehq/motech/repeaters/_design/views/repeat_records_by_next_check/reduce.js deleted file mode 100644 index 768b109a2055..000000000000 --- a/corehq/motech/repeaters/_design/views/repeat_records_by_next_check/reduce.js +++ /dev/null @@ -1 +0,0 @@ -_count \ No newline at end of file diff --git a/corehq/motech/repeaters/_design/views/repeat_records_by_payload_id/map.js b/corehq/motech/repeaters/_design/views/repeat_records_by_payload_id/map.js deleted file mode 100644 index af7dadb3f25c..000000000000 --- a/corehq/motech/repeaters/_design/views/repeat_records_by_payload_id/map.js +++ /dev/null @@ -1,5 +0,0 @@ -function (doc) { - if (doc.doc_type === 'RepeatRecord' || doc.doc_type === 'RepeatRecord-Failed') { - emit([doc.domain, doc.payload_id], null); - } -} diff --git a/corehq/motech/repeaters/_design/views/repeaters/map.js b/corehq/motech/repeaters/_design/views/repeaters/map.js deleted file mode 100644 index ff5f301d5bac..000000000000 --- a/corehq/motech/repeaters/_design/views/repeaters/map.js +++ /dev/null @@ -1,6 +0,0 @@ -function(doc){ - // grandfather in old FormRepeater docs - if(doc.base_doc === "Repeater" || doc.doc_type === "FormRepeater") { - emit([doc.domain, doc.doc_type], null); - } -} \ No newline at end of file diff --git a/corehq/motech/repeaters/_design/views/repeaters/reduce.js b/corehq/motech/repeaters/_design/views/repeaters/reduce.js deleted file mode 100644 index 768b109a2055..000000000000 --- a/corehq/motech/repeaters/_design/views/repeaters/reduce.js +++ /dev/null @@ -1 +0,0 @@ -_count \ No newline at end of file diff --git a/corehq/motech/repeaters/const.py b/corehq/motech/repeaters/const.py index 7ca67cbd7a60..1febe24b7c1a 100644 --- a/corehq/motech/repeaters/const.py +++ b/corehq/motech/repeaters/const.py @@ -21,10 +21,10 @@ class State(IntegerChoices): # powers of two to allow multiple simultaneous states (not currently used) Pending = 1, _('Pending') - Fail = 2, _('Failed') + Fail = 2, _('Failed') # Will be retried. Implies Pending. Success = 4, _('Succeeded') Cancelled = 8, _('Cancelled') - Empty = 16, _('Empty') + Empty = 16, _('Empty') # There was nothing to send. Implies Success. RECORD_PENDING_STATE = State.Pending diff --git a/corehq/motech/repeaters/dbaccessors.py b/corehq/motech/repeaters/dbaccessors.py deleted file mode 100644 index bfd53c150cea..000000000000 --- a/corehq/motech/repeaters/dbaccessors.py +++ /dev/null @@ -1,13 +0,0 @@ -from corehq.util.test_utils import unit_testing_only - - -@unit_testing_only -def delete_all_repeat_records(): - from .models import RepeatRecord - db = RepeatRecord.get_db() - results = db.view( - 'repeaters/repeat_records_by_payload_id', - reduce=False, - include_docs=True, - ).all() - db.bulk_delete([r["doc"] for r in results], empty_on_delete=False) diff --git a/corehq/motech/repeaters/expression/tests.py b/corehq/motech/repeaters/expression/tests.py index c6b18beed6a2..545c8de002f8 100644 --- a/corehq/motech/repeaters/expression/tests.py +++ b/corehq/motech/repeaters/expression/tests.py @@ -11,9 +11,8 @@ from corehq.apps.domain.shortcuts import create_domain from corehq.apps.userreports.models import UCRExpression from corehq.motech.models import ConnectionSettings -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records from corehq.motech.repeaters.expression.repeaters import CaseExpressionRepeater -from corehq.motech.repeaters.models import SQLRepeatRecord +from corehq.motech.repeaters.models import RepeatRecord from corehq.util.test_utils import flag_enabled @@ -84,25 +83,22 @@ def setUpClass(cls): cls.repeater.save() - def tearDown(self): - delete_all_repeat_records() - @classmethod def repeat_records(cls, domain_name): # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - return SQLRepeatRecord.objects.filter(domain=domain_name, next_check__lt=later) + return RepeatRecord.objects.filter(domain=domain_name, next_check__lt=later) def test_filter_cases(self): forwardable_case = self.factory.create_case(case_type='forward-me') unforwardable_case = self.factory.create_case(case_type='dont-forward-me') repeat_records = self.repeat_records(self.domain).all() - self.assertEqual(SQLRepeatRecord.objects.filter(domain=self.domain).count(), 1) + self.assertEqual(RepeatRecord.objects.filter(domain=self.domain).count(), 1) self.assertEqual(repeat_records[0].payload_id, forwardable_case.case_id) self.factory.update_case(unforwardable_case.case_id, update={'now-this-case': 'can-be-forwarded'}) repeat_records = self.repeat_records(self.domain).all() - self.assertEqual(SQLRepeatRecord.objects.filter(domain=self.domain).count(), 2) + self.assertEqual(RepeatRecord.objects.filter(domain=self.domain).count(), 2) self.assertEqual(repeat_records[1].payload_id, unforwardable_case.case_id) def test_payload(self): diff --git a/corehq/motech/repeaters/management/commands/create_missing_repeaters.py b/corehq/motech/repeaters/management/commands/create_missing_repeaters.py deleted file mode 100644 index 60079770425b..000000000000 --- a/corehq/motech/repeaters/management/commands/create_missing_repeaters.py +++ /dev/null @@ -1,118 +0,0 @@ -from uuid import UUID - -from memoized import memoized -from django.core.management.base import BaseCommand - -from dimagi.utils.chunked import chunked -from dimagi.utils.couch.database import iter_docs - -from corehq.apps.domain.dbaccessors import domain_exists -from corehq.util.couchdb_management import couch_config - -from ...models import Repeater, ConnectionSettings -from ...views.repeat_record_display import MISSING_VALUE - - -class Command(BaseCommand): - help = """ - Migrate deleted repeaters from Couch to SQL. - - The new SQL repeaters are for repeat record display purposes - only, and therefore have a minimal set of fields with meaningful - values. Notably, they do not have related connection settings, - and therefore may cause errors if referenced elsewhere. All new - repeaters are created with a soft-deleted state. - """ - - def add_arguments(self, parser): - parser.add_argument("--dry-run", action="store_true") - - def handle(self, dry_run, **options): - couch = couch_config.get_db("receiverwrapper") - domain_by_repeater_id = dict(iter_missing_repeaters(couch)) - missing_repeaters = iter_docs(couch, list(domain_by_repeater_id)) - mode = " (dry run)" if dry_run else "" - - # recreate soft-deleted repeaters in SQL - soft_migrated = [] - for docs in chunked(missing_repeaters, 100, list): - for doc in docs: - domain_name = domain_by_repeater_id[doc["_id"]] - if domain_name != doc["domain"]: - print(f"Repeat record domain '{domain_name}' != " - f"'{doc['domain']}' of repeater {doc['_id']}") - soft_migrated.extend(create_sql_repeaters(docs, dry_run)) - print_summary(soft_migrated, "soft", mode) - - # recreate hard-deleted repeaters in SQL - soft_ids = {r.id for r in soft_migrated} - remaining = [ - {"_id": x, "domain": d} - for x, d in domain_by_repeater_id.items() if UUID(x) not in soft_ids - ] - hard_migrated = [] - for docs in chunked(remaining, 100, list): - hard_migrated.extend(create_sql_repeaters(docs, dry_run)) - print() - print_summary(hard_migrated, "hard", mode) - - -def print_summary(repeaters, category, mode): - print(f"Migrated {len(repeaters)} {category}-deleted repeaters to SQL{mode}") - for repeater in repeaters: - print(f" {repeater.domain} {repeater.id.hex} {repeater.name}") - - -def iter_missing_repeaters(couch): - """Yield (id, domain) pairs of repeaters that do not exist in SQL""" - @memoized - def is_not_deleted(domain_name): - return domain_exists(domain_name) - - couch_results = couch.view( - 'repeaters/repeat_records', - startkey=[], - endkey=[{}], - reduce=True, - group_level=2, - ).all() - sql_repeater_ids = set(Repeater.all_objects.order_by().values_list("id", flat=True)) - for result in couch_results: - domain, repeater_id = result['key'] - if ( - repeater_id is not None # view emits twice per record, skip the second - and UUID(repeater_id) not in sql_repeater_ids - and is_not_deleted(domain) - ): - yield repeater_id, domain - - -def create_sql_repeaters(docs, dry_run): - objs = [make_deleted_sql_repeater(doc) for doc in docs] - domains = {doc['domain'] for doc in docs} - settings = {d: make_deleted_connection_settings(d) for d in domains} - if not dry_run: - ConnectionSettings.objects.bulk_create(settings.values()) - for obj in objs: - assert settings[obj.domain].id is not None - obj.connection_settings = settings[obj.domain] - Repeater.objects.bulk_create(objs) - return objs - - -def make_deleted_sql_repeater(doc): - return Repeater( - id=UUID(doc["_id"]), - domain=doc["domain"], - name=doc.get('name') or doc.get('url'), - is_deleted=True, - ) - - -def make_deleted_connection_settings(domian): - return ConnectionSettings( - domain=domian, - name=MISSING_VALUE, - url="", - is_deleted=True, - ) diff --git a/corehq/motech/repeaters/management/commands/delete_duplicate_cancelled_records.py b/corehq/motech/repeaters/management/commands/delete_duplicate_cancelled_records.py index 1ef82d7e69ea..cd98017ec68d 100644 --- a/corehq/motech/repeaters/management/commands/delete_duplicate_cancelled_records.py +++ b/corehq/motech/repeaters/management/commands/delete_duplicate_cancelled_records.py @@ -10,7 +10,7 @@ RECORD_CANCELLED_STATE, RECORD_SUCCESS_STATE, ) -from corehq.motech.repeaters.models import Repeater, SQLRepeatRecord +from corehq.motech.repeaters.models import Repeater, RepeatRecord class Command(BaseCommand): @@ -33,7 +33,7 @@ def add_arguments(self, parser): @memoized def most_recent_success(self): res = {} - for record in SQLRepeatRecord.objects.iterate( + for record in RepeatRecord.objects.iterate( self.domain, repeater_id=self.repeater_id, state=RECORD_SUCCESS_STATE): if record.last_checked: res[record.payload_id] = max(res.get(record.payload_id, datetime.datetime.min), @@ -48,7 +48,7 @@ def handle(self, domain, repeater_id, *args, **options): redundant_records = [] records_by_payload_id = defaultdict(list) - records = SQLRepeatRecord.objects.iterate(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE) + records = RepeatRecord.objects.iterate(domain, repeater_id=repeater_id, state=RECORD_CANCELLED_STATE) total_records = 0 for record in records: total_records += 1 @@ -117,7 +117,7 @@ def delete(self, record): def flush(self): if self.ids_to_delete: - SQLRepeatRecord.objects.filter(id__in=self.ids_to_delete).delete() + RepeatRecord.objects.filter(id__in=self.ids_to_delete).delete() self.ids_to_delete = [] def __exit__(self, *exc_info): diff --git a/corehq/motech/repeaters/management/commands/find_missing_repeat_records.py b/corehq/motech/repeaters/management/commands/find_missing_repeat_records.py index 549205ba9d2c..86dff0117aca 100644 --- a/corehq/motech/repeaters/management/commands/find_missing_repeat_records.py +++ b/corehq/motech/repeaters/management/commands/find_missing_repeat_records.py @@ -13,7 +13,7 @@ from corehq.form_processor.models import CommCareCase, XFormInstance from corehq.motech.dhis2.repeaters import Dhis2EntityRepeater from corehq.motech.openmrs.repeaters import OpenmrsRepeater -from corehq.motech.repeaters.models import CreateCaseRepeater, Repeater, UpdateCaseRepeater, SQLRepeatRecord +from corehq.motech.repeaters.models import CreateCaseRepeater, Repeater, UpdateCaseRepeater, RepeatRecord from corehq.util.argparse_types import date_type from dimagi.utils.parsing import string_to_utc_datetime @@ -102,7 +102,7 @@ def find_missing_form_repeat_records_for_form(form, domain, repeaters, enddate, missing_count = 0 successful_count = 0 triggered_repeater_ids = set( - SQLRepeatRecord.objects + RepeatRecord.objects .filter(domain=domain, payload_id=form.get_id) .values_list("repeater_id", flat=True) .order_by() @@ -193,16 +193,19 @@ def find_missing_case_repeat_records_for_domain(domain, startdate, enddate, shou def find_missing_case_repeat_records_for_case(case, domain, repeaters, startdate, enddate, should_create=False): successful_count = missing_all_count = missing_create_count = missing_update_count = 0 - repeat_records = SQLRepeatRecord.objects.filter(domain=domain, payload_id=case.get_id).order_by() + repeat_records = RepeatRecord.objects.filter( + domain=domain, + payload_id=case.get_id, + registered_at__gte=startdate, + ).order_by() # grab repeat records that were registered during the date range - records_during_daterange = [record for record in repeat_records - if startdate <= record.registered_at.date() <= enddate] + records_during_daterange = [record for record in repeat_records if record.registered_at.date() <= enddate] fired_repeater_ids_and_counts_during_daterange = defaultdict(int) for record in records_during_daterange: fired_repeater_ids_and_counts_during_daterange[record.repeater_id] += 1 # grab repeat records that were registered after the enddate - records_after_daterange = [record for record in repeat_records if record.registered_at.date() >= enddate] + records_after_daterange = [record for record in repeat_records if record.registered_at.date() > enddate] fired_repeater_ids_and_counts_after_enddate = defaultdict(int) for record in records_after_daterange: fired_repeater_ids_and_counts_after_enddate[record.repeater_id] += 1 @@ -399,7 +402,7 @@ def find_missing_repeat_records_in_domain(domain, repeaters, payload, enddate, s NOTE: Assumes the payload passed in was modified since the startdate """ missing_count = 0 - fired_repeater_ids = set(SQLRepeatRecord.objects.filter( + fired_repeater_ids = set(RepeatRecord.objects.filter( domain=domain, payload_id=payload.get_id, registered_at__gte=payload.last_modified.date(), @@ -513,7 +516,7 @@ def create_case_repeater_register(repeater, domain, payload): return now = datetime.utcnow() - repeat_record = SQLRepeatRecord.objects.create( + repeat_record = RepeatRecord.objects.create( repeater_id=repeater.id, domain=domain, registered_at=now, @@ -555,11 +558,11 @@ def handle(self, command, startdate, enddate, domain, startswith, create, **opti domains_to_inspect = [domain] elif startswith: domains_to_inspect = list( - SQLRepeatRecord.objects.get_domains_with_records() + RepeatRecord.objects.get_domains_with_records() .filter(domain__startswith=startswith) ) else: - domains_to_inspect = list(SQLRepeatRecord.objects.get_domains_with_records()) + domains_to_inspect = list(RepeatRecord.objects.get_domains_with_records()) logger.setLevel(logging.INFO if options["verbose"] else logging.WARNING) if command == CASES: diff --git a/corehq/motech/repeaters/management/commands/generate_repeaters_summary.py b/corehq/motech/repeaters/management/commands/generate_repeaters_summary.py index 76d014d5694d..1f56a6e93c63 100644 --- a/corehq/motech/repeaters/management/commands/generate_repeaters_summary.py +++ b/corehq/motech/repeaters/management/commands/generate_repeaters_summary.py @@ -1,6 +1,6 @@ from django.core.management.base import BaseCommand from django.db.models import Count -from corehq.motech.repeaters.models import SQLRepeatRecord, Repeater +from corehq.motech.repeaters.models import RepeatRecord, Repeater class Command(BaseCommand): @@ -17,7 +17,7 @@ def handle(self, *args, **options): self.stdout.write("\n") self.stdout.write('fetching repeat record data...') repeat_records_summary = dict( - SQLRepeatRecord.objects + RepeatRecord.objects .values("repeater__domain") .order_by() .annotate(record_count=Count("id")) diff --git a/corehq/motech/repeaters/management/commands/populate_repeatrecords.py b/corehq/motech/repeaters/management/commands/populate_repeatrecords.py deleted file mode 100644 index b3cbdfe3a9f9..000000000000 --- a/corehq/motech/repeaters/management/commands/populate_repeatrecords.py +++ /dev/null @@ -1,246 +0,0 @@ -from contextlib import contextmanager - -from django.db.models import Count - -from dimagi.utils.parsing import json_format_datetime, string_to_utc_datetime - -from corehq.apps.cleanup.management.commands.populate_sql_model_from_couch_model import PopulateSQLCommand - -from ...models import Repeater, SQLRepeatRecordAttempt, enable_attempts_sync_to_sql - - -class Command(PopulateSQLCommand): - - @classmethod - def couch_db_slug(cls): - return "receiverwrapper" - - @classmethod - def couch_doc_type(cls): - return 'RepeatRecord' - - @classmethod - def sql_class(cls): - from ...models import SQLRepeatRecord - return SQLRepeatRecord - - @classmethod - def commit_adding_migration(cls): - return "TODO: add once the PR adding this file is merged" - - def handle(self, *args, **kw): - couch_model_class = self.sql_class()._migration_get_couch_model_class() - with patch_couch_to_sql(couch_model_class): - return super().handle(*args, **kw) - - @classmethod - def diff_couch_and_sql(cls, couch, sql): - """ - Compare each attribute of the given couch document and sql - object. Return a list of human-readable strings describing their - differences, or None if the two are equivalent. The list may - contain `None` or empty strings. - """ - def sql_may_have_next_check(): - if sql.next_check is not None: - return True - couch_state = get_state(couch) - return couch_state == State.Pending or couch_state == State.Fail - - from ...models import State - is_very_old_pending_record = ( - 'registered_at' not in couch - and (get_state(couch) == State.Pending or get_state(couch) == State.Fail) - and (couch.get('next_check') or '2018-03-12') < '2018-03-12' - ) - fields = ["domain", "payload_id"] - diffs = [cls.diff_attr(name, couch, sql) for name in fields] - diffs.append(cls.diff_value( - "repeater_id", - couch["repeater_id"], - sql.repeater_id.hex, - )) - diffs.append(cls.diff_value( - "state", - State.Cancelled if is_very_old_pending_record else get_state(couch), - sql.state, - )) - diffs.append(cls.diff_value( - "registered_at", - (couch.get("next_check" if is_very_old_pending_record else "registered_on") or EPOCH), - json_format_datetime(sql.registered_at), - )) - if sql_may_have_next_check(): - diffs.append(cls.diff_value( - "next_check", - None if is_very_old_pending_record else couch["next_check"], - json_format_datetime(sql.next_check) if sql.next_check else sql.next_check, - )) - if couch.get("failure_reason") and not couch.get("succeeded"): - diffs.append(cls.diff_value( - "failure_reason", - couch["failure_reason"], - sql.failure_reason, - )) - - if not couch.get("attempts"): - if len(sql.attempts) > 1: - diffs.append(f"attempts: not in couch, {len(sql.attempts)} in sql") - else: - def transform(couch_attempts): - for attempt in couch_attempts: - yield {f: trans(attempt) for f, trans in transforms.items()} - - transforms = ATTEMPT_TRANSFORMS - diffs.extend(cls.diff_lists( - "attempts", - list(transform(couch["attempts"])), - sql.attempts, - transforms, - )) - if any(d for d in diffs) and '_rev' in couch: - # possibly useful for detecting Couch data corruption - diffs.append(f"couch['_rev']: {couch['_rev']}") - return diffs - - def get_ids_to_ignore(self, docs): - """Get ids of records that reference missing repeaters - - May include repeaters that have been created since the migration - started, whose records are already migrated. Also ignore records - associated with deleted repeaters. - - NOTE: there is a race condition between this repeaters existence - check and saving new records. A repeater could be deleted - between when this function is called and when the new records - are saved, which would cause the migration to fail with - IntegrityError on "repeater_id" column value. Since that is a - rare condition, it is not handled. It should be sufficient to - rerun the migration to recover from that error. - """ - existing_ids = {id_.hex for id_ in Repeater.all_objects.filter( - id__in=list({d["repeater_id"] for d in docs}) - ).values_list("id", flat=True)} - return {d["_id"] for d in docs if d["repeater_id"] not in existing_ids} - - def _prepare_for_submodel_creation(self, docs): - query = self.sql_class().objects.filter( - couch_id__in=[d["_id"] for d in docs if d.get("attempts")], - ).annotate( - num_attempts=Count("attempt_set") - ).order_by().values_list("couch_id", "id", "num_attempts") - self._sql_id_and_num_attempts_by_couch_id = {c: (s, n) for c, s, n in query} - - def _create_submodels(self, doc, submodel_specs): - """Create (unsaved) submodels for a previously synced doc - - :returns: Iterable of ``(submodel_type, submodels_list)`` pairs. - """ - couch_attempts = doc.get("attempts") - if not couch_attempts: - return - sql_id, sql_count = self._sql_id_and_num_attempts_by_couch_id.get(doc["_id"], (None, None)) - if sql_id is not None and sql_count < len(couch_attempts): - transforms = ATTEMPT_TRANSFORMS - yield SQLRepeatRecordAttempt, [ - SQLRepeatRecordAttempt(repeat_record_id=sql_id, **{ - f: trans(attempt) for f, trans in transforms.items() - }) - for attempt in (couch_attempts[:-sql_count] if sql_count else couch_attempts) - ] - - def _sql_query_from_docs(self, docs): - return super()._sql_query_from_docs(docs).prefetch_related("attempt_set") - - @classmethod - def _get_couch_doc_count_for_type(cls): - return count_docs() - - @classmethod - def get_couch_view_name_and_parameters(cls): - return 'repeaters/repeat_records_by_payload_id', {} - - @classmethod - def get_couch_view_name_and_parameters_for_domains(cls, domains): - return 'repeaters/repeat_records_by_payload_id', [{ - 'startkey': [domain], - 'endkey': [domain, {}], - } for domain in domains] - - def should_process(self, result): - if result['doc'] is None: - self.logfile.write(f"Ignored null document: {result['id']}\n") - return False - return True - - def _get_couch_doc_count_for_domains(self, domains): - def count_domain_docs(domain): - return count_docs(startkey=[domain], endkey=[domain, {}]) - return sum(count_domain_docs(d) for d in domains) - - -def count_docs(**params): - from ...models import RepeatRecord - result = RepeatRecord.get_db().view( - 'repeaters/repeat_records', - include_docs=False, - reduce=True, - **params, - ).one() - if not result: - return 0 - # repeaters/repeat_records's map emits twice per doc, so its count is doubled - # repeaters/repeat_records_by_payload_id has no reduce, so cannot be used - assert result['value'] % 2 == 0, result['value'] - return int(result['value'] / 2) - - -def get_state(doc): - from ...models import State - if doc['succeeded'] and doc.get('cancelled'): - return State.Empty - if doc['succeeded']: - return State.Success - if doc.get('cancelled'): - return State.Cancelled - if doc.get('failure_reason'): - return State.Fail - return State.Pending - - -ATTEMPT_TRANSFORMS = { - "state": get_state, - "message": (lambda doc: ( - doc.get("success_response") if doc.get("succeeded") else doc.get("failure_reason") - ) or ''), - "created_at": (lambda doc: string_to_utc_datetime(doc["datetime"] or EPOCH)), -} - - -def _attempt_to_preserve_failure_reason(doc, obj): - if not doc.attempts and not doc.succeeded and doc.failure_reason: - attempt = SQLRepeatRecordAttempt( - repeat_record=obj, - state=doc.state, - message=doc.failure_reason, - created_at=doc.registered_on or doc.next_check or EPOCH_DATETIME, - ) - assert not obj._new_submodels[SQLRepeatRecordAttempt][0], 'unexpected attempts' - obj._new_submodels[SQLRepeatRecordAttempt][0].append(attempt) - - -@contextmanager -def patch_couch_to_sql(couch_model): - original_functions = couch_model._migration_get_custom_couch_to_sql_functions - couch_model._migration_get_custom_couch_to_sql_functions = staticmethod( - lambda: [_attempt_to_preserve_failure_reason] - ) - try: - with enable_attempts_sync_to_sql(couch_model, True): - yield - finally: - couch_model._migration_get_custom_couch_to_sql_functions = original_functions - - -EPOCH = '1970-01-01T00:00:00.000000Z' -EPOCH_DATETIME = string_to_utc_datetime(EPOCH) diff --git a/corehq/motech/repeaters/management/commands/update_cancelled_records.py b/corehq/motech/repeaters/management/commands/update_cancelled_records.py index d29aa5a3b093..c5e10b7fd624 100644 --- a/corehq/motech/repeaters/management/commands/update_cancelled_records.py +++ b/corehq/motech/repeaters/management/commands/update_cancelled_records.py @@ -6,7 +6,7 @@ from django.core.management.base import BaseCommand from corehq.motech.repeaters.const import State -from corehq.motech.repeaters.models import Repeater, SQLRepeatRecord +from corehq.motech.repeaters.models import Repeater, RepeatRecord class Command(BaseCommand): @@ -93,7 +93,7 @@ def meets_filter(record): records = list(filter( meets_filter, - SQLRepeatRecord.objects.iterate(domain, repeater_id=repeater_id, state=State.Cancelled) + RepeatRecord.objects.iterate(domain, repeater_id=repeater_id, state=State.Cancelled) )) if verbose: diff --git a/corehq/motech/repeaters/migrations/0008_sqlrepeatrecords.py b/corehq/motech/repeaters/migrations/0008_sqlrepeatrecords.py index fcc5ebb27f17..273cdf1cab86 100644 --- a/corehq/motech/repeaters/migrations/0008_sqlrepeatrecords.py +++ b/corehq/motech/repeaters/migrations/0008_sqlrepeatrecords.py @@ -2,7 +2,7 @@ from django.db import migrations -from ..management.commands.populate_repeatrecords import Command +from corehq.util.django_migrations import prompt_for_historical_migration, get_migration_name class Migration(migrations.Migration): @@ -13,8 +13,6 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunPython( - Command.migrate_from_migration, - reverse_code=migrations.RunPython.noop, - ), + prompt_for_historical_migration( + "repeaters", get_migration_name(__file__), "06f59059cef7321849e0ea9d8e15b7e824d3e26f"), ] diff --git a/corehq/motech/repeaters/migrations/0010_rm_couch_artifacts.py b/corehq/motech/repeaters/migrations/0010_rm_couch_artifacts.py new file mode 100644 index 000000000000..f779c6d62ffd --- /dev/null +++ b/corehq/motech/repeaters/migrations/0010_rm_couch_artifacts.py @@ -0,0 +1,48 @@ +# Generated by Django 3.2.23 on 2024-01-16 20:20 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repeaters', '0009_add_domain_to_indexes'), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.RemoveConstraint( + model_name='sqlrepeatrecord', + name='unique_couch_id', + ), + migrations.RemoveField( + model_name='sqlrepeatrecord', + name='couch_id', + ), + # state-only to prevent drop/create attempts foreign key + migrations.RenameModel( + old_name='SQLRepeatRecord', + new_name='RepeatRecord', + ), + migrations.AlterModelTable( + name='repeatrecord', + table=None, + ), + migrations.RenameModel( + old_name='SQLRepeatRecordAttempt', + new_name='RepeatRecordAttempt', + ), + migrations.AlterModelTable( + name='repeatrecordattempt', + table=None, + ), + ], + ), + ] + + +# SQL operations to be done later +# +# DROP INDEX IF EXISTS "unique_couch_id"; +# ALTER TABLE "repeaters_repeatrecord" DROP COLUMN "couch_id" CASCADE; diff --git a/corehq/motech/repeaters/models.py b/corehq/motech/repeaters/models.py index ba7ec446c6b7..f01320dbe1b7 100644 --- a/corehq/motech/repeaters/models.py +++ b/corehq/motech/repeaters/models.py @@ -67,9 +67,7 @@ import json import traceback import uuid -import warnings from collections import defaultdict -from contextlib import contextmanager from datetime import datetime, timedelta from typing import Any from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse @@ -81,7 +79,7 @@ from django.utils.functional import cached_property from django.utils.translation import gettext_lazy as _ -from couchdbkit.exceptions import ResourceConflict, ResourceNotFound +from couchdbkit.exceptions import ResourceNotFound from jsonfield import JSONField from memoized import memoized from requests.exceptions import ConnectionError, RequestException, Timeout @@ -89,16 +87,6 @@ from casexml.apps.case.const import CASE_INDEX_EXTENSION from casexml.apps.case.xml import LEGAL_VERSIONS, V2 from couchforms.const import DEVICE_LOG_XMLNS -from dimagi.ext.couchdbkit import ( - BooleanProperty, - DateTimeProperty, - Document, - DocumentSchema, - IntegerProperty, - ListProperty, - StringProperty, -) -from dimagi.utils.couch.migration import SubModelSpec, SyncCouchToSQLMixin, SyncSQLToCouchMixin from dimagi.utils.logging import notify_error, notify_exception from dimagi.utils.parsing import json_format_datetime @@ -136,11 +124,6 @@ MAX_BACKOFF_ATTEMPTS, MAX_RETRY_WAIT, MIN_RETRY_WAIT, - RECORD_CANCELLED_STATE, - RECORD_EMPTY_STATE, - RECORD_FAILURE_STATE, - RECORD_PENDING_STATE, - RECORD_SUCCESS_STATE, State, ) from .exceptions import RequestConnectionError, UnknownRepeater @@ -400,7 +383,7 @@ def register(self, payload, fire_synchronously=False): if not self.allowed_to_forward(payload): return now = datetime.utcnow() - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( repeater_id=self.id, domain=self.domain, registered_at=now, @@ -897,426 +880,6 @@ def datasource_is_subscribed_to(domain, data_source_id): ).exists() -def _get_state(self): - state = RECORD_PENDING_STATE - if self.succeeded and self.cancelled: - state = RECORD_EMPTY_STATE - elif self.succeeded: - state = RECORD_SUCCESS_STATE - elif self.cancelled: - state = RECORD_CANCELLED_STATE - elif self.failure_reason: - state = RECORD_FAILURE_STATE - return state - - -def set_state(self, value): - if value == RECORD_EMPTY_STATE: - self.succeeded = True - self.cancelled = True - self.failure_reason = "" - elif value == RECORD_SUCCESS_STATE: - self.succeeded = True - self.cancelled = False - self.failure_reason = "" - elif value == RECORD_CANCELLED_STATE: - self.succeeded = False - self.cancelled = True - self.failure_reason = "" - elif value == RECORD_FAILURE_STATE: - self.succeeded = False - self.cancelled = False - try: - reason = self.failure_reason - except AssertionError: - pass # HACK jsonobject/base_properties.pyx:73 - else: - if not reason: - self.failure_reason = "Unknown" - else: - assert value == RECORD_PENDING_STATE - self.succeeded = False - self.cancelled = False - self.failure_reason = "" - - -class RepeatRecordAttempt(DocumentSchema): - cancelled = BooleanProperty(default=False) - datetime = DateTimeProperty() - failure_reason = StringProperty() - success_response = StringProperty() - next_check = DateTimeProperty() - succeeded = BooleanProperty(default=False) - info = StringProperty() # extra information about this attempt - - @property - def message(self): - return (self.success_response if self.succeeded else self.failure_reason) or '' - - @message.setter - def message(self, value): - if self.succeeded: - self.success_response = value - else: - self.failure_reason = value - - state = property(_get_state, set_state) - - @property - def created_at(self): - # Used by .../case/partials/repeat_records.html - return self.datetime - - @created_at.setter - def created_at(self, value): - self.datetime = value - - -class RepeaterIdProperty(StringProperty): - - def __set__(self, instance, value): - super().__set__(instance, value) - instance.__dict__.pop("repeater", None) - - -class RepeatRecord(SyncCouchToSQLMixin, Document): - """ - An record of a particular instance of something that needs to be forwarded - with a link to the proper repeater object - """ - - domain = StringProperty() - repeater_id = RepeaterIdProperty() - repeater_type = StringProperty() - payload_id = StringProperty() - - overall_tries = IntegerProperty(default=0) - max_possible_tries = IntegerProperty(default=6) - - attempts = ListProperty(RepeatRecordAttempt) - - cancelled = BooleanProperty(default=False) - registered_on = DateTimeProperty() - last_checked = DateTimeProperty() - failure_reason = StringProperty() - next_check = DateTimeProperty() - succeeded = BooleanProperty(default=False) - - @classmethod - def _migration_get_fields(cls): - return ["domain", "payload_id", "registered_at", "state"] - - def _migration_sync_to_sql(self, sql_object, save=True): - sql_object.repeater_id = uuid.UUID(self.repeater_id) - sql_object.next_check = None if self.succeeded or self.cancelled else self.next_check - return super()._migration_sync_to_sql(sql_object, save=save) - - @classmethod - def _migration_get_submodels(cls): - return [SubModelSpec( - "attempt_set", - SQLRepeatRecordAttempt, - ["state", "message", "created_at"], - "attempts", - RepeatRecordAttempt, - ["state", "message", "created_at"], - )] - - @classmethod - def _migration_get_sql_model_class(cls): - return SQLRepeatRecord - - def save(self, *args, sync_attempts=False, **kw): - with enable_attempts_sync_to_sql(self, sync_attempts): - return super().save(*args, **kw) - - def _migration_sync_submodels_to_sql(self, sql_object): - if self._should_sync_attempts: - super()._migration_sync_submodels_to_sql(sql_object) - - @property - def record_id(self): - return self._id - - @property - def next_attempt_at(self): - return self.next_check - - @classmethod - def wrap(cls, data): - should_bootstrap_attempts = ('attempts' not in data) - - self = super(RepeatRecord, cls).wrap(data) - - if should_bootstrap_attempts and self.last_checked: - assert not self.attempts - self.attempts = [RepeatRecordAttempt( - cancelled=self.cancelled, - datetime=self.last_checked, - failure_reason=self.failure_reason, - success_response=None, - next_check=self.next_check, - succeeded=self.succeeded, - )] - return self - - @cached_property - def repeater(self): - try: - return Repeater.objects.get(id=self.repeater_id) - except Repeater.DoesNotExist: - return None - - def is_repeater_deleted(self): - try: - return Repeater.all_objects.values_list("is_deleted", flat=True).get(id=self.repeater_id) - except Repeater.DoesNotExist: - return True - - @property - def url(self): - warnings.warn("RepeatRecord.url is deprecated. Use Repeater.get_url instead", DeprecationWarning) - if self.repeater: - return self.repeater.get_url(self) - - state = property(_get_state, set_state) - - @property - def exceeded_max_retries(self): - return (self.state == RECORD_FAILURE_STATE and self.overall_tries - >= self.max_possible_tries) - - @property - def registered_at(self): - return self.registered_on or datetime.fromtimestamp(0) - - @registered_at.setter - def registered_at(self, value): - self.registered_on = value - - @classmethod - def all(cls, domain=None, due_before=None, limit=None): - json_now = json_format_datetime(due_before or datetime.utcnow()) - repeat_records = RepeatRecord.view("repeaters/repeat_records_by_next_check", - startkey=[domain], - endkey=[domain, json_now, {}], - include_docs=True, - reduce=False, - limit=limit, - ) - return repeat_records - - @classmethod - def count(cls, domain=None): - results = RepeatRecord.view("repeaters/repeat_records_by_next_check", - startkey=[domain], - endkey=[domain, {}], - reduce=True, - ).one() - return results['value'] if results else 0 - - def add_attempt(self, attempt): - self.attempts.append(attempt) - self.last_checked = attempt.datetime - self.next_check = attempt.next_check - self.succeeded = attempt.succeeded - self.cancelled = attempt.cancelled - self.failure_reason = attempt.failure_reason - try: - record_id = SQLRepeatRecord.objects.values("id").get(couch_id=self._id)["id"] - except SQLRepeatRecord.DoesNotExist: - with enable_attempts_sync_to_sql(self, True): - self._migration_do_sync() - else: - SQLRepeatRecordAttempt.objects.create( - repeat_record_id=record_id, - state=attempt.state, - message=attempt.message, - created_at=attempt.created_at, - ) - - def get_numbered_attempts(self): - for i, attempt in enumerate(self.attempts): - yield i + 1, attempt - - def postpone_by(self, duration): - self.last_checked = datetime.utcnow() - self.next_check = self.last_checked + duration - self.save() - - def make_set_next_try_attempt(self, failure_reason): - assert self.succeeded is False - assert self.next_check is not None - now = datetime.utcnow() - retry_interval = _get_retry_interval(self.last_checked, now) - return RepeatRecordAttempt( - cancelled=False, - datetime=now, - failure_reason=failure_reason, - success_response=None, - next_check=now + retry_interval, - succeeded=False, - ) - - def try_now(self): - # try when we haven't succeeded and either we've - # never checked, or it's time to check again - return not self.succeeded - - def get_payload(self): - return self.repeater.get_payload(self) - - def get_attempt_info(self): - return self.repeater.get_attempt_info(self) - - def handle_payload_exception(self, exception): - now = datetime.utcnow() - return RepeatRecordAttempt( - cancelled=True, - datetime=now, - failure_reason=str(exception), - success_response=None, - next_check=None, - succeeded=False, - ) - - def fire(self, force_send=False): - if self.try_now() or force_send: - self.overall_tries += 1 - try: - attempt = self.repeater.fire_for_record(self) - except Exception as e: - log_repeater_error_in_datadog(self.domain, status_code=None, - repeater_type=self.repeater_type) - attempt = self.handle_payload_exception(e) - raise - finally: - # pycharm warns attempt might not be defined. - # that'll only happen if fire_for_record raise a non-Exception exception (e.g. SIGINT) - # or handle_payload_exception raises an exception. I'm okay with that. -DMR - self.add_attempt(attempt) - self.save() - - def handle_success(self, response): - """ - Log success in Datadog and return a success RepeatRecordAttempt. - - ``response`` can be a Requests response instance, or True if the - payload did not result in an API call. - """ - now = datetime.utcnow() - if is_response(response): - # ^^^ Don't bother logging success in Datadog if the payload - # did not need to be sent. (This can happen with DHIS2 if - # the form that triggered the forwarder doesn't contain data - # for a DHIS2 Event.) - log_repeater_success_in_datadog( - self.domain, - response.status_code, - self.repeater_type - ) - # Mark as cancelled and successful if it was an empty payload with nothing to send - return RepeatRecordAttempt( - cancelled=(response.status_code == 204), - datetime=now, - failure_reason=None, - success_response=format_response(response), - next_check=None, - succeeded=True, - info=self.get_attempt_info(), - ) - - def handle_failure(self, response): - """Do something with the response if the repeater fails - """ - return self._make_failure_attempt(format_response(response), response) - - def handle_exception(self, exception): - """handle internal exceptions - """ - return self._make_failure_attempt(str(exception), None) - - def _make_failure_attempt(self, reason, response): - log_repeater_error_in_datadog(self.domain, response.status_code if response else None, - self.repeater_type) - - if self.repeater.allow_retries(response) and self.overall_tries < self.max_possible_tries: - return self.make_set_next_try_attempt(reason) - else: - now = datetime.utcnow() - return RepeatRecordAttempt( - cancelled=True, - datetime=now, - failure_reason=reason, - success_response=None, - next_check=None, - succeeded=False, - info=self.get_attempt_info(), - ) - - def cancel(self): - self.next_check = None - self.cancelled = True - - def _is_ready(self): - return self.next_check < datetime.utcnow() - - def _already_processed(self): - return self.succeeded or self.cancelled or self.next_check is None - - def attempt_forward_now(self, *, is_retry=False, fire_synchronously=False): - from corehq.motech.repeaters.tasks import ( - process_repeat_record, - retry_process_repeat_record, - ) - - if self._already_processed() or not self._is_ready(): - return - - # Set the next check to happen an arbitrarily long time from now. - # This way if there's a delay in calling `process_repeat_record` (which - # also sets or clears next_check) we won't queue this up in duplicate. - # If `process_repeat_record` is totally borked, this future date is a - # fallback. - self.next_check = datetime.utcnow() + timedelta(hours=48) - try: - self.save() - except ResourceConflict: - # Another process beat us to the punch. This takes advantage - # of Couch DB's optimistic locking, which prevents a process - # with stale data from overwriting the work of another. - return - - # separated for improved datadog reporting - task = retry_process_repeat_record if is_retry else process_repeat_record - - if fire_synchronously: - task(self._id, self.domain) - else: - task.delay(self._id, self.domain) - - def requeue(self): - self.cancelled = False - self.succeeded = False - self.failure_reason = '' - self.overall_tries = 0 - self.next_check = datetime.utcnow() - - -def is_sql_id(value): - return not isinstance(value, str) or (value.isdigit() and len(value) != 32) - - -@contextmanager -def enable_attempts_sync_to_sql(obj, value): - assert not hasattr(obj, "_should_sync_attempts") - obj._should_sync_attempts = value - try: - yield - finally: - del obj._should_sync_attempts - - # on_delete=DB_CASCADE denotes ON DELETE CASCADE in the database. The # constraints are configured in a migration. Note that Django signals # will not fire on records deleted via cascade. @@ -1394,9 +957,8 @@ def get_domains_with_records(self): return self.order_by().values_list("domain", flat=True).distinct() -class SQLRepeatRecord(SyncSQLToCouchMixin, models.Model): +class RepeatRecord(models.Model): domain = models.CharField(max_length=126) - couch_id = models.CharField(max_length=36, null=True, blank=True) payload_id = models.CharField(max_length=255) repeater = models.ForeignKey(Repeater, on_delete=DB_CASCADE, @@ -1410,7 +972,6 @@ class SQLRepeatRecord(SyncSQLToCouchMixin, models.Model): objects = RepeatRecordManager() class Meta: - db_table = 'repeaters_repeatrecord' indexes = [ models.Index(fields=['domain', 'registered_at']), models.Index(fields=['payload_id']), @@ -1421,11 +982,6 @@ class Meta: ) ] constraints = [ - models.UniqueConstraint( - name="unique_couch_id", - fields=['couch_id'], - condition=models.Q(couch_id__isnull=False), - ), models.CheckConstraint( name="next_check_pending_or_null", check=( @@ -1437,26 +993,6 @@ class Meta: ] ordering = ['registered_at'] - @classmethod - def _migration_get_couch_model_class(cls): - return RepeatRecord - - @classmethod - def _migration_get_fields(cls): - return [ - "domain", - "payload_id", - "registered_at", - "next_check", - "state", - "failure_reason", - "overall_tries", - ] - - def _migration_sync_to_couch(self, couch_object, save=True): - couch_object.repeater_id = self.repeater.repeater_id - return super()._migration_sync_to_couch(couch_object, save=save) - def requeue(self): # Changing "success" to "pending" and "cancelled" to "failed" # preserves the value of `self.failure_reason`. @@ -1566,7 +1102,7 @@ def next_attempt_at(self): @property def url(self): # Used by .../case/partials/repeat_records.html - return self.repeater.couch_repeater.get_url(self) + return self.repeater.get_url(self) @property def failure_reason(self): @@ -1601,14 +1137,6 @@ def is_queued(self): # Members below this line have been added to support the # Couch repeater processing logic. - @property - def overall_tries(self): - return self.num_attempts - - @overall_tries.setter - def overall_tries(self, ignored): - pass - @property def exceeded_max_retries(self): return self.state == State.Fail and self.num_attempts >= self.max_possible_tries @@ -1618,8 +1146,7 @@ def repeater_type(self): return self.repeater.repeater_type def fire(self, force_send=False): - if self.try_now() or force_send: - self.overall_tries += 1 + if force_send or not self.succeeded: try: attempt = self.repeater.fire_for_record(self) except Exception as e: @@ -1634,10 +1161,6 @@ def fire(self, force_send=False): self.add_attempt(attempt) self.save() - def try_now(self): - # TODO rename to should_try_now - return self.state != State.Success - def attempt_forward_now(self, *, is_retry=False, fire_synchronously=False): from corehq.motech.repeaters.tasks import ( process_repeat_record, @@ -1705,27 +1228,26 @@ def postpone_by(self, duration): self.save() -class SQLRepeatRecordAttempt(models.Model): +class RepeatRecordAttempt(models.Model): repeat_record = models.ForeignKey( - SQLRepeatRecord, on_delete=DB_CASCADE, related_name="attempt_set") + RepeatRecord, on_delete=DB_CASCADE, related_name="attempt_set") state = models.PositiveSmallIntegerField(choices=State.choices) message = models.TextField(blank=True, default='') traceback = models.TextField(blank=True, default='') created_at = models.DateTimeField(default=timezone.now) class Meta: - db_table = 'repeaters_repeatrecordattempt' ordering = ['created_at'] -@receiver(models.signals.pre_save, sender=SQLRepeatRecordAttempt) +@receiver(models.signals.pre_save, sender=RepeatRecordAttempt) def _register_new_attempt_to_clear_cache(sender, instance, **kwargs): record = instance._meta.get_field("repeat_record").get_cached_value(instance, None) if instance._state.adding and record is not None: instance._record_with_new_attempt = record -@receiver(models.signals.post_save, sender=SQLRepeatRecordAttempt) +@receiver(models.signals.post_save, sender=RepeatRecordAttempt) def _clear_attempts_cache_after_save_new_attempt(sender, instance, **kwargs): # Clear cache in post_save because it may get populated by save # logic before the save is complete. The post_save signal by itself @@ -1765,7 +1287,7 @@ def attempt_forward_now(repeater: Repeater): # unused process_repeater.delay(repeater.id.hex) -def get_payload(repeater: Repeater, repeat_record: SQLRepeatRecord) -> str: +def get_payload(repeater: Repeater, repeat_record: RepeatRecord) -> str: try: return repeater.get_payload(repeat_record) except Exception as err: @@ -1783,7 +1305,7 @@ def get_payload(repeater: Repeater, repeat_record: SQLRepeatRecord) -> str: def send_request( repeater: Repeater, - repeat_record: SQLRepeatRecord, + repeat_record: RepeatRecord, payload: Any, ) -> bool: """ @@ -1864,10 +1386,6 @@ def is_response(duck): return hasattr(duck, 'status_code') and hasattr(duck, 'reason') -def are_repeat_records_migrated(domain) -> bool: - return True - - def domain_can_forward(domain): return domain and ( domain_has_privilege(domain, ZAPIER_INTEGRATION) diff --git a/corehq/motech/repeaters/tasks.py b/corehq/motech/repeaters/tasks.py index 55ad2ccc047e..0cbb5b917515 100644 --- a/corehq/motech/repeaters/tasks.py +++ b/corehq/motech/repeaters/tasks.py @@ -1,4 +1,3 @@ -from contextlib import contextmanager from datetime import datetime, timedelta from django.conf import settings @@ -7,7 +6,6 @@ from celery.utils.log import get_task_logger from dimagi.utils.couch import CriticalSection, get_redis_lock -from dimagi.utils.couch.undo import DELETED_SUFFIX from corehq.apps.celery import periodic_task, task from corehq.motech.models import RequestLog @@ -30,10 +28,9 @@ ) from .models import ( Repeater, - SQLRepeatRecord, + RepeatRecord, domain_can_forward, get_payload, - is_sql_id, send_request, ) @@ -108,7 +105,7 @@ def check_repeaters_in_partition(partition): "commcare.repeaters.check.processing", timing_buckets=_check_repeaters_buckets, ): - for record in SQLRepeatRecord.objects.iter_partition( + for record in RepeatRecord.objects.iter_partition( start, partition, CHECK_REPEATERS_PARTITION_COUNT): if not _soft_assert( datetime.utcnow() < twentythree_hours_later, @@ -134,8 +131,7 @@ def process_repeat_record(repeat_record_id, domain): NOTE: Keep separate from retry_process_repeat_record for monitoring purposes Domain is present here for domain tagging in datadog """ - repeat_record = _get_repeat_record(repeat_record_id) - _process_repeat_record(repeat_record) + _process_repeat_record(RepeatRecord.objects.get(id=repeat_record_id)) @task(queue=settings.CELERY_REPEAT_RECORD_QUEUE) @@ -144,15 +140,7 @@ def retry_process_repeat_record(repeat_record_id, domain): NOTE: Keep separate from process_repeat_record for monitoring purposes Domain is present here for domain tagging in datadog """ - repeat_record = _get_repeat_record(repeat_record_id) - _process_repeat_record(repeat_record) - - -def _get_repeat_record(repeat_record_id): - if not is_sql_id(repeat_record_id): - # can be removed after all in-flight tasks with Couch ids have been processed - return SQLRepeatRecord.objects.get(couch_id=repeat_record_id) - return SQLRepeatRecord.objects.get(id=repeat_record_id) + _process_repeat_record(RepeatRecord.objects.get(id=repeat_record_id)) def _process_repeat_record(repeat_record): @@ -169,8 +157,7 @@ def _process_repeat_record(repeat_record): if repeat_record.repeater.is_deleted: repeat_record.cancel() - with _delete_couch_record(repeat_record): - repeat_record.save() + repeat_record.save() return try: @@ -185,30 +172,9 @@ def _process_repeat_record(repeat_record): logging.exception('Failed to process repeat record: {}'.format(repeat_record.id)) -@contextmanager -def _delete_couch_record(repeat_record): - from django.db.models import Model - - def delete(_, couch_object): - if not couch_object.doc_type.endswith(DELETED_SUFFIX): - couch_object.doc_type += DELETED_SUFFIX - - if isinstance(repeat_record, Model): - assert not repeat_record._migration_get_custom_sql_to_couch_functions() - repeat_record._migration_get_custom_sql_to_couch_functions = lambda: [delete] - try: - yield - finally: - del repeat_record._migration_get_custom_sql_to_couch_functions - assert not repeat_record._migration_get_custom_sql_to_couch_functions() - else: - delete(..., repeat_record) - yield - - metrics_gauge_task( 'commcare.repeaters.overdue', - SQLRepeatRecord.objects.count_overdue, + RepeatRecord.objects.count_overdue, run_every=crontab(), # every minute multiprocess_mode=MPM_MAX ) @@ -217,7 +183,7 @@ def delete(_, couch_object): @task(queue=settings.CELERY_REPEAT_RECORD_QUEUE) def process_repeater(repeater_id): """ - Worker task to send SQLRepeatRecords in chronological order. + Worker task to send RepeatRecords in chronological order. This function assumes that ``repeater`` checks have already been performed. Call via ``models.attempt_forward_now()``. diff --git a/corehq/motech/repeaters/tests/test_couchsqlmigration.py b/corehq/motech/repeaters/tests/test_couchsqlmigration.py deleted file mode 100644 index 595b3a81cec5..000000000000 --- a/corehq/motech/repeaters/tests/test_couchsqlmigration.py +++ /dev/null @@ -1,588 +0,0 @@ -from contextlib import contextmanager -from datetime import datetime, timedelta -from pathlib import Path -from unittest.mock import patch -from uuid import UUID, uuid4 - -from django.db import connection, transaction -from django.core.management import call_command -from django.test import SimpleTestCase, TestCase -from django.utils.functional import cached_property - -from testil import tempdir - -from dimagi.utils.parsing import json_format_datetime - -from .. import models -from ..dbaccessors import delete_all_repeat_records -from ..management.commands.populate_repeatrecords import Command, get_state -from ..models import ( - ConnectionSettings, - RepeatRecord, - RepeatRecordAttempt, - SQLRepeatRecord, - SQLRepeatRecordAttempt, -) - -REPEATER_ID_1 = "5c739aaa0cb44a24a71933616258f3b6" -REPEATER_ID_2 = "64b6bf1758ed4f2a8944d6f34c2811c2" -REPEATER_ID_3 = "123b7a7008b447a4a0de61f6077a0353" - - -class TestRepeatRecordModel(SimpleTestCase): - - def test_set_state_does_not_overwrite_failure_reason(self): - rec = RepeatRecord(failure_reason="Mildew that should not go away") - rec.state = models.State.Fail - self.assertEqual(rec.failure_reason, "Mildew that should not go away") - - -class BaseRepeatRecordCouchToSQLTest(TestCase): - - @classmethod - def setUpClass(cls): - super().setUpClass() - url = 'https://www.example.com/api/' - conn = ConnectionSettings.objects.create(domain="test", name=url, url=url) - cls.repeater1 = models.FormRepeater.objects.create( - id=UUID(REPEATER_ID_1), - domain="test", - connection_settings=conn, - include_app_id_param=False, - ) - cls.repeater2 = models.FormRepeater.objects.create( - id=UUID(REPEATER_ID_2), - domain="test", - connection_settings=conn, - include_app_id_param=False, - ) - - def create_repeat_record(self, unwrap_doc=True, repeater=None): - def data(**extra): - return { - 'domain': repeater.domain, - 'payload_id': payload_id, - **extra, - } - if repeater is None: - repeater = self.repeater1 - now = datetime.utcnow().replace(microsecond=0) - payload_id = uuid4().hex - first_attempt = datetime.utcnow() - timedelta(minutes=10) - second_attempt = datetime.utcnow() - timedelta(minutes=8) - obj = SQLRepeatRecord(repeater_id=repeater.id, registered_at=now, **data()) - obj._prefetched_objects_cache = {"attempt_set": [ - SQLRepeatRecordAttempt( - state=models.State.Fail, - message="something bad happened", - traceback="the parrot has left the building", - created_at=first_attempt, - ), - SQLRepeatRecordAttempt( - state=models.State.Success, - message="polly wants a cracker", - created_at=second_attempt, - ), - ]} - doc = RepeatRecord.wrap(data( - doc_type="RepeatRecord", - repeater_type='Echo', - repeater_id=repeater.repeater_id, - registered_on=json_format_datetime(now), - attempts=[ - { - "datetime": first_attempt.isoformat() + "Z", - "failure_reason": "something bad happened", - "next_check": second_attempt.isoformat() + "Z", - }, - { - "datetime": second_attempt.isoformat() + "Z", - "success_response": "polly wants a cracker", - "succeeded": True, - }, - ], - overall_tries=2, - )) - if unwrap_doc: - doc = doc.to_json() - return doc, obj - - -class TestRepeatRecordCouchToSQLDiff(BaseRepeatRecordCouchToSQLTest): - - def test_no_diff(self): - doc, obj = self.create_repeat_record() - self.assertEqual(self.diff(doc, obj), []) - - def test_diff_domain(self): - doc, obj = self.create_repeat_record() - doc['domain'] = 'other-domain' - self.assertEqual( - self.diff(doc, obj), - ["domain: couch value 'other-domain' != sql value 'test'"], - ) - - def test_diff_payload_id(self): - doc, obj = self.create_repeat_record() - obj.payload_id = uuid4().hex - self.assertEqual( - self.diff(doc, obj), - [f"payload_id: couch value '{doc['payload_id']}' != sql value '{obj.payload_id}'"], - ) - - def test_diff_repeater_id(self): - doc, obj = self.create_repeat_record() - obj.repeater_id = self.repeater2.id - self.assertEqual( - self.diff(doc, obj), - [f"repeater_id: couch value '{REPEATER_ID_1}' != sql value '{REPEATER_ID_2}'"], - ) - - def test_diff_state(self): - doc, obj = self.create_repeat_record() - obj.state = models.State.Empty - expected_diff = ["state: couch value State.Pending != sql value State.Empty"] - self.assertEqual(self.diff(doc, obj), expected_diff) - - def test_diff_registered_at(self): - doc, obj = self.create_repeat_record() - hour_hence = datetime.utcnow() + timedelta(hours=1) - obj.registered_at = hour_hence - self.assertEqual( - self.diff(doc, obj), - [f"registered_at: couch value {doc['registered_on']!r} " - f"!= sql value {json_format_datetime(hour_hence)!r}"], - ) - - def test_diff_next_check(self): - doc, obj = self.create_repeat_record() - hour_hence = datetime.utcnow() + timedelta(hours=1) - obj.next_check = hour_hence - self.assertEqual( - self.diff(doc, obj), - [f"next_check: couch value {doc['next_check']!r} " - f"!= sql value {json_format_datetime(hour_hence)!r}"], - ) - - def test_diff_next_check_when_couch_value_is_obsolete(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.state = models.State.Success - doc.next_check = datetime.utcnow() + timedelta(days=2) - obj.state = models.State.Success - obj.next_check = None - self.assertEqual(self.diff(doc.to_json(), obj), []) - - def test_diff_failure_reason(self): - doc, obj = self.create_repeat_record() - doc["failure_reason"] = "polly didn't get a cracker" - obj.state = models.State.Fail - self.assertEqual( - self.diff(doc, obj), - [ - 'failure_reason: couch value "polly didn\'t get a cracker" ' - '!= sql value \'polly wants a cracker\'', - ], - ) - - def test_diff_empty_couch_failure_reason(self): - doc, obj = self.create_repeat_record() - doc["failure_reason"] = "" - doc["cancelled"] = True - doc["attempts"].pop() - obj.state = models.State.Cancelled - obj.attempts.pop() - self.assertEqual(self.diff(doc, obj), []) - - def test_diff_attempts(self): - doc, obj = self.create_repeat_record() - doc["_rev"] = "v0-fake" - doc["attempts"][0]["succeeded"] = True - doc["attempts"][0]["failure_reason"] = None - doc["attempts"][1]["datetime"] = "2020-01-01T00:00:00.000000Z" - doc["attempts"][1]["success_response"] = None - obj.attempts[1].message = '' - couch_datetime = repr(datetime(2020, 1, 1, 0, 0)) - sql_created_at = repr(obj.attempts[1].created_at) - expected_diff = [ - "attempts[0].state: couch value State.Success != sql value State.Fail", - "attempts[0].message: couch value '' != sql value 'something bad happened'", - f"attempts[1].created_at: couch value {couch_datetime} != sql value {sql_created_at}", - "couch['_rev']: v0-fake", - ] - self.assertEqual(self.diff(doc, obj), expected_diff) - - def diff(self, doc, obj): - return do_diff(Command, doc, obj) - - -class TestRepeatRecordCouchToSQLMigration(BaseRepeatRecordCouchToSQLTest): - - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.db = RepeatRecord.get_db() - - def tearDown(self): - delete_all_repeat_records() - Command.discard_resume_state(verify_only=False) - Command.discard_resume_state(verify_only=True) - super().tearDown() - - def test_sync_to_couch(self): - doc, obj = self.create_repeat_record() - obj.save() - couch_obj = self.db.get(obj._migration_couch_id) - self.assertEqual(self.diff(couch_obj, obj), []) - - hour_hence = datetime.utcnow() + timedelta(hours=1) - obj.payload_id = payload_id = uuid4().hex - obj.repeater_id = self.repeater2.id - obj.state = models.State.Fail - obj.registered_at = hour_hence - obj.next_check = hour_hence - del obj.attempts[1] - obj.save() - doc = self.db.get(obj._migration_couch_id) - self.assertEqual(doc['payload_id'], payload_id) - self.assertEqual(doc['repeater_id'], self.repeater2.repeater_id) - self.assertEqual(doc['failure_reason'], "something bad happened") - self.assertEqual(get_state(doc), models.State.Fail) - self.assertEqual(doc['registered_on'], json_format_datetime(hour_hence)) - self.assertEqual(doc['next_check'], json_format_datetime(hour_hence)) - self.assertEqual(doc['attempts'][0]["succeeded"], False) - self.assertEqual(doc['attempts'][0]["failure_reason"], "something bad happened") - self.assertEqual(doc['overall_tries'], 1) - - def test_sync_to_sql(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_attempts=True) - self.assertEqual( - self.diff(doc.to_json(), SQLRepeatRecord.objects.get(couch_id=doc._id)), - [], - ) - - hour_hence = datetime.utcnow() + timedelta(hours=1) - doc.payload_id = payload_id = uuid4().hex - doc.repeater_id = REPEATER_ID_2 - doc.failure_reason = "something happened" - doc.registered_on = hour_hence - doc.next_check = hour_hence - del doc.attempts[0] - doc.save() - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(obj.payload_id, payload_id) - self.assertEqual(obj.repeater.repeater_id, REPEATER_ID_2) - self.assertEqual(obj.state, models.State.Fail) - self.assertEqual(obj.registered_at, hour_hence) - self.assertEqual(obj.next_check, hour_hence) - # attempts are not synced to SQL by default - self.assertEqual(obj.attempts[0].state, models.State.Fail) - self.assertEqual(len(obj.attempts), 2) - - def test_sync_attempts_to_sql(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_attempts=True) - - del doc.attempts[0] - doc.save(sync_attempts=True) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(obj.attempts[0].state, models.State.Success) - self.assertEqual(len(obj.attempts), 1) - - def test_sync_attempt_with_null_message_to_sql(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_attempts=True) - - doc.attempts[1].success_response = None - doc.save(sync_attempts=True) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(obj.attempts[1].message, '') - - def test_repeater_syncs_attempt_to_couch_on_sql_record_add_attempt(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.attempts.pop() - assert len(doc.attempts) == 1, doc.attempts - obj._prefetched_objects_cache["attempt_set"].pop() - doc.save(sync_attempts=True) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(self.diff(doc.to_json(), obj), []) - - obj.add_success_attempt(True) - doc = self.db.get(obj._migration_couch_id) - self.assertEqual(len(doc["attempts"]), 2, doc["attempts"]) - self.assertEqual(doc["attempts"][-1]["succeeded"], True) - self.assertFalse(doc["attempts"][-1]["success_response"]) - - def test_repeater_add_attempt_syncs_to_sql(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_attempts=True) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - initial_attempt_ids = [a.id for a in obj.attempts] - obj.attempts._result_cache = None - - attempt = RepeatRecordAttempt( - datetime=datetime.utcnow(), - success_response="manual resend", - succeeded=True, - ) - doc.add_attempt(attempt) - doc.save() - attempts = list(obj.attempts) - self.assertEqual( - initial_attempt_ids, - [attempts[0].id, attempts[1].id], - "initial attempts should not be deleted/recreated" - ) - self.assertEqual(len(attempts), 3, [a.message for a in attempts]) - self.assertEqual(attempts[-1].message, "manual resend") - - def test_repeater_syncs_attempt_to_sql_when_sql_record_does_not_exist(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_to_sql=False) - - attempt = RepeatRecordAttempt( - datetime=datetime.utcnow(), - success_response="manual resend", - succeeded=True, - ) - doc.add_attempt(attempt) - doc.save() - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - attempts = list(obj.attempts) - self.assertEqual(len(attempts), 3, [a.message for a in attempts]) - self.assertEqual(attempts[-1].message, "manual resend") - - def test_migration(self): - @property - def dont_lookup_repeater(self): - # fail if inefficient repeater lookup is attempted - raise Exception("this should not happen") - - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_to_sql=False) - with patch.object(type(doc), "repeater", dont_lookup_repeater): - call_command('populate_repeatrecords') - self.assertEqual( - self.diff(doc.to_json(), SQLRepeatRecord.objects.get(couch_id=doc._id)), - [], - ) - - def test_migration_fixup_diffs(self): - # Additional call should apply any updates - doc, obj = self.create_repeat_record(unwrap_doc=False) - doc.save(sync_attempts=True) - doc.payload_id = payload_id = uuid4().hex - doc.repeater_id = REPEATER_ID_2 - doc.failure_reason = "something bad happened" - doc.registered_on = datetime.utcnow() + timedelta(hours=1) - del doc.attempts[1] - doc.save(sync_to_sql=False) - - with templog() as log: - call_command('populate_repeatrecords', log_path=log.path) - self.assertIn(f'Doc "{doc._id}" has differences:\n', log.content) - self.assertIn(f"payload_id: couch value {payload_id!r} != sql value {obj.payload_id!r}\n", log.content) - self.assertIn( - f"repeater_id: couch value '{REPEATER_ID_2}' != sql value '{REPEATER_ID_1}'\n", log.content) - self.assertIn("state: couch value State.Fail != sql value 1\n", log.content) - self.assertIn("registered_at: couch value '", log.content) - - call_command('populate_repeatrecords', fixup_diffs=log.path) - self.assertEqual( - self.diff(doc.to_json(), SQLRepeatRecord.objects.get(couch_id=doc._id)), - [], - ) - - def test_migration_with_deleted_repeater(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - repeater1_id = self.repeater1.id - self.addCleanup(setattr, self.repeater1, "id", repeater1_id) - self.repeater1.delete() - doc_id = self.db.save_doc(doc.to_json())["id"] - assert RepeatRecord.get(doc_id) is not None, "missing record" - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertIn(f"Ignored model for RepeatRecord with id {doc_id}\n", log.content) - - def test_migration_with_null_attempt_message(self): - doc, _ = self.create_repeat_record(unwrap_doc=False) - doc.attempts[1].message = None - doc.save(sync_to_sql=False) - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertNotIn('has differences:', log.content) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(obj.attempts[1].message, '') - - def test_migration_with_null_registered_at(self): - doc, _ = self.create_repeat_record(unwrap_doc=False) - doc.registered_on = None - doc.save(sync_to_sql=False) - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertNotIn('has differences:', log.content) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(obj.registered_at, datetime(1970, 1, 1)) - - def test_migrate_record_with_unsynced_sql_attempts(self): - doc, _ = self.create_repeat_record(unwrap_doc=False) - doc.save() # sync to SQL, but do not save attempts - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertNotIn('has differences:', log.content) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(len(obj.attempts), len(doc.attempts)) - self.assertTrue(obj.attempts) - - def test_migrate_record_with_partial_sql_attempts(self): - doc, _ = self.create_repeat_record(unwrap_doc=False) - doc.save() # sync to SQL, but do not save attempts - # This attempt is saved in both Couch and SQL, which means there - # are three attempts in Couch and only one in SQL. - doc.add_attempt(RepeatRecordAttempt( - datetime=datetime.utcnow(), - success_response="good call", - succeeded=True, - )) - doc.save() # sync to SQL, but do not save attempts - assert len(doc.attempts) == 3, doc.attempts - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertNotIn('has differences:', log.content) - obj = SQLRepeatRecord.objects.get(couch_id=doc._id) - self.assertEqual(len(obj.attempts), 3) - - def test_migrate_record_with_no_attempts(self): - doc, _ = self.create_repeat_record() - doc.pop("attempts") - doc_id = self.db.save_doc(doc)["id"] - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertNotIn('has differences:', log.content) - obj = SQLRepeatRecord.objects.get(couch_id=doc_id) - self.assertFalse(obj.attempts) - - def test_fixup_failed_record_with_no_attempts(self): - when = datetime.utcnow().replace(year=2017, microsecond=0) - doc, _ = self.create_repeat_record() - doc.pop("attempts") - doc.pop("registered_on") - doc["succeeded"] = False - doc["registered_at"] = json_format_datetime(when) - doc["next_check"] = json_format_datetime(when) - doc["failure_reason"] = "A tree fell in the forest" - doc_id = self.db.save_doc(doc)["id"] - - with templog() as log: - from ..management.commands import populate_repeatrecords as mod - with patch.object(mod, "_attempt_to_preserve_failure_reason"): # produces diff - call_command('populate_repeatrecords', log_path=log.path) - self.assertIn(f'Doc "{doc_id}" has differences:\n', log.content) - self.assertIn("failure_reason: couch value 'A tree fell", log.content) - - fixup_log = log.path.parent / "fixup.log" - call_command('populate_repeatrecords', log_path=fixup_log, fixup_diffs=log.path) - with open(fixup_log) as fh: - self.assertNotIn("has differences:", fh.read()) - - obj = SQLRepeatRecord.objects.get(couch_id=doc_id) - attempt, = obj.attempts - self.assertEqual(attempt.created_at, when) - - def test_migrate_record_erroneous_next_check(self): - doc, _ = self.create_repeat_record() - doc.update(succeeded=True, next_check=datetime.utcnow().isoformat() + 'Z') - doc_id = self.db.save_doc(doc)["id"] - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertNotIn('has differences:', log.content) - obj = SQLRepeatRecord.objects.get(couch_id=doc_id) - self.assertIsNone(obj.next_check) - - def test_migration_with_repeater_added_after_start(self): - doc, obj = self.create_repeat_record(unwrap_doc=False) - repeater3 = models.FormRepeater( - id=UUID(REPEATER_ID_3), - domain="test", - connection_settings_id=self.repeater1.connection_settings_id, - include_app_id_param=False, - ) - repeater3.save() - doc.repeater_id = repeater3.repeater_id - doc.save(sync_attempts=True) - with ( - templog() as log, - patch.object(Command, "get_ids_to_ignore", lambda *a: {}), - patch.object(transaction, "atomic", atomic_check), - ): - call_command('populate_repeatrecords', log_path=log.path) - assert not log.content, log.content # doc should have been implicitly ignored (already migrated) - - def test_migrate_domain(self): - with patch.object(self.repeater2, "domain", "other"), templog() as log: - docs = {} - for repeater in [self.repeater1, self.repeater2]: - doc, obj = self.create_repeat_record(unwrap_doc=False, repeater=repeater) - doc.save(sync_to_sql=False) - docs[repeater.domain] = doc - - call_command('populate_repeatrecords', domains=["other"], log_path=log.path) - self.assertIn(f'Created model for RepeatRecord with id {docs["other"]._id}\n', log.content) - self.assertNotIn(docs["test"]._id, log.content) - SQLRepeatRecord.objects.get(couch_id=docs["other"]._id) - with self.assertRaises(SQLRepeatRecord.DoesNotExist): - SQLRepeatRecord.objects.get(couch_id=docs["test"]._id) - - def test_verify_record_missing_fields(self): - doc, _ = self.create_repeat_record(unwrap_doc=False) - doc.succeeded = True - doc.save() - self.db.save_doc({ - "_id": doc._id, - "_rev": doc._rev, - "doc_type": "RepeatRecord", - "domain": "test", - "last_checked": "2015-02-20T13:25:25.655650Z", - "lock_date": None, - "next_check": None, - "payload_id": "00a7d361-474d-4cf3-9aed-f6204c2a0897", - "repeater_id": self.repeater1.id.hex, - "succeeded": True, - }) - with templog() as log, patch.object(transaction, "atomic", atomic_check): - call_command('populate_repeatrecords', log_path=log.path) - self.assertIn('has differences:', log.content) - - def diff(self, doc, obj): - return do_diff(Command, doc, obj) - - -def do_diff(Command, doc, obj): - result = Command.diff_couch_and_sql(doc, obj) - return [x for x in result if x is not None] - - -@contextmanager -def atomic_check(using=None, savepoint='ignored'): - with _atomic(using=using): - yield - connection.check_constraints() - - -_atomic = transaction.atomic - - -@contextmanager -def templog(): - with tempdir() as tmp: - yield Log(tmp) - - -class Log: - def __init__(self, tmp): - self.path = Path(tmp) / "log.txt" - - @cached_property - def content(self): - with self.path.open() as lines: - return "".join(lines) diff --git a/corehq/motech/repeaters/tests/test_data_registry_case_update_repeater.py b/corehq/motech/repeaters/tests/test_data_registry_case_update_repeater.py index 5194859281b6..153213f19df9 100644 --- a/corehq/motech/repeaters/tests/test_data_registry_case_update_repeater.py +++ b/corehq/motech/repeaters/tests/test_data_registry_case_update_repeater.py @@ -14,8 +14,7 @@ from corehq.apps.registry.tests.utils import create_registry_for_test, Invitation, Grant from corehq.apps.users.models import CommCareUser from corehq.motech.models import ConnectionSettings -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records -from corehq.motech.repeaters.models import DataRegistryCaseUpdateRepeater, SQLRepeatRecord +from corehq.motech.repeaters.models import DataRegistryCaseUpdateRepeater, RepeatRecord from corehq.motech.repeaters.repeater_generators import DataRegistryCaseUpdatePayloadGenerator from corehq.motech.repeaters.tests.test_data_registry_case_update_payload_generator import IntentCaseBuilder, \ DataRegistryUpdateForm @@ -81,9 +80,6 @@ def setUpClass(cls): domain=cls.target_domain ) - def tearDown(self): - delete_all_repeat_records() - def test_update_cases(self): builder1 = ( IntentCaseBuilder(self.registry_slug) @@ -151,4 +147,4 @@ def test_prevention_of_update_chaining(self): def repeat_records(cls, domain_name): # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - return SQLRepeatRecord.objects.filter(domain=domain_name, next_check__lt=later) + return RepeatRecord.objects.filter(domain=domain_name, next_check__lt=later) diff --git a/corehq/motech/repeaters/tests/test_dbaccessors.py b/corehq/motech/repeaters/tests/test_dbaccessors.py index 32ce867fd2e5..e99b609906ac 100644 --- a/corehq/motech/repeaters/tests/test_dbaccessors.py +++ b/corehq/motech/repeaters/tests/test_dbaccessors.py @@ -4,7 +4,7 @@ from django.test import TestCase from corehq.motech.repeaters.const import State -from corehq.motech.repeaters.models import ConnectionSettings, FormRepeater, SQLRepeatRecord +from corehq.motech.repeaters.models import ConnectionSettings, FormRepeater, RepeatRecord class TestRepeatRecordDBAccessors(TestCase): @@ -35,7 +35,7 @@ def setUpClass(cls): ) cls.payload_id_1 = uuid.uuid4().hex cls.payload_id_2 = uuid.uuid4().hex - failed = SQLRepeatRecord( + failed = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, @@ -43,7 +43,7 @@ def setUpClass(cls): payload_id=cls.payload_id_1, state=State.Fail, ) - failed_hq_error = SQLRepeatRecord( + failed_hq_error = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, @@ -51,42 +51,42 @@ def setUpClass(cls): payload_id=cls.payload_id_1, state=State.Fail, ) - success = SQLRepeatRecord( + success = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, payload_id=cls.payload_id_2, state=State.Success, ) - pending = SQLRepeatRecord( + pending = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, next_check=before, payload_id=cls.payload_id_2, ) - overdue = SQLRepeatRecord( + overdue = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, next_check=before - timedelta(minutes=10), payload_id=cls.payload_id_2, ) - cancelled = SQLRepeatRecord( + cancelled = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, payload_id=cls.payload_id_2, state=State.Cancelled, ) - empty = SQLRepeatRecord( + empty = RepeatRecord( domain=cls.domain, repeater_id=cls.repeater_id, registered_at=before, payload_id=cls.payload_id_2, state=State.Empty, ) - other_id = SQLRepeatRecord( + other_id = RepeatRecord( domain=cls.domain, repeater_id=cls.other_id, registered_at=before, @@ -104,45 +104,45 @@ def setUpClass(cls): empty, other_id, ] - SQLRepeatRecord.objects.bulk_create(cls.records) + RepeatRecord.objects.bulk_create(cls.records) def test_get_paged_repeat_records(self): - records = SQLRepeatRecord.objects.page(self.domain, 0, 2) + records = RepeatRecord.objects.page(self.domain, 0, 2) self.assertEqual(len(records), 2) def test_get_paged_repeat_records_with_repeater_id(self): - records = SQLRepeatRecord.objects.page(self.domain, 0, 2, repeater_id=self.other_id) + records = RepeatRecord.objects.page(self.domain, 0, 2, repeater_id=self.other_id) self.assertEqual(len(records), 1) def test_get_paged_repeat_records_with_state(self): - records = SQLRepeatRecord.objects.page(self.domain, 0, 10, state=State.Pending) + records = RepeatRecord.objects.page(self.domain, 0, 10, state=State.Pending) self.assertEqual(len(records), 3) def test_get_paged_repeat_records_wrong_domain(self): - records = SQLRepeatRecord.objects.page('wrong-domain', 0, 2) + records = RepeatRecord.objects.page('wrong-domain', 0, 2) self.assertEqual(len(records), 0) def test_get_all_paged_repeat_records(self): - records = SQLRepeatRecord.objects.page(self.domain, 0, 10) + records = RepeatRecord.objects.page(self.domain, 0, 10) self.assertEqual(len(records), len(self.records)) # get all the records that were created def test_get_all_repeat_records_by_domain_wrong_domain(self): - records = list(SQLRepeatRecord.objects.iterate("wrong-domain")) + records = list(RepeatRecord.objects.iterate("wrong-domain")) self.assertEqual(len(records), 0) def test_get_all_repeat_records_by_domain_with_repeater_id(self): - records = list(SQLRepeatRecord.objects.iterate(self.domain, repeater_id=self.repeater_id)) + records = list(RepeatRecord.objects.iterate(self.domain, repeater_id=self.repeater_id)) self.assertEqual(len(records), 7) def test_get_all_repeat_records_by_domain(self): - records = list(SQLRepeatRecord.objects.iterate(self.domain)) + records = list(RepeatRecord.objects.iterate(self.domain)) self.assertEqual(len(records), len(self.records)) def test_get_repeat_records_by_payload_id(self): - id_1_records = list(SQLRepeatRecord.objects.filter(domain=self.domain, payload_id=self.payload_id_1)) + id_1_records = list(RepeatRecord.objects.filter(domain=self.domain, payload_id=self.payload_id_1)) self.assertEqual(len(id_1_records), 2) self.assertItemsEqual([r.id for r in id_1_records], [r.id for r in self.records[:2]]) - id_2_records = list(SQLRepeatRecord.objects.filter(domain=self.domain, payload_id=self.payload_id_2)) + id_2_records = list(RepeatRecord.objects.filter(domain=self.domain, payload_id=self.payload_id_2)) self.assertEqual(len(id_2_records), 6) self.assertItemsEqual([r.id for r in id_2_records], [r.id for r in self.records[2:]]) diff --git a/corehq/motech/repeaters/tests/test_models.py b/corehq/motech/repeaters/tests/test_models.py index fb39c12b3a19..7723cac91884 100644 --- a/corehq/motech/repeaters/tests/test_models.py +++ b/corehq/motech/repeaters/tests/test_models.py @@ -15,8 +15,6 @@ from nose.tools import assert_in, assert_raises -from testil import eq - from corehq.motech.models import ConnectionSettings from corehq.util.test_utils import _create_case @@ -33,11 +31,10 @@ from ..models import ( FormRepeater, Repeater, - SQLRepeatRecord, + RepeatRecord, format_response, get_all_repeater_types, is_response, - is_sql_id, ) DOMAIN = 'test-domain' @@ -62,13 +59,6 @@ def setUp(self): ) self.repeater.save() - @classmethod - def tearDownClass(cls): - # TODO remove when RepeatRecords are no longer in Couch - super().tearDownClass() - from ..dbaccessors import delete_all_repeat_records - delete_all_repeat_records() - class TestSoftDeleteRepeaters(RepeaterTestCase): def setUp(self) -> None: @@ -107,7 +97,7 @@ def test_repeater_name(self): self.assertEqual(self.repeater.name, self.conn.name) -class TestSQLRepeatRecordOrdering(RepeaterTestCase): +class TestRepeatRecordOrdering(RepeaterTestCase): def setUp(self): super().setUp() @@ -207,16 +197,12 @@ def test_all_ready_next_past(self): @contextmanager def make_repeat_record(repeater, state): - repeat_record = repeater.repeat_records.create( + yield repeater.repeat_records.create( domain=repeater.domain, payload_id=str(uuid4()), state=state, registered_at=timezone.now() ) - try: - yield repeat_record - finally: - repeat_record._migration_get_couch_object().delete() @contextmanager @@ -411,13 +397,7 @@ def test_add_payload_exception_attempt(self): self.assertEqual(self.repeat_record.attempts[0].traceback, tb_str) def test_cached_attempts(self): - def clear_attempts_cache(): - # can be removed with RepeatRecord (Couch model) - # the cache is populated when the couch record is saved - self.repeat_record.attempts._result_cache = None - self.repeat_record.add_client_failure_attempt(message="Fail") - clear_attempts_cache() with self.assertNumQueries(1): self.assertEqual(len(self.repeat_record.attempts), 1) @@ -425,7 +405,6 @@ def clear_attempts_cache(): self.assertEqual(len(self.repeat_record.attempts), 1) self.repeat_record.add_client_failure_attempt(message="Fail") - clear_attempts_cache() with self.assertNumQueries(1): self.assertEqual(len(self.repeat_record.attempts), 2) @@ -475,7 +454,7 @@ def test_used_connection_setting_cannot_be_deleted(self): def test_attempt_forward_now_kwargs(): - rr = SQLRepeatRecord() + rr = RepeatRecord() with assert_raises(TypeError): rr.attempt_forward_now(True) @@ -527,7 +506,7 @@ def test_retry(self, process, retry_process): def test_optimistic_lock(self, process, retry_process): rec = self.new_record() - two = SQLRepeatRecord.objects.get(id=rec.id) + two = RepeatRecord.objects.get(id=rec.id) two.next_check = datetime.utcnow() - timedelta(days=1) two.save() @@ -544,7 +523,7 @@ def assert_not_called(self, *tasks): raise AssertionError(f"{task} unexpectedly called:\n{err}") def new_record(self, next_check=before_now, state=RECORD_PENDING_STATE): - rec = SQLRepeatRecord( + rec = RepeatRecord( domain="test", repeater_id=self.repeater.repeater_id, payload_id="c0ffee", @@ -564,9 +543,8 @@ def test_register(self): domain=DOMAIN, case_id=case_id, case_type='some_case', owner_id='abcd' ) repeat_record = self.repeater.register(payload, fire_synchronously=True) - self.addCleanup(repeat_record._migration_get_couch_object().delete) self.assertEqual(repeat_record.payload_id, payload.get_id) - all_records = list(SQLRepeatRecord.objects.iterate(DOMAIN)) + all_records = list(RepeatRecord.objects.iterate(DOMAIN)) self.assertEqual(len(all_records), 1) self.assertEqual(all_records[0].id, repeat_record.id) @@ -576,7 +554,6 @@ def test_send_request(self): domain=DOMAIN, case_id=case_id, case_type='some_case', owner_id='abcd' ) repeat_record = self.repeater.register(payload, fire_synchronously=True) - self.addCleanup(repeat_record._migration_get_couch_object().delete) from corehq.motech.repeaters.tests.test_models_slow import ResponseMock resp = ResponseMock(status_code=200, reason='OK') # Basic test checks if send_request is called @@ -617,7 +594,7 @@ def test_count_by_repeater_and_state(self): self.make_records(2, state=State.Fail) self.make_records(3, state=State.Cancelled) self.make_records(5, state=State.Success) - counts = SQLRepeatRecord.objects.count_by_repeater_and_state(domain="test") + counts = RepeatRecord.objects.count_by_repeater_and_state(domain="test") rid = self.repeater.id self.assertEqual(counts[rid][State.Pending], 1) @@ -638,10 +615,10 @@ def test_count_overdue(self): self.new_record(next_check=now - timedelta(minutes=15)) self.new_record(next_check=now - timedelta(minutes=5)) self.new_record(next_check=None, state=State.Success) - overdue = SQLRepeatRecord.objects.count_overdue() + overdue = RepeatRecord.objects.count_overdue() self.assertEqual(overdue, 3) - iter_partition = SQLRepeatRecord.objects.iter_partition + iter_partition = RepeatRecord.objects.iter_partition def test_one_partition(self): iter_partition = type(self).iter_partition @@ -675,7 +652,7 @@ def test_get_domains_with_records(self): self.new_record(domain='b') self.new_record(domain='c') self.assertEqual( - set(SQLRepeatRecord.objects.get_domains_with_records()), + set(RepeatRecord.objects.get_domains_with_records()), {'a', 'b', 'c'}, ) @@ -684,12 +661,12 @@ def test_get_domains_with_records_with_filter(self): self.new_record(domain='alice') self.new_record(domain='carl') self.assertEqual( - set(SQLRepeatRecord.objects.get_domains_with_records().filter(domain__startswith="al")), + set(RepeatRecord.objects.get_domains_with_records().filter(domain__startswith="al")), {'alex', 'alice'}, ) def new_record(self, next_check=before_now, state=State.Pending, domain="test"): - return SQLRepeatRecord.objects.create( + return RepeatRecord.objects.create( domain=domain, repeater_id=self.repeater.repeater_id, payload_id="c0ffee", @@ -701,7 +678,7 @@ def new_record(self, next_check=before_now, state=State.Pending, domain="test"): def make_records(self, n, state=State.Pending): now = timezone.now() - timedelta(seconds=10) is_pending = state in [State.Pending, State.Fail] - records = SQLRepeatRecord.objects.bulk_create(SQLRepeatRecord( + records = RepeatRecord.objects.bulk_create(RepeatRecord( domain="test", repeater=self.repeater, payload_id="c0ffee", @@ -711,10 +688,6 @@ def make_records(self, n, state=State.Pending): ) for i in range(n)) return {r.id for r in records} - def tearDown(self): - from ..dbaccessors import delete_all_repeat_records - delete_all_repeat_records() - class TestRepeatRecordMethods(TestCase): @@ -724,13 +697,12 @@ def test_repeater_returns_active_repeater(self): connection_settings=self.conn_settings, is_deleted=False ) - repeat_record = SQLRepeatRecord.objects.create( + repeat_record = RepeatRecord.objects.create( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), repeater_id=repeater.repeater_id ) - self.addCleanup(repeat_record._migration_get_couch_object().delete) self.assertIsNotNone(repeat_record.repeater) @@ -740,18 +712,17 @@ def test_repeater_returns_deleted_repeater(self): connection_settings=self.conn_settings, is_deleted=True ) - repeat_record = SQLRepeatRecord.objects.create( + repeat_record = RepeatRecord.objects.create( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), repeater_id=repeater.repeater_id ) - self.addCleanup(repeat_record._migration_get_couch_object().delete) self.assertTrue(repeat_record.repeater.is_deleted) def test_repeater_raises_if_not_found(self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -777,7 +748,7 @@ def setUpClass(cls): def test_requeue(self): now = datetime.utcnow() - record = SQLRepeatRecord.objects.create( + record = RepeatRecord.objects.create( domain="test", repeater_id=self.repeater.id.hex, payload_id="abc123", @@ -790,7 +761,7 @@ def test_requeue(self): self.assertLessEqual(record.next_check, datetime.utcnow()) def test_get_payload(self): - record = SQLRepeatRecord( + record = RepeatRecord( domain="test", repeater_id=self.repeater.id.hex, payload_id="abc123", @@ -802,7 +773,7 @@ def test_get_payload(self): def test_postpone_by(self): now = datetime.utcnow() hour = timedelta(hours=1) - record = SQLRepeatRecord( + record = RepeatRecord( domain="test", repeater_id=self.repeater.id.hex, payload_id="abc123", @@ -817,64 +788,50 @@ class TestRepeatRecordMethodsNoDB(SimpleTestCase): domain = 'repeat-record-tests' def test_exceeded_max_retries_returns_false_if_fewer_tries_than_possible(self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), state=State.Fail ) - with patch.object(SQLRepeatRecord, "num_attempts", 0), \ + with patch.object(RepeatRecord, "num_attempts", 0), \ patch.object(repeat_record, "max_possible_tries", 1): self.assertFalse(repeat_record.exceeded_max_retries) def test_exceeded_max_retries_returns_true_if_equal(self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), state=State.Fail ) - with patch.object(SQLRepeatRecord, "num_attempts", 1), \ + with patch.object(RepeatRecord, "num_attempts", 1), \ patch.object(repeat_record, "max_possible_tries", 1): self.assertTrue(repeat_record.exceeded_max_retries) def test_exceeded_max_retries_returns_true_if_more_tries_than_possible(self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), state=State.Fail ) - with patch.object(SQLRepeatRecord, "num_attempts", 2), \ + with patch.object(RepeatRecord, "num_attempts", 2), \ patch.object(repeat_record, "max_possible_tries", 1): self.assertTrue(repeat_record.exceeded_max_retries) def test_exceeded_max_retries_returns_false_if_not_failure_state( self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), state=State.Success, ) - with patch.object(SQLRepeatRecord, "num_attempts", 2), \ + with patch.object(RepeatRecord, "num_attempts", 2), \ patch.object(repeat_record, "max_possible_tries", 1): self.assertFalse(repeat_record.exceeded_max_retries) - - -def test_is_sql_id(): - def test(value, expect): - eq(is_sql_id(value), expect, f"value was: {value!r}") - - yield test, 1234, True - yield test, '1234', True - yield test, 'b6859ae05fd94dccbc3dfd25cdc6cb2c', False - yield test, 'b6859ae0-5fd9-4dcc-bc3d-fd25cdc6cb2c', False - - # numeric str is considered UUID if number of digits is 32 - yield test, '40400000000000000000000000000404', False - yield test, '40400000-0000-0000-0000-000000000404', False diff --git a/corehq/motech/repeaters/tests/test_repeater.py b/corehq/motech/repeaters/tests/test_repeater.py index 620440114c09..9b83728cc27f 100644 --- a/corehq/motech/repeaters/tests/test_repeater.py +++ b/corehq/motech/repeaters/tests/test_repeater.py @@ -41,7 +41,6 @@ MIN_RETRY_WAIT, State, ) -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records from corehq.motech.repeaters.models import ( CaseRepeater, DataSourceRepeater, @@ -49,7 +48,7 @@ LocationRepeater, Repeater, ShortFormRepeater, - SQLRepeatRecord, + RepeatRecord, UserRepeater, _get_retry_interval, format_response, @@ -135,7 +134,7 @@ def post_xml(cls, xml, domain_name): def repeat_records(cls, domain_name): # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - return SQLRepeatRecord.objects.filter(domain=domain_name, next_check__lt=later) + return RepeatRecord.objects.filter(domain=domain_name, next_check__lt=later) class RepeaterTest(BaseRepeaterTest): @@ -172,12 +171,7 @@ def setUp(self): self.initial_fire_call_count = mock_fire.call_count def tearDown(self): - self.case_repeater.delete() - self.case_connx.delete() - self.form_repeater.delete() - self.form_connx.delete() FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() super(RepeaterTest, self).tearDown() def repeat_records(self): @@ -233,13 +227,13 @@ def now(): # Enqueued repeat records have next_check incremented by 48 hours next_check_time = now() + timedelta(minutes=60) + timedelta(hours=48) - repeat_records = SQLRepeatRecord.objects.filter( + repeat_records = RepeatRecord.objects.filter( domain=self.domain, next_check__lt=now() + timedelta(minutes=15), ) self.assertEqual(len(repeat_records), 0) - repeat_records = SQLRepeatRecord.objects.filter( + repeat_records = RepeatRecord.objects.filter( domain=self.domain, next_check__lt=next_check_time, ) @@ -247,7 +241,7 @@ def now(): def test_update_failure_next_check(self): now = datetime.utcnow() - record = SQLRepeatRecord.objects.create( + record = RepeatRecord.objects.create( domain=self.domain, repeater_id=self.case_repeater.repeater_id, registered_at=now, @@ -328,7 +322,7 @@ def test_repeat_record_status_check(self): # all records should be in SUCCESS state after force try for repeat_record in self.repeat_records(): self.assertEqual(repeat_record.state, State.Success) - self.assertEqual(repeat_record.overall_tries, 1) + self.assertEqual(repeat_record.num_attempts, 1) # not trigger records succeeded triggered after cancellation with patch('corehq.motech.repeaters.models.simple_request') as mock_fire: @@ -357,14 +351,14 @@ def test_automatic_cancel_repeat_record(self): case = CommCareCase.objects.get_case(CASE_ID, self.domain) rr = self.case_repeater.register(case) # Fetch the revision that was updated: - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) - self.assertEqual(1, repeat_record.overall_tries) + repeat_record = RepeatRecord.objects.get(id=rr.id) + self.assertEqual(1, repeat_record.num_attempts) with patch('corehq.motech.repeaters.models.simple_request', side_effect=Exception('Boom!')): - for __ in range(repeat_record.max_possible_tries - repeat_record.overall_tries): + for __ in range(repeat_record.max_possible_tries - repeat_record.num_attempts): repeat_record.fire() self.assertEqual(repeat_record.state, State.Cancelled) repeat_record.requeue() - self.assertEqual(repeat_record.max_possible_tries - repeat_record.overall_tries, MAX_BACKOFF_ATTEMPTS) + self.assertEqual(repeat_record.max_possible_tries - repeat_record.num_attempts, MAX_BACKOFF_ATTEMPTS) self.assertNotEqual(None, repeat_record.next_check) def test_check_repeat_records_ignores_future_retries_using_multiple_partitions(self): @@ -401,7 +395,7 @@ def test_repeat_record_status_check_using_multiple_partitions(self): # all records should be in SUCCESS state after force try for repeat_record in self.repeat_records(): self.assertEqual(repeat_record.state, State.Success) - self.assertEqual(repeat_record.overall_tries, 1) + self.assertEqual(repeat_record.num_attempts, 1) # not trigger records succeeded triggered after cancellation with patch('corehq.motech.repeaters.models.simple_request') as mock_fire, \ @@ -451,15 +445,8 @@ def setUpClass(cls): ) cls.repeater.save() - @classmethod - def tearDownClass(cls): - cls.repeater.delete() - cls.connx.delete() - super().tearDownClass() - def tearDown(self): FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() super().tearDown() def test_get_payload(self): @@ -486,15 +473,8 @@ def setUpClass(cls): ) cls.repeater.save() - @classmethod - def tearDownClass(cls): - cls.repeater.delete() - cls.connx.delete() - super(FormRepeaterTest, cls).tearDownClass() - def tearDown(self): FormProcessorTestUtils.delete_all_cases(self.domain) - delete_all_repeat_records() super(FormRepeaterTest, self).tearDown() def test_payload(self): @@ -521,15 +501,8 @@ def setUpClass(cls): ) cls.repeater.save() - @classmethod - def tearDownClass(cls): - cls.repeater.delete() - cls.connx.delete() - super().tearDownClass() - def tearDown(self): FormProcessorTestUtils.delete_all_cases(self.domain) - delete_all_repeat_records() super().tearDown() def test_payload(self): @@ -561,9 +534,6 @@ def setUp(self): def tearDown(self): FormProcessorTestUtils.delete_all_cases(self.domain) - delete_all_repeat_records() - self.repeater.delete() - self.connx.delete() super().tearDown() def test_case_close_format(self): @@ -697,10 +667,7 @@ def setUp(self): self.repeater.save() def tearDown(self): - self.repeater.delete() - self.connx.delete() FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() super().tearDown() def test_get_payload_exception(self): @@ -717,7 +684,7 @@ def test_payload_exception(self): with patch.object(Repeater, "get_payload", side_effect=Exception('Payload error')): rr = self.repeater.register(case) - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) + repeat_record = RepeatRecord.objects.get(id=rr.id) self.assertEqual(repeat_record.state, State.Cancelled) self.assertEqual(repeat_record.failure_reason, "Payload error") @@ -727,7 +694,7 @@ def test_failure(self): rr = self.repeater.register(case) # calls repeat_record.fire() # Fetch the repeat_record revision that was updated - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) + repeat_record = RepeatRecord.objects.get(id=rr.id) self.assertEqual(repeat_record.failure_reason, 'Boom!') self.assertEqual(repeat_record.state, State.Fail) @@ -736,7 +703,7 @@ def test_unexpected_failure(self): with patch('corehq.motech.repeaters.models.simple_request', side_effect=Exception('Boom!')): rr = self.repeater.register(case) - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) + repeat_record = RepeatRecord.objects.get(id=rr.id) self.assertEqual(repeat_record.failure_reason, 'Internal Server Error') self.assertEqual(repeat_record.state, State.Fail) @@ -747,7 +714,7 @@ def test_success(self): mock_simple_post.return_value.status_code = 200 rr = self.repeater.register(case) - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) + repeat_record = RepeatRecord.objects.get(id=rr.id) self.assertEqual(repeat_record.state, State.Success) def test_empty(self): @@ -757,7 +724,7 @@ def test_empty(self): mock_simple_post.return_value.status_code = 204 rr = self.repeater.register(case) - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) + repeat_record = RepeatRecord.objects.get(id=rr.id) self.assertEqual(repeat_record.state, State.Empty) @@ -791,16 +758,13 @@ def setUp(self): self.repeater.save() def tearDown(self): - self.repeater.delete() - self.connx.delete() FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() def test_ignore_document(self): """ When get_payload raises IgnoreDocument, fire should call update_success """ - repeat_records = SQLRepeatRecord.objects.filter(domain=self.domain) + repeat_records = RepeatRecord.objects.filter(domain=self.domain) for repeat_record_ in repeat_records: repeat_record_.fire() @@ -842,10 +806,7 @@ def setUp(self): self.repeater.save() def tearDown(self): - self.repeater.delete() - self.connx.delete() FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() super().tearDown() def test_new_format_same_name(self): @@ -878,7 +839,7 @@ def test_new_format_payload(self): mock_manager.return_value = 'MockAuthManager' rr = self.repeater.register(case) - repeat_record = SQLRepeatRecord.objects.get(id=rr.id) + repeat_record = RepeatRecord.objects.get(id=rr.id) headers = self.repeater.get_headers(repeat_record) mock_request.assert_called_with( self.domain, @@ -931,16 +892,10 @@ def tearDownClass(cls): clear_plan_version_cache() super().tearDownClass() - def tearDown(self): - super().tearDown() - delete_all_repeat_records() - self.repeater.delete() - self.connx.delete() - def repeat_records(self): # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - return SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) + return RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) def make_user(self, username): user = CommCareUser.create( @@ -1012,14 +967,10 @@ def tearDownClass(cls): clear_plan_version_cache() super().tearDownClass() - def tearDown(self): - super().tearDown() - delete_all_repeat_records() - def repeat_records(self): # Enqueued repeat records have next_check set 48 hours in the future. later = datetime.utcnow() + timedelta(hours=48 + 1) - return SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) + return RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later) def make_location(self, name): location = SQLLocation.objects.create( @@ -1080,16 +1031,13 @@ def setUp(self): self.repeater = Repeater.objects.get(id=self.repeater.id) def tearDown(self): - self.repeater.delete() - self.connx.delete() FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() super(TestRepeaterPause, self).tearDown() def test_trigger_when_paused(self): # not paused - with patch.object(SQLRepeatRecord, 'fire') as mock_fire: - with patch.object(SQLRepeatRecord, 'postpone_by') as mock_postpone_fire: + with patch.object(RepeatRecord, 'fire') as mock_fire: + with patch.object(RepeatRecord, 'postpone_by') as mock_postpone_fire: # calls _process_repeat_record(): self.repeat_record = self.repeater.register(CommCareCase.objects.get_case(CASE_ID, self.domain)) self.assertEqual(mock_fire.call_count, 1) @@ -1098,7 +1046,7 @@ def test_trigger_when_paused(self): # paused self.repeater.pause() # re fetch repeat record - self.repeat_record = SQLRepeatRecord.objects.get(id=self.repeat_record.id) + self.repeat_record = RepeatRecord.objects.get(id=self.repeat_record.id) _process_repeat_record(self.repeat_record) self.assertEqual(mock_fire.call_count, 1) self.assertEqual(mock_postpone_fire.call_count, 1) @@ -1106,7 +1054,7 @@ def test_trigger_when_paused(self): # resumed self.repeater.resume() # re fetch repeat record - self.repeat_record = SQLRepeatRecord.objects.get(id=self.repeat_record.id) + self.repeat_record = RepeatRecord.objects.get(id=self.repeat_record.id) _process_repeat_record(self.repeat_record) self.assertEqual(mock_fire.call_count, 2) self.assertEqual(mock_postpone_fire.call_count, 1) @@ -1131,15 +1079,14 @@ def setUp(self): def tearDown(self): FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) - delete_all_repeat_records() super().tearDown() def test_trigger_when_deleted(self): self.repeater.retire() - with patch.object(SQLRepeatRecord, 'fire') as mock_fire: + with patch.object(RepeatRecord, 'fire') as mock_fire: repeat_record = self.repeater.register(CommCareCase.objects.get_case(CASE_ID, self.domain)) - repeat_record = SQLRepeatRecord.objects.get(id=repeat_record.id) + repeat_record = RepeatRecord.objects.get(id=repeat_record.id) _process_repeat_record(repeat_record) self.assertEqual(mock_fire.call_count, 0) self.assertEqual(repeat_record.state, State.Cancelled) @@ -1321,7 +1268,7 @@ def test_datasource_is_subscribed_to(self): def test_payload_format(self): sample_doc, expected_indicators = self._create_log_and_repeat_record() later = datetime.utcnow() + timedelta(hours=50) - repeat_record = SQLRepeatRecord.objects.filter(domain=self.domain, next_check__lt=later).first() + repeat_record = RepeatRecord.objects.filter(domain=self.domain, next_check__lt=later).first() json_payload = self.repeater.get_payload(repeat_record) payload = json.loads(json_payload) @@ -1351,12 +1298,6 @@ def _create_log_and_repeat_record(self): expected_indicators = json.loads(json_indicators) return sample_doc, expected_indicators - def tearDown(self): - delete_all_repeat_records() - self.repeater.delete() - self.connx.delete() - super().tearDown() - def fromisoformat(isoformat): """ diff --git a/corehq/motech/repeaters/tests/test_tasks.py b/corehq/motech/repeaters/tests/test_tasks.py index 197438841adf..ba6812121567 100644 --- a/corehq/motech/repeaters/tests/test_tasks.py +++ b/corehq/motech/repeaters/tests/test_tasks.py @@ -13,8 +13,7 @@ TestFormMetadata, ) from corehq.motech.models import ConnectionSettings, RequestLog -from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records -from corehq.motech.repeaters.models import FormRepeater, SQLRepeatRecord, Repeater +from corehq.motech.repeaters.models import FormRepeater, RepeatRecord, Repeater from corehq.motech.repeaters.tasks import ( _process_repeat_record, delete_old_request_logs, @@ -81,7 +80,6 @@ def setUpClass(cls): name='Test API', url="http://localhost/api/" ) - cls.addClassCleanup(delete_all_repeat_records) def setUp(self): self.repeater = FormRepeater.objects.create( @@ -145,7 +143,7 @@ def form_context(form_ids): class TestProcessRepeatRecord(TestCase): def test_returns_if_record_is_cancelled(self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -162,7 +160,7 @@ def test_returns_if_record_is_cancelled(self): def test_cancels_and_returns_if_domain_cannot_forward(self): self.mock_domain_can_forward.return_value = False - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -171,13 +169,13 @@ def test_cancels_and_returns_if_domain_cannot_forward(self): _process_repeat_record(repeat_record) - fetched_repeat_record = SQLRepeatRecord.objects.get(id=repeat_record.id) + fetched_repeat_record = RepeatRecord.objects.get(id=repeat_record.id) self.assertEqual(fetched_repeat_record.state, State.Cancelled) self.assertEqual(self.mock_fire.call_count, 0) self.assertEqual(self.mock_postpone_by.call_count, 0) def test_cancels_and_returns_if_repeat_record_exceeds_max_retries(self): - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -185,11 +183,11 @@ def test_cancels_and_returns_if_repeat_record_exceeds_max_retries(self): state=State.Fail, ) - with patch.object(SQLRepeatRecord, "num_attempts", 1), \ + with patch.object(RepeatRecord, "num_attempts", 1), \ patch.object(repeat_record, "max_possible_tries", 1): _process_repeat_record(repeat_record) - fetched_repeat_record = SQLRepeatRecord.objects.get(id=repeat_record.id) + fetched_repeat_record = RepeatRecord.objects.get(id=repeat_record.id) self.assertEqual(fetched_repeat_record.state, State.Cancelled) self.assertEqual(self.mock_fire.call_count, 0) self.assertEqual(self.mock_postpone_by.call_count, 0) @@ -201,7 +199,7 @@ def test_deletes_repeat_record_cancels_and_returns_if_repeater_deleted(self): is_deleted=True ) - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -210,9 +208,8 @@ def test_deletes_repeat_record_cancels_and_returns_if_repeater_deleted(self): _process_repeat_record(repeat_record) - fetched_repeat_record = repeat_record._migration_get_couch_object() - self.assertEqual(fetched_repeat_record.doc_type, 'RepeatRecord-Deleted') - self.assertEqual(fetched_repeat_record.state, State.Cancelled) + repeat_record.refresh_from_db(fields=["state"]) + self.assertEqual(repeat_record.state, State.Cancelled) self.assertEqual(self.mock_fire.call_count, 0) self.assertEqual(self.mock_postpone_by.call_count, 0) @@ -223,7 +220,7 @@ def test_postpones_record_if_repeater_is_paused(self): is_paused=True ) - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -242,7 +239,7 @@ def test_fires_record_if_repeater_is_not_paused(self): is_paused=False ) - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -262,7 +259,7 @@ def test_paused_and_deleted_repeater_does_not_fire_or_postpone(self): is_deleted=True, ) - repeat_record = SQLRepeatRecord( + repeat_record = RepeatRecord( domain=self.domain, payload_id='abc123', registered_at=datetime.utcnow(), @@ -287,17 +284,16 @@ def setUpClass(cls): domain=cls.domain, connection_settings=cls.conn_settings, ) - cls.addClassCleanup(delete_all_repeat_records) def setUp(self): self.patch() def patch(self): - patch_fire = patch.object(SQLRepeatRecord, 'fire') + patch_fire = patch.object(RepeatRecord, 'fire') self.mock_fire = patch_fire.start() self.addCleanup(patch_fire.stop) - patch_postpone_by = patch.object(SQLRepeatRecord, 'postpone_by') + patch_postpone_by = patch.object(RepeatRecord, 'postpone_by') self.mock_postpone_by = patch_postpone_by.start() self.addCleanup(patch_postpone_by.stop) diff --git a/corehq/motech/repeaters/views/repeat_records.py b/corehq/motech/repeaters/views/repeat_records.py index 59c1bcf108a2..b065abb423d4 100644 --- a/corehq/motech/repeaters/views/repeat_records.py +++ b/corehq/motech/repeaters/views/repeat_records.py @@ -41,7 +41,7 @@ from corehq.motech.models import RequestLog from ..const import State, RECORD_CANCELLED_STATE -from ..models import SQLRepeatRecord, is_sql_id +from ..models import RepeatRecord from .repeat_record_display import RepeatRecordDisplay @@ -121,7 +121,7 @@ def _make_resend_payload_button(self, record_id): def total_records(self): if self.payload_id: return len(self._get_all_records_by_payload()) - query = SQLRepeatRecord.objects.filter(domain=self.domain) + query = RepeatRecord.objects.filter(domain=self.domain) if self.repeater_id: query = query.filter(repeater_id=self.repeater_id) if self.state: @@ -138,7 +138,7 @@ def shared_pagination_GET_params(self): @memoized def _get_all_records_by_payload(self): - query = SQLRepeatRecord.objects.filter( + query = RepeatRecord.objects.filter( domain=self.domain, payload_id=self.payload_id, ) @@ -163,7 +163,7 @@ def rows(self): end = self.pagination.start + self.pagination.count records = self._get_all_records_by_payload()[self.pagination.start:end] else: - records = SQLRepeatRecord.objects.page( + records = RepeatRecord.objects.page( self.domain, self.pagination.start, self.pagination.count, @@ -244,9 +244,9 @@ def report_context(self): where = Q(domain=self.domain) if self.repeater_id: where &= Q(repeater_id=self.repeater_id) - total = SQLRepeatRecord.objects.filter(where).count() - total_pending = SQLRepeatRecord.objects.filter(where, state=State.Pending).count() # include State.Fail? - total_cancelled = SQLRepeatRecord.objects.filter(where, state=State.Cancelled).count() + total = RepeatRecord.objects.filter(where).count() + total_pending = RepeatRecord.objects.filter(where, state=State.Pending).count() # include State.Fail? + total_cancelled = RepeatRecord.objects.filter(where, state=State.Cancelled).count() form_query_string = self.request.GET.urlencode() form_query_string_cancelled = _change_record_state( @@ -273,10 +273,9 @@ class RepeatRecordView(View): @staticmethod def get_record_or_404(domain, record_id): - where = {"id": record_id} if is_sql_id(record_id) else {"couch_id": record_id} try: - record = SQLRepeatRecord.objects.get(**where) - except SQLRepeatRecord.DoesNotExist: + record = RepeatRecord.objects.get(id=record_id) + except RepeatRecord.DoesNotExist: raise Http404() if record.domain != domain: diff --git a/corehq/motech/repeaters/views/repeaters.py b/corehq/motech/repeaters/views/repeaters.py index 300c5b65b7da..88b8e27b26a6 100644 --- a/corehq/motech/repeaters/views/repeaters.py +++ b/corehq/motech/repeaters/views/repeaters.py @@ -28,7 +28,7 @@ from ..forms import CaseRepeaterForm, FormRepeaterForm, GenericRepeaterForm from ..models import ( Repeater, - SQLRepeatRecord, + RepeatRecord, get_all_repeater_types, ) @@ -68,7 +68,7 @@ def get_repeaters_with_state_counts(repeater_class): @property def page_context(self): - state_counts = SQLRepeatRecord.objects.count_by_repeater_and_state(domain=self.domain) + state_counts = RepeatRecord.objects.count_by_repeater_and_state(domain=self.domain) return { 'report': 'repeat_record_report', 'repeater_types_info': self.get_repeater_types_info(state_counts), diff --git a/corehq/motech/repeaters/views/tests/test_repeat_records.py b/corehq/motech/repeaters/views/tests/test_repeat_records.py index a45fef732e8c..d27140cb2a92 100644 --- a/corehq/motech/repeaters/views/tests/test_repeat_records.py +++ b/corehq/motech/repeaters/views/tests/test_repeat_records.py @@ -9,7 +9,7 @@ from .. import repeaters from .. import repeat_records -from ...models import FormRepeater, SQLRepeatRecord +from ...models import FormRepeater, RepeatRecord class TestUtilities(SimpleTestCase): @@ -106,7 +106,7 @@ def setUpClass(cls): def test_get_repeater_types_info(self): class view: domain = "test" - state_counts = SQLRepeatRecord.objects.count_by_repeater_and_state("test") + state_counts = RepeatRecord.objects.count_by_repeater_and_state("test") infos = repeaters.DomainForwardingOptionsView.get_repeater_types_info(view, state_counts) repeater, = {i.class_name: i for i in infos}['FormRepeater'].instances @@ -151,17 +151,12 @@ def test_get_record_or_404_with_int(self): record = repeat_records.RepeatRecordView.get_record_or_404("test", rec_id) self.assertEqual(record.id, rec_id) - def test_get_record_or_404_with_couch_id(self): - rec_id = self.record.couch_id - record = repeat_records.RepeatRecordView.get_record_or_404("test", rec_id) - self.assertEqual(record.id, self.record.id) - def test_get_record_or_404_not_found(self): - rec_id = "404aaaaaaaaaaaaaaaaaaaaaaaaaa404" + rec_id = 40400000000000000000000000000404 with self.assertRaises(repeat_records.Http404): repeat_records.RepeatRecordView.get_record_or_404("test", rec_id) def test_get_record_or_404_with_wrong_domain(self): - rec_id = str(self.record.id) + rec_id = self.record.id with self.assertRaises(repeat_records.Http404): repeat_records.RepeatRecordView.get_record_or_404("wrong", rec_id) diff --git a/corehq/preindex/couchviews.lock b/corehq/preindex/couchviews.lock index 28911b505927..5b0d27149942 100644 --- a/corehq/preindex/couchviews.lock +++ b/corehq/preindex/couchviews.lock @@ -110,11 +110,3 @@ ae04d8c82218a9543491 corehq/couchapps/schemas_by_xmlns_or_case_type/views/view/r 7d2e8e33683a7c27a8df corehq/couchapps/tests/test_all_docs.py 7ae2e00ba081690d86c3 corehq/couchapps/users_extra/views/users_by_location_id/map.js ae04d8c82218a9543491 corehq/couchapps/users_extra/views/users_by_location_id/reduce.js -ed6ff90fb4b24a10a222 corehq/motech/repeaters/_design/indexes/search/index.js -8b4f00cb1a48d85286ec corehq/motech/repeaters/_design/views/repeat_records/map.js -ae04d8c82218a9543491 corehq/motech/repeaters/_design/views/repeat_records/reduce.js -7ad14540f9e12079225f corehq/motech/repeaters/_design/views/repeat_records_by_next_check/map.js -83d4baace1fa5a6ede96 corehq/motech/repeaters/_design/views/repeat_records_by_next_check/reduce.js -0337a63a6147e8f2fcc6 corehq/motech/repeaters/_design/views/repeat_records_by_payload_id/map.js -637a41f78b2cb08b0aa7 corehq/motech/repeaters/_design/views/repeaters/map.js -83d4baace1fa5a6ede96 corehq/motech/repeaters/_design/views/repeaters/reduce.js diff --git a/custom/cowin/tests.py b/custom/cowin/tests.py index 73c519b4c05e..5e0776a8f63d 100644 --- a/custom/cowin/tests.py +++ b/custom/cowin/tests.py @@ -9,7 +9,7 @@ from corehq.form_processor.models import CommCareCase from corehq.motech.models import ConnectionSettings -from corehq.motech.repeaters.models import SQLRepeatRecord +from corehq.motech.repeaters.models import RepeatRecord from custom.cowin.const import ( COWIN_API_DATA_REGISTRATION_IDENTIFIER, COWIN_API_DATA_VACCINATION_IDENTIFIER, @@ -48,7 +48,7 @@ def test_registration_payload(self, payload_doc_mock, connection_settings_mock): repeater = BeneficiaryRegistrationRepeater() generator = BeneficiaryRegistrationPayloadGenerator(repeater) - repeat_record = SQLRepeatRecord() + repeat_record = RepeatRecord() self.assertEqual(repeater.get_headers(repeat_record)['X-Api-Key'], "secure-api-key") @@ -66,8 +66,8 @@ def test_registration_payload(self, payload_doc_mock, connection_settings_mock): } ) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.handle_success', lambda *_: None) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.repeater', new_callable=PropertyMock) + @patch('corehq.motech.repeaters.models.RepeatRecord.handle_success', lambda *_: None) + @patch('corehq.motech.repeaters.models.RepeatRecord.repeater', new_callable=PropertyMock) @patch('corehq.motech.repeaters.models.CaseRepeater.payload_doc') @patch('custom.cowin.repeaters.update_case') @patch('requests.Response.json') @@ -92,7 +92,7 @@ def test_registration_response(self, json_response_mock, update_case_mock, paylo response.status_code = 200 json_response_mock.return_value = response_json - repeat_record = SQLRepeatRecord(payload_id=case_id) + repeat_record = RepeatRecord(payload_id=case_id) repeater = BeneficiaryRegistrationRepeater(domain=self.domain) repeat_record_repeater_mock.return_value = repeater @@ -115,7 +115,7 @@ def test_vaccination_payload(self, payload_doc_mock, connection_settings_mock): repeater = BeneficiaryVaccinationRepeater() generator = BeneficiaryVaccinationPayloadGenerator(repeater) - repeat_record = SQLRepeatRecord() + repeat_record = RepeatRecord() self.assertEqual(repeater.get_headers(repeat_record)['X-Api-Key'], "my-secure-api-key") @@ -169,8 +169,8 @@ def test_vaccination_payload(self, payload_doc_mock, connection_settings_mock): } ) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.handle_success', lambda *_: None) - @patch('corehq.motech.repeaters.models.SQLRepeatRecord.repeater', new_callable=PropertyMock) + @patch('corehq.motech.repeaters.models.RepeatRecord.handle_success', lambda *_: None) + @patch('corehq.motech.repeaters.models.RepeatRecord.repeater', new_callable=PropertyMock) @patch('corehq.motech.repeaters.models.CaseRepeater.payload_doc') @patch('custom.cowin.repeaters.update_case') def test_vaccination_response(self, update_case_mock, payload_doc_mock, repeat_record_repeater_mock): @@ -188,7 +188,7 @@ def test_vaccination_response(self, update_case_mock, payload_doc_mock, repeat_r response = requests.Response() response.status_code = 204 - repeat_record = SQLRepeatRecord(payload_id=case_id) + repeat_record = RepeatRecord(payload_id=case_id) repeater = BeneficiaryVaccinationRepeater(domain=self.domain) repeat_record_repeater_mock.return_value = repeater diff --git a/migrations.lock b/migrations.lock index f180b4e1cc2a..d379919d1bfd 100644 --- a/migrations.lock +++ b/migrations.lock @@ -822,6 +822,7 @@ repeaters 0007_lengthen_payload_id 0008_sqlrepeatrecords 0009_add_domain_to_indexes + 0010_rm_couch_artifacts reports 0001_initial 0002_auto_20171121_1803 diff --git a/settings.py b/settings.py index b4b26a68c358..42144f59dbb2 100755 --- a/settings.py +++ b/settings.py @@ -1609,7 +1609,6 @@ def _pkce_required(client_id): # custom reports 'accounting', - ('repeaters', 'receiverwrapper'), ('userreports', META_DB), ('custom_data_fields', META_DB), ('export', META_DB),