From 90d503bd7b1623b0659dd007b4d10694c1943cba Mon Sep 17 00:00:00 2001 From: nospame Date: Thu, 7 Mar 2024 09:33:01 -0800 Subject: [PATCH 001/928] Add transifex-python dependency --- requirements/base-requirements.in | 1 + requirements/dev-requirements.txt | 20 ++++++++++++++++++-- requirements/docs-requirements.txt | 19 ++++++++++++++++++- requirements/prod-requirements.txt | 20 ++++++++++++++++++-- requirements/requirements.txt | 19 ++++++++++++++++++- requirements/test-requirements.txt | 19 ++++++++++++++++++- 6 files changed, 91 insertions(+), 7 deletions(-) diff --git a/requirements/base-requirements.in b/requirements/base-requirements.in index b34871061e5f..7bcde7c65626 100644 --- a/requirements/base-requirements.in +++ b/requirements/base-requirements.in @@ -113,6 +113,7 @@ stripe suds-py3 text-unidecode toposort +transifex-python tropo-webapi-python turn-python twilio diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index a1945dcd8d5c..1aaed865d61d 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -15,7 +15,9 @@ architect==0.6.0 asgiref==3.7.2 # via django asttokens==2.0.5 - # via stack-data + # via + # stack-data + # transifex-python async-timeout==4.0.2 # via redis attrs==23.1.0 @@ -75,6 +77,7 @@ click==8.0.3 # click-repl # git-build-branch # pip-tools + # transifex-python click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -258,7 +261,9 @@ flaky==3.7.0 freezegun==1.1.0 # via -r test-requirements.in future==0.18.3 - # via pyjwkest + # via + # pyjwkest + # pyseeyou gevent==23.9.1 # via # -r base-requirements.in @@ -448,6 +453,8 @@ packaging==23.0 # build # ddtrace # sphinx +parsimonious==0.10.0 + # via pyseeyou parso==0.8.3 # via jedi pep517==0.10.0 @@ -552,6 +559,8 @@ pypng==0.20220715.0 # via qrcode pyrsistent==0.17.3 # via jsonschema +pyseeyou==1.0.2 + # via transifex-python python-dateutil==2.8.2 # via # -r base-requirements.in @@ -600,6 +609,8 @@ redis==4.5.4 # django-redis # django-redis-sessions # django-websocket-redis +regex==2023.12.25 + # via parsimonious reportlab==3.6.13 # via -r base-requirements.in requests==2.28.2 @@ -621,6 +632,7 @@ requests==2.28.2 # requests-toolbelt # sphinx # stripe + # transifex-python # twilio requests-mock==1.9.3 # via -r test-requirements.in @@ -747,12 +759,16 @@ tomli==2.0.1 # via # build # pip-tools +toolz==0.12.1 + # via pyseeyou toposort==1.7 # via -r base-requirements.in traitlets==5.1.1 # via # ipython # matplotlib-inline +transifex-python==3.5.0 + # via -r base-requirements.in tropo-webapi-python==0.1.3 # via -r base-requirements.in turn-python==0.0.1 diff --git a/requirements/docs-requirements.txt b/requirements/docs-requirements.txt index 2558dd934c4c..b84da8d3a5ee 100644 --- a/requirements/docs-requirements.txt +++ b/requirements/docs-requirements.txt @@ -14,6 +14,8 @@ architect==0.6.0 # via -r base-requirements.in asgiref==3.7.2 # via django +asttokens==2.4.1 + # via transifex-python async-timeout==4.0.2 # via redis attrs==23.1.0 @@ -67,6 +69,7 @@ click==8.1.3 # click-didyoumean # click-plugins # click-repl + # transifex-python click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -221,7 +224,9 @@ exceptiongroup==1.1.1 firebase-admin==6.1.0 # via -r base-requirements.in future==0.18.3 - # via pyjwkest + # via + # pyjwkest + # pyseeyou gevent==23.9.1 # via # -r base-requirements.in @@ -380,6 +385,8 @@ packaging==23.0 # -r base-requirements.in # ddtrace # sphinx +parsimonious==0.10.0 + # via pyseeyou phonenumberslite==8.12.48 # via -r base-requirements.in pickle5==0.0.11 @@ -458,6 +465,8 @@ pypng==0.20220715.0 # via qrcode pyrsistent==0.17.3 # via jsonschema +pyseeyou==1.0.2 + # via transifex-python python-dateutil==2.8.2 # via # -r base-requirements.in @@ -497,6 +506,8 @@ redis==4.5.4 # django-redis # django-redis-sessions # django-websocket-redis +regex==2023.12.25 + # via parsimonious reportlab==3.6.13 # via -r base-requirements.in requests==2.28.2 @@ -516,6 +527,7 @@ requests==2.28.2 # requests-toolbelt # sphinx # stripe + # transifex-python # twilio requests-oauthlib==1.3.1 # via @@ -544,6 +556,7 @@ simplejson==3.17.6 six==1.16.0 # via # -r base-requirements.in + # asttokens # bleach # click-repl # ddsketch @@ -616,8 +629,12 @@ text-unidecode==1.3 # via -r base-requirements.in tinycss2==1.2.1 # via bleach +toolz==0.12.1 + # via pyseeyou toposort==1.7 # via -r base-requirements.in +transifex-python==3.5.0 + # via -r base-requirements.in tropo-webapi-python==0.1.3 # via -r base-requirements.in turn-python==0.0.1 diff --git a/requirements/prod-requirements.txt b/requirements/prod-requirements.txt index e8eb0baec250..5e3b5bbfe89c 100644 --- a/requirements/prod-requirements.txt +++ b/requirements/prod-requirements.txt @@ -13,7 +13,9 @@ architect==0.6.0 asgiref==3.7.2 # via django asttokens==2.0.5 - # via stack-data + # via + # stack-data + # transifex-python async-timeout==4.0.2 # via redis attrs==23.1.0 @@ -69,6 +71,7 @@ click==8.1.3 # click-didyoumean # click-plugins # click-repl + # transifex-python click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -218,7 +221,9 @@ firebase-admin==6.1.0 flower==2.0.0 # via -r prod-requirements.in future==0.18.3 - # via pyjwkest + # via + # pyjwkest + # pyseeyou gevent==23.9.1 # via # -r base-requirements.in @@ -374,6 +379,8 @@ packaging==23.0 # via # -r base-requirements.in # ddtrace +parsimonious==0.10.0 + # via pyseeyou parso==0.8.3 # via jedi pexpect==4.8.0 @@ -462,6 +469,8 @@ pypng==0.20220715.0 # via qrcode pyrsistent==0.17.3 # via jsonschema +pyseeyou==1.0.2 + # via transifex-python python-dateutil==2.8.2 # via # -r base-requirements.in @@ -501,6 +510,8 @@ redis==4.5.4 # django-redis # django-redis-sessions # django-websocket-redis +regex==2023.12.25 + # via parsimonious reportlab==3.6.13 # via -r base-requirements.in requests==2.28.2 @@ -519,6 +530,7 @@ requests==2.28.2 # requests-oauthlib # requests-toolbelt # stripe + # transifex-python # twilio requests-oauthlib==1.3.1 # via @@ -598,6 +610,8 @@ text-unidecode==1.3 # via -r base-requirements.in tinycss2==1.2.1 # via bleach +toolz==0.12.1 + # via pyseeyou toposort==1.7 # via -r base-requirements.in tornado==6.3.3 @@ -606,6 +620,8 @@ traitlets==5.1.1 # via # ipython # matplotlib-inline +transifex-python==3.5.0 + # via -r base-requirements.in tropo-webapi-python==0.1.3 # via -r base-requirements.in turn-python==0.0.1 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 2fbfe9749483..2529b560747f 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -12,6 +12,8 @@ architect==0.6.0 # via -r base-requirements.in asgiref==3.7.2 # via django +asttokens==2.4.1 + # via transifex-python async-timeout==4.0.2 # via redis attrs==23.1.0 @@ -63,6 +65,7 @@ click==8.1.3 # click-didyoumean # click-plugins # click-repl + # transifex-python click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -207,7 +210,9 @@ exceptiongroup==1.1.1 firebase-admin==6.1.0 # via -r base-requirements.in future==0.18.3 - # via pyjwkest + # via + # pyjwkest + # pyseeyou gevent==23.9.1 # via # -r base-requirements.in @@ -353,6 +358,8 @@ packaging==23.0 # via # -r base-requirements.in # ddtrace +parsimonious==0.10.0 + # via pyseeyou phonenumberslite==8.12.48 # via -r base-requirements.in pickle5==0.0.11 @@ -425,6 +432,8 @@ pypng==0.20220715.0 # via qrcode pyrsistent==0.17.3 # via jsonschema +pyseeyou==1.0.2 + # via transifex-python python-dateutil==2.8.2 # via # -r base-requirements.in @@ -463,6 +472,8 @@ redis==4.5.4 # django-redis # django-redis-sessions # django-websocket-redis +regex==2023.12.25 + # via parsimonious reportlab==3.6.13 # via -r base-requirements.in requests==2.28.2 @@ -481,6 +492,7 @@ requests==2.28.2 # requests-oauthlib # requests-toolbelt # stripe + # transifex-python # twilio requests-oauthlib==1.3.1 # via @@ -509,6 +521,7 @@ simplejson==3.17.6 six==1.16.0 # via # -r base-requirements.in + # asttokens # bleach # click-repl # ddsketch @@ -555,8 +568,12 @@ text-unidecode==1.3 # via -r base-requirements.in tinycss2==1.2.1 # via bleach +toolz==0.12.1 + # via pyseeyou toposort==1.7 # via -r base-requirements.in +transifex-python==3.5.0 + # via -r base-requirements.in tropo-webapi-python==0.1.3 # via -r base-requirements.in turn-python==0.0.1 diff --git a/requirements/test-requirements.txt b/requirements/test-requirements.txt index 1e8dc2e12558..e96345e9ad86 100644 --- a/requirements/test-requirements.txt +++ b/requirements/test-requirements.txt @@ -12,6 +12,8 @@ architect==0.6.0 # via -r base-requirements.in asgiref==3.7.2 # via django +asttokens==2.4.1 + # via transifex-python async-timeout==4.0.2 # via redis attrs==23.1.0 @@ -66,6 +68,7 @@ click==8.1.3 # click-plugins # click-repl # pip-tools + # transifex-python click-didyoumean==0.3.0 # via celery click-plugins==1.1.1 @@ -224,7 +227,9 @@ flaky==3.7.0 freezegun==1.1.0 # via -r test-requirements.in future==0.18.3 - # via pyjwkest + # via + # pyjwkest + # pyseeyou gevent==23.9.1 # via # -r base-requirements.in @@ -380,6 +385,8 @@ packaging==23.0 # -r base-requirements.in # build # ddtrace +parsimonious==0.10.0 + # via pyseeyou pep517==0.10.0 # via build phonenumberslite==8.12.48 @@ -458,6 +465,8 @@ pypng==0.20220715.0 # via qrcode pyrsistent==0.17.3 # via jsonschema +pyseeyou==1.0.2 + # via transifex-python python-dateutil==2.8.2 # via # -r base-requirements.in @@ -500,6 +509,8 @@ redis==4.5.4 # django-redis # django-redis-sessions # django-websocket-redis +regex==2023.12.25 + # via parsimonious reportlab==3.6.13 # via -r base-requirements.in requests==2.28.2 @@ -519,6 +530,7 @@ requests==2.28.2 # requests-oauthlib # requests-toolbelt # stripe + # transifex-python # twilio requests-mock==1.9.3 # via -r test-requirements.in @@ -549,6 +561,7 @@ simplejson==3.17.6 six==1.16.0 # via # -r base-requirements.in + # asttokens # bleach # click-repl # ddsketch @@ -609,8 +622,12 @@ tomli==2.0.1 # via # build # pip-tools +toolz==0.12.1 + # via pyseeyou toposort==1.7 # via -r base-requirements.in +transifex-python==3.5.0 + # via -r base-requirements.in tropo-webapi-python==0.1.3 # via -r base-requirements.in turn-python==0.0.1 From 5e7bc2954fe4c98ea83f2e8e736e8f8231efab3f Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 11:50:07 -0700 Subject: [PATCH 002/928] Wrap Transifex client API calls with dummy functions pre-upgrade --- .../integrations/transifex/client.py | 161 +++++------------- 1 file changed, 44 insertions(+), 117 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index e7c103d3c43e..b56f0688e8c2 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -27,18 +27,41 @@ def __init__(self, token, organization, project, use_version_postfix=True): def _auth(self): return self.username, self.token + def _create_resource(self, resource_slug, resource_name): + ... + + def _upload_resource_strings(self, content, resource_id): + ... + + def _upload_resource_translations(self, content, resource_id, language_id): + ... + + def _get_project(self, project_slug): + ... + + def _get_resource(self, resource_slug): + ... + + def _list_resources(self): + ... + + def _download_resource_translations(self, resource_id, language_id): + ... + + def _lock_resource(self, resource): + ... + + def delete_resource(self, resource_slug): + ... + def list_resources(self): - url = "https://api.transifex.com/organizations/{}/projects/{}/resources".format( - self.organization, - self.project - ) - return requests.get(url, auth=self._auth) + return self._list_resources() def get_resource_slugs(self, version): """ :return: list of resource slugs corresponding to version """ - all_resources = self.list_resources().json() + all_resources = self._list_resources().json() if version and self.use_version_postfix: # get all slugs with version postfix return [r['slug'] @@ -54,34 +77,8 @@ def get_resource_slugs(self, version): return [r['slug'] for r in all_resources] def update_resource_slug(self, old_resource_slug, new_resource_slug): - url = "https://www.transifex.com/api/2/project/{}/resource/{}".format( - self.project, old_resource_slug) - data = {'slug': new_resource_slug} - headers = {'content-type': 'application/json'} - return requests.put( - url, data=json.dumps(data), auth=self._auth, headers=headers, - ) - - def lock_resource(self, resource_slug): - """ - lock a resource so that it can't be translated/reviewed anymore. - - :param resource_slug: - """ - url = "https://www.transifex.com/api/2/project/{}/resource/{}".format( - self.project, resource_slug) - data = { - 'accept_translations': False - } - headers = {'content-type': 'application/json'} - return requests.put( - url, data=json.dumps(data), auth=self._auth, headers=headers, - ) - - def delete_resource(self, resource_slug): - url = "https://www.transifex.com/api/2/project/{}/resource/{}".format( - self.project, resource_slug) - return requests.delete(url, auth=self._auth) + # slug is immutable from Transifex API v3 + pass def upload_resource(self, path_to_pofile, resource_slug, resource_name, update_resource): """ @@ -96,22 +93,11 @@ def upload_resource(self, path_to_pofile, resource_slug, resource_name, update_r if resource_name is None: __, filename = os.path.split(path_to_pofile) resource_name = filename - headers = {'content-type': 'application/json'} - data = { - 'name': resource_name, 'slug': resource_slug, 'content': content, - 'i18n_type': 'PO' - } if update_resource: - url = "https://www.transifex.com/api/2/project/{}/resource/{}/content".format( - self.project, resource_slug) - return requests.put( - url, data=json.dumps(data), auth=self._auth, headers=headers, - ) + resource = self._get_resource(resource_slug) else: - url = "https://www.transifex.com/api/2/project/{}/resources".format(self.project) - return requests.post( - url, data=json.dumps(data), auth=self._auth, headers=headers, - ) + resource = self._create_resource(resource_slug, resource_name) + self._upload_resource_strings(content, resource.id) def upload_translation(self, path_to_pofile, resource_slug, resource_name, hq_lang_code): """ @@ -123,27 +109,12 @@ def upload_translation(self, path_to_pofile, resource_slug, resource_name, hq_la :param hq_lang_code: lang code on hq """ target_lang_code = self.transifex_lang_code(hq_lang_code) - url = "https://www.transifex.com/api/2/project/{}/resource/{}/translation/{}".format( - self.project, resource_slug, target_lang_code) content = open(path_to_pofile, 'r', encoding="utf-8").read() - headers = {'content-type': 'application/json'} - data = { - 'name': resource_name, 'slug': resource_slug, 'content': content, - 'i18n_type': 'PO' - } - return requests.put( - url, data=json.dumps(data), auth=self._auth, headers=headers, - ) + resource = self._get_resource(resource_slug) + self._upload_resource_translations(content, resource.id, target_lang_code) def project_details(self): - url = "https://www.transifex.com/api/2/project/{}/?details".format(self.project) - response = requests.get( - url, auth=self._auth, - ) - if response.status_code == 404: - raise ResourceMissing("Project not found with slug {}".format(self.project)) - else: - return response + return self.project @memoized def _resource_details(self, resource_slug): @@ -152,14 +123,7 @@ def _resource_details(self, resource_slug): :param resource_slug: resource slug """ - url = "https://www.transifex.com/api/2/project/{}/resource/{}/stats/".format( - self.project, resource_slug) - response = requests.get(url, auth=self._auth) - if response.status_code == 200: - return response.json() - elif response.status_code == 404: - raise ResourceMissing("Resource {} not found".format(resource_slug)) - raise Exception(response.content) + return self._get_resource(resource_slug) def translation_completed(self, resource_slug, hq_lang_code=None): """ @@ -188,18 +152,14 @@ def get_translation(self, resource_slug, hq_lang_code, lock_resource): :param lock_resource: lock resource after pulling translation :return: list of POEntry objects """ + resource = self._get_resource(resource_slug) lang = self.transifex_lang_code(hq_lang_code) - url = "https://www.transifex.com/api/2/project/{}/resource/{}/translation/{}/?file".format( - self.project, resource_slug, lang - ) - response = requests.get(url, auth=self._auth, stream=True) - if response.status_code != 200: - raise ResourceMissing + content = self._download_resource_translations(resource.id, lang) temp_file = tempfile.NamedTemporaryFile() with open(temp_file.name, 'w', encoding='utf-8') as f: - f.write(response.content.decode(encoding='utf-8')) + f.write(content.decode(encoding='utf-8')) if lock_resource: - self.lock_resource(resource_slug) + self._lock_resource(resource_slug) return polib.pofile(temp_file.name) @staticmethod @@ -224,38 +184,5 @@ def get_source_lang(self): return self.project_details().json().get('source_language_code') def move_resources(self, hq_lang_code, target_project, version=None, use_version_postfix=True): - """ - ability to move resources from one project to another - - :param hq_lang_code: lang code on hq - :param target_project: target project slug on transifex - :param version: version if needed on parent resource slugs - :param use_version_postfix: to use version postfix in new project - :return: responses per resource slug - """ - responses = {} - for resource_slug in self.get_resource_slugs(version): - lang = self.transifex_lang_code(hq_lang_code) - url = "https://www.transifex.com/api/2/project/{}/resource/{}/translation/{}/?file".format( - self.project, resource_slug, lang - ) - response = requests.get(url, auth=self._auth, stream=True) - if response.status_code != 200: - raise ResourceMissing - if use_version_postfix: - upload_resource_slug = resource_slug - else: - upload_resource_slug = resource_slug.split("_v")[0] - upload_url = "https://www.transifex.com/api/2/project/{}/resource/{}/translation/{}".format( - target_project, upload_resource_slug, lang) - content = response.content - headers = {'content-type': 'application/json'} - data = { - 'name': upload_resource_slug, 'slug': upload_resource_slug, 'content': content, - 'i18n_type': 'PO' - } - upload_response = requests.put( - upload_url, data=json.dumps(data), auth=self._auth, headers=headers, - ) - responses[resource_slug] = upload_response - return responses + # not exposed to UI + pass From 1e8dfa99587f6b0babb76ca0184eae579707c0ad Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 12:03:48 -0700 Subject: [PATCH 003/928] Implement stub Transifex client API methods --- .../integrations/transifex/client.py | 79 +++++++++++++------ 1 file changed, 57 insertions(+), 22 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index b56f0688e8c2..d47cf99cb41d 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -4,6 +4,7 @@ import polib import requests +from transifex.api import TransifexApi from memoized import memoized from corehq.apps.translations.integrations.transifex.const import ( @@ -17,42 +18,76 @@ class TransifexApiClient(object): def __init__(self, token, organization, project, use_version_postfix=True): - self.username = API_USER - self.token = token - self.organization = organization - self.project = project self.use_version_postfix = use_version_postfix + self.api = TransifexApi(auth=token) + self.organization = self._get_organization(organization) + self.project = self._get_project(project) @property - def _auth(self): - return self.username, self.token + def _i18n_format(self): + return self.api.I18nFormat(id="PO") def _create_resource(self, resource_slug, resource_name): - ... + return self.api.Resource.create( + name=resource_name, + slug=resource_slug, + project=self.project, + i18n_format=self._i18n_format + ) + + @staticmethod + def _upload_content(cls, content, **kwargs): + # TransifexApi.upload() waits for async upload which we don't need, so create the upload manually + cls.create_with_form(data=kwargs, files={"content": content}) def _upload_resource_strings(self, content, resource_id): - ... + cls = self.api.ResourceStringsAsyncUpload + self._upload_content(cls, content, resource=resource_id) def _upload_resource_translations(self, content, resource_id, language_id): - ... + cls = self.api.ResourceTranslationsAsyncUpload + self._upload_content(cls, content, resource=resource_id, language=language_id) + + @staticmethod + def _get_object(cls, **kwargs): + return cls.get(**kwargs) + + def _get_organization(self, organization_slug): + cls = self.api.Organization + return self._get_object(cls, slug=organization_slug) def _get_project(self, project_slug): - ... + cls = self.api.Project + return self._get_object(cls, slug=project_slug, organization=self.organization) def _get_resource(self, resource_slug): - ... + cls = self.api.Resource + return self._get_object(cls, slug=resource_slug, project=self.project) + + @staticmethod + def _list_objects(cls, **kwargs): + return cls.filter(**kwargs) def _list_resources(self): - ... + cls = self.api.Resource + return self._list_objects(cls, project=self.project) + + @staticmethod + def _download_content(cls, **kwargs): + download = cls.download(**kwargs) + response = requests.get(download, stream=True) + return response.content def _download_resource_translations(self, resource_id, language_id): - ... + cls = self.api.ResourceTranslationsAsyncDownload + return self._download_content(cls, resource=resource_id, language_id=language_id) def _lock_resource(self, resource): - ... + return resource.save(accept_translations=False) def delete_resource(self, resource_slug): - ... + resource = self._get_resource(resource_slug) + resource.delete() def list_resources(self): return self._list_resources() @@ -61,20 +96,20 @@ def get_resource_slugs(self, version): """ :return: list of resource slugs corresponding to version """ - all_resources = self._list_resources().json() + all_resources = self._list_resources() if version and self.use_version_postfix: # get all slugs with version postfix - return [r['slug'] + return [r.slug for r in all_resources - if r['slug'].endswith("v%s" % version)] + if r.slug.endswith("v%s" % version)] elif version and not self.use_version_postfix: # get all slugs that don't have version postfix - return [r['slug'] + return [r.slug for r in all_resources - if not r['slug'].endswith("v%s" % version)] + if not r.slug.endswith("v%s" % version)] else: # get all slugs - return [r['slug'] for r in all_resources] + return [r.slug for r in all_resources] def update_resource_slug(self, old_resource_slug, new_resource_slug): # slug is immutable from Transifex API v3 @@ -159,7 +194,7 @@ def get_translation(self, resource_slug, hq_lang_code, lock_resource): with open(temp_file.name, 'w', encoding='utf-8') as f: f.write(content.decode(encoding='utf-8')) if lock_resource: - self._lock_resource(resource_slug) + self._lock_resource(resource) return polib.pofile(temp_file.name) @staticmethod From 84215ea5accdbcef10e07d54a058f2c331e281e5 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 12:05:10 -0700 Subject: [PATCH 004/928] Update Transifex source language mapping --- .../integrations/transifex/const.py | 178 ++++++++++++++++-- 1 file changed, 159 insertions(+), 19 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/const.py b/corehq/apps/translations/integrations/transifex/const.py index f35d145c8c04..1a2329eba751 100644 --- a/corehq/apps/translations/integrations/transifex/const.py +++ b/corehq/apps/translations/integrations/transifex/const.py @@ -1,24 +1,164 @@ API_USER = "api" -SOURCE_LANGUAGE_MAPPING = { - # 'hq_code' : 'transifex_code' - 'hin': 'hi', # hindi - 'ori': 'or', # oriya - 'tam': 'ta', # tamil - 'pan': 'pa', # punjabi - 'asm': 'as', # assamese - 'ben': 'bn', # bengali - 'guj': 'gu', # gujarati - 'mal': 'ml', # malayalam - 'mar': 'mr', # marathi - 'snd': 'sd', # sindhi for test - 'mri': 'mi', # maori - 'khm': 'km', # khmer - 'lug': 'lg', # ganda - 'tel': 'te', # telugu - 'urd': 'ur', # urdu - 'kan': 'kn', # kannada -} TRANSIFEX_SLUG_PREFIX_MAPPING = { 'Menu': 'module', 'Form': 'form', } +SOURCE_LANGUAGE_MAPPING = { + # HQ uses 3-letter codes with some exceptions, but Transifex uses 2-letter codes whenever possible + # This maps HQ codes to their 2-letter equivalents where available in Transifex + # Other languages may be unsupported by Transifex or require custom mapping + # 'hq_code' : 'transifex_code' + 'abk': 'ab', # Abkhazian + 'aka': 'ak', # Akan + 'sqi': 'sq', # Albanian + 'amh': 'am', # Amharic + 'ara': 'ar', # Arabic + 'arg': 'an', # Aragonese + 'asm': 'as', # Assamese + 'aym': 'ay', # Aymara + 'aze': 'az', # Azerbaijani + 'bak': 'ba', # Bashkir + 'bam': 'bm', # Bambara + 'eus': 'eu', # Basque + 'bel': 'be', # Belarusian + 'ben': 'bn', # Bengali + 'bis': 'bi', # Bislama + 'bos': 'bs', # Bosnian + 'bre': 'br', # Breton + 'bul': 'bg', # Bulgarian + 'mya': 'my', # Burmese + 'cat': 'ca', # Catalan + 'cha': 'ch', # Chamorro + 'che': 'ce', # Chechen + 'zho': 'zh', # Chinese + 'chv': 'cv', # Chuvash + 'cor': 'kw', # Cornish + 'cos': 'co', # Corsican + 'ces': 'cs', # Czech + 'dan': 'da', # Danish + 'div': 'dv', # Divehi + 'nld': 'nl', # Dutch + 'dzo': 'dz', # Dzongkha + 'epo': 'eo', # Esperanto + 'est': 'et', # Estonian + 'ewe': 'ee', # Ewe + 'fao': 'fo', # Faroese + 'fin': 'fi', # Finnish + 'fra': 'fr', # French + 'fry': 'fy', # Western Frisian + 'ful': 'ff', # Fulah + 'kat': 'ka', # Georgian + 'deu': 'de', # German + 'gla': 'gd', # Gaelic + 'gle': 'ga', # Irish + 'glg': 'gl', # Galician + 'ell': 'el', # Greek, Modern (1453-) + 'hat': 'ht', # Haitian + 'hau': 'ha', # Hausa + 'heb': 'he', # Hebrew + 'hin': 'hi', # Hindi + 'hrv': 'hr', # Croatian + 'hun': 'hu', # Hungarian + 'ibo': 'ig', # Igbo + 'isl': 'is', # Icelandic + 'ido': 'io', # Ido + 'iku': 'iu', # Inuktitut + 'ile': 'ie', # Interlingue + 'ina': 'ia', # Interlingua (International Auxiliary Language Association) + 'ind': 'id', # Indonesian + 'ita': 'it', # Italian + 'jav': 'jv', # Javanese + 'jpn': 'ja', # Japanese + 'kal': 'kl', # Kalaallisut + 'kan': 'kn', # Kannada + 'kas': 'ks', # Kashmiri + 'kaz': 'kk', # Kazakh + 'khm': 'km', # Central Khmer + 'kik': 'ki', # Kikuyu + 'kin': 'rw', # Kinyarwanda + 'kir': 'ky', # Kirghiz + 'kor': 'ko', # Korean + 'kur': 'ku', # Kurdish + 'lao': 'lo', # Lao + 'lat': 'la', # Latin + 'lav': 'lv', # Latvian + 'lim': 'li', # Limburgan + 'lin': 'ln', # Lingala + 'lit': 'lt', # Lithuanian + 'ltz': 'lb', # Luxembourgish + 'lug': 'lg', # Ganda + 'mkd': 'mk', # Macedonian + 'mah': 'mh', # Marshallese + 'mal': 'ml', # Malayalam + 'mri': 'mi', # Maori + 'mar': 'mr', # Marathi + 'msa': 'ms', # Malay + 'mlg': 'mg', # Malagasy + 'mlt': 'mt', # Maltese + 'mon': 'mn', # Mongolian + 'nav': 'nv', # Navajo + 'nbl': 'nr', # Ndebele, South + 'nde': 'nd', # Ndebele, North + 'nep': 'ne', # Nepali + 'nno': 'nn', # Norwegian Nynorsk + 'nob': 'nb', # Bokml, Norwegian + 'nor': 'no', # Norwegian + 'nya': 'ny', # Chichewa + 'oci': 'oc', # Occitan (post 1500) + 'ori': 'or', # Oriya + 'orm': 'om', # Oromo + 'oss': 'os', # Ossetian + 'pan': 'pa', # Panjabi + 'fas': 'fa', # Persian + 'pol': 'pl', # Polish + 'por': 'pt', # Portuguese + 'pus': 'ps', # Pushto + 'que': 'qu', # Quechua + 'roh': 'rm', # Romansh + 'ron': 'ro', # Romanian + 'run': 'rn', # Rundi + 'rus': 'ru', # Russian + 'sag': 'sg', # Sango + 'san': 'sa', # Sanskrit + 'sin': 'si', # Sinhala + 'slk': 'sk', # Slovak + 'slv': 'sl', # Slovenian + 'sme': 'se', # Northern Sami + 'smo': 'sm', # Samoan + 'sna': 'sn', # Shona + 'snd': 'sd', # Sindhi + 'som': 'so', # Somali + 'sot': 'st', # Sotho, Southern + 'srd': 'sc', # Sardinian + 'srp': 'sr', # Serbian + 'ssw': 'ss', # Swati + 'sun': 'su', # Sundanese + 'swe': 'sv', # Swedish + 'tam': 'ta', # Tamil + 'tat': 'tt', # Tatar + 'tel': 'te', # Telugu + 'tgk': 'tg', # Tajik + 'tgl': 'tl', # Tagalog + 'tha': 'th', # Thai + 'bod': 'bo', # Tibetan + 'tir': 'ti', # Tigrinya + 'ton': 'to', # Tonga (Tonga Islands) + 'tsn': 'tn', # Tswana + 'tso': 'ts', # Tsonga + 'tuk': 'tk', # Turkmen + 'tur': 'tr', # Turkish + 'uig': 'ug', # Uighur + 'ukr': 'uk', # Ukrainian + 'urd': 'ur', # Urdu + 'uzb': 'uz', # Uzbek + 'ven': 've', # Venda + 'vie': 'vi', # Vietnamese + 'vol': 'vo', # Volapk + 'cym': 'cy', # Welsh + 'wln': 'wa', # Walloon + 'wol': 'wo', # Wolof + 'xho': 'xh', # Xhosa + 'yid': 'yi', # Yiddish + 'yor': 'yo', # Yoruba + 'zul': 'zu', # Zulu +} From 4d3ec6e083aa1711fad0f38d61a9204719d7fb9b Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 12:33:11 -0700 Subject: [PATCH 005/928] Update Transifex client language conversion methods Tfx now uses language id in the format 'l:langcode' --- .../integrations/transifex/client.py | 32 ++++++++++++++----- .../transifex/project_migrator.py | 2 +- .../integrations/transifex/transifex.py | 2 +- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index d47cf99cb41d..7aa008db2086 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -27,6 +27,14 @@ def __init__(self, token, organization, project, use_version_postfix=True): def _i18n_format(self): return self.api.I18nFormat(id="PO") + @property + def source_language_id(self): + return self.project.source_language.id + + @property + def source_lang_code(self): + return self._to_lang_code(self.source_language_id) + def _create_resource(self, resource_slug, resource_name): return self.api.Resource.create( name=resource_name, @@ -143,10 +151,10 @@ def upload_translation(self, path_to_pofile, resource_slug, resource_name, hq_la :param resource_name: resource name, mostly same as resource slug itself :param hq_lang_code: lang code on hq """ - target_lang_code = self.transifex_lang_code(hq_lang_code) + language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) content = open(path_to_pofile, 'r', encoding="utf-8").read() resource = self._get_resource(resource_slug) - self._upload_resource_translations(content, resource.id, target_lang_code) + self._upload_resource_translations(content, resource.id, language_id) def project_details(self): return self.project @@ -169,8 +177,8 @@ def completed(details): return not bool(details.get('untranslated_words')) if hq_lang_code: - lang = self.transifex_lang_code(hq_lang_code) - return completed(self._resource_details(resource_slug).get(lang, {})) + language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) + return completed(self._resource_details(resource_slug).get(language_id, {})) else: for lang, detail in self._resource_details(resource_slug).items(): if not completed(detail): @@ -188,8 +196,8 @@ def get_translation(self, resource_slug, hq_lang_code, lock_resource): :return: list of POEntry objects """ resource = self._get_resource(resource_slug) - lang = self.transifex_lang_code(hq_lang_code) - content = self._download_resource_translations(resource.id, lang) + language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) + content = self._download_resource_translations(resource.id, language_id) temp_file = tempfile.NamedTemporaryFile() with open(temp_file.name, 'w', encoding='utf-8') as f: f.write(content.decode(encoding='utf-8')) @@ -210,13 +218,21 @@ def source_lang_is(self, hq_lang_code): """ confirm is source lang on transifex is same as hq lang code """ - return self.transifex_lang_code(hq_lang_code) == self.get_source_lang() + return self.transifex_lang_code(hq_lang_code) == self.source_lang_code def get_source_lang(self): """ :return: source lang code on transifex """ - return self.project_details().json().get('source_language_code') + return self._to_lang_code(self.source_language_id) + + @staticmethod + def _to_language_id(lang_code): + return f"l:{lang_code}" + + @staticmethod + def _to_lang_code(language_id): + return language_id.replace("l:", "") def move_resources(self, hq_lang_code, target_project, version=None, use_version_postfix=True): # not exposed to UI diff --git a/corehq/apps/translations/integrations/transifex/project_migrator.py b/corehq/apps/translations/integrations/transifex/project_migrator.py index 431383ca5181..dcfbce7df742 100644 --- a/corehq/apps/translations/integrations/transifex/project_migrator.py +++ b/corehq/apps/translations/integrations/transifex/project_migrator.py @@ -144,7 +144,7 @@ def _target_app(self): @cached_property def get_project_source_lang(self): - return self.client.project_details().json()['source_language_code'] + return self.client.source_lang_code @cached_property def source_app_default_lang(self): diff --git a/corehq/apps/translations/integrations/transifex/transifex.py b/corehq/apps/translations/integrations/transifex/transifex.py index d657ab4a645c..c5f6ba20d699 100644 --- a/corehq/apps/translations/integrations/transifex/transifex.py +++ b/corehq/apps/translations/integrations/transifex/transifex.py @@ -82,7 +82,7 @@ def client(self): @cached_property def transifex_project_source_lang(self): - return self.client.transifex_lang_code(self.client.get_source_lang()) + return self.client.transifex_lang_code(self.client.source_lang_code) def _resource_name_in_project_lang(self, resource_slug, app_trans_generator): """ From 549ccb81efef94ddbd28c702ff3a5537747f6c89 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 12:46:01 -0700 Subject: [PATCH 006/928] Update Transifex client language info methods --- .../integrations/transifex/client.py | 32 ++++++++++++++----- .../integrations/transifex/transifex.py | 2 +- corehq/apps/translations/tasks.py | 2 +- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index 7aa008db2086..47af5bb54ce7 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -72,6 +72,10 @@ def _get_resource(self, resource_slug): cls = self.api.Resource return self._get_object(cls, slug=resource_slug, project=self.project) + def _get_language_stats(self, resource_id, language_id): + cls = self.api.ResourceLanguageStats + return self._get_object(cls, id=language_id, resource=resource_id) + @staticmethod def _list_objects(cls, **kwargs): return cls.filter(**kwargs) @@ -80,6 +84,13 @@ def _list_resources(self): cls = self.api.Resource return self._list_objects(cls, project=self.project) + def _list_language_stats(self, **kwargs): + cls = self.api.ResourceLanguageStats + return self._list_objects(cls, project=self.project, **kwargs) + + def _fetch_related(self, obj, key): + return obj.fetch(key) + @staticmethod def _download_content(cls, **kwargs): download = cls.download(**kwargs) @@ -168,22 +179,22 @@ def _resource_details(self, resource_slug): """ return self._get_resource(resource_slug) - def translation_completed(self, resource_slug, hq_lang_code=None): + def is_translation_completed(self, resource_slug, hq_lang_code=None): """ check if a resource has been completely translated for all langs or a specific target lang """ - def completed(details): - return not bool(details.get('untranslated_words')) + def completed(stats): + return not bool(stats.untranslated_words) + resource = self._get_resource(resource_slug) if hq_lang_code: language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) - return completed(self._resource_details(resource_slug).get(language_id, {})) + language_stats = self._get_language_stats(resource.id, language_id) + return completed(language_stats) else: - for lang, detail in self._resource_details(resource_slug).items(): - if not completed(detail): - return False - return True + language_stats_list = self._list_language_stats(resource_id=resource.id) + return all(completed(stats) for stats in language_stats_list) def get_translation(self, resource_slug, hq_lang_code, lock_resource): """ @@ -214,6 +225,11 @@ def transifex_lang_code(hq_lang_code): """ return SOURCE_LANGUAGE_MAPPING.get(hq_lang_code, hq_lang_code) + def get_project_langcodes(self): + languages = self._fetch_related(self.project, 'languages') + languages.append(self.project.source_language) + return [self._to_lang_code(language.id) for language in languages] + def source_lang_is(self, hq_lang_code): """ confirm is source lang on transifex is same as hq lang code diff --git a/corehq/apps/translations/integrations/transifex/transifex.py b/corehq/apps/translations/integrations/transifex/transifex.py index c5f6ba20d699..5c22da190d10 100644 --- a/corehq/apps/translations/integrations/transifex/transifex.py +++ b/corehq/apps/translations/integrations/transifex/transifex.py @@ -158,7 +158,7 @@ def resources_pending_translations(self, all_langs=False): """ check_for_lang = None if all_langs else self.source_lang for resource_slug in self.resource_slugs: - if not self.client.translation_completed(resource_slug, check_for_lang): + if not self.client.is_translation_completed(resource_slug, check_for_lang): return resource_slug def generate_excel_file(self): diff --git a/corehq/apps/translations/tasks.py b/corehq/apps/translations/tasks.py index 464c49da3969..6e0aed396cca 100644 --- a/corehq/apps/translations/tasks.py +++ b/corehq/apps/translations/tasks.py @@ -141,7 +141,7 @@ def backup_project_from_transifex(domain, data, email): version, use_version_postfix='yes' in data['use_version_postfix']) project_details = transifex.client.project_details().json() - target_lang_codes = project_details.get('teams') + target_lang_codes = transifex.client.get_project_langcodes() with NamedTemporaryFile(mode='w+b', suffix='.zip') as tmp: with ZipFile(tmp, 'w') as zipfile: for target_lang in target_lang_codes: From 1c96aac43ed8605af3b4354671677cf8b7c82600 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 13:14:32 -0700 Subject: [PATCH 007/928] Refactor: small Transifex client changes for clarity * Remove unused resource_name param * project_details => project_name * 'pofile' => 'po_file' * Read po files with context manager --- .../integrations/transifex/client.py | 29 ++++++++++--------- .../transifex/project_migrator.py | 6 ++-- .../integrations/transifex/transifex.py | 5 ++-- corehq/apps/translations/tasks.py | 4 +-- 4 files changed, 23 insertions(+), 21 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index 47af5bb54ce7..2c3bc635b5cc 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -27,6 +27,10 @@ def __init__(self, token, organization, project, use_version_postfix=True): def _i18n_format(self): return self.api.I18nFormat(id="PO") + @property + def project_name(self): + return self.project.name + @property def source_language_id(self): return self.project.source_language.id @@ -134,42 +138,41 @@ def update_resource_slug(self, old_resource_slug, new_resource_slug): # slug is immutable from Transifex API v3 pass - def upload_resource(self, path_to_pofile, resource_slug, resource_name, update_resource): + def upload_resource(self, path_to_po_file, resource_slug, resource_name, update_resource): """ Upload source language file - :param path_to_pofile: path to pofile + :param path_to_po_file: path to po file :param resource_slug: resource slug :param resource_name: resource name, mostly same as resource slug itself :param update_resource: update resource """ - content = open(path_to_pofile, 'r', encoding="utf-8").read() - if resource_name is None: - __, filename = os.path.split(path_to_pofile) - resource_name = filename if update_resource: resource = self._get_resource(resource_slug) else: + # must create the new resource first + if resource_name is None: + __, filename = os.path.split(path_to_po_file) + resource_name = filename resource = self._create_resource(resource_slug, resource_name) + with open(path_to_po_file, 'r', encoding="utf-8") as po_file: + content = po_file.read() self._upload_resource_strings(content, resource.id) - def upload_translation(self, path_to_pofile, resource_slug, resource_name, hq_lang_code): + def upload_translation(self, path_to_po_file, resource_slug, hq_lang_code): """ Upload translated files - :param path_to_pofile: path to pofile + :param path_to_po_file: path to po file :param resource_slug: resource slug - :param resource_name: resource name, mostly same as resource slug itself :param hq_lang_code: lang code on hq """ language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) - content = open(path_to_pofile, 'r', encoding="utf-8").read() resource = self._get_resource(resource_slug) + with open(path_to_po_file, 'r', encoding="utf-8") as po_file: + content = po_file.read() self._upload_resource_translations(content, resource.id, language_id) - def project_details(self): - return self.project - @memoized def _resource_details(self, resource_slug): """ diff --git a/corehq/apps/translations/integrations/transifex/project_migrator.py b/corehq/apps/translations/integrations/transifex/project_migrator.py index dcfbce7df742..8739c7e9fe4f 100644 --- a/corehq/apps/translations/integrations/transifex/project_migrator.py +++ b/corehq/apps/translations/integrations/transifex/project_migrator.py @@ -118,11 +118,9 @@ def _upload_translation(self, translations, lang_code): po.save(temp_file.name) temp_file.seek(0) if lang_code == self.target_app_default_lang: - return self.client.upload_resource(temp_file.name, "Menus_and_forms", "Menus_and_forms", - update_resource=True) + self.client.upload_resource(temp_file.name, "Menus_and_forms", "Menus_and_forms", True) else: - return self.client.upload_translation(temp_file.name, "Menus_and_forms", "Menus_and_forms", - lang_code) + self.client.upload_translation(temp_file.name, "Menus_and_forms", lang_code) def get_metadata(self): now = str(datetime.datetime.now()) diff --git a/corehq/apps/translations/integrations/transifex/transifex.py b/corehq/apps/translations/integrations/transifex/transifex.py index 5c22da190d10..1b39017bf21c 100644 --- a/corehq/apps/translations/integrations/transifex/transifex.py +++ b/corehq/apps/translations/integrations/transifex/transifex.py @@ -110,8 +110,9 @@ def _send_files_to_transifex(self, generated_files, app_trans_generator): ) else: response = self.client.upload_translation( - path_to_file, resource_slug, - resource_name, self.source_lang + path_to_file, + resource_slug, + self.source_lang ) if response.status_code in [200, 201]: file_uploads[resource_name] = _("Successfully Uploaded") diff --git a/corehq/apps/translations/tasks.py b/corehq/apps/translations/tasks.py index 6e0aed396cca..5c2ea3b7a50f 100644 --- a/corehq/apps/translations/tasks.py +++ b/corehq/apps/translations/tasks.py @@ -140,7 +140,7 @@ def backup_project_from_transifex(domain, data, email): data.get('transifex_project_slug'), version, use_version_postfix='yes' in data['use_version_postfix']) - project_details = transifex.client.project_details().json() + project_name = transifex.client.project_name target_lang_codes = transifex.client.get_project_langcodes() with NamedTemporaryFile(mode='w+b', suffix='.zip') as tmp: with ZipFile(tmp, 'w') as zipfile: @@ -160,7 +160,7 @@ def backup_project_from_transifex(domain, data, email): subject='[{}] - Transifex backup translations'.format(settings.SERVER_ENVIRONMENT), body="PFA Translations backup from transifex.", recipient_list=[email], - filename="%s-TransifexBackup.zip" % project_details.get('name'), + filename="%s-TransifexBackup.zip" % project_name, content=tmp.read(), domain=domain, use_domain_gateway=True, From 0568cd149e26ad6d361d39ac88f3b9079fc40b3b Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 13:27:47 -0700 Subject: [PATCH 008/928] Wrap Transifex API exceptions and update handling --- .../integrations/transifex/client.py | 42 +++++++++++++------ .../integrations/transifex/exceptions.py | 4 +- .../transifex/project_migrator.py | 24 +++++++---- .../integrations/transifex/transifex.py | 41 +++++++++--------- .../integrations/transifex/views.py | 8 ++-- corehq/apps/translations/tasks.py | 16 ++++--- 6 files changed, 76 insertions(+), 59 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index 2c3bc635b5cc..f9841c3ac0e9 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -5,15 +5,15 @@ import polib import requests from transifex.api import TransifexApi +from transifex.api.exceptions import DownloadException +from transifex.api.jsonapi.exceptions import DoesNotExist, JsonApiException from memoized import memoized from corehq.apps.translations.integrations.transifex.const import ( API_USER, SOURCE_LANGUAGE_MAPPING, ) -from corehq.apps.translations.integrations.transifex.exceptions import ( - ResourceMissing, -) +from corehq.apps.translations.integrations.transifex.exceptions import TransifexApiException class TransifexApiClient(object): @@ -40,17 +40,24 @@ def source_lang_code(self): return self._to_lang_code(self.source_language_id) def _create_resource(self, resource_slug, resource_name): - return self.api.Resource.create( - name=resource_name, - slug=resource_slug, - project=self.project, - i18n_format=self._i18n_format - ) + try: + return self.api.Resource.create( + name=resource_name, + slug=resource_slug, + project=self.project, + i18n_format=self._i18n_format + ) + except JsonApiException as e: + raise TransifexApiException(e) @staticmethod def _upload_content(cls, content, **kwargs): # TransifexApi.upload() waits for async upload which we don't need, so create the upload manually - cls.create_with_form(data=kwargs, files={"content": content}) + upload = cls.create_with_form(data=kwargs, files={"content": content}) + + # mirror TransifexApi error handling + if hasattr(upload, "errors") and len(upload.errors) > 0: + raise TransifexApiException(upload.errors[0]["detail"], upload.errors) def _upload_resource_strings(self, content, resource_id): cls = self.api.ResourceStringsAsyncUpload @@ -62,7 +69,10 @@ def _upload_resource_translations(self, content, resource_id, language_id): @staticmethod def _get_object(cls, **kwargs): - return cls.get(**kwargs) + try: + return cls.get(**kwargs) + except (DoesNotExist, JsonApiException) as e: + raise TransifexApiException(e) def _get_organization(self, organization_slug): cls = self.api.Organization @@ -82,7 +92,10 @@ def _get_language_stats(self, resource_id, language_id): @staticmethod def _list_objects(cls, **kwargs): - return cls.filter(**kwargs) + try: + return cls.filter(**kwargs) + except JsonApiException as e: + raise TransifexApiException(e) def _list_resources(self): cls = self.api.Resource @@ -97,7 +110,10 @@ def _fetch_related(self, obj, key): @staticmethod def _download_content(cls, **kwargs): - download = cls.download(**kwargs) + try: + download = cls.download(**kwargs) + except (DownloadException, JsonApiException) as e: + raise TransifexApiException(e) response = requests.get(download, stream=True) return response.content diff --git a/corehq/apps/translations/integrations/transifex/exceptions.py b/corehq/apps/translations/integrations/transifex/exceptions.py index 4e50f7d8a6f7..4f715ff040e0 100644 --- a/corehq/apps/translations/integrations/transifex/exceptions.py +++ b/corehq/apps/translations/integrations/transifex/exceptions.py @@ -1,6 +1,4 @@ - - -class ResourceMissing(Exception): +class TransifexApiException(Exception): pass diff --git a/corehq/apps/translations/integrations/transifex/project_migrator.py b/corehq/apps/translations/integrations/transifex/project_migrator.py index 8739c7e9fe4f..b3e24eaa2f58 100644 --- a/corehq/apps/translations/integrations/transifex/project_migrator.py +++ b/corehq/apps/translations/integrations/transifex/project_migrator.py @@ -19,7 +19,7 @@ ) from corehq.apps.translations.integrations.transifex.exceptions import ( InvalidProjectMigration, - ResourceMissing, + TransifexApiException, ) from corehq.apps.translations.models import TransifexProject @@ -46,20 +46,23 @@ def validate(self): ProjectMigrationValidator(self).validate() def migrate(self): - slug_update_responses = self._update_slugs() - menus_and_forms_sheet_update_responses = self._update_menus_and_forms_sheet() - return slug_update_responses, menus_and_forms_sheet_update_responses + slug_update_errors = self._update_slugs() + menus_and_forms_sheet_update_errors = self._update_menus_and_forms_sheet() + return slug_update_errors, menus_and_forms_sheet_update_errors def _update_slugs(self): - responses = {} + errors = {} for resource_type, old_id, new_id in self.resource_ids_mapping: slug_prefix = self._get_slug_prefix(resource_type) if not slug_prefix: continue resource_slug = "%s_%s" % (slug_prefix, old_id) new_resource_slug = "%s_%s" % (slug_prefix, new_id) - responses[old_id] = self.client.update_resource_slug(resource_slug, new_resource_slug) - return responses + try: + self.client.update_resource_slug(resource_slug, new_resource_slug) + except TransifexApiException as e: + errors[old_id] = e + return errors @memoized def _get_slug_prefix(self, resource_type): @@ -71,7 +74,7 @@ def _update_menus_and_forms_sheet(self): for lang in langs: try: translations[lang] = self.client.get_translation("Menus_and_forms", lang, lock_resource=False) - except ResourceMissing: + except TransifexApiException: # Probably a lang in app not present on Transifex, so skip pass self._update_context(translations) @@ -106,7 +109,10 @@ def _upload_new_translations(self, translations): # HQ keeps the default lang on top and hence it should be the first one here assert list(translations.keys())[0] == self.target_app_default_lang for lang_code in translations: - responses[lang_code] = self._upload_translation(translations[lang_code], lang_code) + try: + self._upload_translation(translations[lang_code], lang_code) + except TransifexApiException as e: + responses[lang_code] = e return responses def _upload_translation(self, translations, lang_code): diff --git a/corehq/apps/translations/integrations/transifex/transifex.py b/corehq/apps/translations/integrations/transifex/transifex.py index 1b39017bf21c..8dbba9874c68 100644 --- a/corehq/apps/translations/integrations/transifex/transifex.py +++ b/corehq/apps/translations/integrations/transifex/transifex.py @@ -10,6 +10,7 @@ from corehq.apps.translations.integrations.transifex.client import ( TransifexApiClient, ) +from corehq.apps.translations.integrations.transifex.exceptions import TransifexApiException from corehq.apps.translations.integrations.transifex.parser import ( TranslationsParser, ) @@ -101,23 +102,23 @@ def _send_files_to_transifex(self, generated_files, app_trans_generator): file_uploads = {} for resource_slug, path_to_file in generated_files: resource_name = self._resource_name_in_project_lang(resource_slug, app_trans_generator) - if self.is_source_file: - response = self.client.upload_resource( - path_to_file, - resource_slug, - resource_name, - self.update_resource - ) - else: - response = self.client.upload_translation( - path_to_file, - resource_slug, - self.source_lang - ) - if response.status_code in [200, 201]: + try: + if self.is_source_file: + self.client.upload_resource( + path_to_file, + resource_slug, + resource_name, + self.update_resource + ) + else: + self.client.upload_translation( + path_to_file, + resource_slug, + self.source_lang + ) file_uploads[resource_name] = _("Successfully Uploaded") - else: - file_uploads[resource_name] = "{}: {}".format(response.status_code, response.content) + except TransifexApiException as e: + file_uploads[resource_name] = "Upload Error: {}".format(e) return file_uploads @cached_property @@ -175,9 +176,9 @@ def source_lang_is(self, hq_lang_code): def delete_resources(self): delete_status = {} for resource_slug in self.resource_slugs: - response = self.client.delete_resource(resource_slug) - if response.status_code == 204: + try: + self.client.delete_resource(resource_slug) delete_status[resource_slug] = _("Successfully Removed") - else: - delete_status[resource_slug] = response.content + except TransifexApiException: + delete_status[resource_slug] = "Resource {} not found".format(resource_slug) return delete_status diff --git a/corehq/apps/translations/integrations/transifex/views.py b/corehq/apps/translations/integrations/transifex/views.py index fd11fd324122..4ed5221210a5 100644 --- a/corehq/apps/translations/integrations/transifex/views.py +++ b/corehq/apps/translations/integrations/transifex/views.py @@ -26,9 +26,7 @@ PullResourceForm, ) from corehq.apps.translations.generators import PoFileGenerator, Translation -from corehq.apps.translations.integrations.transifex.exceptions import ( - ResourceMissing, -) +from corehq.apps.translations.integrations.transifex.exceptions import TransifexApiException from corehq.apps.translations.integrations.transifex.transifex import Transifex from corehq.apps.translations.integrations.transifex.utils import ( transifex_details_available_for_domain, @@ -302,7 +300,7 @@ def post(self, request, *args, **kwargs): if self.pull_resource_form.is_valid(): try: return self._pull_resource(request) - except ResourceMissing: + except TransifexApiException: messages.add_message(request, messages.ERROR, 'Resource not found') return self.get(request, *args, **kwargs) @@ -463,7 +461,7 @@ def post(self, request, *args, **kwargs): try: if self.perform_request(request, form_data): return redirect(self.urlname, domain=self.domain) - except ResourceMissing as e: + except TransifexApiException as e: messages.error(request, e) return self.get(request, *args, **kwargs) diff --git a/corehq/apps/translations/tasks.py b/corehq/apps/translations/tasks.py index 5c2ea3b7a50f..62dc5d52ac6e 100644 --- a/corehq/apps/translations/tasks.py +++ b/corehq/apps/translations/tasks.py @@ -201,14 +201,12 @@ def email_project_from_hq(domain, data, email): def migrate_project_on_transifex(domain, transifex_project_slug, source_app_id, target_app_id, mappings, email): def consolidate_errors_messages(): error_messages = [] - for old_id, response in slug_update_responses.items(): - if response.status_code != 200: - error_messages.append("Slug update failed for %s with message %s" % (old_id, response.content)) - for lang_code, response in menus_and_forms_sheet_update_responses.items(): - if response.status_code != 200: - error_messages.append( - "Menus and forms sheet update failed for lang %s with message %s" % ( - lang_code, response.content)) + for old_id, response in slug_update_errors.items(): + error_messages.append("Slug update failed for %s with message %s" % (old_id, response)) + for lang_code, response in menus_and_forms_sheet_update_errors.items(): + error_messages.append( + "Menus and forms sheet update failed for lang %s with message %s" % ( + lang_code, response)) return error_messages def generate_email_body(): @@ -220,7 +218,7 @@ def generate_email_body(): email_body += error_message + "\n" return email_body - slug_update_responses, menus_and_forms_sheet_update_responses = ProjectMigrator( + slug_update_errors, menus_and_forms_sheet_update_errors = ProjectMigrator( domain, transifex_project_slug, source_app_id, target_app_id, From 67d279dbffa7efcaa5da47bd4a1124f3d0b36e4c Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 13:34:06 -0700 Subject: [PATCH 009/928] Remove unused code --- .../integrations/transifex/client.py | 29 +------------------ .../integrations/transifex/const.py | 1 - 2 files changed, 1 insertion(+), 29 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index f9841c3ac0e9..7abf8f5fecbb 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -1,4 +1,3 @@ -import json import os import tempfile @@ -7,12 +6,8 @@ from transifex.api import TransifexApi from transifex.api.exceptions import DownloadException from transifex.api.jsonapi.exceptions import DoesNotExist, JsonApiException -from memoized import memoized -from corehq.apps.translations.integrations.transifex.const import ( - API_USER, - SOURCE_LANGUAGE_MAPPING, -) +from corehq.apps.translations.integrations.transifex.const import SOURCE_LANGUAGE_MAPPING from corehq.apps.translations.integrations.transifex.exceptions import TransifexApiException @@ -128,9 +123,6 @@ def delete_resource(self, resource_slug): resource = self._get_resource(resource_slug) resource.delete() - def list_resources(self): - return self._list_resources() - def get_resource_slugs(self, version): """ :return: list of resource slugs corresponding to version @@ -189,15 +181,6 @@ def upload_translation(self, path_to_po_file, resource_slug, hq_lang_code): content = po_file.read() self._upload_resource_translations(content, resource.id, language_id) - @memoized - def _resource_details(self, resource_slug): - """ - get details for a resource corresponding to a lang - - :param resource_slug: resource slug - """ - return self._get_resource(resource_slug) - def is_translation_completed(self, resource_slug, hq_lang_code=None): """ check if a resource has been completely translated for @@ -255,12 +238,6 @@ def source_lang_is(self, hq_lang_code): """ return self.transifex_lang_code(hq_lang_code) == self.source_lang_code - def get_source_lang(self): - """ - :return: source lang code on transifex - """ - return self._to_lang_code(self.source_language_id) - @staticmethod def _to_language_id(lang_code): return f"l:{lang_code}" @@ -268,7 +245,3 @@ def _to_language_id(lang_code): @staticmethod def _to_lang_code(language_id): return language_id.replace("l:", "") - - def move_resources(self, hq_lang_code, target_project, version=None, use_version_postfix=True): - # not exposed to UI - pass diff --git a/corehq/apps/translations/integrations/transifex/const.py b/corehq/apps/translations/integrations/transifex/const.py index 1a2329eba751..d7b91fc13f13 100644 --- a/corehq/apps/translations/integrations/transifex/const.py +++ b/corehq/apps/translations/integrations/transifex/const.py @@ -1,4 +1,3 @@ -API_USER = "api" TRANSIFEX_SLUG_PREFIX_MAPPING = { 'Menu': 'module', 'Form': 'form', From a1d75acf1cbb40e1fecc9cf6c3f2d59005a61658 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 13:39:16 -0700 Subject: [PATCH 010/928] Hide Transifex project migrator from UI --- corehq/apps/translations/integrations/transifex/client.py | 4 +++- .../translations/integrations/transifex/project_migrator.py | 3 +++ corehq/tabs/tabclasses.py | 4 ---- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index 7abf8f5fecbb..857e18e424d2 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -143,7 +143,9 @@ def get_resource_slugs(self, version): return [r.slug for r in all_resources] def update_resource_slug(self, old_resource_slug, new_resource_slug): - # slug is immutable from Transifex API v3 + # TODO: rework ProjectMigrator to be compatible with Transifex API v3, or remove this functionality + # v3 makes 'slug' an immutable attribute + # meaning we can no longer simply change the path to a resource while retaining its history pass def upload_resource(self, path_to_po_file, resource_slug, resource_name, update_resource): diff --git a/corehq/apps/translations/integrations/transifex/project_migrator.py b/corehq/apps/translations/integrations/transifex/project_migrator.py index b3e24eaa2f58..b725cb3b9cad 100644 --- a/corehq/apps/translations/integrations/transifex/project_migrator.py +++ b/corehq/apps/translations/integrations/transifex/project_migrator.py @@ -25,6 +25,9 @@ class ProjectMigrator(object): + # TODO: rework ProjectMigrator to be compatible with Transifex API v3, or remove this functionality + # v3 makes 'slug' an immutable attribute + # meaning we can no longer simply change the path to a resource while retaining its history def __init__(self, domain, project_slug, source_app_id, target_app_id, resource_ids_mapping): """ Migrate a transifex project from one app to another by diff --git a/corehq/tabs/tabclasses.py b/corehq/tabs/tabclasses.py index 3d5bef07b44e..308f92ed783e 100644 --- a/corehq/tabs/tabclasses.py +++ b/corehq/tabs/tabclasses.py @@ -1854,10 +1854,6 @@ def sidebar_items(self): 'url': reverse('download_translations', args=[self.domain]), 'title': _('Download Translations') }, - { - 'url': reverse('migrate_transifex_project', args=[self.domain]), - 'title': _('Migrate Project') - }, ])) return items From 56ec06b53caf2334e8ccc290a8d0ca6755fc5b07 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 14:22:25 -0700 Subject: [PATCH 011/928] Reorder methods in Transifex client --- .../integrations/transifex/client.py | 82 +++++++++---------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/corehq/apps/translations/integrations/transifex/client.py b/corehq/apps/translations/integrations/transifex/client.py index 857e18e424d2..d50ccdd05db3 100644 --- a/corehq/apps/translations/integrations/transifex/client.py +++ b/corehq/apps/translations/integrations/transifex/client.py @@ -123,25 +123,6 @@ def delete_resource(self, resource_slug): resource = self._get_resource(resource_slug) resource.delete() - def get_resource_slugs(self, version): - """ - :return: list of resource slugs corresponding to version - """ - all_resources = self._list_resources() - if version and self.use_version_postfix: - # get all slugs with version postfix - return [r.slug - for r in all_resources - if r.slug.endswith("v%s" % version)] - elif version and not self.use_version_postfix: - # get all slugs that don't have version postfix - return [r.slug - for r in all_resources - if not r.slug.endswith("v%s" % version)] - else: - # get all slugs - return [r.slug for r in all_resources] - def update_resource_slug(self, old_resource_slug, new_resource_slug): # TODO: rework ProjectMigrator to be compatible with Transifex API v3, or remove this functionality # v3 makes 'slug' an immutable attribute @@ -183,22 +164,24 @@ def upload_translation(self, path_to_po_file, resource_slug, hq_lang_code): content = po_file.read() self._upload_resource_translations(content, resource.id, language_id) - def is_translation_completed(self, resource_slug, hq_lang_code=None): + def get_resource_slugs(self, version): """ - check if a resource has been completely translated for - all langs or a specific target lang + :return: list of resource slugs corresponding to version """ - def completed(stats): - return not bool(stats.untranslated_words) - - resource = self._get_resource(resource_slug) - if hq_lang_code: - language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) - language_stats = self._get_language_stats(resource.id, language_id) - return completed(language_stats) + all_resources = self._list_resources() + if version and self.use_version_postfix: + # get all slugs with version postfix + return [r.slug + for r in all_resources + if r.slug.endswith("v%s" % version)] + elif version and not self.use_version_postfix: + # get all slugs that don't have version postfix + return [r.slug + for r in all_resources + if not r.slug.endswith("v%s" % version)] else: - language_stats_list = self._list_language_stats(resource_id=resource.id) - return all(completed(stats) for stats in language_stats_list) + # get all slugs + return [r.slug for r in all_resources] def get_translation(self, resource_slug, hq_lang_code, lock_resource): """ @@ -220,15 +203,6 @@ def get_translation(self, resource_slug, hq_lang_code, lock_resource): self._lock_resource(resource) return polib.pofile(temp_file.name) - @staticmethod - def transifex_lang_code(hq_lang_code): - """ - Single place to convert lang codes from HQ to transifex lang code - - :param hq_lang_code: lang code on HQ - """ - return SOURCE_LANGUAGE_MAPPING.get(hq_lang_code, hq_lang_code) - def get_project_langcodes(self): languages = self._fetch_related(self.project, 'languages') languages.append(self.project.source_language) @@ -240,6 +214,32 @@ def source_lang_is(self, hq_lang_code): """ return self.transifex_lang_code(hq_lang_code) == self.source_lang_code + def is_translation_completed(self, resource_slug, hq_lang_code=None): + """ + check if a resource has been completely translated for + all langs or a specific target lang + """ + def completed(stats): + return not bool(stats.untranslated_words) + + resource = self._get_resource(resource_slug) + if hq_lang_code: + language_id = self._to_language_id(self.transifex_lang_code(hq_lang_code)) + language_stats = self._get_language_stats(resource.id, language_id) + return completed(language_stats) + else: + language_stats_list = self._list_language_stats(resource_id=resource.id) + return all(completed(stats) for stats in language_stats_list) + + @staticmethod + def transifex_lang_code(hq_lang_code): + """ + Single place to convert lang codes from HQ to transifex lang code + + :param hq_lang_code: lang code on HQ + """ + return SOURCE_LANGUAGE_MAPPING.get(hq_lang_code, hq_lang_code) + @staticmethod def _to_language_id(lang_code): return f"l:{lang_code}" From 6aa609ded2f6d0b437caadf4aa15f986d9bb43c9 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 14:46:28 -0700 Subject: [PATCH 012/928] Set up Transifex tests with requests_mock and stub functions --- .../tests/test_transifex_integration.py | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 corehq/apps/translations/tests/test_transifex_integration.py diff --git a/corehq/apps/translations/tests/test_transifex_integration.py b/corehq/apps/translations/tests/test_transifex_integration.py new file mode 100644 index 000000000000..3fee36eb9b23 --- /dev/null +++ b/corehq/apps/translations/tests/test_transifex_integration.py @@ -0,0 +1,67 @@ +from django.test import SimpleTestCase + +import requests_mock + +from corehq.apps.translations.integrations.transifex.client import TransifexApiClient +from corehq.util.test_utils import TestFileMixin + +TOKEN = "1234" +ORGANIZATION_SLUG = "test-organization" +PROJECT_SLUG = "test-project" +RESOURCE_SLUG = "test-resource" +RESOURCE_NAME = "Test Resource" + +DATA_PATH = 'corehq/apps/translations/tests/data/transifex/' + + +class TestTransifexApiClient(TestFileMixin, SimpleTestCase): + + @classmethod + @requests_mock.Mocker() + def setUpClass(cls, mocker): + super().setUpClass() + cls.mocker = mocker + cls.mocker.register_uri(requests_mock.ANY, requests_mock.ANY, text=cls.route_request) + cls.tfx_client = TransifexApiClient(TOKEN, ORGANIZATION_SLUG, PROJECT_SLUG) + + def tearDown(self): + super().tearDown() + self.mocker.reset_mock() + + @classmethod + def route_request(cls, request, context): + return cls._get_file(request, 'json') + + @classmethod + def _get_file(cls, request, ext): + path_text = request.path.replace('/', '_') + file_path = DATA_PATH + 'api/' + request.method.lower() + path_text + return cls.get_file(file_path, ext) + + def test_auth_setup(self): + expected_headers = {'Authorization': 'Bearer ' + TOKEN} + self.assertEqual(self.tfx_client.api.make_auth_headers(), expected_headers) + + def test_request_get_object_by_slug(self): + ... + + def test_request_list_objects(self): + ... + + def test_request_fetch_related(self): + ... + + def test_request_create_resource(self): + ... + + def test_request_lock_resource(self): + ... + + def test_request_delete_resource(self): + ... + + def test_request_upload_content_for_resource(self): + ... + + def test_request_download_content_for_resource(self): + ... From 0fe4929e6bf7f11f2fd27118d4f21a1a248b41c2 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 14:54:10 -0700 Subject: [PATCH 013/928] Add Transifex setup test fixtures --- .../data/transifex/api/get_organizations.json | 34 ++++++++ .../data/transifex/api/get_projects.json | 82 +++++++++++++++++++ 2 files changed, 116 insertions(+) create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_organizations.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_projects.json diff --git a/corehq/apps/translations/tests/data/transifex/api/get_organizations.json b/corehq/apps/translations/tests/data/transifex/api/get_organizations.json new file mode 100644 index 000000000000..54eefbd1def4 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_organizations.json @@ -0,0 +1,34 @@ +{ + "data": [ + { + "id": "o:test-organization", + "type": "organizations", + "attributes": { + "name": "Test Organization", + "slug": "test-organization", + "logo_url": null, + "private": false + }, + "links": { + "self": "https://rest.api.transifex.com/organizations/o:test-organization" + }, + "relationships": { + "projects": { + "links": { + "related": "https://rest.api.transifex.com/projects?filter[organization]=o:test-organization" + } + }, + "teams": { + "links": { + "related": "https://rest.api.transifex.com/teams?filter[organization]=o:test-organization" + } + } + } + } + ], + "links": { + "self": "https://rest.api.transifex.com/organizations?filter[slug]=test-organization", + "next": null, + "previous": null + } +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/get_projects.json b/corehq/apps/translations/tests/data/transifex/api/get_projects.json new file mode 100644 index 000000000000..58aa0bc1a606 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_projects.json @@ -0,0 +1,82 @@ +{ + "data": [ + { + "id": "o:test-organization:p:test-project", + "type": "projects", + "attributes": { + "slug": "test-project", + "name": "Test Project", + "type": "file", + "datetime_created": "2024-02-07T21:47:30Z", + "datetime_modified": "2024-02-07T21:47:30Z", + "tags": [], + "description": "Test Project", + "long_description": "", + "private": false, + "archived": false, + "translation_memory_fillup": false, + "machine_translation_fillup": false, + "homepage_url": "", + "repository_url": "", + "instructions_url": "", + "license": "open_source", + "logo_url": "" + }, + "relationships": { + "organization": { + "links": { + "related": "https://rest.api.transifex.com/organizations/o:test-organization" + }, + "data": { + "type": "organizations", + "id": "o:test-organization" + } + }, + "source_language": { + "links": { + "related": "https://rest.api.transifex.com/languages/l:en" + }, + "data": { + "type": "languages", + "id": "l:en" + } + }, + "languages": { + "links": { + "self": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project/relationships/languages", + "related": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project/languages" + } + }, + "team": { + "data": { + "type": "teams", + "id": "o:test-organization:t:test-project-team" + }, + "links": { + "related": "https://rest.api.transifex.com/teams/o:test-organization:t:test-project-team", + "self": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project/relationships/team" + } + }, + "maintainers": { + "links": { + "related": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project/maintainers", + "self": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project/relationships/maintainers" + } + }, + "resources": { + "links": { + "related": "https://rest.api.transifex.com/resources?filter[project]=o:test-organization:p:test-project" + } + } + }, + "links": { + "self": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project" + } + } + ], + "links": { + "self": "https://rest.api.transifex.com/projects?filter[organization]=o:test-organization&filter[slug]=test-project", + "next": null, + "previous": null + } +} \ No newline at end of file From 1ae390b1349837b865dab03071c0f43ef90fe9a6 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 14:58:24 -0700 Subject: [PATCH 014/928] Implement tests for Transifex API request methods --- .../tests/test_transifex_integration.py | 111 ++++++++++++++++-- 1 file changed, 103 insertions(+), 8 deletions(-) diff --git a/corehq/apps/translations/tests/test_transifex_integration.py b/corehq/apps/translations/tests/test_transifex_integration.py index 3fee36eb9b23..73e05b7cf17f 100644 --- a/corehq/apps/translations/tests/test_transifex_integration.py +++ b/corehq/apps/translations/tests/test_transifex_integration.py @@ -30,8 +30,25 @@ def tearDown(self): @classmethod def route_request(cls, request, context): + if request.method == 'DELETE': + context.status_code = 204 + return + if request.method == 'GET' and 'downloads/' in request.path: + return cls._handle_download_redirect(request, context) return cls._get_file(request, 'json') + @classmethod + def _handle_download_redirect(cls, request, context): + if request.path.endswith('content'): + return cls._get_file(request, 'txt') + + # Set redirect to our predefined content irrespective of ID + url_pieces = request.url.split('/') + url_pieces[-1] = 'content' + context.status_code = 303 + context.headers['location'] = '/'.join(url_pieces) + return + @classmethod def _get_file(cls, request, ext): path_text = request.path.replace('/', '_') @@ -43,25 +60,103 @@ def test_auth_setup(self): self.assertEqual(self.tfx_client.api.make_auth_headers(), expected_headers) def test_request_get_object_by_slug(self): - ... + cls = self.tfx_client.api.Resource + key = 'slug' + value = RESOURCE_SLUG + with self.mocker as mocker: + obj = self.tfx_client._get_object(cls, **{key: value}) + request = mocker.last_request + + expected_filter = f"filter[{key}]" + self.assertEqual(request.method, 'GET') + self.assertEqual(request.qs[expected_filter], [value]) + self.assertIsInstance(obj, cls) def test_request_list_objects(self): - ... + cls = self.tfx_client.api.Resource + key = 'project' + value = self.tfx_client.project.id + with self.mocker as mocker: + objects = [o for o in self.tfx_client._list_objects(cls, **{key: value})] + request = mocker.last_request + + expected_filter = f"filter[{key}]" + self.assertEqual(request.method, 'GET') + self.assertEqual(request.qs[expected_filter], [value]) + self.assertIsInstance(objects[0], cls) def test_request_fetch_related(self): - ... + obj = self.tfx_client.project + relative = 'languages' + with self.mocker as mocker: + [r for r in self.tfx_client._fetch_related(obj, 'languages')] + request = mocker.last_request + + self.assertEqual(request.method, 'GET') + self.assertIn(obj.id, request.path) + self.assertIn(relative, request.path) def test_request_create_resource(self): - ... + with self.mocker as mocker: + self.tfx_client._create_resource(RESOURCE_SLUG, RESOURCE_NAME) + request = mocker.last_request + + data = request.json()['data'] + self.assertEqual(request.method, 'POST') + self.assertEqual(data['attributes']['slug'], RESOURCE_SLUG) + self.assertEqual(data['attributes']['name'], RESOURCE_NAME) def test_request_lock_resource(self): - ... + with self.mocker as mocker: + resource = self.tfx_client._get_resource(RESOURCE_SLUG) + self.tfx_client._lock_resource(resource) + request = mocker.last_request + + data = request.json()['data'] + self.assertEqual(request.method, 'PATCH') + self.assertEqual(data['id'], resource.id) + self.assertEqual(data['attributes']['accept_translations'], False) def test_request_delete_resource(self): - ... + with self.mocker as mocker: + resource = self.tfx_client._get_resource(RESOURCE_SLUG) + self.tfx_client.delete_resource(RESOURCE_SLUG) + request = mocker.last_request + + self.assertEqual(request.method, 'DELETE') + self.assertTrue(request.path.endswith(resource.id)) def test_request_upload_content_for_resource(self): - ... + cls = self.tfx_client.api.ResourceStringsAsyncUpload + content = "Here is some content" + key = 'resource' + with self.mocker as mocker: + value = self.tfx_client._get_resource(RESOURCE_SLUG).id + self.tfx_client._upload_content(cls, content, **{key: value}) + request = mocker.last_request + + text = request.text + self.assertEqual(request.method, 'POST') + self.assertIn(content, text) + self.assertIn(key, text) + self.assertIn(value, text) def test_request_download_content_for_resource(self): - ... + cls = self.tfx_client.api.ResourceStringsAsyncDownload + key = 'resource' + with self.mocker as mocker: + value = self.tfx_client._get_resource(RESOURCE_SLUG).id + self.tfx_client._download_content(cls, **{key: value}) + # with our mocked redirect, Transifex async downloads should make exactly 3 requests + post_download, get_download_status, get_content = mocker.request_history[-3:] + + # creates async download request + data = post_download.json()['data'] + self.assertEqual(post_download.method, 'POST') + self.assertEqual(data['attributes'][key], value) + + # checks status of download + self.assertEqual(get_download_status.method, 'GET') + + # finally, gets content from download + self.assertEqual(get_content.method, 'GET') From 9901ca76b7d886121cb9e1a93feb98962ec9eb43 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 15:08:28 -0700 Subject: [PATCH 015/928] Add test fixtures for Transifex API request tests --- ...organization:p:test-project_languages.json | 40 ++++++++++++++ ...source_strings_async_downloads_content.txt | 24 ++++++++ .../data/transifex/api/get_resources.json | 55 +++++++++++++++++++ ...zation:p:test-project:r:test-resource.json | 45 +++++++++++++++ ...post_resource_strings_async_downloads.json | 26 +++++++++ .../post_resource_strings_async_uploads.json | 32 +++++++++++ .../data/transifex/api/post_resources.json | 48 ++++++++++++++++ 7 files changed, 270 insertions(+) create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_projects_o:test-organization:p:test-project_languages.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_resource_strings_async_downloads_content.txt create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_resources.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/patch_resources_o:test-organization:p:test-project:r:test-resource.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_downloads.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_uploads.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/post_resources.json diff --git a/corehq/apps/translations/tests/data/transifex/api/get_projects_o:test-organization:p:test-project_languages.json b/corehq/apps/translations/tests/data/transifex/api/get_projects_o:test-organization:p:test-project_languages.json new file mode 100644 index 000000000000..9a1d7f4c8894 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_projects_o:test-organization:p:test-project_languages.json @@ -0,0 +1,40 @@ +{ + "data": [ + { + "id": "l:es", + "type": "languages", + "attributes": { + "code": "es", + "name": "Spanish", + "rtl": false, + "plural_equation": "n == 1 ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2", + "plural_rules": { + "one": "n is 1", + "many": "n is not 0 and n mod 1000000 == 0", + "other": "everything else" + } + }, + "links": { + "self": "https://rest.api.transifex.com/languages/l:es" + } + }, + { + "id": "l:fr", + "type": "languages", + "attributes": { + "code": "fr", + "name": "French", + "rtl": false, + "plural_equation": "(n == 0 || n == 1) ? 0 : n != 0 && n % 1000000 == 0 ? 1 : 2", + "plural_rules": { + "one": "n is 0 or n is 1", + "many": "n is not 0 and n mod 1000000 == 0", + "other": "everything else" + } + }, + "links": { + "self": "https://rest.api.transifex.com/languages/l:fr" + } + } + ] +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/get_resource_strings_async_downloads_content.txt b/corehq/apps/translations/tests/data/transifex/api/get_resource_strings_async_downloads_content.txt new file mode 100644 index 000000000000..a2455e9fdb72 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_resource_strings_async_downloads_content.txt @@ -0,0 +1,24 @@ +# +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"App-Id: 47a20b417ad4fad19006e9aa6302513b\n" +"Language: en\n" +"PO-Creation-Date: 2024-03-05 17:54:33.896525\n" +"Version: 20\n" + +#: :1 +msgctxt "Module:module1:fce00a6bfc004b62a84a3a75466257ae" +msgid "Case List" +msgstr "Case List" + +#: :2 +msgctxt "Form:module1_form1:a2ca659474fe4b9c8474cc34cc4213c9" +msgid "Registration Form" +msgstr "Registration Form" + +#: :3 +msgctxt "Form:module1_form2:f799648efeac47f8b469e2854b56b121" +msgid "Followup Form" +msgstr "Followup Form" diff --git a/corehq/apps/translations/tests/data/transifex/api/get_resources.json b/corehq/apps/translations/tests/data/transifex/api/get_resources.json new file mode 100644 index 000000000000..8ced9d01b78d --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_resources.json @@ -0,0 +1,55 @@ +{ + "data": [ + { + "attributes": { + "accept_translations": true, + "categories": [ + "category_1", + "category_2" + ], + "datetime_created": "XXXX-XX-XXTXX:XX:XXZ", + "datetime_modified": "XXXX-XX-XXTXX:XX:XXZ", + "i18n_options": { + "allow_duplicate_strings": true + }, + "i18n_version": 1, + "mp4_url": "https://some.host.net/some_mp4_url", + "name": "Test Resource", + "ogg_url": "https://some.host.net/some_ogg_url", + "priority": "normal", + "slug": "test-resource", + "string_count": 0, + "webm_url": "https://some.host.net/some_webm_url", + "word_count": 0, + "youtube_url": "https://www.youtube.com/" + }, + "id": "o:test-organization:p:test-project:r:test-resource", + "links": { + "self": "/resources/o:test-organization:p:test-project:r:test-resource" + }, + "relationships": { + "i18n_format": { + "data": { + "id": "PO", + "type": "i18n_formats" + } + }, + "project": { + "data": { + "id": "o:test-organization:p:test-project", + "type": "projects" + }, + "links": { + "related": "/projects/o:test-organization:p:test-project" + } + } + }, + "type": "resources" + } + ], + "links": { + "next": "/resources?filter[project]=o:test-organization:p:test-project&page[cursor]=XXX", + "previous": "/resources?filter[project]=o:test-organization:p:test-project&page[cursor]=XXX", + "self": "/resources?filter[project]=o:test-organization:p:test-project" + } +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/patch_resources_o:test-organization:p:test-project:r:test-resource.json b/corehq/apps/translations/tests/data/transifex/api/patch_resources_o:test-organization:p:test-project:r:test-resource.json new file mode 100644 index 000000000000..45f5bae4a157 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/patch_resources_o:test-organization:p:test-project:r:test-resource.json @@ -0,0 +1,45 @@ +{ + "data": { + "id": "o:test-organization:p:test-project:r:test-resource", + "type": "resources", + "attributes": { + "slug": "test-resource", + "name": "Test Resource", + "priority": "normal", + "i18n_type": "PO", + "i18n_version": 2, + "accept_translations": false, + "string_count": 3, + "word_count": 6, + "datetime_created": "2024-03-04T22:36:44Z", + "datetime_modified": "2024-03-20T21:59:39Z", + "categories": [], + "i18n_options": {}, + "mp4_url": null, + "ogg_url": null, + "youtube_url": null, + "webm_url": null + }, + "relationships": { + "project": { + "links": { + "related": "https://rest.api.transifex.com/projects/o:test-organization:p:test-project" + }, + "data": { + "type": "projects", + "id": "o:test-organization:p:test-project" + } + }, + "i18n_format": { + "data": { + "type": "i18n_formats", + "id": "PO" + } + }, + "base": null + }, + "links": { + "self": "https://rest.api.transifex.com/resources/o:test-organization:p:test-project:r:test-resource" + } + } +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_downloads.json b/corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_downloads.json new file mode 100644 index 000000000000..6d4030ebdff8 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_downloads.json @@ -0,0 +1,26 @@ +{ + "data": { + "attributes": { + "date_created": "XXXX-XX-XXTXX:XX:XXZ", + "date_modified": "XXXX-XX-XXTXX:XX:XXZ", + "errors": [], + "status": "pending" + }, + "id": "4abfc726-6a27-4c33-9d99-e5254c8df748", + "links": { + "self": "/resource_strings_async_downloads/4abfc726-6a27-4c33-9d99-e5254c8df748" + }, + "relationships": { + "resource": { + "data": { + "id": "o:test-organization:p:test-project:r:test-resource", + "type": "resources" + }, + "links": { + "related": "/resources/o:test-organization:p:test-project:r:test-resource" + } + } + }, + "type": "resource_strings_async_downloads" + } +} diff --git a/corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_uploads.json b/corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_uploads.json new file mode 100644 index 000000000000..061b3df429ef --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/post_resource_strings_async_uploads.json @@ -0,0 +1,32 @@ +{ + "data": { + "attributes": { + "date_created": "XXXX-XX-XXTXX:XX:XXZ", + "date_modified": "XXXX-XX-XXTXX:XX:XXZ", + "details": { + "strings_created": 0, + "strings_deleted": 0, + "strings_skipped": 0, + "strings_updated": 0 + }, + "errors": [], + "status": "succeeded" + }, + "id": "4abfc726-6a27-4c33-9d99-e5254c8df748", + "links": { + "self": "/resource_strings_async_uploads/4abfc726-6a27-4c33-9d99-e5254c8df748" + }, + "relationships": { + "resource": { + "data": { + "id": "o:test-organization:p:test-project:r:test-resource", + "type": "resources" + }, + "links": { + "related": "/resources/o:test-organization:p:test-project:r:test-resource" + } + } + }, + "type": "resource_strings_async_uploads" + } +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/post_resources.json b/corehq/apps/translations/tests/data/transifex/api/post_resources.json new file mode 100644 index 000000000000..36abe4461752 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/post_resources.json @@ -0,0 +1,48 @@ +{ + "data": { + "attributes": { + "accept_translations": true, + "categories": [ + "category_1", + "category_2" + ], + "datetime_created": "XXXX-XX-XXTXX:XX:XXZ", + "datetime_modified": "XXXX-XX-XXTXX:XX:XXZ", + "i18n_options": { + "allow_duplicate_strings": true + }, + "i18n_version": 1, + "mp4_url": "https://some.host.net/some_mp4_url", + "name": "Test Resource", + "ogg_url": "https://some.host.net/some_ogg_url", + "priority": "normal", + "slug": "test-resource", + "string_count": 0, + "webm_url": "https://some.host.net/some_webm_url", + "word_count": 0, + "youtube_url": "https://www.youtube.com/" + }, + "id": "o:test-organization:p:test-project:r:test-resource", + "links": { + "self": "/resources/o:test-organization:p:test-project:r:test-resource" + }, + "relationships": { + "i18n_format": { + "data": { + "id": "PO", + "type": "i18n_formats" + } + }, + "project": { + "data": { + "id": "o:test-organization:p:test-project", + "type": "projects" + }, + "links": { + "related": "/projects/o:test-organization:p:test-project" + } + } + }, + "type": "resources" + } +} \ No newline at end of file From cdea5bb5294acba2b2662f1fb5366afd4604e0f4 Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 15:14:09 -0700 Subject: [PATCH 016/928] Add tests for Transifex client methods that indirectly hit the API --- .../tests/test_transifex_integration.py | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/corehq/apps/translations/tests/test_transifex_integration.py b/corehq/apps/translations/tests/test_transifex_integration.py index 73e05b7cf17f..7e138b450224 100644 --- a/corehq/apps/translations/tests/test_transifex_integration.py +++ b/corehq/apps/translations/tests/test_transifex_integration.py @@ -1,6 +1,8 @@ from django.test import SimpleTestCase +import polib import requests_mock +from unittest.mock import patch from corehq.apps.translations.integrations.transifex.client import TransifexApiClient from corehq.util.test_utils import TestFileMixin @@ -160,3 +162,95 @@ def test_request_download_content_for_resource(self): # finally, gets content from download self.assertEqual(get_content.method, 'GET') + + @patch('corehq.apps.translations.integrations.transifex.client.TransifexApiClient._upload_resource_strings') + @patch('corehq.apps.translations.integrations.transifex.client.TransifexApiClient._get_resource') + @patch('corehq.apps.translations.integrations.transifex.client.TransifexApiClient._create_resource') + def test_upload_resource(self, create_resource, get_resource, upload_resource_strings): + filename = 'menus_and_forms.po' + path = DATA_PATH + filename + with self.mocker: + self.tfx_client.upload_resource(path, RESOURCE_SLUG, RESOURCE_NAME, False) + + content = self.get_file(path, '') + resource = create_resource.return_value + create_resource.assert_called_with(RESOURCE_SLUG, RESOURCE_NAME) + upload_resource_strings.assert_called_with(content, resource.id) + + # with update_resource True we should get the existing resource instead of creating + with self.mocker: + self.tfx_client.upload_resource(path, RESOURCE_SLUG, RESOURCE_NAME, True) + get_resource.assert_called_with(RESOURCE_SLUG) + + # with resource_name None we should create the resource using its filename as resource_name + with self.mocker: + self.tfx_client.upload_resource(path, RESOURCE_SLUG, None, False) + create_resource.assert_called_with(RESOURCE_SLUG, filename) + + @patch( + 'corehq.apps.translations.integrations.transifex.client.TransifexApiClient._upload_resource_translations') + @patch('corehq.apps.translations.integrations.transifex.client.TransifexApiClient._get_resource') + def test_upload_translation(self, get_resource, upload_resource_translations): + path = DATA_PATH + 'menus_and_forms-fr.po' + hq_lang_code = 'fra' + with self.mocker: + self.tfx_client.upload_translation(path, RESOURCE_SLUG, hq_lang_code) + + content = self.get_file(path, '') + resource = get_resource.return_value + language_id = self.tfx_client._to_language_id(self.tfx_client.transifex_lang_code(hq_lang_code)) + upload_resource_translations.assert_called_with(content, resource.id, language_id) + + @patch('corehq.apps.translations.integrations.transifex.client.TransifexApiClient._list_resources') + def test_get_resource_slugs(self, list_resources): + all_slugs = ['test-resource', 'test-resourcev2', 'test-resourcev3'] + all_resources = [self.tfx_client.api.Resource(slug=slug) for slug in all_slugs] + list_resources.return_value = all_resources + + # no version arg returns all slugs + actual_slugs = self.tfx_client.get_resource_slugs(None) + self.assertEqual(all_slugs, actual_slugs) + + # version arg with "use_version_postfix" returns matching that version + actual_slugs = self.tfx_client.get_resource_slugs(2) + self.assertEqual(['test-resourcev2'], actual_slugs) + + # version arg without "use_version_postfix" returns those not matching the version + self.tfx_client.use_version_postfix = False + actual_slugs = self.tfx_client.get_resource_slugs(2) + self.assertEqual(['test-resource', 'test-resourcev3'], actual_slugs) + + def test_get_translation(self): + hq_lang_code = 'fra' + with self.mocker: + actual_translation = self.tfx_client.get_translation(RESOURCE_SLUG, hq_lang_code, False) + + path = DATA_PATH + 'menus_and_forms-fr.po' + expected_translation = polib.pofile(self.get_file(path, '')) + self.assertEqual(expected_translation, actual_translation) + + def test_get_project_langcodes(self): + expected_langcodes = ['es', 'fr', 'en'] + with self.mocker: + actual_langcodes = self.tfx_client.get_project_langcodes() + + self.assertEqual(expected_langcodes, actual_langcodes) + + def test_source_lang_is(self): + transifex_language_id = 'l:fr' + hq_lang_code = 'fra' + self.tfx_client.project.source_language.id = transifex_language_id + self.assertTrue(self.tfx_client.source_lang_is(hq_lang_code)) + + def test_is_translation_completed(self): + with self.mocker: + # some translations are incomplete in our test data + translation_completed = self.tfx_client.is_translation_completed(RESOURCE_SLUG) + self.assertFalse(translation_completed) + + with self.mocker: + # translations for French ('fra') are complete in our test data + hq_lang_code = 'fra' + translation_completed = self.tfx_client.is_translation_completed( + RESOURCE_SLUG, hq_lang_code=hq_lang_code) + self.assertTrue(translation_completed) From 5984ff6322a7164effe91389a01a7d09ad115e0f Mon Sep 17 00:00:00 2001 From: nospame Date: Mon, 1 Apr 2024 15:18:46 -0700 Subject: [PATCH 017/928] Add test fixtures for Transifex client methods that hit API indirectly --- .../api/get_resource_language_stats.json | 136 ++++++++++++++++++ .../api/get_resource_language_stats_l:fr.json | 45 ++++++ ...e_translations_async_downloads_content.txt | 24 ++++ ...resource_translations_async_downloads.json | 35 +++++ .../data/transifex/menus_and_forms-fr.po | 24 ++++ .../tests/data/transifex/menus_and_forms.po | 24 ++++ 6 files changed, 288 insertions(+) create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats_l:fr.json create mode 100644 corehq/apps/translations/tests/data/transifex/api/get_resource_translations_async_downloads_content.txt create mode 100644 corehq/apps/translations/tests/data/transifex/api/post_resource_translations_async_downloads.json create mode 100644 corehq/apps/translations/tests/data/transifex/menus_and_forms-fr.po create mode 100644 corehq/apps/translations/tests/data/transifex/menus_and_forms.po diff --git a/corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats.json b/corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats.json new file mode 100644 index 000000000000..92ada165b895 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats.json @@ -0,0 +1,136 @@ +{ + "links": { + "self": "https://rest.api.transifex.com/resource_language_stats?filter[project]=o:test-organization:p:test-project&filter[resource]=o:test-organization:p:test-project:r:test-resource" + }, + "data": [ + { + "id": "o:test-organization:p:test-project:r:test-resource:l:en", + "type": "resource_language_stats", + "attributes": { + "untranslated_words": 0, + "translated_words": 6, + "reviewed_words": 0, + "proofread_words": 0, + "total_words": 6, + "untranslated_strings": 0, + "translated_strings": 3, + "reviewed_strings": 0, + "proofread_strings": 0, + "total_strings": 3, + "last_translation_update": "2024-03-04T22:36:45Z", + "last_review_update": null, + "last_proofread_update": null, + "last_update": "2024-03-04T22:36:45Z" + }, + "relationships": { + "resource": { + "links": { + "related": "https://rest.api.transifex.com/resources/o:test-organization:p:test-project:r:test-resource" + }, + "data": { + "type": "resources", + "id": "o:test-organization:p:test-project:r:test-resource" + } + }, + "language": { + "links": { + "related": "https://rest.api.transifex.com/languages/l:en" + }, + "data": { + "type": "languages", + "id": "l:en" + } + } + }, + "links": { + "self": "https://rest.api.transifex.com/resource_language_stats/o:test-organization:p:test-project:r:test-resource:l:en" + } + }, + { + "id": "o:test-organization:p:test-project:r:test-resource:l:es", + "type": "resource_language_stats", + "attributes": { + "untranslated_words": 2, + "translated_words": 4, + "reviewed_words": 4, + "proofread_words": 0, + "total_words": 6, + "untranslated_strings": 0, + "translated_strings": 3, + "reviewed_strings": 3, + "proofread_strings": 0, + "total_strings": 3, + "last_translation_update": "2024-03-20T18:52:41Z", + "last_review_update": "2024-03-20T18:52:42Z", + "last_proofread_update": null, + "last_update": "2024-03-20T18:52:42Z" + }, + "relationships": { + "resource": { + "links": { + "related": "https://rest.api.transifex.com/resources/o:test-organization:p:test-project:r:test-resource" + }, + "data": { + "type": "resources", + "id": "o:test-organization:p:test-project:r:test-resource" + } + }, + "language": { + "links": { + "related": "https://rest.api.transifex.com/languages/l:es" + }, + "data": { + "type": "languages", + "id": "l:es" + } + } + }, + "links": { + "self": "https://rest.api.transifex.com/resource_language_stats/o:test-organization:p:test-project:r:test-resource:l:es" + } + }, + { + "id": "o:test-organization:p:test-project:r:test-resource:l:fr", + "type": "resource_language_stats", + "attributes": { + "untranslated_words": 0, + "translated_words": 6, + "reviewed_words": 6, + "proofread_words": 0, + "total_words": 6, + "untranslated_strings": 0, + "translated_strings": 3, + "reviewed_strings": 3, + "proofread_strings": 0, + "total_strings": 3, + "last_translation_update": "2024-03-05T18:15:31Z", + "last_review_update": "2024-03-20T18:52:42Z", + "last_proofread_update": null, + "last_update": "2024-03-20T18:52:42Z" + }, + "relationships": { + "resource": { + "links": { + "related": "https://rest.api.transifex.com/resources/o:test-organization:p:test-project:r:test-resource" + }, + "data": { + "type": "resources", + "id": "o:test-organization:p:test-project:r:test-resource" + } + }, + "language": { + "links": { + "related": "https://rest.api.transifex.com/languages/l:fr" + }, + "data": { + "type": "languages", + "id": "l:fr" + } + } + }, + "links": { + "self": "https://rest.api.transifex.com/resource_language_stats/o:test-organization:p:test-project:r:test-resource:l:fr" + } + } + ] +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats_l:fr.json b/corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats_l:fr.json new file mode 100644 index 000000000000..a18de5349482 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_resource_language_stats_l:fr.json @@ -0,0 +1,45 @@ +{ + "data": { + "id": "o:test-organization:p:test-project:r:test-resource:l:fr", + "type": "resource_language_stats", + "attributes": { + "untranslated_words": 0, + "translated_words": 6, + "reviewed_words": 6, + "proofread_words": 0, + "total_words": 6, + "untranslated_strings": 0, + "translated_strings": 3, + "reviewed_strings": 3, + "proofread_strings": 0, + "total_strings": 3, + "last_translation_update": "2024-03-05T18:15:31Z", + "last_review_update": "2024-03-20T18:52:42Z", + "last_proofread_update": null, + "last_update": "2024-03-20T18:52:42Z" + }, + "relationships": { + "resource": { + "links": { + "related": "https://rest.api.transifex.com/resources/o:test-organization:p:test-project:r:test-resource" + }, + "data": { + "type": "resources", + "id": "o:test-organization:p:test-project:r:test-resource" + } + }, + "language": { + "links": { + "related": "https://rest.api.transifex.com/languages/l:fr" + }, + "data": { + "type": "languages", + "id": "l:fr" + } + } + }, + "links": { + "self": "https://rest.api.transifex.com/resource_language_stats/o:test-organization:p:test-project:r:test-resource:l:fr" + } + } +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/api/get_resource_translations_async_downloads_content.txt b/corehq/apps/translations/tests/data/transifex/api/get_resource_translations_async_downloads_content.txt new file mode 100644 index 000000000000..ed5b3f1de4ae --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/get_resource_translations_async_downloads_content.txt @@ -0,0 +1,24 @@ +# +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"App-Id: 47a20b417ad4fad19006e9aa6302513b\n" +"Language: fr\n" +"PO-Creation-Date: 2024-03-05 17:54:33.896525\n" +"Version: 20\n" + +#: :1 +msgctxt "Module:module1:fce00a6bfc004b62a84a3a75466257ae" +msgid "Case List" +msgstr "Liste de cas" + +#: :2 +msgctxt "Form:module1_form1:a2ca659474fe4b9c8474cc34cc4213c9" +msgid "Registration Form" +msgstr "Formulaire d'inscription" + +#: :3 +msgctxt "Form:module1_form2:f799648efeac47f8b469e2854b56b121" +msgid "Followup Form" +msgstr "Formulaire de suivi" diff --git a/corehq/apps/translations/tests/data/transifex/api/post_resource_translations_async_downloads.json b/corehq/apps/translations/tests/data/transifex/api/post_resource_translations_async_downloads.json new file mode 100644 index 000000000000..f82f8862b71e --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/api/post_resource_translations_async_downloads.json @@ -0,0 +1,35 @@ +{ + "data": { + "attributes": { + "date_created": "XXXX-XX-XXTXX:XX:XXZ", + "date_modified": "XXXX-XX-XXTXX:XX:XXZ", + "errors": [], + "status": "pending" + }, + "id": "4abfc726-6a27-4c33-9d99-e5254c8df748", + "links": { + "self": "/resource_translations_async_downloads/4abfc726-6a27-4c33-9d99-e5254c8df748" + }, + "relationships": { + "language": { + "data": { + "id": "l:fr", + "type": "languages" + }, + "links": { + "related": "/languages/l:fr" + } + }, + "resource": { + "data": { + "id": "o:test-organization:p:test-project:r:test-resource", + "type": "resources" + }, + "links": { + "related": "/resources/o:test-organization:p:test-project:r:test-resource" + } + } + }, + "type": "resource_translations_async_downloads" + } +} \ No newline at end of file diff --git a/corehq/apps/translations/tests/data/transifex/menus_and_forms-fr.po b/corehq/apps/translations/tests/data/transifex/menus_and_forms-fr.po new file mode 100644 index 000000000000..ed5b3f1de4ae --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/menus_and_forms-fr.po @@ -0,0 +1,24 @@ +# +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"App-Id: 47a20b417ad4fad19006e9aa6302513b\n" +"Language: fr\n" +"PO-Creation-Date: 2024-03-05 17:54:33.896525\n" +"Version: 20\n" + +#: :1 +msgctxt "Module:module1:fce00a6bfc004b62a84a3a75466257ae" +msgid "Case List" +msgstr "Liste de cas" + +#: :2 +msgctxt "Form:module1_form1:a2ca659474fe4b9c8474cc34cc4213c9" +msgid "Registration Form" +msgstr "Formulaire d'inscription" + +#: :3 +msgctxt "Form:module1_form2:f799648efeac47f8b469e2854b56b121" +msgid "Followup Form" +msgstr "Formulaire de suivi" diff --git a/corehq/apps/translations/tests/data/transifex/menus_and_forms.po b/corehq/apps/translations/tests/data/transifex/menus_and_forms.po new file mode 100644 index 000000000000..a2455e9fdb72 --- /dev/null +++ b/corehq/apps/translations/tests/data/transifex/menus_and_forms.po @@ -0,0 +1,24 @@ +# +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"App-Id: 47a20b417ad4fad19006e9aa6302513b\n" +"Language: en\n" +"PO-Creation-Date: 2024-03-05 17:54:33.896525\n" +"Version: 20\n" + +#: :1 +msgctxt "Module:module1:fce00a6bfc004b62a84a3a75466257ae" +msgid "Case List" +msgstr "Case List" + +#: :2 +msgctxt "Form:module1_form1:a2ca659474fe4b9c8474cc34cc4213c9" +msgid "Registration Form" +msgstr "Registration Form" + +#: :3 +msgctxt "Form:module1_form2:f799648efeac47f8b469e2854b56b121" +msgid "Followup Form" +msgstr "Followup Form" From f31a543f286a43927a545c2509a87ddc679445b2 Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Tue, 2 Apr 2024 13:49:21 -0400 Subject: [PATCH 018/928] add new LocationType field and update SQL function --- ..._fixture_queryset_case_sync_restriction.py | 33 ++ corehq/apps/locations/models.py | 10 +- .../get_location_fixture_ids_2.sql | 333 ++++++++++++++++++ migrations.lock | 1 + 4 files changed, 375 insertions(+), 2 deletions(-) create mode 100644 corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py create mode 100644 corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql diff --git a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py new file mode 100644 index 000000000000..f2cedcd034e7 --- /dev/null +++ b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py @@ -0,0 +1,33 @@ +# Partially generated by Django 3.2.25 on 2024-03-25 19:26 + +from corehq.sql_db.operations import RawSQLMigration +from django.db import migrations, models +import django.db.models.deletion + +locations_sql_migrator = RawSQLMigration(('corehq', 'apps', 'locations', 'sql_templates'), {}) + + +class Migration(migrations.Migration): + + dependencies = [ + ('locations', '0020_delete_locationrelation'), + ] + + operations = [ + migrations.AddField( + model_name='locationtype', + name='restrict_cases_to', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='locations.locationtype'), + ), + migrations.AlterField( + model_name='locationtype', + name='_expand_from', + field=models.ForeignKey(db_column='expand_from', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='locations.locationtype'), + ), + migrations.AlterField( + model_name='locationtype', + name='expand_to', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='locations.locationtype'), + ), + locations_sql_migrator.get_migration('get_location_fixture_ids_2.sql'), + ] diff --git a/corehq/apps/locations/models.py b/corehq/apps/locations/models.py index 29871185d001..a2ae8d141a68 100644 --- a/corehq/apps/locations/models.py +++ b/corehq/apps/locations/models.py @@ -94,15 +94,21 @@ class LocationType(models.Model): null=True, related_name='+', db_column='expand_from', - on_delete=models.CASCADE, + on_delete=models.PROTECT, ) # levels below this location type that we start expanding from _expand_from_root = models.BooleanField(default=False, db_column='expand_from_root') expand_to = models.ForeignKey( "self", null=True, related_name="+", - on_delete=models.CASCADE, + on_delete=models.PROTECT, ) # levels above this type that are synced + restrict_cases_to = models.ForeignKey( + "self", + null=True, + related_name="+", + on_delete=models.PROTECT, + ) include_without_expanding = models.ForeignKey( 'self', null=True, diff --git a/corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql b/corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql new file mode 100644 index 000000000000..e909785b6955 --- /dev/null +++ b/corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql @@ -0,0 +1,333 @@ +DROP FUNCTION IF EXISTS get_location_fixture_ids_2(TEXT, INTEGER[], BOOLEAN); + +CREATE FUNCTION get_location_fixture_ids_2( + domain_name TEXT, + -- array of locations_sqllocation.id (NOT locations_sqllocation.location_id) + user_location_ids_array INTEGER[], + case_sync_restriction BOOLEAN +) RETURNS TABLE ( + "id" INTEGER, -- location id + "path" INTEGER[], -- location tree path from root (array of location ids) + "depth" INTEGER -- depth in locations tree (0 is root node) +) AS $$ +BEGIN + /* + Get fixture locations using expand_from criteria + + There may be ambiguities in location type configurations that could + cause undefined outcomes: + - expand_from_root = TRUE seems to do the same thing as + include_without_expanding IS NOT NULL (redundant config?). + - expand_from_root = TRUE with expand_from IS NOT NULL seems logically + inconsistent. Suggest adding check constraint to prevent this state. + - include_without_expanding IS NOT NULL with expand_from IS NOT NULL + seems logically inconsistent. Suggest adding check constraint. + - expand_from could point to a location that is not an ancester + - expand_to could point to a location that is not a descendant (maybe + doesn't matter since it's only used to calculate a depth). + - two location types along the same path could both have expand_to set + to different levels, making the expansion depth ambiguous if a user + had both of those locations. + - ancestors could be excluded with improper include_only config. + seems like we could achieve the same thing with expand_from/expand_to + */ + + RETURN QUERY + + WITH RECURSIVE expand_to AS ( + /* + CTE with location type ids and corresponding expand to depths + + This traverses the location type hierarchy, which is assumed to + mirror the location hierarchy but contain many less records. The + traversal is over user locations' types and their ancestors, so + should be reasonably fast. + + "expand_to" columns: + - expand_to_type: location type id, -1 if include_without_expanding + - expand_to_depth: expansion depth + */ + + WITH RECURSIVE cte AS ( + -- get expand_to location types + SELECT + expand_to_type."parent_type_id", + 0 AS "depth", + expand_to_type."id" AS "expand_to_type" + FROM "locations_sqllocation" loc + INNER JOIN "locations_locationtype" loc_type + ON loc."location_type_id" = loc_type."id" + INNER JOIN "locations_locationtype" expand_to_type + ON CASE + WHEN case_sync_restriction THEN expand_to_type."id" = loc_type."restrict_cases_to_id" + ELSE expand_to_type."id" = loc_type."expand_to_id" + END + WHERE + loc."id" = ANY(user_location_ids_array) + AND CASE + WHEN case_sync_restriction THEN loc_type."restrict_cases_to_id" IS NOT NULL + ELSE loc_type."expand_to_id" IS NOT NULL + END + + + UNION ALL + + -- get include_without_expanding location types + SELECT + iwe_type."parent_type_id", + 0 AS "depth", + -1 AS "expand_to_type" + FROM "locations_sqllocation" loc + INNER JOIN "locations_locationtype" loc_type + ON loc."location_type_id" = loc_type."id" + INNER JOIN "locations_locationtype" iwe_type + ON loc_type."include_without_expanding_id" = iwe_type."id" + WHERE + NOT case_sync_restriction + AND loc."id" = ANY(user_location_ids_array) + AND loc_type."include_without_expanding_id" IS NOT NULL + + UNION ALL + + -- recursive query to calculate depths + SELECT + loc_type."parent_type_id", + "cte"."depth" + 1 AS "depth", + "cte"."expand_to_type" AS "expand_to_type" + FROM "locations_locationtype" loc_type + INNER JOIN "cte" ON loc_type."id" = "cte"."parent_type_id" + ) + + SELECT + "cte"."expand_to_type", + MAX("cte"."depth") AS "expand_to_depth" + FROM "cte" + WHERE "cte"."parent_type_id" IS NULL -- exclude all but the root items + GROUP BY "cte"."expand_to_type" + + ), expand_from AS ( + /* + CTE with expand from location ids and expansion depths + + The traversal is over user locations and their ancestors, so should + be reasonably fast. + + "expand_from" columns: + - loc_id: location id, null for include_without_expanding or + expand_from_root. + - depth: expand to depth. Negative values in this column have + special meanings. See output examples below. + + loc_id | depth + --------|------- + NULL | 3 -- include all locations with depth <= 3 + 1 | 4 -- include all descendents of location 1 to depth 4 + 10 | -1 -- include location 10 (but do not expand) + 100 | -2 -- include all descendents of location 100, unlimited depth + 11 | -3 -- location 11 and its descendants are included based on + include_only types + */ + + WITH RECURSIVE cte AS ( + -- get include_without_expanding depth + SELECT + NULL AS "parent_id", + NULL AS "expand_from_type", + NULL AS "loc_id", + "expand_to"."expand_to_depth" AS "depth" + FROM "expand_to" + WHERE "expand_to"."expand_to_type" = -1 + + UNION ALL + + SELECT + loc."parent_id", + CASE + WHEN ( + -- if expand_from is set and not the current location type + -- it will be one of this location's ancestors + loc_type."expand_from" IS NOT NULL + AND loc_type."expand_from_root" = FALSE + AND loc_type."expand_from" <> loc."location_type_id" + AND NOT EXISTS ( + -- might be wrong to ignore loc_type.expand_from + -- when include_only types exist + SELECT 1 + FROM "locations_locationtype_include_only" + WHERE "from_locationtype_id" = loc."location_type_id" + ) + ) AND NOT case_sync_restriction THEN loc_type."expand_from" + -- otherwise it will be null for this and all ancestors + ELSE NULL + END AS "expand_from_type", + CASE + -- expand_from_root -> no path + WHEN loc_type."expand_from_root" = TRUE AND NOT case_sync_restriction THEN NULL + -- else first path element + ELSE loc."id" + END AS "loc_id", + CASE + WHEN case_sync_restriction THEN ( + CASE WHEN loc_type."restrict_cases_to_id" IS NOT NULL THEN ( + SELECT "expand_to_depth" + FROM "expand_to" + WHERE "expand_to_type" = loc_type."restrict_cases_to_id" + ) ELSE -2 + END + ) + -- get expand_to depth + WHEN loc_type."expand_to_id" IS NOT NULL THEN ( + SELECT "expand_to_depth" + FROM "expand_to" + WHERE "expand_to_type" = loc_type."expand_to_id" + ) + -- use include_only types + WHEN EXISTS ( + SELECT 1 + FROM "locations_locationtype_include_only" + WHERE "from_locationtype_id" = loc."location_type_id" + ) THEN -3 + -- else unlimited expansion depth + ELSE -2 + END AS "depth" + FROM "locations_sqllocation" loc + INNER JOIN "locations_locationtype" loc_type ON loc."location_type_id" = loc_type."id" + WHERE + loc."is_archived" = FALSE + AND loc."domain" = domain_name + AND loc."id" = ANY(user_location_ids_array) + + UNION ALL + + SELECT + loc."parent_id", + CASE + -- set expand_from_type if it will apply to an ancestor + WHEN "cte"."expand_from_type" <> loc."location_type_id" + THEN "cte"."expand_from_type" + -- otherwise it will be null for this and all ancestors + ELSE NULL + END AS "expand_from_type", + CASE + -- expand_from_root -> no path + WHEN "cte"."loc_id" IS NULL THEN NULL + -- else next element of path + ELSE loc."id" + END AS "loc_id", + CASE + -- ancestor of expand_from -> include but do not expand + WHEN ( + "cte"."loc_id" IS NOT NULL + AND "cte"."expand_from_type" IS NULL + ) THEN -1 + -- else no path yet or starting path -> use previous depth + ELSE "cte"."depth" + END AS "depth" + FROM "locations_sqllocation" loc + INNER JOIN "cte" ON loc."id" = "cte"."parent_id" + WHERE loc."is_archived" = FALSE + ) + + SELECT DISTINCT "cte"."loc_id", "cte"."depth" FROM "cte" + + ), fixture_ids AS ( + /* + Get fixture locations using expand_from criteria + + "fixture_ids" columns: + - id: location id + - path: location tree path from root (array of location ids) + - depth: depth in locations tree (0 is root node) + */ + + SELECT + loc."id", + ARRAY[loc."id"] AS "path", + 0 AS "depth" + FROM "locations_sqllocation" loc + WHERE + loc."is_archived" = FALSE + AND loc."domain" = domain_name + AND loc."parent_id" IS NULL + AND EXISTS ( + SELECT 1 + FROM "expand_from" xf + WHERE + ( + "loc_id" = loc."id" AND ( + xf."depth" = -1 -- ancestor of expand_from + OR xf."depth" = -2 -- expansion depth is unlimited + -- descendant of expand_from within expand_to depth + OR xf."depth" >= 0 + ) + ) OR ( + -- include_without_expanding/expand_from_root + -- AND + -- unlimited depth or max depth >= current depth + "loc_id" IS NULL AND (xf."depth" = -2 OR xf."depth" >= 0) + ) OR ( + -- location type is in include_only types + xf."depth" = -3 + AND "loc_id" = loc."id" + AND loc."location_type_id" IN ( + SELECT to_locationtype_id + FROM locations_locationtype_include_only + INNER JOIN "locations_sqllocation" x + ON x."location_type_id" = from_locationtype_id + WHERE x."id" = ANY(user_location_ids_array) + ) + ) + ) + + UNION ALL + + SELECT + loc."id", + array_append("fixture_ids"."path", loc."id") AS "path", + "fixture_ids"."depth" + 1 AS "depth" + FROM "locations_sqllocation" loc + INNER JOIN "fixture_ids" ON loc."parent_id" = "fixture_ids"."id" + WHERE + loc."is_archived" = FALSE + AND loc."domain" = domain_name + AND EXISTS ( + SELECT 1 + FROM "expand_from" xf + WHERE + ( + "loc_id" = loc."id" AND ( + xf."depth" = -1 -- ancestor of expand_from + OR xf."depth" = -2 -- expansion depth is unlimited + -- descendant of expand_from within expand_to depth + OR "fixture_ids"."depth" < xf."depth" + ) + ) OR ( + ( + -- include_without_expanding/expand_from_root or + -- descendant of expand_from within expand_to depth + "loc_id" IS NULL OR "loc_id" = ANY("fixture_ids"."path") + ) AND ( + xf."depth" = -2 -- expansion depth is unlimited + -- descendant of expand_from within expand_to depth + OR "fixture_ids"."depth" < xf."depth" + ) + ) OR ( + -- location type is in include_only types + xf."depth" = -3 AND ( + "loc_id" = loc."id" + OR "loc_id" = ANY("fixture_ids"."path") + ) AND loc."location_type_id" IN ( + SELECT to_locationtype_id + FROM locations_locationtype_include_only + INNER JOIN "locations_sqllocation" x + ON x."location_type_id" = from_locationtype_id + WHERE x."id" = ANY(user_location_ids_array) + ) + ) + ) + ) + + SELECT x."id", x."path", x."depth" from fixture_ids x; + +END; +$$ LANGUAGE plpgsql; diff --git a/migrations.lock b/migrations.lock index c9494371ee3d..7c1776b34203 100644 --- a/migrations.lock +++ b/migrations.lock @@ -668,6 +668,7 @@ locations 0018_auto_20200430_1601 0019_auto_20200924_1753 0020_delete_locationrelation + 0021_add_fixture_queryset_case_sync_restriction mobile_auth 0001_initial 0002_delete_sqlmobileauthkeyrecord From 14029da0411ff05c7649e098c29ad592cfe10ba7 Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Tue, 2 Apr 2024 13:51:21 -0400 Subject: [PATCH 019/928] use new SQL function and update funciton naming --- corehq/apps/locations/fixtures.py | 12 ++++++------ corehq/ex-submodules/casexml/apps/phone/models.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/corehq/apps/locations/fixtures.py b/corehq/apps/locations/fixtures.py index d916fcebbfc4..2000d8e6bf55 100644 --- a/corehq/apps/locations/fixtures.py +++ b/corehq/apps/locations/fixtures.py @@ -109,7 +109,7 @@ def __call__(self, restore_state): if not self.serializer.should_sync(restore_user, restore_state.params.app): return [] - # This just calls get_location_fixture_queryset but is memoized to the user + # This just calls get_location_fixture_queryset_for_user but is memoized to the user locations_queryset = restore_user.get_locations_to_sync() if not should_sync_locations(restore_state.last_sync_log, locations_queryset, restore_state): return [] @@ -245,7 +245,7 @@ def should_sync_flat_fixture(project, app): int_array = ArrayField(int_field) -def get_location_fixture_queryset(user): +def get_location_fixture_queryset_for_user(user): if toggles.SYNC_ALL_LOCATIONS.enabled(user.domain): return get_domain_locations(user.domain).prefetch_related('location_type') @@ -256,16 +256,16 @@ def get_location_fixture_queryset(user): user_location_ids = list(user_locations.order_by().values_list("id", flat=True)) - return _location_queryset_helper(user.domain, user_location_ids) + return get_location_fixture_queryset(user.domain, user_location_ids) -def _location_queryset_helper(domain, location_pks): +def get_location_fixture_queryset(domain, location_pks, case_sync_restriction=False): fixture_ids = With(raw_cte_sql( """ SELECT "id", "path", "depth" - FROM get_location_fixture_ids(%s::TEXT, %s) + FROM get_location_fixture_ids_2(%s::TEXT, %s, %s) """, - [domain, location_pks], + [domain, location_pks, case_sync_restriction], {"id": int_field, "path": int_array, "depth": int_field}, )) diff --git a/corehq/ex-submodules/casexml/apps/phone/models.py b/corehq/ex-submodules/casexml/apps/phone/models.py index 6440c8498f77..61f7be05d6c0 100644 --- a/corehq/ex-submodules/casexml/apps/phone/models.py +++ b/corehq/ex-submodules/casexml/apps/phone/models.py @@ -153,8 +153,8 @@ def get_ucr_filter_value(self, ucr_filter, ui_filter): @memoized def get_locations_to_sync(self): - from corehq.apps.locations.fixtures import get_location_fixture_queryset - return get_location_fixture_queryset(self) + from corehq.apps.locations.fixtures import get_location_fixture_queryset_for_user + return get_location_fixture_queryset_for_user(self) class OTARestoreWebUser(OTARestoreUser): From 47dd4d313397e3a1e95832699e6b5bbb2c8f6c8b Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Tue, 2 Apr 2024 13:52:02 -0400 Subject: [PATCH 020/928] add/update tests --- .../locations/tests/test_location_fixtures.py | 50 +++++++++++++++++-- 1 file changed, 46 insertions(+), 4 deletions(-) diff --git a/corehq/apps/locations/tests/test_location_fixtures.py b/corehq/apps/locations/tests/test_location_fixtures.py index 7f66aeb04734..d7d783876333 100644 --- a/corehq/apps/locations/tests/test_location_fixtures.py +++ b/corehq/apps/locations/tests/test_location_fixtures.py @@ -35,6 +35,7 @@ _location_to_fixture, get_location_data_fields, flat_location_fixture_generator, + get_location_fixture_queryset_for_user, get_location_fixture_queryset, location_fixture_generator, should_sync_flat_fixture, @@ -111,8 +112,14 @@ def _assert_fixture_matches_file(self, xml_name, desired_locations, flat=False): desired_fixture = self._assemble_expected_fixture(xml_name, desired_locations) self.assertXmlEqual(desired_fixture, fixture) - def assert_fixture_queryset_equals_locations(self, desired_locations): - actual = get_location_fixture_queryset(self.user).values_list('name', flat=True) + def assert_fixture_queryset_equals_locations_for_user(self, desired_locations): + actual = get_location_fixture_queryset_for_user(self.user).values_list('name', flat=True) + self.assertItemsEqual(actual, desired_locations) + + def assert_fixture_queryset_equals_locations(self, desired_locations, location_pks, + case_sync_restriction): + actual = get_location_fixture_queryset(self.domain, location_pks, + case_sync_restriction).values_list('name', flat=True) self.assertItemsEqual(actual, desired_locations) @@ -314,7 +321,7 @@ def test_include_only_location_types(self): location_type.include_only.set([self.location_types['state'], self.location_types['county']]) location_type.save() # include county and state - self.assert_fixture_queryset_equals_locations( + self.assert_fixture_queryset_equals_locations_for_user( ['Massachusetts', 'Suffolk', 'Middlesex'] ) @@ -330,6 +337,41 @@ def test_include_only_location_types_hierarchical(self): ['Massachusetts', 'Suffolk', 'Middlesex'] ) + def test_get_location_fixture_queryset_with_case_sync_restriction_default_1(self): + # Test default - case_sync_restriction on w/o any restrict_cases_to + middlesex = self.locations['Middlesex'] + self.location_types['county'].expand_to = self.location_types['county'] # This should have no effect + self.location_types['county'].expand_from = self.location_types['state'] # Also should have no effect + self.location_types['county'].save() + self.assert_fixture_queryset_equals_locations( + ['Massachusetts', 'Middlesex', 'Cambridge', 'Somerville'], + [middlesex.id], + True + ) + + def test_get_location_fixture_queryset_with_case_sync_restriction_default_2(self): + middlesex = self.locations['Middlesex'] + self.location_types['county']._expand_from_root = True + self.location_types['county'].include_only.set([self.location_types['state']]) + self.location_types['county'].save() + self.assert_fixture_queryset_equals_locations( + ['Massachusetts', 'Middlesex', 'Cambridge', 'Somerville'], + [middlesex.id], + True + ) + + def test_get_location_fixture_queryset_with_case_sync_restriction_restrict_cases_to(self): + # Verify restrict_cases_to + mass = self.locations['Massachusetts'] + self.location_types['state'].restrict_cases_to = self.location_types['county'] + self.location_types['state'].include_without_expanding = self.location_types['city'] + self.location_types['state'].save() + self.assert_fixture_queryset_equals_locations( + ['Massachusetts', 'Middlesex', 'Suffolk'], + [mass.id], + True + ) + @mock.patch.object(Domain, 'uses_locations', lambda: True) # removes dependency on accounting class ForkedHierarchiesTest(TestCase, FixtureHasLocationsMixin): @@ -612,7 +654,7 @@ def test_include_only_location_types(self): ]) location_type.save() # include county and state - self.assert_fixture_queryset_equals_locations([ + self.assert_fixture_queryset_equals_locations_for_user([ 'Massachusetts', 'Middlesex', 'Cambridge', From dc190cf624b404c1d4ca42acf165aa4c7d25adde Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Tue, 2 Apr 2024 17:05:32 -0400 Subject: [PATCH 021/928] use RESTRICT instead of PROTECT allows LocationType model to be deleted by domain deletion without error --- .../0021_add_fixture_queryset_case_sync_restriction.py | 6 +++--- corehq/apps/locations/models.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py index f2cedcd034e7..cdc12bab51b5 100644 --- a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py +++ b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py @@ -17,17 +17,17 @@ class Migration(migrations.Migration): migrations.AddField( model_name='locationtype', name='restrict_cases_to', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='locations.locationtype'), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='+', to='locations.locationtype'), ), migrations.AlterField( model_name='locationtype', name='_expand_from', - field=models.ForeignKey(db_column='expand_from', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='locations.locationtype'), + field=models.ForeignKey(db_column='expand_from', null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='+', to='locations.locationtype'), ), migrations.AlterField( model_name='locationtype', name='expand_to', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='locations.locationtype'), + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='+', to='locations.locationtype'), ), locations_sql_migrator.get_migration('get_location_fixture_ids_2.sql'), ] diff --git a/corehq/apps/locations/models.py b/corehq/apps/locations/models.py index a2ae8d141a68..9a206e937341 100644 --- a/corehq/apps/locations/models.py +++ b/corehq/apps/locations/models.py @@ -94,20 +94,20 @@ class LocationType(models.Model): null=True, related_name='+', db_column='expand_from', - on_delete=models.PROTECT, + on_delete=models.RESTRICT, ) # levels below this location type that we start expanding from _expand_from_root = models.BooleanField(default=False, db_column='expand_from_root') expand_to = models.ForeignKey( "self", null=True, related_name="+", - on_delete=models.PROTECT, + on_delete=models.RESTRICT, ) # levels above this type that are synced restrict_cases_to = models.ForeignKey( "self", null=True, related_name="+", - on_delete=models.PROTECT, + on_delete=models.RESTRICT, ) include_without_expanding = models.ForeignKey( 'self', From d73eabaf5626233c303902f3e114273961decddb Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Wed, 3 Apr 2024 10:42:28 -0400 Subject: [PATCH 022/928] lint --- corehq/apps/locations/fixtures.py | 10 +++++----- corehq/apps/locations/tests/test_location_fixtures.py | 9 ++++++--- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/corehq/apps/locations/fixtures.py b/corehq/apps/locations/fixtures.py index 2000d8e6bf55..e3b687a397f2 100644 --- a/corehq/apps/locations/fixtures.py +++ b/corehq/apps/locations/fixtures.py @@ -1,9 +1,9 @@ from collections import defaultdict from itertools import groupby -from xml.etree.cElementTree import Element, SubElement +from xml.etree.cElementTree import Element from django.contrib.postgres.fields.array import ArrayField -from django.db.models import IntegerField, Q +from django.db.models import IntegerField from django_cte import With from django_cte.raw import raw_cte_sql @@ -74,8 +74,7 @@ def _app_has_changed(last_sync, app_id): def _fixture_has_changed(last_sync, restore_user): - return (not last_sync or not last_sync.date or - restore_user.get_fixture_last_modified() >= last_sync.date) + return (not last_sync or not last_sync.date or restore_user.get_fixture_last_modified() >= last_sync.date) def _locations_have_changed(last_sync, locations_queryset, restore_user): @@ -285,7 +284,8 @@ def _append_children(node, location_db, locations, data_fields): def _group_by_type(locations): - key = lambda loc: (loc.location_type.code, loc.location_type) + def key(loc): + return (loc.location_type.code, loc.location_type) for (code, type), locs in groupby(sorted(locations, key=key), key=key): yield type, list(locs) diff --git a/corehq/apps/locations/tests/test_location_fixtures.py b/corehq/apps/locations/tests/test_location_fixtures.py index d7d783876333..215f4b0931c6 100644 --- a/corehq/apps/locations/tests/test_location_fixtures.py +++ b/corehq/apps/locations/tests/test_location_fixtures.py @@ -148,7 +148,8 @@ def tearDown(self): @flag_enabled('HIERARCHICAL_LOCATION_FIXTURE') def test_no_user_locations_returns_empty(self): empty_fixture = EMPTY_LOCATION_FIXTURE_TEMPLATE.format(self.user.user_id) - fixture = ElementTree.tostring(call_fixture_generator(location_fixture_generator, self.user)[0], encoding='utf-8') + fixture = ElementTree.tostring(call_fixture_generator( + location_fixture_generator, self.user)[0], encoding='utf-8') self.assertXmlEqual(empty_fixture, fixture) def test_metadata(self): @@ -443,7 +444,8 @@ def test_include_without_expanding_includes_all_ancestors(self): location_type.include_without_expanding = self.locations['DTO'].location_type location_type.save() - fixture = ElementTree.tostring(call_fixture_generator(flat_location_fixture_generator, self.user)[-1], encoding='utf-8').decode('utf-8') + fixture = ElementTree.tostring(call_fixture_generator( + flat_location_fixture_generator, self.user)[-1], encoding='utf-8').decode('utf-8') for location_name in ('CDST1', 'CDST', 'DRTB1', 'DRTB', 'DTO1', 'DTO', 'CTO', 'CTO1', 'CTD'): self.assertTrue(location_name in fixture) @@ -560,7 +562,8 @@ def setUp(self): @flag_enabled('HIERARCHICAL_LOCATION_FIXTURE') def test_no_user_locations_returns_empty(self): empty_fixture = EMPTY_LOCATION_FIXTURE_TEMPLATE.format(self.user.user_id) - fixture = ElementTree.tostring(call_fixture_generator(location_fixture_generator, self.user)[0], encoding='utf-8') + fixture = ElementTree.tostring(call_fixture_generator( + location_fixture_generator, self.user)[0], encoding='utf-8') self.assertXmlEqual(empty_fixture, fixture) def test_simple_location_fixture(self): From f9dd9370180fd6340e598c833f93595722d83402 Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Thu, 4 Apr 2024 17:05:39 -0400 Subject: [PATCH 023/928] add validation for deleting location types referenced by other types only needed in bulk management --- corehq/apps/locations/bulk_management.py | 21 +++++++++++++++++++ .../locations/tests/test_bulk_management.py | 15 +++++++++++++ 2 files changed, 36 insertions(+) diff --git a/corehq/apps/locations/bulk_management.py b/corehq/apps/locations/bulk_management.py index 253d70b0fe6c..acb1df6e3f57 100644 --- a/corehq/apps/locations/bulk_management.py +++ b/corehq/apps/locations/bulk_management.py @@ -11,6 +11,7 @@ from django.core.exceptions import ValidationError from django.db import transaction +from django.db.models import ForeignKey from django.utils.functional import cached_property from django.utils.translation import gettext as _ from django.utils.translation import gettext_lazy @@ -456,6 +457,12 @@ def new_count(locations): ]) +@memoized +def _get_location_type_foreign_key_fields_minus_parent(): + return [field.name for field in LocationType._meta.get_fields() if isinstance(field, ForeignKey) + and field.related_model == LocationType and field.name != 'parent_type'] + + class LocationTreeValidator(object): """Validates the given type and location stubs @@ -696,6 +703,20 @@ def _validate_types_tree(self): for code in e.affected_nodes ] + # Verify that deleted types are not referenced by other types via foreign key + for deleted_type in self.types_to_be_deleted: + for field_name in _get_location_type_foreign_key_fields_minus_parent(): + referencing_types_and_fields = [ + (lt.code, field_name) for lt in self.location_types if getattr(lt.db_object, field_name) + and getattr(lt.db_object, field_name).id == deleted_type.db_object.id + ] + if referencing_types_and_fields: + return [ + _(f"Location Type '{referencing_type_and_field[0]}' references the type to be deleted" + f" '{deleted_type.code}' via the field '{referencing_type_and_field[1]}'") + for referencing_type_and_field in referencing_types_and_fields + ] + def _validate_location_tree(self): errors = [] diff --git a/corehq/apps/locations/tests/test_bulk_management.py b/corehq/apps/locations/tests/test_bulk_management.py index b4300150227e..a809a5e76088 100644 --- a/corehq/apps/locations/tests/test_bulk_management.py +++ b/corehq/apps/locations/tests/test_bulk_management.py @@ -1094,6 +1094,21 @@ def test_download_reupload_no_changes(self): self.assertFalse(save_location.called) self.assertFalse(save_type.called) + def test_dont_delete_referenced_location_types(self): + self.location_types['State'].expand_to = self.location_types['County'] + self.location_types['State'].save() + delete_county_type = [ + LocTypeRow('State', 'state', ''), + LocTypeRow('County', 'county', 'state', do_delete=True), + LocTypeRow('City', 'city', 'state'), + ] + result = self.bulk_update_locations( + delete_county_type, + [], + ) + assert_errors(result, ["Location Type 'state' references the type to be deleted 'county'" + " via the field 'expand_to'"]) + class TestRestrictedUserUpload(UploadTestUtils, LocationHierarchyPerTest): location_type_names = [lt.name for lt in FLAT_LOCATION_TYPES] From 5298971c7dc227d37b03586b18464c070d1d48a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 09:32:39 +0000 Subject: [PATCH 024/928] Bump tar from 6.1.13 to 6.2.1 Bumps [tar](https://github.com/isaacs/node-tar) from 6.1.13 to 6.2.1. - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v6.1.13...v6.2.1) --- updated-dependencies: - dependency-name: tar dependency-type: indirect ... Signed-off-by: dependabot[bot] --- yarn.lock | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/yarn.lock b/yarn.lock index 38af3b39375a..fa1f62623d94 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5261,19 +5261,7 @@ tar-stream@^2.1.4: inherits "^2.0.3" readable-stream "^3.1.1" -tar@^6.1.11, tar@^6.1.2: - version "6.1.11" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" - integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^3.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - -tar@^6.2.0: +tar@^6.1.11, tar@^6.1.2, tar@^6.2.0: version "6.2.1" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== @@ -5686,6 +5674,7 @@ workerpool@6.2.0: integrity sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A== "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: + name wrap-ansi-cjs version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== From 4f57c11d3a790059eebd809807f72f386459d08b Mon Sep 17 00:00:00 2001 From: Steph Date: Mon, 15 Apr 2024 14:06:44 -0400 Subject: [PATCH 025/928] ui changes: add profile and location dropdowns --- corehq/apps/registration/forms.py | 36 ++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 8 deletions(-) diff --git a/corehq/apps/registration/forms.py b/corehq/apps/registration/forms.py index 8958fe41530d..70bff93b0371 100644 --- a/corehq/apps/registration/forms.py +++ b/corehq/apps/registration/forms.py @@ -15,7 +15,10 @@ from crispy_forms import layout as crispy from crispy_forms.helper import FormHelper +from corehq import privileges +from corehq.apps.accounting.utils import domain_has_privilege from corehq.apps.analytics.tasks import track_workflow +from corehq.apps.custom_data_fields.models import CustomDataFieldsDefinition from corehq.apps.domain.forms import NoAutocompleteMixin, clean_password from corehq.apps.domain.models import Domain from corehq.apps.hqwebapp import crispy as hqcrispy @@ -504,6 +507,7 @@ class AdminInvitesUserForm(RoleForm, _BaseForm, forms.Form): email = forms.EmailField(label="Email Address", max_length=User._meta.get_field('email').max_length) role = forms.ChoiceField(choices=(), label="Project Role") + profile = forms.ChoiceField(choices=(), label="Profile") def __init__(self, data=None, excluded_emails=None, is_add_user=None, *args, **kwargs): domain_obj = None @@ -514,19 +518,33 @@ def __init__(self, data=None, excluded_emails=None, is_add_user=None, *args, **k if 'location' in kwargs: location = kwargs['location'] del kwargs['location'] + show_profile = False + show_location = False + super(AdminInvitesUserForm, self).__init__(data=data, *args, **kwargs) - if domain_obj and domain_obj.commtrack_enabled: + if domain_obj: self.fields['supply_point'] = forms.CharField(label='Primary Location', required=False, widget=LocationSelectWidget(domain_obj.name), help_text=EMWF.location_search_help, - initial=location.location_id if location else '') - self.fields['program'] = forms.ChoiceField(label="Program", choices=(), required=False) - programs = Program.by_domain(domain_obj.name) - choices = list((prog.get_id, prog.name) for prog in programs) - choices.insert(0, ('', '')) - self.fields['program'].choices = choices + initial='') + show_location = True + + if domain_has_privilege(domain_obj.name, privileges.APP_USER_PROFILES): + from corehq.apps.users.views.mobile import UserFieldsView + definition = CustomDataFieldsDefinition.get(domain_obj.name, UserFieldsView.field_type) + profile_choices = [] + if definition: + profiles = definition.get_profiles() + profile_choices = [('', '')] + [(profile.id, profile.name) for profile in profiles] + self.fields['profile'].choices = profile_choices + show_profile = True + + if domain_obj.commtrack_enabled: + self.fields['program'] = forms.ChoiceField(label="Program", choices=(), required=False) + programs = Program.by_domain(domain_obj.name) + choices = [('', '')] + list((prog.get_id, prog.name) for prog in programs) + self.fields['program'].choices = choices self.excluded_emails = excluded_emails or [] - self.helper = FormHelper() self.helper.form_method = 'POST' self.helper.form_class = 'form-horizontal form-ko-validation' @@ -543,6 +561,8 @@ def __init__(self, data=None, excluded_emails=None, is_add_user=None, *args, **k data_bind="textInput: email", ), 'role', + 'profile' if show_profile else None, + 'supply_point' if show_location else None, ), crispy.HTML( render_to_string( From 39f834db31a951b7a40b178687b1da20e883dec7 Mon Sep 17 00:00:00 2001 From: Steph Date: Mon, 15 Apr 2024 14:09:37 -0400 Subject: [PATCH 026/928] backend: add profile to web user --- corehq/apps/users/models.py | 6 +++++- corehq/apps/users/views/__init__.py | 5 +++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/corehq/apps/users/models.py b/corehq/apps/users/models.py index 5483067e7295..dc60cbc1212e 100644 --- a/corehq/apps/users/models.py +++ b/corehq/apps/users/models.py @@ -518,7 +518,7 @@ def add_domain_membership(self, domain, timezone=None, **kwargs): self.domain_memberships.append(domain_membership) self.domains.append(domain) - def add_as_web_user(self, domain, role, location_id=None, program_id=None): + def add_as_web_user(self, domain, role, location_id=None, program_id=None, profile=None): domain_obj = Domain.get_by_name(domain) self.add_domain_membership(domain=domain) self.set_role(domain, role) @@ -526,6 +526,9 @@ def add_as_web_user(self, domain, role, location_id=None, program_id=None): self.get_domain_membership(domain).program_id = program_id if domain_obj.uses_locations and location_id: self.set_location(domain, location_id) + if domain_has_privilege(domain_obj.name, privileges.APP_USER_PROFILES) and profile: + user_data = self.get_user_data(domain_obj.name) + user_data.update({}, profile_id=profile.id if profile.name else ...) self.save() def delete_domain_membership(self, domain, create_record=False): @@ -2787,6 +2790,7 @@ def accept_invitation_and_join_domain(self, web_user): role=self.role, location_id=self.supply_point, program_id=self.program, + profile=self.profile, ) self.is_accepted = True self.save() diff --git a/corehq/apps/users/views/__init__.py b/corehq/apps/users/views/__init__.py index ba1431000a68..e864564104e9 100644 --- a/corehq/apps/users/views/__init__.py +++ b/corehq/apps/users/views/__init__.py @@ -10,6 +10,7 @@ from couchdbkit.exceptions import ResourceNotFound from crispy_forms.utils import render_crispy_form +from corehq.apps.custom_data_fields.models import CustomDataFieldsProfile from corehq.apps.registry.utils import get_data_registry_dropdown_options from corehq.apps.reports.models import TableauVisualization, TableauUser from corehq.apps.sso.models import IdentityProvider @@ -1144,6 +1145,10 @@ def post(self, request, *args, **kwargs): # Preparation for location to replace supply_point supply_point = data.get("supply_point", None) data["location"] = SQLLocation.by_location_id(supply_point) if supply_point else None + profile_id = data.get("profile", None) + data["profile"] = CustomDataFieldsProfile.objects.get( + id=profile_id, + definition__domain=self.domain) if profile_id else None invite = Invitation(**data) invite.save() invite.send_activation_email() From 1a67df924be562f4442f694e972e160614101ad7 Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Tue, 16 Apr 2024 09:53:21 -0400 Subject: [PATCH 027/928] undo changes made to old SQL function --- corehq/apps/locations/fixtures.py | 22 +- .../get_location_fixture_ids_2.sql | 333 ------------------ .../locations/tests/test_location_fixtures.py | 59 +--- .../casexml/apps/phone/models.py | 4 +- 4 files changed, 20 insertions(+), 398 deletions(-) delete mode 100644 corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql diff --git a/corehq/apps/locations/fixtures.py b/corehq/apps/locations/fixtures.py index e3b687a397f2..d916fcebbfc4 100644 --- a/corehq/apps/locations/fixtures.py +++ b/corehq/apps/locations/fixtures.py @@ -1,9 +1,9 @@ from collections import defaultdict from itertools import groupby -from xml.etree.cElementTree import Element +from xml.etree.cElementTree import Element, SubElement from django.contrib.postgres.fields.array import ArrayField -from django.db.models import IntegerField +from django.db.models import IntegerField, Q from django_cte import With from django_cte.raw import raw_cte_sql @@ -74,7 +74,8 @@ def _app_has_changed(last_sync, app_id): def _fixture_has_changed(last_sync, restore_user): - return (not last_sync or not last_sync.date or restore_user.get_fixture_last_modified() >= last_sync.date) + return (not last_sync or not last_sync.date or + restore_user.get_fixture_last_modified() >= last_sync.date) def _locations_have_changed(last_sync, locations_queryset, restore_user): @@ -108,7 +109,7 @@ def __call__(self, restore_state): if not self.serializer.should_sync(restore_user, restore_state.params.app): return [] - # This just calls get_location_fixture_queryset_for_user but is memoized to the user + # This just calls get_location_fixture_queryset but is memoized to the user locations_queryset = restore_user.get_locations_to_sync() if not should_sync_locations(restore_state.last_sync_log, locations_queryset, restore_state): return [] @@ -244,7 +245,7 @@ def should_sync_flat_fixture(project, app): int_array = ArrayField(int_field) -def get_location_fixture_queryset_for_user(user): +def get_location_fixture_queryset(user): if toggles.SYNC_ALL_LOCATIONS.enabled(user.domain): return get_domain_locations(user.domain).prefetch_related('location_type') @@ -255,16 +256,16 @@ def get_location_fixture_queryset_for_user(user): user_location_ids = list(user_locations.order_by().values_list("id", flat=True)) - return get_location_fixture_queryset(user.domain, user_location_ids) + return _location_queryset_helper(user.domain, user_location_ids) -def get_location_fixture_queryset(domain, location_pks, case_sync_restriction=False): +def _location_queryset_helper(domain, location_pks): fixture_ids = With(raw_cte_sql( """ SELECT "id", "path", "depth" - FROM get_location_fixture_ids_2(%s::TEXT, %s, %s) + FROM get_location_fixture_ids(%s::TEXT, %s) """, - [domain, location_pks, case_sync_restriction], + [domain, location_pks], {"id": int_field, "path": int_array, "depth": int_field}, )) @@ -284,8 +285,7 @@ def _append_children(node, location_db, locations, data_fields): def _group_by_type(locations): - def key(loc): - return (loc.location_type.code, loc.location_type) + key = lambda loc: (loc.location_type.code, loc.location_type) for (code, type), locs in groupby(sorted(locations, key=key), key=key): yield type, list(locs) diff --git a/corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql b/corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql deleted file mode 100644 index e909785b6955..000000000000 --- a/corehq/apps/locations/sql_templates/get_location_fixture_ids_2.sql +++ /dev/null @@ -1,333 +0,0 @@ -DROP FUNCTION IF EXISTS get_location_fixture_ids_2(TEXT, INTEGER[], BOOLEAN); - -CREATE FUNCTION get_location_fixture_ids_2( - domain_name TEXT, - -- array of locations_sqllocation.id (NOT locations_sqllocation.location_id) - user_location_ids_array INTEGER[], - case_sync_restriction BOOLEAN -) RETURNS TABLE ( - "id" INTEGER, -- location id - "path" INTEGER[], -- location tree path from root (array of location ids) - "depth" INTEGER -- depth in locations tree (0 is root node) -) AS $$ -BEGIN - /* - Get fixture locations using expand_from criteria - - There may be ambiguities in location type configurations that could - cause undefined outcomes: - - expand_from_root = TRUE seems to do the same thing as - include_without_expanding IS NOT NULL (redundant config?). - - expand_from_root = TRUE with expand_from IS NOT NULL seems logically - inconsistent. Suggest adding check constraint to prevent this state. - - include_without_expanding IS NOT NULL with expand_from IS NOT NULL - seems logically inconsistent. Suggest adding check constraint. - - expand_from could point to a location that is not an ancester - - expand_to could point to a location that is not a descendant (maybe - doesn't matter since it's only used to calculate a depth). - - two location types along the same path could both have expand_to set - to different levels, making the expansion depth ambiguous if a user - had both of those locations. - - ancestors could be excluded with improper include_only config. - seems like we could achieve the same thing with expand_from/expand_to - */ - - RETURN QUERY - - WITH RECURSIVE expand_to AS ( - /* - CTE with location type ids and corresponding expand to depths - - This traverses the location type hierarchy, which is assumed to - mirror the location hierarchy but contain many less records. The - traversal is over user locations' types and their ancestors, so - should be reasonably fast. - - "expand_to" columns: - - expand_to_type: location type id, -1 if include_without_expanding - - expand_to_depth: expansion depth - */ - - WITH RECURSIVE cte AS ( - -- get expand_to location types - SELECT - expand_to_type."parent_type_id", - 0 AS "depth", - expand_to_type."id" AS "expand_to_type" - FROM "locations_sqllocation" loc - INNER JOIN "locations_locationtype" loc_type - ON loc."location_type_id" = loc_type."id" - INNER JOIN "locations_locationtype" expand_to_type - ON CASE - WHEN case_sync_restriction THEN expand_to_type."id" = loc_type."restrict_cases_to_id" - ELSE expand_to_type."id" = loc_type."expand_to_id" - END - WHERE - loc."id" = ANY(user_location_ids_array) - AND CASE - WHEN case_sync_restriction THEN loc_type."restrict_cases_to_id" IS NOT NULL - ELSE loc_type."expand_to_id" IS NOT NULL - END - - - UNION ALL - - -- get include_without_expanding location types - SELECT - iwe_type."parent_type_id", - 0 AS "depth", - -1 AS "expand_to_type" - FROM "locations_sqllocation" loc - INNER JOIN "locations_locationtype" loc_type - ON loc."location_type_id" = loc_type."id" - INNER JOIN "locations_locationtype" iwe_type - ON loc_type."include_without_expanding_id" = iwe_type."id" - WHERE - NOT case_sync_restriction - AND loc."id" = ANY(user_location_ids_array) - AND loc_type."include_without_expanding_id" IS NOT NULL - - UNION ALL - - -- recursive query to calculate depths - SELECT - loc_type."parent_type_id", - "cte"."depth" + 1 AS "depth", - "cte"."expand_to_type" AS "expand_to_type" - FROM "locations_locationtype" loc_type - INNER JOIN "cte" ON loc_type."id" = "cte"."parent_type_id" - ) - - SELECT - "cte"."expand_to_type", - MAX("cte"."depth") AS "expand_to_depth" - FROM "cte" - WHERE "cte"."parent_type_id" IS NULL -- exclude all but the root items - GROUP BY "cte"."expand_to_type" - - ), expand_from AS ( - /* - CTE with expand from location ids and expansion depths - - The traversal is over user locations and their ancestors, so should - be reasonably fast. - - "expand_from" columns: - - loc_id: location id, null for include_without_expanding or - expand_from_root. - - depth: expand to depth. Negative values in this column have - special meanings. See output examples below. - - loc_id | depth - --------|------- - NULL | 3 -- include all locations with depth <= 3 - 1 | 4 -- include all descendents of location 1 to depth 4 - 10 | -1 -- include location 10 (but do not expand) - 100 | -2 -- include all descendents of location 100, unlimited depth - 11 | -3 -- location 11 and its descendants are included based on - include_only types - */ - - WITH RECURSIVE cte AS ( - -- get include_without_expanding depth - SELECT - NULL AS "parent_id", - NULL AS "expand_from_type", - NULL AS "loc_id", - "expand_to"."expand_to_depth" AS "depth" - FROM "expand_to" - WHERE "expand_to"."expand_to_type" = -1 - - UNION ALL - - SELECT - loc."parent_id", - CASE - WHEN ( - -- if expand_from is set and not the current location type - -- it will be one of this location's ancestors - loc_type."expand_from" IS NOT NULL - AND loc_type."expand_from_root" = FALSE - AND loc_type."expand_from" <> loc."location_type_id" - AND NOT EXISTS ( - -- might be wrong to ignore loc_type.expand_from - -- when include_only types exist - SELECT 1 - FROM "locations_locationtype_include_only" - WHERE "from_locationtype_id" = loc."location_type_id" - ) - ) AND NOT case_sync_restriction THEN loc_type."expand_from" - -- otherwise it will be null for this and all ancestors - ELSE NULL - END AS "expand_from_type", - CASE - -- expand_from_root -> no path - WHEN loc_type."expand_from_root" = TRUE AND NOT case_sync_restriction THEN NULL - -- else first path element - ELSE loc."id" - END AS "loc_id", - CASE - WHEN case_sync_restriction THEN ( - CASE WHEN loc_type."restrict_cases_to_id" IS NOT NULL THEN ( - SELECT "expand_to_depth" - FROM "expand_to" - WHERE "expand_to_type" = loc_type."restrict_cases_to_id" - ) ELSE -2 - END - ) - -- get expand_to depth - WHEN loc_type."expand_to_id" IS NOT NULL THEN ( - SELECT "expand_to_depth" - FROM "expand_to" - WHERE "expand_to_type" = loc_type."expand_to_id" - ) - -- use include_only types - WHEN EXISTS ( - SELECT 1 - FROM "locations_locationtype_include_only" - WHERE "from_locationtype_id" = loc."location_type_id" - ) THEN -3 - -- else unlimited expansion depth - ELSE -2 - END AS "depth" - FROM "locations_sqllocation" loc - INNER JOIN "locations_locationtype" loc_type ON loc."location_type_id" = loc_type."id" - WHERE - loc."is_archived" = FALSE - AND loc."domain" = domain_name - AND loc."id" = ANY(user_location_ids_array) - - UNION ALL - - SELECT - loc."parent_id", - CASE - -- set expand_from_type if it will apply to an ancestor - WHEN "cte"."expand_from_type" <> loc."location_type_id" - THEN "cte"."expand_from_type" - -- otherwise it will be null for this and all ancestors - ELSE NULL - END AS "expand_from_type", - CASE - -- expand_from_root -> no path - WHEN "cte"."loc_id" IS NULL THEN NULL - -- else next element of path - ELSE loc."id" - END AS "loc_id", - CASE - -- ancestor of expand_from -> include but do not expand - WHEN ( - "cte"."loc_id" IS NOT NULL - AND "cte"."expand_from_type" IS NULL - ) THEN -1 - -- else no path yet or starting path -> use previous depth - ELSE "cte"."depth" - END AS "depth" - FROM "locations_sqllocation" loc - INNER JOIN "cte" ON loc."id" = "cte"."parent_id" - WHERE loc."is_archived" = FALSE - ) - - SELECT DISTINCT "cte"."loc_id", "cte"."depth" FROM "cte" - - ), fixture_ids AS ( - /* - Get fixture locations using expand_from criteria - - "fixture_ids" columns: - - id: location id - - path: location tree path from root (array of location ids) - - depth: depth in locations tree (0 is root node) - */ - - SELECT - loc."id", - ARRAY[loc."id"] AS "path", - 0 AS "depth" - FROM "locations_sqllocation" loc - WHERE - loc."is_archived" = FALSE - AND loc."domain" = domain_name - AND loc."parent_id" IS NULL - AND EXISTS ( - SELECT 1 - FROM "expand_from" xf - WHERE - ( - "loc_id" = loc."id" AND ( - xf."depth" = -1 -- ancestor of expand_from - OR xf."depth" = -2 -- expansion depth is unlimited - -- descendant of expand_from within expand_to depth - OR xf."depth" >= 0 - ) - ) OR ( - -- include_without_expanding/expand_from_root - -- AND - -- unlimited depth or max depth >= current depth - "loc_id" IS NULL AND (xf."depth" = -2 OR xf."depth" >= 0) - ) OR ( - -- location type is in include_only types - xf."depth" = -3 - AND "loc_id" = loc."id" - AND loc."location_type_id" IN ( - SELECT to_locationtype_id - FROM locations_locationtype_include_only - INNER JOIN "locations_sqllocation" x - ON x."location_type_id" = from_locationtype_id - WHERE x."id" = ANY(user_location_ids_array) - ) - ) - ) - - UNION ALL - - SELECT - loc."id", - array_append("fixture_ids"."path", loc."id") AS "path", - "fixture_ids"."depth" + 1 AS "depth" - FROM "locations_sqllocation" loc - INNER JOIN "fixture_ids" ON loc."parent_id" = "fixture_ids"."id" - WHERE - loc."is_archived" = FALSE - AND loc."domain" = domain_name - AND EXISTS ( - SELECT 1 - FROM "expand_from" xf - WHERE - ( - "loc_id" = loc."id" AND ( - xf."depth" = -1 -- ancestor of expand_from - OR xf."depth" = -2 -- expansion depth is unlimited - -- descendant of expand_from within expand_to depth - OR "fixture_ids"."depth" < xf."depth" - ) - ) OR ( - ( - -- include_without_expanding/expand_from_root or - -- descendant of expand_from within expand_to depth - "loc_id" IS NULL OR "loc_id" = ANY("fixture_ids"."path") - ) AND ( - xf."depth" = -2 -- expansion depth is unlimited - -- descendant of expand_from within expand_to depth - OR "fixture_ids"."depth" < xf."depth" - ) - ) OR ( - -- location type is in include_only types - xf."depth" = -3 AND ( - "loc_id" = loc."id" - OR "loc_id" = ANY("fixture_ids"."path") - ) AND loc."location_type_id" IN ( - SELECT to_locationtype_id - FROM locations_locationtype_include_only - INNER JOIN "locations_sqllocation" x - ON x."location_type_id" = from_locationtype_id - WHERE x."id" = ANY(user_location_ids_array) - ) - ) - ) - ) - - SELECT x."id", x."path", x."depth" from fixture_ids x; - -END; -$$ LANGUAGE plpgsql; diff --git a/corehq/apps/locations/tests/test_location_fixtures.py b/corehq/apps/locations/tests/test_location_fixtures.py index 215f4b0931c6..7f66aeb04734 100644 --- a/corehq/apps/locations/tests/test_location_fixtures.py +++ b/corehq/apps/locations/tests/test_location_fixtures.py @@ -35,7 +35,6 @@ _location_to_fixture, get_location_data_fields, flat_location_fixture_generator, - get_location_fixture_queryset_for_user, get_location_fixture_queryset, location_fixture_generator, should_sync_flat_fixture, @@ -112,14 +111,8 @@ def _assert_fixture_matches_file(self, xml_name, desired_locations, flat=False): desired_fixture = self._assemble_expected_fixture(xml_name, desired_locations) self.assertXmlEqual(desired_fixture, fixture) - def assert_fixture_queryset_equals_locations_for_user(self, desired_locations): - actual = get_location_fixture_queryset_for_user(self.user).values_list('name', flat=True) - self.assertItemsEqual(actual, desired_locations) - - def assert_fixture_queryset_equals_locations(self, desired_locations, location_pks, - case_sync_restriction): - actual = get_location_fixture_queryset(self.domain, location_pks, - case_sync_restriction).values_list('name', flat=True) + def assert_fixture_queryset_equals_locations(self, desired_locations): + actual = get_location_fixture_queryset(self.user).values_list('name', flat=True) self.assertItemsEqual(actual, desired_locations) @@ -148,8 +141,7 @@ def tearDown(self): @flag_enabled('HIERARCHICAL_LOCATION_FIXTURE') def test_no_user_locations_returns_empty(self): empty_fixture = EMPTY_LOCATION_FIXTURE_TEMPLATE.format(self.user.user_id) - fixture = ElementTree.tostring(call_fixture_generator( - location_fixture_generator, self.user)[0], encoding='utf-8') + fixture = ElementTree.tostring(call_fixture_generator(location_fixture_generator, self.user)[0], encoding='utf-8') self.assertXmlEqual(empty_fixture, fixture) def test_metadata(self): @@ -322,7 +314,7 @@ def test_include_only_location_types(self): location_type.include_only.set([self.location_types['state'], self.location_types['county']]) location_type.save() # include county and state - self.assert_fixture_queryset_equals_locations_for_user( + self.assert_fixture_queryset_equals_locations( ['Massachusetts', 'Suffolk', 'Middlesex'] ) @@ -338,41 +330,6 @@ def test_include_only_location_types_hierarchical(self): ['Massachusetts', 'Suffolk', 'Middlesex'] ) - def test_get_location_fixture_queryset_with_case_sync_restriction_default_1(self): - # Test default - case_sync_restriction on w/o any restrict_cases_to - middlesex = self.locations['Middlesex'] - self.location_types['county'].expand_to = self.location_types['county'] # This should have no effect - self.location_types['county'].expand_from = self.location_types['state'] # Also should have no effect - self.location_types['county'].save() - self.assert_fixture_queryset_equals_locations( - ['Massachusetts', 'Middlesex', 'Cambridge', 'Somerville'], - [middlesex.id], - True - ) - - def test_get_location_fixture_queryset_with_case_sync_restriction_default_2(self): - middlesex = self.locations['Middlesex'] - self.location_types['county']._expand_from_root = True - self.location_types['county'].include_only.set([self.location_types['state']]) - self.location_types['county'].save() - self.assert_fixture_queryset_equals_locations( - ['Massachusetts', 'Middlesex', 'Cambridge', 'Somerville'], - [middlesex.id], - True - ) - - def test_get_location_fixture_queryset_with_case_sync_restriction_restrict_cases_to(self): - # Verify restrict_cases_to - mass = self.locations['Massachusetts'] - self.location_types['state'].restrict_cases_to = self.location_types['county'] - self.location_types['state'].include_without_expanding = self.location_types['city'] - self.location_types['state'].save() - self.assert_fixture_queryset_equals_locations( - ['Massachusetts', 'Middlesex', 'Suffolk'], - [mass.id], - True - ) - @mock.patch.object(Domain, 'uses_locations', lambda: True) # removes dependency on accounting class ForkedHierarchiesTest(TestCase, FixtureHasLocationsMixin): @@ -444,8 +401,7 @@ def test_include_without_expanding_includes_all_ancestors(self): location_type.include_without_expanding = self.locations['DTO'].location_type location_type.save() - fixture = ElementTree.tostring(call_fixture_generator( - flat_location_fixture_generator, self.user)[-1], encoding='utf-8').decode('utf-8') + fixture = ElementTree.tostring(call_fixture_generator(flat_location_fixture_generator, self.user)[-1], encoding='utf-8').decode('utf-8') for location_name in ('CDST1', 'CDST', 'DRTB1', 'DRTB', 'DTO1', 'DTO', 'CTO', 'CTO1', 'CTD'): self.assertTrue(location_name in fixture) @@ -562,8 +518,7 @@ def setUp(self): @flag_enabled('HIERARCHICAL_LOCATION_FIXTURE') def test_no_user_locations_returns_empty(self): empty_fixture = EMPTY_LOCATION_FIXTURE_TEMPLATE.format(self.user.user_id) - fixture = ElementTree.tostring(call_fixture_generator( - location_fixture_generator, self.user)[0], encoding='utf-8') + fixture = ElementTree.tostring(call_fixture_generator(location_fixture_generator, self.user)[0], encoding='utf-8') self.assertXmlEqual(empty_fixture, fixture) def test_simple_location_fixture(self): @@ -657,7 +612,7 @@ def test_include_only_location_types(self): ]) location_type.save() # include county and state - self.assert_fixture_queryset_equals_locations_for_user([ + self.assert_fixture_queryset_equals_locations([ 'Massachusetts', 'Middlesex', 'Cambridge', diff --git a/corehq/ex-submodules/casexml/apps/phone/models.py b/corehq/ex-submodules/casexml/apps/phone/models.py index 61f7be05d6c0..6440c8498f77 100644 --- a/corehq/ex-submodules/casexml/apps/phone/models.py +++ b/corehq/ex-submodules/casexml/apps/phone/models.py @@ -153,8 +153,8 @@ def get_ucr_filter_value(self, ucr_filter, ui_filter): @memoized def get_locations_to_sync(self): - from corehq.apps.locations.fixtures import get_location_fixture_queryset_for_user - return get_location_fixture_queryset_for_user(self) + from corehq.apps.locations.fixtures import get_location_fixture_queryset + return get_location_fixture_queryset(self) class OTARestoreWebUser(OTARestoreUser): From eb53f09b0223bafcb80c269ac0d16ca6a9c3627f Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Tue, 16 Apr 2024 09:53:59 -0400 Subject: [PATCH 028/928] update field name --- .../0021_add_fixture_queryset_case_sync_restriction.py | 2 +- corehq/apps/locations/models.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py index cdc12bab51b5..f1bc2e9df3a2 100644 --- a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py +++ b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py @@ -16,7 +16,7 @@ class Migration(migrations.Migration): operations = [ migrations.AddField( model_name='locationtype', - name='restrict_cases_to', + name='expand_view_child_data_to', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='+', to='locations.locationtype'), ), migrations.AlterField( diff --git a/corehq/apps/locations/models.py b/corehq/apps/locations/models.py index 9a206e937341..bc2d76a33382 100644 --- a/corehq/apps/locations/models.py +++ b/corehq/apps/locations/models.py @@ -103,7 +103,7 @@ class LocationType(models.Model): related_name="+", on_delete=models.RESTRICT, ) # levels above this type that are synced - restrict_cases_to = models.ForeignKey( + expand_view_child_data_to = models.ForeignKey( "self", null=True, related_name="+", From a16f4d4d6bad207a9b46d17012250fa1cc7d8d4a Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Wed, 17 Apr 2024 12:07:51 -0400 Subject: [PATCH 029/928] add new SQL function for getting case owning locations --- ..._fixture_queryset_case_sync_restriction.py | 2 +- .../get_case_owning_locations.sql | 169 ++++++++++++++++++ 2 files changed, 170 insertions(+), 1 deletion(-) create mode 100644 corehq/apps/locations/sql_templates/get_case_owning_locations.sql diff --git a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py index f1bc2e9df3a2..9cbb6e5eb14b 100644 --- a/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py +++ b/corehq/apps/locations/migrations/0021_add_fixture_queryset_case_sync_restriction.py @@ -29,5 +29,5 @@ class Migration(migrations.Migration): name='expand_to', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.RESTRICT, related_name='+', to='locations.locationtype'), ), - locations_sql_migrator.get_migration('get_location_fixture_ids_2.sql'), + locations_sql_migrator.get_migration('get_case_owning_locations.sql'), ] diff --git a/corehq/apps/locations/sql_templates/get_case_owning_locations.sql b/corehq/apps/locations/sql_templates/get_case_owning_locations.sql new file mode 100644 index 000000000000..fdba05eec121 --- /dev/null +++ b/corehq/apps/locations/sql_templates/get_case_owning_locations.sql @@ -0,0 +1,169 @@ +DROP FUNCTION IF EXISTS get_case_owning_locations(TEXT, INTEGER[]); + +CREATE FUNCTION get_case_owning_locations( + domain_name TEXT, + -- array of locations_sqllocation.id (NOT locations_sqllocation.location_id) + user_location_ids_array INTEGER[] +) RETURNS TABLE ( + "id" INTEGER +) AS $$ +BEGIN + /* + Gets the locations whose cases belong in the user's restore file. Spin-off of the function + `get_location_fixture_ids`--this function follows the same sort of high-level process with a slightly + different approach. + + 1. (expand_to CTE) Gets the expand_to location types for each of the user's locations and, using + recursion, their depths + 2. (expand_from CTE) Turns expand_to into a table of the user's location IDs, their depths, and + the depths to expand to (-2 for unlimited expansion) + 3. (restore_file_locations CTE) Gets appropriate set of locations using the columns from #2 + 4. (final select statement) Gets _distinct_ set of location IDs whose type has case sharing on + */ + + RETURN QUERY + + WITH RECURSIVE expand_to AS ( + /* + Get the expand_to types for each of the user's locations, and their depths. + */ + + WITH RECURSIVE cte AS ( + SELECT + expand_to_type."parent_type_id", + 0 AS "depth", + expand_to_type."id" AS "expand_to_type" + FROM "locations_sqllocation" loc + INNER JOIN "locations_locationtype" loc_type + ON loc."location_type_id" = loc_type."id" + INNER JOIN "locations_locationtype" expand_to_type + ON expand_to_type."id" = loc_type."expand_view_child_data_to_id" + WHERE + loc."id" = ANY(user_location_ids_array) + AND loc_type."expand_view_child_data_to_id" IS NOT NULL + AND loc_type."view_descendants" = TRUE + + UNION ALL + + SELECT + loc_type."parent_type_id", + "cte"."depth" + 1 AS "depth", + "cte"."expand_to_type" AS "expand_to_type" + FROM "locations_locationtype" loc_type + INNER JOIN "cte" ON loc_type."id" = "cte"."parent_type_id" + ) + + SELECT + "cte"."expand_to_type", + MAX("cte"."depth") AS "expand_to_depth" + FROM "cte" + WHERE "cte"."parent_type_id" IS NULL + GROUP BY "cte"."expand_to_type" + + ), expand_from AS ( + /* + This CTE has the columns: + + Location ID, depth for location to expand to, and depth of location itself (in that order). + + Each row is a location the user belongs to. This info is then used by the restore_file_locations + CTE to get the list of expanded locations. + */ + + WITH RECURSIVE cte AS ( + + -- Get location ID and depth to expand to. Recursion base case + SELECT + loc_type."parent_type_id" AS "recur_parent_type_id", + 0 as "recur_depth", + loc."id" AS "loc_id", + CASE WHEN loc_type."expand_view_child_data_to_id" IS NULL THEN -2 + ELSE ( + SELECT "expand_to_depth" + FROM "expand_to" + WHERE "expand_to_type" = loc_type."expand_view_child_data_to_id" + ) + END AS "expand_to_depth" + FROM "locations_sqllocation" loc + INNER JOIN "locations_locationtype" loc_type ON loc."location_type_id" = loc_type."id" + WHERE + loc."is_archived" = FALSE + AND loc."domain" = domain_name + AND loc."id" = ANY(user_location_ids_array) + AND loc_type."view_descendants" = TRUE + + UNION ALL + + -- Recursion to get depth of each location + SELECT + loc_type."parent_type_id" AS "recur_parent_type_id", + "cte"."recur_depth" + 1 AS "recur_depth", + "cte"."loc_id", + "cte"."expand_to_depth" + FROM "locations_locationtype" loc_type + INNER JOIN "cte" ON loc_type."id" = "cte"."recur_parent_type_id" + + ) + + -- All info is available at top-level node + SELECT + "cte"."loc_id", + "cte"."expand_to_depth", + "cte"."recur_depth" AS "loc_depth" + FROM "cte" + WHERE "cte"."recur_parent_type_id" IS NULL + + ), restore_file_locations AS ( + + SELECT + loc."id", + "expand_from"."loc_depth", + loc."location_type_id" + FROM "locations_sqllocation" loc + INNER JOIN "expand_from" on loc."id" = "expand_from"."loc_id" + WHERE + loc."is_archived" = FALSE + AND loc."domain" = domain_name + + UNION ALL + + SELECT + loc."id", + "restore_file_locations"."loc_depth" + 1 AS "loc_depth", + loc."location_type_id" + FROM "locations_sqllocation" loc + INNER JOIN "restore_file_locations" ON loc."parent_id" = "restore_file_locations"."id" + WHERE + loc."is_archived" = FALSE + AND loc."domain" = domain_name + AND EXISTS ( + SELECT 1 + FROM "expand_from" xf + WHERE + ( + xf."expand_to_depth" = -2 -- expansion depth is unlimited + OR "restore_file_locations"."loc_depth" < xf."expand_to_depth" + ) + ) + ) + + -- Final SELECT. Filter by "shares cases" setting + SELECT DISTINCT + x."id" + FROM "restore_file_locations" x + INNER JOIN "locations_locationtype" loc_type ON x."location_type_id" = loc_type."id" + WHERE loc_type."shares_cases" = TRUE + + UNION ALL + + -- And union with the user's locations who can't view descendants + SELECT + loc."id" + FROM "locations_sqllocation" loc + INNER JOIN "locations_locationtype" loc_type on loc."location_type_id" = loc_type."id" + WHERE loc."id" = ANY(user_location_ids_array) + AND loc_type."view_descendants" = FALSE + AND loc_type."shares_cases" = TRUE; + +END; +$$ LANGUAGE plpgsql; From a72c3f5a7b610b4e3440e8c757ee4506edeaf74b Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Wed, 17 Apr 2024 12:09:49 -0400 Subject: [PATCH 030/928] use SQL function for getting case owning locations when FF is on --- corehq/apps/users/models.py | 18 +++++-- corehq/apps/users/tests/test_get_owner_ids.py | 49 +++++++++++++++++++ corehq/toggles/__init__.py | 13 +++++ 3 files changed, 75 insertions(+), 5 deletions(-) diff --git a/corehq/apps/users/models.py b/corehq/apps/users/models.py index 5483067e7295..ad1e63ff532d 100644 --- a/corehq/apps/users/models.py +++ b/corehq/apps/users/models.py @@ -1139,11 +1139,19 @@ def _get_case_owning_locations(self, domain): """ from corehq.apps.locations.models import SQLLocation - yield from self.get_sql_locations(domain).filter(location_type__shares_cases=True) - - yield from SQLLocation.objects.get_queryset_descendants( - self.get_sql_locations(domain).filter(location_type__view_descendants=True) - ).filter(location_type__shares_cases=True, is_archived=False) + if toggles.USH_RESTORE_FILE_LOCATION_CASE_SYNC_RESTRICTION.enabled(domain): + user_location_ids = list(self.get_sql_locations(domain).order_by().values_list("id", flat=True)) + yield from SQLLocation.objects.raw( + """ + SELECT id FROM get_case_owning_locations(%s, %s); + """, + [domain, user_location_ids] + ) + else: + yield from self.get_sql_locations(domain).filter(location_type__shares_cases=True) + yield from SQLLocation.objects.get_queryset_descendants( + self.get_sql_locations(domain).filter(location_type__view_descendants=True) + ).filter(location_type__shares_cases=True, is_archived=False) def delete(self, deleted_by_domain, deleted_by, deleted_via=None): from corehq.apps.users.model_log import UserModelAction diff --git a/corehq/apps/users/tests/test_get_owner_ids.py b/corehq/apps/users/tests/test_get_owner_ids.py index 3fae7651e504..c98a68eebafc 100644 --- a/corehq/apps/users/tests/test_get_owner_ids.py +++ b/corehq/apps/users/tests/test_get_owner_ids.py @@ -4,6 +4,7 @@ from corehq.apps.groups.models import Group from corehq.apps.locations.tests.util import LocationHierarchyTestCase from corehq.apps.users.models import CommCareUser, WebUser +from corehq.util.test_utils import flag_enabled class OwnerIDTestCase(TestCase): @@ -102,3 +103,51 @@ def test_web_user(self): user.get_owner_ids(self.domain), [user.user_id] + [self.locations[loc].location_id for loc in ['Manhattan', 'Brooklyn', 'Queens']] ) + + @flag_enabled('USH_RESTORE_FILE_LOCATION_CASE_SYNC_RESTRICTION') + def test_hierarchical_ownership_with_SQL_function(self): + # Uses the SQL function, but functionality should be exact same as method above + user = CommCareUser.create(self.domain, 'username', 'password', None, None) + user.set_location(self.locations['New York']) + user.add_to_assigned_locations(self.locations['Suffolk']) + user.add_to_assigned_locations(self.locations['Somerville']) + user.save() + self.addCleanup(user.delete, self.domain, deleted_by=None) + + self.assertItemsEqual( + user.get_owner_ids(self.domain), + [user.user_id] + [self.locations[loc].location_id for loc in + ['Manhattan', 'Brooklyn', 'Queens', 'Somerville']] + ) + + @flag_enabled('USH_RESTORE_FILE_LOCATION_CASE_SYNC_RESTRICTION') + def test_web_user_with_SQL_function(self): + user = WebUser.create(self.domain, 'username', 'password', None, None) + user.set_location(self.domain, self.locations['New York']) + user.save() + self.addCleanup(user.delete, self.domain, deleted_by=None) + + self.assertItemsEqual( + user.get_owner_ids(self.domain), + [user.user_id] + [self.locations[loc].location_id for loc in ['Manhattan', 'Brooklyn', 'Queens']] + ) + + @flag_enabled('USH_RESTORE_FILE_LOCATION_CASE_SYNC_RESTRICTION') + def test_case_sync_restriction_simple(self): + # Tests `expand_view_child_data_to` setting + user = WebUser.create(self.domain, 'username', 'password', None, None) + user.set_location(self.domain, self.locations['New York']) + user.save() + self.addCleanup(user.delete, self.domain, deleted_by=None) + + self.location_types['state'].expand_view_child_data_to = self.location_types['county'] + self.location_types['state'].shares_cases = True + self.location_types['state'].save() + self.location_types['county'].shares_cases = True + self.location_types['county'].view_descendants = True + self.location_types['county'].save() + self.assertItemsEqual( + user.get_owner_ids(self.domain), + [user.user_id] + [self.locations[loc].location_id for loc in [ + 'New York', 'New York City']] + ) diff --git a/corehq/toggles/__init__.py b/corehq/toggles/__init__.py index d43e8935a8fb..fc76c29fc6ad 100644 --- a/corehq/toggles/__init__.py +++ b/corehq/toggles/__init__.py @@ -2785,3 +2785,16 @@ def domain_has_privilege_from_toggle(privilege_slug, domain): namespaces=[NAMESPACE_DOMAIN], description='Add support for the Road Network disbursement algorithm for the Geospatial feature', ) + +USH_RESTORE_FILE_LOCATION_CASE_SYNC_RESTRICTION = StaticToggle( + 'ush_restore_file_location_case_sync_restriction', + 'USH: Limit the location-owned cases that show up in a user\'s restore file', + TAG_CUSTOM, + namespaces=[NAMESPACE_DOMAIN], + help_link='https://dimagi.atlassian.net/wiki/spaces/USH/pages/2252210196/Prevent+Syncing+of+Lower+Level+Locations', # noqa: E501 + description=""" + In the 'Organizational Level' section of location management, web admins can specify which org level to + expand to when syncing the location-owned cases included in a user's restore file. Limits cases in a user's + restore file and thus can improve performance. + """ +) From 08985d273835f315c287de49cacd0c93508fc18d Mon Sep 17 00:00:00 2001 From: Steph Date: Wed, 17 Apr 2024 14:50:25 -0400 Subject: [PATCH 031/928] clean up last remains of supply_point reference --- corehq/apps/users/views/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/corehq/apps/users/views/__init__.py b/corehq/apps/users/views/__init__.py index c7505da4a958..501eb7c8bfe7 100644 --- a/corehq/apps/users/views/__init__.py +++ b/corehq/apps/users/views/__init__.py @@ -1143,8 +1143,8 @@ def post(self, request, *args, **kwargs): data["invited_by"] = request.couch_user.user_id data["invited_on"] = datetime.utcnow() data["domain"] = self.domain - supply_point = data.get("location_id", None) - data["location"] = SQLLocation.by_location_id(supply_point) if supply_point else None #TODO should be location or location_id? + location_id = data.get("location_id", None) + data["location"] = SQLLocation.by_location_id(location_id) if location_id else None profile_id = data.get("profile", None) data["profile"] = CustomDataFieldsProfile.objects.get( id=profile_id, From d4edf9b1dba45493106cfc752f96b1c90fe30e53 Mon Sep 17 00:00:00 2001 From: Steph Date: Wed, 17 Apr 2024 16:14:03 -0400 Subject: [PATCH 032/928] make profile not required field --- corehq/apps/registration/forms.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corehq/apps/registration/forms.py b/corehq/apps/registration/forms.py index 6865315dba58..fdda78e9a541 100644 --- a/corehq/apps/registration/forms.py +++ b/corehq/apps/registration/forms.py @@ -490,7 +490,7 @@ class AdminInvitesUserForm(forms.Form): email = forms.EmailField(label="Email Address", max_length=User._meta.get_field('email').max_length) role = forms.ChoiceField(choices=(), label="Project Role") - profile = forms.ChoiceField(choices=(), label="Profile") + profile = forms.ChoiceField(choices=(), label="Profile", required=False) def __init__(self, data=None, excluded_emails=None, is_add_user=None, location=None, role_choices=(), *, domain, **kwargs): From d722a4f3ff8861e746c2388ffbf2bb0bf6b4fe7e Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Wed, 17 Apr 2024 17:03:56 -0400 Subject: [PATCH 033/928] update error messaging and move loop into helper function --- corehq/apps/locations/bulk_management.py | 31 +++++++++++-------- .../locations/tests/test_bulk_management.py | 4 +-- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/corehq/apps/locations/bulk_management.py b/corehq/apps/locations/bulk_management.py index acb1df6e3f57..fb6446839147 100644 --- a/corehq/apps/locations/bulk_management.py +++ b/corehq/apps/locations/bulk_management.py @@ -688,6 +688,23 @@ def _custom_data_errors(self): if loc.custom_data is not LocationStub.NOT_PROVIDED and validator(loc.custom_data) ] + def _verify_deleted_types_not_referenced_by_other_types(self): + # Location types reference other types via foreign key on a few different fields. If a user tries to + # delete a type that is referenced by another type by foreign key, we'll catch that here. + for deleted_type in self.types_to_be_deleted: + for field_name in _get_location_type_foreign_key_fields_minus_parent(): + referencing_types_and_fields = [ + (lt.code, field_name) for lt in self.location_types if getattr(lt.db_object, field_name) + and getattr(lt.db_object, field_name).id == deleted_type.db_object.id + ] + if referencing_types_and_fields: + return [ + _(f"Cannot delete location type '{deleted_type.code}'. It is referenced by the type " + f"'{type_code}' via the '{field}' setting. Change this setting on '{type_code}'" + " and try again.") + for type_code, field in referencing_types_and_fields + ] + def _validate_types_tree(self): type_pairs = [(lt.code, lt.parent_code) for lt in self.location_types] try: @@ -703,19 +720,7 @@ def _validate_types_tree(self): for code in e.affected_nodes ] - # Verify that deleted types are not referenced by other types via foreign key - for deleted_type in self.types_to_be_deleted: - for field_name in _get_location_type_foreign_key_fields_minus_parent(): - referencing_types_and_fields = [ - (lt.code, field_name) for lt in self.location_types if getattr(lt.db_object, field_name) - and getattr(lt.db_object, field_name).id == deleted_type.db_object.id - ] - if referencing_types_and_fields: - return [ - _(f"Location Type '{referencing_type_and_field[0]}' references the type to be deleted" - f" '{deleted_type.code}' via the field '{referencing_type_and_field[1]}'") - for referencing_type_and_field in referencing_types_and_fields - ] + return self._verify_deleted_types_not_referenced_by_other_types() def _validate_location_tree(self): errors = [] diff --git a/corehq/apps/locations/tests/test_bulk_management.py b/corehq/apps/locations/tests/test_bulk_management.py index a809a5e76088..c70116524f86 100644 --- a/corehq/apps/locations/tests/test_bulk_management.py +++ b/corehq/apps/locations/tests/test_bulk_management.py @@ -1106,8 +1106,8 @@ def test_dont_delete_referenced_location_types(self): delete_county_type, [], ) - assert_errors(result, ["Location Type 'state' references the type to be deleted 'county'" - " via the field 'expand_to'"]) + assert_errors(result, ["Cannot delete location type 'county'. It is referenced by the type 'state' via " + "the 'expand_to' setting. Change this setting on 'state' and try again."]) class TestRestrictedUserUpload(UploadTestUtils, LocationHierarchyPerTest): From 149f3484ecf73e3ca4fc97e596a8ecbc9ef0cc58 Mon Sep 17 00:00:00 2001 From: AddisonDunn Date: Wed, 17 Apr 2024 17:23:58 -0400 Subject: [PATCH 034/928] yield all import deleted type errors --- corehq/apps/locations/bulk_management.py | 10 +++++----- corehq/apps/locations/tests/test_bulk_management.py | 6 +++++- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/corehq/apps/locations/bulk_management.py b/corehq/apps/locations/bulk_management.py index fb6446839147..4e55af550c07 100644 --- a/corehq/apps/locations/bulk_management.py +++ b/corehq/apps/locations/bulk_management.py @@ -698,12 +698,10 @@ def _verify_deleted_types_not_referenced_by_other_types(self): and getattr(lt.db_object, field_name).id == deleted_type.db_object.id ] if referencing_types_and_fields: - return [ - _(f"Cannot delete location type '{deleted_type.code}'. It is referenced by the type " + for type_code, field in referencing_types_and_fields: + yield _(f"Cannot delete location type '{deleted_type.code}'. It is referenced by the type " f"'{type_code}' via the '{field}' setting. Change this setting on '{type_code}'" " and try again.") - for type_code, field in referencing_types_and_fields - ] def _validate_types_tree(self): type_pairs = [(lt.code, lt.parent_code) for lt in self.location_types] @@ -720,7 +718,9 @@ def _validate_types_tree(self): for code in e.affected_nodes ] - return self._verify_deleted_types_not_referenced_by_other_types() + deleted_type_errors = list(self._verify_deleted_types_not_referenced_by_other_types()) + if deleted_type_errors: + return deleted_type_errors def _validate_location_tree(self): errors = [] diff --git a/corehq/apps/locations/tests/test_bulk_management.py b/corehq/apps/locations/tests/test_bulk_management.py index c70116524f86..620c74f07072 100644 --- a/corehq/apps/locations/tests/test_bulk_management.py +++ b/corehq/apps/locations/tests/test_bulk_management.py @@ -1096,6 +1096,7 @@ def test_download_reupload_no_changes(self): def test_dont_delete_referenced_location_types(self): self.location_types['State'].expand_to = self.location_types['County'] + self.location_types['State'].expand_view_child_data_to = self.location_types['County'] self.location_types['State'].save() delete_county_type = [ LocTypeRow('State', 'state', ''), @@ -1107,7 +1108,10 @@ def test_dont_delete_referenced_location_types(self): [], ) assert_errors(result, ["Cannot delete location type 'county'. It is referenced by the type 'state' via " - "the 'expand_to' setting. Change this setting on 'state' and try again."]) + "the 'expand_to' setting. Change this setting on 'state' and try again.", + "Cannot delete location type 'county'. It is referenced by the type 'state' via " + "the 'expand_view_child_data_to' setting. Change this setting on 'state' and try " + "again."]) class TestRestrictedUserUpload(UploadTestUtils, LocationHierarchyPerTest): From f989e0e4d49a6e0d922781f60df599b3bc67d5fd Mon Sep 17 00:00:00 2001 From: Steph Date: Thu, 18 Apr 2024 11:13:08 -0400 Subject: [PATCH 035/928] remove location_id from data before creating invite --- corehq/apps/users/views/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corehq/apps/users/views/__init__.py b/corehq/apps/users/views/__init__.py index 501eb7c8bfe7..3bfe08766dce 100644 --- a/corehq/apps/users/views/__init__.py +++ b/corehq/apps/users/views/__init__.py @@ -1143,7 +1143,7 @@ def post(self, request, *args, **kwargs): data["invited_by"] = request.couch_user.user_id data["invited_on"] = datetime.utcnow() data["domain"] = self.domain - location_id = data.get("location_id", None) + location_id = data.pop("location_id", None) data["location"] = SQLLocation.by_location_id(location_id) if location_id else None profile_id = data.get("profile", None) data["profile"] = CustomDataFieldsProfile.objects.get( From 78d65a9eb1102b0a271c22f68cadfae5368736ad Mon Sep 17 00:00:00 2001 From: robert-costello Date: Thu, 18 Apr 2024 11:27:33 -0400 Subject: [PATCH 036/928] add fields for web user tableau config --- corehq/apps/users/models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/corehq/apps/users/models.py b/corehq/apps/users/models.py index 5483067e7295..b22907d80b57 100644 --- a/corehq/apps/users/models.py +++ b/corehq/apps/users/models.py @@ -224,6 +224,9 @@ class HqPermissions(DocumentSchema): manage_domain_alerts = BooleanProperty(default=False) + edit_user_tableau_config = BooleanProperty(default=False) + view_user_tableau_config = BooleanProperty(default=False) + @classmethod def from_permission_list(cls, permission_list): """Converts a list of Permission objects into a Permissions object""" From 6ffa4faead9debd4407bd72e868bd4e98f634e77 Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:09:21 +0200 Subject: [PATCH 037/928] add filter to view report by groups --- corehq/apps/reports/standard/monitoring.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index 2bf93fe3a3de..80668f033e56 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -181,8 +181,6 @@ class CaseActivityReport(WorkerMonitoringCaseReportTableBase): """See column headers for details""" name = gettext_lazy('Case Activity') slug = 'case_activity' - fields = ['corehq.apps.reports.filters.users.ExpandedMobileWorkerFilter', - 'corehq.apps.reports.filters.select.CaseTypeFilter'] display_data = ['percent'] emailable = True description = gettext_lazy("Followup rates on active cases.") @@ -190,6 +188,21 @@ class CaseActivityReport(WorkerMonitoringCaseReportTableBase): ajax_pagination = True exportable_all = True + @property + def fields(self): + fields = [ + 'corehq.apps.reports.filters.users.ExpandedMobileWorkerFilter', + 'corehq.apps.reports.filters.select.CaseTypeFilter', + ] + if self.has_case_sharing: + fields.append('corehq.apps.reports.filters.users.UserOrGroupFilter') + return fields + + @property + @memoized + def has_case_sharing(self): + return self.domain_object.case_sharing_included() + @property def shared_pagination_GET_params(self): params = [ @@ -206,6 +219,10 @@ def shared_pagination_GET_params(self): dict( name='landmark', value=self.request.GET.get('landmark') + ), + dict( + name='view_by', + value=self.request.GET.get('view_by') ) ] return params From 14f55f76c32061f03c457f176d990b941b865d0c Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:11:30 +0200 Subject: [PATCH 038/928] change header/footer text if viewing by groups --- corehq/apps/reports/standard/monitoring.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index 80668f033e56..b7fcae801a8e 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -203,6 +203,11 @@ def fields(self): def has_case_sharing(self): return self.domain_object.case_sharing_included() + @property + @memoized + def view_by_groups(self): + return self.request.GET.get('view_by', None) == 'groups' + @property def shared_pagination_GET_params(self): params = [ @@ -310,7 +315,7 @@ def make_column(title, help_text, num_days): help_text=help_text.format(num_days), sortable=False if title == "Proportion" else True) - columns = [DataTablesColumn(_("Users"))] + columns = [DataTablesColumn(_("Groups"))] if self.view_by_groups else [DataTablesColumn(_("Users"))] for __, landmark in self.landmarks: columns.append(DataTablesColumnGroup( @@ -504,7 +509,10 @@ def _total_row(self): query = self.add_landmark_aggregations(query, self.end_date) - return self._format_row(self.TotalRow(query.run(), _("All Users"))) + return self._format_row(self.TotalRow( + query.run(), + _("All Groups") if self.view_by_groups else _("All Users") + )) @property @memoized From 683946a456108431c08fe03b11a5af24a41e95fd Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:13:02 +0200 Subject: [PATCH 039/928] create generic class for users and group/location data --- corehq/apps/reports/standard/monitoring.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index b7fcae801a8e..b3a175c04ded 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -587,11 +587,22 @@ def landmark_aggregation(self, key, landmark, end_date): .aggregation(FilterAggregation('closed', case_es.is_closed())) ) + class RowData(object): + def __init__(self, id, name, name_in_report, filter_func): + self.id = id + self.name = name + self.name_in_report = name_in_report + self.filter_func = filter_func # Func for getting ID to filter by in Case List + + @property + def filter_id(self): + return urlencode(self.filter_func(self.id)) + class Row(object): - def __init__(self, report, user, bucket): + def __init__(self, report, row_data, bucket): self.report = report - self.user = user + self.row_data = row_data self.bucket = bucket def active_count(self, landmark_key): @@ -649,7 +660,7 @@ def total_active_count(self): return 0 def header(self): - return self.report.get_user_link(self.user)['html'] + return self.report.get_user_link(self.row_data)['html'] class TotalRow(object): From 7217031b9b408e13f6c2fb84c5585fe7ddec4923 Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:16:32 +0200 Subject: [PATCH 040/928] add properties for retrieving selected groups/locations --- corehq/apps/reports/standard/monitoring.py | 43 +++++++++++++++++++--- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index b3a175c04ded..975b7bdff108 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -75,6 +75,8 @@ from corehq.util.context_processors import commcare_hq_names from corehq.util.timezones.conversions import PhoneTime, ServerTime from corehq.util.view_utils import absolute_reverse +from corehq.apps.locations.models import SQLLocation +from corehq.apps.groups.models import Group TOO_MUCH_DATA = gettext_noop( 'The filters you selected include too much data. Please change your filters and try again' @@ -334,15 +336,46 @@ def make_column(title, help_text, num_days): def selected_users(self): return _get_selected_users(self.domain, self.request) + @property + def has_group_filters(self): + slugs = EMWF.get_value(self.request, self.domain) + filter_count = len(EMWF.selected_group_ids(slugs) + EMWF.selected_location_ids(slugs)) + return filter_count > 0 + @property @memoized - def users_by_id(self): - return {user.user_id: user for user in self.selected_users} + def selected_groups(self): + slugs = EMWF.get_value(self.request, self.domain) + if self.has_group_filters: + group_ids = EMWF.selected_group_ids(slugs) + groups = [Group.get(g) for g in group_ids] + else: + groups = Group.get_reporting_groups(self.domain) + return [ + self.RowData( + id=group['_id'], + name=group['name'], + name_in_report=group['name'], + filter_func=CaseListFilter.for_reporting_group + ) for group in groups + ] @property @memoized - def user_ids(self): - return list(self.users_by_id) + def selected_locations(self): + locations = SQLLocation.objects.filter(domain=self.domain) + slugs = EMWF.get_value(self.request, self.domain) + if self.has_group_filters: + location_ids = EMWF.selected_location_ids(slugs) + locations = locations.filter(location_id__in=location_ids) + return [ + self.RowData( + id=loc.location_id, + name=loc.name, + name_in_report=loc.display_name, + filter_func=CaseListFilter.for_reporting_location + ) for loc in locations + ] @property @memoized @@ -1382,8 +1415,6 @@ def group_ids(self): @property @memoized def users_by_group(self): - from corehq.apps.groups.models import Group - if not self.group_ids or self.request.GET.get('all_groups', 'off') == 'on': groups = Group.get_reporting_groups(self.domain) else: From 3f67f1855fe0ad09b44fc575044231d710510c26 Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:17:09 +0200 Subject: [PATCH 041/928] refactor prop for retrieving selected users to utilize generic class --- corehq/apps/reports/standard/monitoring.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index 975b7bdff108..68f3845b2de2 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -334,7 +334,15 @@ def make_column(title, help_text, num_days): @property @memoized def selected_users(self): - return _get_selected_users(self.domain, self.request) + users = _get_selected_users(self.domain, self.request) + return [ + self.RowData( + id=user.user_id, + name=user.raw_username, + name_in_report=user.username_in_report, + filter_func=CaseListFilter.for_user + ) for user in users + ] @property def has_group_filters(self): From f841fe723f6bb83ba5bd33961d8544aa48f55807 Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:19:19 +0200 Subject: [PATCH 042/928] refactor property funcs to retrieve owners instead of users --- corehq/apps/reports/standard/monitoring.py | 41 +++++++++++++++------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index 68f3845b2de2..b7a291ee1015 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -307,7 +307,7 @@ def utc_now(self): @property def total_records(self): - return len(self.user_ids) + return len(self.owner_ids) @property def headers(self): @@ -387,29 +387,46 @@ def selected_locations(self): @property @memoized - def paginated_users(self): + def selected_owners(self): + if self.view_by_groups: + return self.selected_groups + self.selected_locations + return self.selected_users + + @property + @memoized + def owners_by_id(self): + return {owner.id: owner for owner in self.selected_owners} + + @property + @memoized + def owner_ids(self): + return list(self.owners_by_id) + + @property + @memoized + def paginated_owners(self): if self.sort_column is None: return sorted( - self.selected_users, key=lambda u: u.raw_username, reverse=self.pagination.desc + self.selected_owners, key=lambda o: o.name, reverse=self.pagination.desc )[self.pagination.start:self.pagination.start + self.pagination.count] - return self.selected_users + return self.selected_owners @property @memoized - def paginated_users_by_id(self): - return [(user.user_id, user) for user in self.paginated_users] + def paginated_owners_by_id(self): + return [(owner.id, owner) for owner in self.paginated_owners] @property @memoized - def paginated_user_ids(self): - return [user.user_id for user in self.paginated_users] + def paginated_owner_ids(self): + return [owner.id for owner in self.paginated_owners] @property def sort_column(self): column_num = self.request_params.get('iSortCol_0', 0) num_columns = self.request_params.get('iColumns', 15) if column_num == 0: - return None # user + return None # owner elif column_num == (num_columns - 2): return "active_total" elif column_num == (num_columns - 1): @@ -432,7 +449,7 @@ def sort_column(self): return "landmark_%d" % (landmark,) @property - def should_sort_by_username(self): + def should_sort_by_name(self): return self.request_params.get('iSortCol_0', 0) == 0 def _format_row(self, row): @@ -557,8 +574,8 @@ def _total_row(self): @property @memoized - def missing_users(self): - return None in self.user_ids + def missing_owners(self): + return None in self.owner_ids @property def end_date(self): From d522dfa029e26947e590cf3224e726871a2a99fd Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:19:49 +0200 Subject: [PATCH 043/928] refactor query func to use owners --- corehq/apps/reports/standard/monitoring.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index b7a291ee1015..afd5948ae932 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -585,9 +585,10 @@ def end_date(self): def milestone_start(self): return ServerTime(self.utc_now - self.milestone).phone_time(self.timezone).done() - def es_queryset(self, user_ids, size=None): + def es_queryset(self, owner_ids, size=None): + field = 'owner_id' if self.view_by_groups else 'user_id' top_level_aggregation = ( - TermsAggregation('users', 'user_id') + TermsAggregation('owners', field) .aggregation(self._touched_total_aggregation) .aggregation(self._active_total_aggregation) .aggregation(self._inactive_total_aggregation) @@ -606,9 +607,10 @@ def es_queryset(self, user_ids, size=None): query = ( case_es.CaseES() .domain(self.domain) - .user_ids_handle_unknown(user_ids) .size(0) ) + query = query.owner(owner_ids) if self.view_by_groups else query.user_ids_handle_unknown(owner_ids) + if self.case_type: query = query.case_type(self.case_type) else: @@ -616,9 +618,9 @@ def es_queryset(self, user_ids, size=None): query = query.aggregation(top_level_aggregation) - if self.missing_users: + if self.missing_owners: missing_aggregation = ( - MissingAggregation('missing_users', 'user_id') + MissingAggregation('missing_owners', field) .aggregation(self._touched_total_aggregation) .aggregation(self._active_total_aggregation) .aggregation(self._inactive_total_aggregation) From 2808dade5056cd4aa615ed238f94ff432c209e90 Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:20:26 +0200 Subject: [PATCH 044/928] refactor row funcs to use owners --- corehq/apps/reports/standard/monitoring.py | 52 +++++++++++----------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index afd5948ae932..99ca61fae457 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -480,23 +480,22 @@ def _format_row(self, row): @property def rows(self): es_results = self.es_queryset( - user_ids=self.paginated_user_ids, + owner_ids=self.paginated_owner_ids, size=self.pagination.start + self.pagination.count ) - buckets = es_results.aggregations.users.buckets_list - if self.missing_users: - buckets.append(es_results.aggregations.missing_users.bucket) + buckets = es_results.aggregations.owners.buckets_list + if self.missing_owners: + buckets.append(es_results.aggregations.missing_owners.bucket) rows = [] for bucket in buckets: - user = self.users_by_id[bucket.key] - rows.append(self.Row(self, user, bucket)) + owner = self.owners_by_id[bucket.key] + rows.append(self.Row(self, owner, bucket)) - rows.extend(self._unmatched_buckets(buckets, self.paginated_user_ids)) + rows.extend(self._unmatched_buckets(buckets, self.paginated_owner_ids)) - if self.should_sort_by_username: + if self.should_sort_by_name: # ES handles sorting for all other columns - rows.sort(key=lambda row: row.user.raw_username) - + rows.sort(key=lambda row: row.row_data.name) self.total_row = self._total_row if len(rows) <= self.pagination.count: return list(map(self._format_row, rows)) @@ -508,29 +507,29 @@ def rows(self): @property def get_all_rows(self): - es_results = self.es_queryset(user_ids=self.user_ids) - buckets = es_results.aggregations.users.buckets_list - if self.missing_users: - buckets.append(es_results.aggregations.missing_users.bucket) + es_results = self.es_queryset(user_ids=self.owner_ids) + buckets = es_results.aggregations.owners.buckets_list + if self.missing_owners: + buckets.append(es_results.aggregations.missing_owners.bucket) rows = [] for bucket in buckets: - user = self.users_by_id[bucket.key] - rows.append(self.Row(self, user, bucket)) + owner = self.owners_by_id[bucket.key] + rows.append(self.Row(self, owner, bucket)) - rows.extend(self._unmatched_buckets(buckets, self.user_ids)) + rows.extend(self._unmatched_buckets(buckets, self.owner_ids)) self.total_row = self._total_row return list(map(self._format_row, rows)) - def _unmatched_buckets(self, buckets, user_ids): + def _unmatched_buckets(self, buckets, owner_ids): # ES doesn't return buckets that don't have any docs matching docs - # we expect a bucket for each relevant user id so add empty buckets - returned_user_ids = {b.key for b in buckets} - not_returned_user_ids = set(user_ids) - returned_user_ids + # we expect a bucket for each relevant owner id so add empty buckets + returned_owner_ids = {b.key for b in buckets} + not_returned_owner_ids = set(owner_ids) - returned_owner_ids extra_rows = [] - for user_id in not_returned_user_ids: - extra_rows.append(self.Row(self, self.users_by_id[user_id], {})) - extra_rows.sort(key=lambda row: row.user.raw_username) + for owner_id in not_returned_owner_ids: + extra_rows.append(self.Row(self, self.owners_by_id[owner_id], {})) + extra_rows.sort(key=lambda row: row.row_data.name) return extra_rows @property @@ -550,9 +549,12 @@ def _total_row(self): query = ( case_es.CaseES() .domain(self.domain) - .user_ids_handle_unknown(self.user_ids) .size(0) ) + if self.view_by_groups: + query = query.owner(self.owner_ids) + else: + query = query.user_ids_handle_unknown(self.owner_ids) if self.case_type: query = query.filter(case_es.case_type(self.case_type)) else: From a7632bcda04acd6b6080d09b7d48eafb6d0b1683 Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:22:06 +0200 Subject: [PATCH 045/928] support getting filter link for groups/locations --- corehq/apps/reports/filters/users.py | 6 ++++++ corehq/apps/reports/standard/monitoring.py | 15 +++++++++++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/corehq/apps/reports/filters/users.py b/corehq/apps/reports/filters/users.py index c53755664295..634a9d7facb4 100644 --- a/corehq/apps/reports/filters/users.py +++ b/corehq/apps/reports/filters/users.py @@ -454,6 +454,12 @@ def for_reporting_group(cls, group_id): cls.slug: 'g__%s' % group_id } + @classmethod + def for_reporting_location(cls, loc_id): + return { + cls.slug: 'l__%s' % loc_id + } + class EnterpriseUsersUtils(EmwfUtils): diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index 99ca61fae457..91c808ce95a9 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -98,8 +98,9 @@ class WorkerMonitoringReportTableBase(GenericTabularReport, ProjectReport, Proje def get_user_link(self, user): if self._has_form_view_permission(): user_link = self.get_raw_user_link(user) - return self.table_cell(user.raw_username, user_link) - return self.table_cell(user.raw_username) + name = user.raw_username if hasattr(user, 'raw_username') else user.name + return self.table_cell(name, user_link) + return self.table_cell(name) def _has_form_view_permission(self): return self.request.couch_user.has_permission( @@ -754,6 +755,16 @@ def closed_count(self, landmark_key): def header(self): return self._header + def get_raw_user_link(self, row_data): + row_link_template = '{name}' + row_link = format_html( + row_link_template, + link=self.raw_user_link_url, + params=row_data.filter_id, + name=row_data.name_in_report, + ) + return row_link + @location_safe class SubmissionsByFormReport(WorkerMonitoringFormReportTableBase, From 2af48447c7527ffe67645f4be820396d16a653da Mon Sep 17 00:00:00 2001 From: Zandre Engelbrecht Date: Fri, 19 Apr 2024 13:22:24 +0200 Subject: [PATCH 046/928] change special notice message to highlight viewing by group --- corehq/apps/reports/standard/monitoring.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/corehq/apps/reports/standard/monitoring.py b/corehq/apps/reports/standard/monitoring.py index 91c808ce95a9..1e17c5dcef3a 100644 --- a/corehq/apps/reports/standard/monitoring.py +++ b/corehq/apps/reports/standard/monitoring.py @@ -264,10 +264,17 @@ def totals_columns(self): @property def special_notice(self): - if self.domain_object.case_sharing_included(): - return _("This report currently does not support case sharing. " - "There might be inconsistencies in case totals if the " - "user is part of a case sharing group.") + if self.has_case_sharing: + help_link = ( + "https://dimagi.atlassian.net/wiki/spaces/commcarepublic" + "/pages/2143957523/Debugging+Case+Sharing+errors" + ) + return format_html(_( + "Note that when viewing this report by group it will only include " + "cases which are assigned to a Case Sharing Group/Location. Learn " + "more about troubleshooting issues with Case Sharing Groups " + "here." + ).format(help_link)) _default_landmarks = [30, 60, 90] From d7e9e024ee9716dfd6820d013e593b0239cc0694 Mon Sep 17 00:00:00 2001 From: robert-costello Date: Fri, 19 Apr 2024 10:43:23 -0400 Subject: [PATCH 047/928] add migration --- ..._add_edit_view_tableau_config_permissions.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 corehq/apps/users/migrations/0060_add_edit_view_tableau_config_permissions.py diff --git a/corehq/apps/users/migrations/0060_add_edit_view_tableau_config_permissions.py b/corehq/apps/users/migrations/0060_add_edit_view_tableau_config_permissions.py new file mode 100644 index 000000000000..d2f2315e2b24 --- /dev/null +++ b/corehq/apps/users/migrations/0060_add_edit_view_tableau_config_permissions.py @@ -0,0 +1,17 @@ +from django.db import migrations +from corehq.apps.users.models_role import Permission + + +def create_edit_view_tableau_config_permissions(apps, schema_editor): + Permission.create_all() + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0059_invitations_addtableau_roles_and_groupids'), + ] + + operations = [ + migrations.RunPython(create_edit_view_tableau_config_permissions, migrations.RunPython.noop) + ] From a1376e4a3c936f5ba3bb928478504515f971c410 Mon Sep 17 00:00:00 2001 From: robert-costello Date: Fri, 19 Apr 2024 10:43:44 -0400 Subject: [PATCH 048/928] update lock file --- migrations.lock | 1 + 1 file changed, 1 insertion(+) diff --git a/migrations.lock b/migrations.lock index bfe4700b7704..95a2a6024ff1 100644 --- a/migrations.lock +++ b/migrations.lock @@ -1205,6 +1205,7 @@ users 0056_add_manage_domain_alerts_permission 0057_populate_sql_user_data 0058_createprofileanduserdata_copylocation + 0060_add_edit_view_tableau_config_permissions util 0001_initial 0002_complaintbouncemeta_permanentbouncemeta_transientbounceemail From 33e198f9ad100d0e4fde76a1ba2bb7224d4ada55 Mon Sep 17 00:00:00 2001 From: Martin Riese Date: Fri, 19 Apr 2024 11:56:12 -0500 Subject: [PATCH 049/928] USH-4332: Support repeat group using linear layout * Formplayer is switching the layout it uses to communicate user controlled repeat groups * Temporarily support both version * Support for nested layout can be dropped in future version --- .../static/cloudcare/js/form_entry/form_ui.js | 29 +++++++++++++++++++ .../partials/form_entry_templates.html | 11 +++++++ 2 files changed, 40 insertions(+) diff --git a/corehq/apps/cloudcare/static/cloudcare/js/form_entry/form_ui.js b/corehq/apps/cloudcare/static/cloudcare/js/form_entry/form_ui.js index f3f2624c0cd3..eb0d8dc0021c 100644 --- a/corehq/apps/cloudcare/static/cloudcare/js/form_entry/form_ui.js +++ b/corehq/apps/cloudcare/static/cloudcare/js/form_entry/form_ui.js @@ -232,6 +232,8 @@ hqDefine("cloudcare/js/form_entry/form_ui", function () { return new GroupedElementTileRow(options.data, self); } else if (options.data.type === constants.QUESTION_TYPE) { return new Question(options.data, self); + } else if (options.data.type === constants.GROUP_TYPE && options.data.exists === "false") { + return new AddGroup(options.data, self); } else if (options.data.type === constants.GROUP_TYPE) { return new Group(options.data, self); } else if (options.data.type === constants.REPEAT_TYPE) { @@ -733,6 +735,7 @@ hqDefine("cloudcare/js/form_entry/form_ui", function () { * @param {Object} json - The JSON returned from touchforms to represent a Form * @param {Object} parent - The object's parent. Either a Form, Group, or Repeat. */ + // User controlled repeat groups function Repeat(json, parent) { var self = this; self.parent = parent; @@ -806,6 +809,32 @@ hqDefine("cloudcare/js/form_entry/form_ui", function () { return itemsPerRow !== null ? Math.round(constants.GRID_COLUMNS / itemsPerRow) : constants.GRID_COLUMNS; }; + function AddGroup (json, parent) { + var self = this; + // self.fromJS(json); + self.parent = parent; + self.hasError = function () { + return false; + } + self.children = function () { + return []; + } + + self.newRepeat = function () { + console.log("add new repeat ..."); + $.publish('formplayer.' + constants.NEW_REPEAT, self); + $.publish('formplayer.dirty'); + $('.add').trigger('blur'); + } + + self.entryTemplate = "add-group-entry-ko-template"; + + self.type = "add-group"; + self.rel_ix = function () { + return json.ix; + } + } + /** * Represents a Question. A Question contains an Entry which is the widget that is displayed for that question * type. diff --git a/corehq/apps/cloudcare/templates/cloudcare/partials/form_entry_templates.html b/corehq/apps/cloudcare/templates/cloudcare/partials/form_entry_templates.html index de1b5d8a60dd..c6ed9523fd38 100644 --- a/corehq/apps/cloudcare/templates/cloudcare/partials/form_entry_templates.html +++ b/corehq/apps/cloudcare/templates/cloudcare/partials/form_entry_templates.html @@ -388,6 +388,17 @@

+ + + + + {% endcompress %} {% endblock %} @@ -138,7 +139,7 @@

@@ -107,18 +107,18 @@ {% endif %} {% if type_info.has_config and user_can_configure %} - + {% trans "Configure" %} {% endif %} - {% trans "Delete" %}