From 785e691ba42b8a8d6664a4b002056af6fc0ca683 Mon Sep 17 00:00:00 2001 From: Shahzaib Ahmad <31288329+Shahzaibahmad97@users.noreply.github.com> Date: Thu, 22 Aug 2024 15:27:21 +0500 Subject: [PATCH] added "individual_role" field for import process that populates role of individual in group (#120) --- individual/management/commands/fake_individuals.py | 11 +++++++++-- .../signals/on_validation_import_valid_items.py | 10 +++++++++- individual/views.py | 2 +- individual/workflows/utils.py | 2 +- 4 files changed, 20 insertions(+), 5 deletions(-) diff --git a/individual/management/commands/fake_individuals.py b/individual/management/commands/fake_individuals.py index 7e03758..f587079 100644 --- a/individual/management/commands/fake_individuals.py +++ b/individual/management/commands/fake_individuals.py @@ -21,13 +21,14 @@ "beneficiary_data_source": {"type": "string"} } -def generate_fake_individual(group_code, recipient_info): +def generate_fake_individual(group_code, recipient_info, individual_role): return { "first_name": fake.first_name(), "last_name": fake.last_name(), "dob": fake.date_of_birth(minimum_age=16, maximum_age=90).isoformat(), "group_code": group_code, "recipient_info": recipient_info, + "individual_role": individual_role, "email": fake.email(), "able_bodied": fake.boolean(), "national_id": fake.unique.ssn(), @@ -45,14 +46,20 @@ class Command(BaseCommand): help = "Create test individual csv for uploading" def handle(self, *args, **options): + from individual.models import GroupIndividual + individuals = [] num_individuals = 100 num_households = 20 + # Exclude the head from role choices so that one group only has one head in randomnes + available_role_choices = [choice for choice in GroupIndividual.Role if choice != GroupIndividual.Role.HEAD] + for group_code in range(1, num_households+1): for i in range(num_individuals // num_households): recipient_info = 1 if i == 0 else 0 - individual = generate_fake_individual(group_code, recipient_info) + individual_role = GroupIndividual.Role.HEAD if i == 0 else random.choice(available_role_choices) + individual = generate_fake_individual(group_code, recipient_info, individual_role) individuals.append(individual) with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv', newline='') as tmp_file: diff --git a/individual/signals/on_validation_import_valid_items.py b/individual/signals/on_validation_import_valid_items.py index 1e6d48e..cca3d3c 100644 --- a/individual/signals/on_validation_import_valid_items.py +++ b/individual/signals/on_validation_import_valid_items.py @@ -67,6 +67,7 @@ def __init__(self, workflow: str, upload_record, upload_id: str, user: User, acc class BaseGroupColumnAggregationClass(ItemsUploadTaskCompletionEvent): group_code_str = 'group_code' recipient_info_str = 'recipient_info' + individual_role_str = 'individual_role' individuals = None group_aggregation_column = None grouped_individuals = None @@ -151,6 +152,7 @@ def clean_json_ext(json_ext): return None json_ext.pop(self.group_code_str, None) json_ext.pop(self.recipient_info_str, None) + json_ext.pop(self.individual_role_str, None) return json_ext for individual in self.individuals: @@ -197,8 +199,10 @@ def build_single_individual_data(individual_id): individual = Individual.objects.get(id=individual_id) individual_json_ext = self._get_json_ext(individual) recipient_info = individual_json_ext.get('recipient_info') + individual_role = individual_json_ext.get(self.individual_role_str) + individual_role = self._individual_role_parser(individual_role) recipient_type = self._recipient_type_parser(recipient_info) - return {'individual_id': individual_id, 'recipient_type': recipient_type} + return {'individual_id': individual_id, 'recipient_type': recipient_type, 'role': individual_role} return [build_single_individual_data(individual_id) for individual_id in ids] @@ -209,6 +213,10 @@ def _recipient_type_parser(recipient_type): if recipient_type in [2, '2', 2.0]: return GroupIndividual.RecipientType.SECONDARY return None + + @staticmethod + def _individual_role_parser(individual_role): + return getattr(GroupIndividual.Role, individual_role.upper(), None) def _create_group_data_source(self, json_ext_data): data_source = GroupDataSource(upload=self.upload_record.data_upload, json_ext=json_ext_data) diff --git a/individual/views.py b/individual/views.py index 8f04546..e6eac4a 100644 --- a/individual/views.py +++ b/individual/views.py @@ -44,7 +44,7 @@ def load_spreadsheet(file: InMemoryUploadedFile, **kwargs) -> pd.DataFrame: def get_global_schema_fields(): schema = json.loads(IndividualConfig.individual_schema) schema_properties = set(schema.get('properties', {}).keys()) - schema_properties.update(['recipient_info', 'group_code']) + schema_properties.update(['recipient_info', 'individual_role', 'group_code']) return list(schema_properties) diff --git a/individual/workflows/utils.py b/individual/workflows/utils.py index 1cda6db..b24e456 100644 --- a/individual/workflows/utils.py +++ b/individual/workflows/utils.py @@ -51,7 +51,7 @@ def validate_dataframe_headers(self, is_update=False): """ df_headers = set(self.df.columns) schema_properties = set(self.schema.get('properties', {}).keys()) - schema_properties.update(['recipient_info', 'group_code']) + schema_properties.update(['recipient_info', 'group_code', 'individual_role']) required_headers = {'first_name', 'last_name', 'dob', 'id'} if is_update: required_headers.add('ID')