Skip to content

Commit

Permalink
Merge pull request #139 from ngdream/patch-2
Browse files Browse the repository at this point in the history
add comments in  views.py and correct some code styling
  • Loading branch information
delcroip authored Jan 8, 2025
2 parents d05e42d + d0dc942 commit 693b55f
Showing 1 changed file with 49 additions and 24 deletions.
73 changes: 49 additions & 24 deletions individual/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,113 +19,138 @@

from workflow.services import WorkflowService

# Set up logging for the module
logger = logging.getLogger(__name__)


# Function to retrieve global schema fields from IndividualConfig
def get_global_schema_fields():
# Load individual schema as a dictionary
schema = json.loads(IndividualConfig.individual_schema)
# Extract property keys and add additional fields specific to individuals
schema_properties = set(schema.get('properties', {}).keys())
schema_properties.update(['recipient_info', 'individual_role', 'group_code'])
return list(schema_properties)


# API endpoint to download a CSV template for individual data import
@api_view(["GET"])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_create_perms, )])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_create_perms)])
def download_template_file(request):
try:
# Base fields and extra fields required in the template
base_fields = IndividualConfig.individual_base_fields
extra_fields = get_global_schema_fields()
all_fields = base_fields + extra_fields

# Create an empty DataFrame with the required fields
template_df = pd.DataFrame(columns=all_fields)

# Function to stream the CSV content
def stream_csv():
output = template_df.to_csv(index=False)
yield output.encode('utf-8')

# Return a streaming HTTP response with the CSV file
response = StreamingHttpResponse(
stream_csv(), content_type='text/csv'
)
response['Content-Disposition'] = 'attachment; filename="individual_upload_template.csv"'
return response
except Exception as exc:
# Log unexpected errors and return a 500 response
logger.error("Unexpected error while generating template file", exc_info=exc)
return Response({'success': False, 'error': str(exc)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)


# API endpoint to import individual data from a file
@api_view(["POST"])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_create_perms, )])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_create_perms)])
def import_individuals(request):
import_file = None
try:
user = request.user
# Resolve the arguments and handle file upload
import_file, workflow, group_aggregation_column = _resolve_import_individuals_args(request)
_handle_file_upload(import_file)
# Import individual data using the service
result = IndividualImportService(user).import_individuals(import_file, workflow, group_aggregation_column)

# If the import was unsuccessful, raise an error
if not result.get('success'):
raise ValueError('{}: {}'.format(result.get("message"), result.get("details")))

# Return the result of the import
return Response(result)
except ValueError as e:
# Remove the file and log the error if a value error occurs
if import_file:
_remove_file(import_file)
logger.error("Error while uploading individuals", exc_info=e)
return Response({'success': False, 'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
except FileExistsError as e:
# Handle file existence conflicts
logger.error("Error while saving file", exc_info=e)
return Response({'success': False, 'error': str(e)}, status=status.HTTP_409_CONFLICT)
except Exception as e:
# Handle unexpected errors and return a 500 response
logger.error("Unexpected error while uploading individuals", exc_info=e)
return Response({'success': False, 'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)


# API endpoint to download invalid items from an individual data upload
@api_view(["GET"])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_search_perms, )])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_search_perms)])
def download_invalid_items(request):
try:
# Get the upload ID from the request parameters
upload_id = request.query_params.get('upload_id')

# Query invalid items from the data source based on the upload ID
invalid_items = IndividualDataSource.objects.filter(
Q(is_deleted=False) &
Q(upload_id=upload_id) &
~Q(validations__validation_errors=[])
)

# Prepare data for invalid items as a list of dictionaries
data_from_source = []
for invalid_item in invalid_items:
json_ext = invalid_item.json_ext
invalid_item.json_ext["id"] = invalid_item.id
invalid_item.json_ext["error"] = invalid_item.validations
json_ext["id"] = invalid_item.id
json_ext["error"] = invalid_item.validations
data_from_source.append(json_ext)

# Convert the data into a DataFrame
recreated_df = pd.DataFrame(data_from_source)

# Function to stream the DataFrame content as CSV
# Stream the DataFrame content as a CSV file
def stream_csv():
output = recreated_df.to_csv(index=False)
yield output.encode('utf-8')

# Create a streaming response with the CSV content
# Return a streaming HTTP response with the CSV file
response = StreamingHttpResponse(
stream_csv(), content_type='text/csv'
)
response['Content-Disposition'] = 'attachment; filename="individuals_invalid_items.csv"'
return response

except ValueError as exc:
# Handle errors gracefully
# Log value errors and return a 400 response
logger.error("Error while fetching data", exc_info=exc)
return Response({'success': False, 'error': str(exc)}, status=400)
except Exception as exc:
# Handle unexpected errors and return a 500 response
logger.error("Unexpected error", exc_info=exc)
return Response({'success': False, 'error': str(exc)}, status=500)


# API endpoint to download a previously uploaded individual data file
@api_view(["GET"])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_search_perms, )])
@permission_classes([check_user_rights(IndividualConfig.gql_individual_search_perms)])
def download_individual_upload(request):
try:
# Get the filename from the request parameters
filename = request.query_params.get('filename')
target_file_path = IndividualConfig.get_individual_upload_file_path(filename)

# Create a file handler to manage the file
file_handler = DefaultStorageFileHandler(target_file_path)
return file_handler.get_file_response_csv(filename)

Expand All @@ -139,7 +164,7 @@ def download_individual_upload(request):
logger.error("Unexpected error", exc_info=exc)
return Response({'success': False, 'error': str(exc)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)


# Function to handle file uploads and save them to a specified path
def _handle_file_upload(file):
try:
target_file_path = IndividualConfig.get_individual_upload_file_path(file.name)
Expand All @@ -148,37 +173,37 @@ def _handle_file_upload(file):
except FileExistsError as exc:
raise exc


# Function to remove a file from storage
def _remove_file(file):
target_file_path = IndividualConfig.get_individual_upload_file_path(file.name)
file_handler = DefaultStorageFileHandler(target_file_path)
file_handler.remove_file()


# Helper function to resolve and validate import arguments from the request
def _resolve_import_individuals_args(request):
import_file = request.FILES.get('file')
workflow_name = request.POST.get('workflow_name')
workflow_group = request.POST.get('workflow_group')
group_aggregation_column = request.POST.get('group_aggregation_column')

# Validate the presence of required arguments
if not import_file:
raise ValueError(f'Import file not provided')
raise ValueError('Import file not provided')
if not workflow_name:
raise ValueError(f'Workflow name not provided')
raise ValueError('Workflow name not provided')
if not workflow_group:
raise ValueError(f'Workflow group not provided')
raise ValueError('Workflow group not provided')

# Retrieve workflows based on the provided arguments
result = WorkflowService.get_workflows(workflow_name, workflow_group)
if not result.get('success'):
raise ValueError('{}: {}'.format(result.get("message"), result.get("details")))

workflows = result.get('data', {}).get('workflows')

if not workflows:
raise ValueError('Workflow not found: group={} name={}'.format(workflow_group, workflow_name))
if len(workflows) > 1:
raise ValueError('Multiple workflows found: group={} name={}'.format(workflow_group, workflow_name))

workflow = workflows[0]

return import_file, workflow, group_aggregation_column
# Return the resolved import file, workflow, and aggregation column
return import_file, workflows[0], group_aggregation_column

0 comments on commit 693b55f

Please sign in to comment.