Skip to content

Commit

Permalink
Merge branch 'master' into jls/b5-web-apps
Browse files Browse the repository at this point in the history
  • Loading branch information
orangejenny committed Jun 10, 2024
2 parents b82bd30 + 0c81cbd commit f76b71e
Show file tree
Hide file tree
Showing 71 changed files with 3,335 additions and 1,688 deletions.
5 changes: 3 additions & 2 deletions corehq/apps/accounting/bootstrap/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,9 @@ def _ensure_software_plan(plan_key, product, product_rate, verbose, apps):
plan_opts = {
'name': plan_name,
'edition': plan_key.edition,
'visibility': (SoftwarePlanVisibility.ANNUAL
if plan_key.is_annual_plan else SoftwarePlanVisibility.PUBLIC),
'visibility': (SoftwarePlanVisibility.INTERNAL
if plan_key.edition == SoftwarePlanEdition.ENTERPRISE
else SoftwarePlanVisibility.PUBLIC),
}
if plan_key.is_annual_plan is not None:
plan_opts['is_annual_plan'] = plan_key.is_annual_plan
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
from datetime import datetime

from django.db import migrations, models

from corehq.apps.accounting.models import SoftwarePlanVisibility

ANNUAL = "ANNUAL"


def change_plan_visibilities(apps, schema_editor):
# one-time cleanup of existing software plans
SoftwarePlan = apps.get_model('accounting', 'SoftwarePlan')

enterprise_names = ["Dimagi Only CommCare Enterprise Edition"]
enterprise_plans = SoftwarePlan.objects.filter(name__in=enterprise_names)
enterprise_plans.update(visibility=SoftwarePlanVisibility.INTERNAL, last_modified=datetime.now())

annual_plans = SoftwarePlan.objects.filter(visibility=ANNUAL)
annual_plans.update(visibility=SoftwarePlanVisibility.PUBLIC, last_modified=datetime.now())


class Migration(migrations.Migration):

dependencies = [
("accounting", "0094_add_annual_softwareplans"),
]

operations = [
migrations.RunPython(change_plan_visibilities),
migrations.AlterField(
model_name="softwareplan",
name="visibility",
field=models.CharField(
choices=[
("PUBLIC", "PUBLIC - Anyone can subscribe"),
("INTERNAL", "INTERNAL - Dimagi must create subscription"),
("TRIAL", "TRIAL- This is a Trial Plan"),
("ARCHIVED", "ARCHIVED - hidden from subscription change forms"),
],
default="INTERNAL",
max_length=10,
),
),
]
2 changes: 0 additions & 2 deletions corehq/apps/accounting/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,14 +171,12 @@ class SoftwarePlanVisibility(object):
PUBLIC = "PUBLIC"
INTERNAL = "INTERNAL"
TRIAL = "TRIAL"
ANNUAL = "ANNUAL"
ARCHIVED = "ARCHIVED"
CHOICES = (
(PUBLIC, "PUBLIC - Anyone can subscribe"),
(INTERNAL, "INTERNAL - Dimagi must create subscription"),
(TRIAL, "TRIAL- This is a Trial Plan"),
(ARCHIVED, "ARCHIVED - hidden from subscription change forms"),
(ANNUAL, "ANNUAL - public plans that on annual pricing"),
)


Expand Down
5 changes: 3 additions & 2 deletions corehq/apps/accounting/tests/test_ensure_plans.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ def _test_plan_versions_ensured(self, bootstrap_config):
)
self.assertEqual(sms_feature_rate.per_excess_fee, 0)

expected_visibility = (SoftwarePlanVisibility.ANNUAL
if is_annual_plan else SoftwarePlanVisibility.PUBLIC)
expected_visibility = (SoftwarePlanVisibility.INTERNAL
if edition == SoftwarePlanEdition.ENTERPRISE
else SoftwarePlanVisibility.PUBLIC)
self.assertEqual(software_plan_version.plan.visibility, expected_visibility)
self.assertEqual(software_plan_version.plan.is_annual_plan, is_annual_plan)
17 changes: 17 additions & 0 deletions corehq/apps/api/resources/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json

from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.urls import NoReverseMatch

Expand Down Expand Up @@ -114,6 +115,22 @@ def dispatch(self, request_type, request, **kwargs):
content_type="application/json",
status=401))

def alter_deserialized_detail_data(self, request, data):
"""Provide a hook for data validation
Subclasses may implement ``validate_deserialized_data`` that
raises ``django.core.exceptions.ValidationError`` if the submitted
data is not valid. This is designed to work conveniently with
``corehq.util.validation.JSONSchemaValidator``.
"""
data = super().alter_deserialized_detail_data(request, data)
if hasattr(self, "validate_deserialized_data"):
try:
self.validate_deserialized_data(data)
except ValidationError as error:
raise ImmediateHttpResponse(self.error_response(request, error.messages))
return data

def get_required_privilege(self):
return privileges.API_ACCESS

Expand Down
81 changes: 81 additions & 0 deletions corehq/apps/api/tests/lookup_table_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,35 @@ def test_update(self):
self.assertEqual(data_type.fields[0].properties, ['lang', 'name'])
self.assertEqual(data_type.item_attributes, ['X'])

def test_update_field_name(self):
lookup_table = {
"fields": [{"name": "property", "properties": ["value"]}],
"tag": "lookup_table",
}

response = self._assert_auth_post_resource(
self.single_endpoint(self.data_type.id), json.dumps(lookup_table), method="PUT")
print(response.content) # for debugging errors
data_type = LookupTable.objects.get(id=self.data_type.id)
self.assertEqual(data_type.fields[0].field_name, 'property')

def test_update_fails_with_two_field_names(self):
lookup_table = {
"fields": [{"name": "property", "field_name": "prop"}],
"tag": "lookup_table",
}

response = self._assert_auth_post_resource(
self.single_endpoint(self.data_type.id), json.dumps(lookup_table), method="PUT")
self.assertEqual(response.status_code, 400)
errors = json.loads(response.content.decode("utf-8"))
print(errors)
self.assertIn("Failed validating 'not' in schema", errors[0])
self.assertIn("{'not': {'required': ['field_name']}}", errors[0])
self.assertIn("Failed validating 'not' in schema", errors[1])
self.assertIn("{'not': {'required': ['name']}}", errors[1])
self.assertEqual(len(errors), 2)


class TestLookupTableItemResourceV06(APIResourceTest):
resource = LookupTableItemResource
Expand Down Expand Up @@ -328,6 +357,58 @@ def test_update(self):
'cool_attr_value'
)

def test_create_with_bad_properties(self):
data_item_json = self._get_data_item_create()
data_item_json["fields"]["state_name"]["field_list"][0]["properties"] = []
response = self._assert_auth_post_resource(
self.list_endpoint,
json.dumps(data_item_json),
content_type='application/json',
)
self.assertEqual(response.status_code, 400)
errors = json.loads(response.content.decode("utf-8"))
print(errors)
self.assertIn("[] is not of type 'object':", errors[0])
data_item = LookupTableRow.objects.filter(domain=self.domain.name).first()
self.assertIsNone(data_item)

def test_update_field_value(self):
data_item = self._create_data_item()
data_item_update = self._get_data_item_update()
data_item_update["fields"]["state_name"]["field_list"][0] = {
"value": "Mass.",
"properties": {"lang": "en"},
}
response = self._assert_auth_post_resource(
self.single_endpoint(data_item.id.hex),
json.dumps(data_item_update),
method="PUT",
)
print(response.content) # for debugging errors
row = LookupTableRow.objects.filter(domain=self.domain.name).first()
self.assertEqual(row.fields["state_name"][0].value, 'Mass.')

def test_update_fails_with_two_field_values(self):
data_item = self._create_data_item()
data_item_update = self._get_data_item_update()
data_item_update["fields"]["state_name"]["field_list"][0] = {
"value": "Mass.",
"field_value": "Mass...",
}
response = self._assert_auth_post_resource(
self.single_endpoint(data_item.id.hex),
json.dumps(data_item_update),
method="PUT",
)
self.assertEqual(response.status_code, 400)
errors = json.loads(response.content.decode("utf-8"))
print(errors)
self.assertIn("Failed validating 'not' in schema", errors[0])
self.assertIn("{'not': {'required': ['field_value']}}", errors[0])
self.assertIn("Failed validating 'not' in schema", errors[1])
self.assertIn("{'not': {'required': ['value']}}", errors[1])
self.assertEqual(len(errors), 2)


class TestLookupTableItemResourceV05(TestLookupTableItemResourceV06):
resource = LookupTableItemResource
Expand Down
82 changes: 67 additions & 15 deletions corehq/apps/app_execution/db_accessors.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from django.db.models import Avg, DateTimeField, DurationField, ExpressionWrapper, F, Max
from collections import defaultdict
from datetime import timedelta

from django.db.models import Avg, Count, DateTimeField, DurationField, ExpressionWrapper, F, Max
from django.db.models.functions import Trunc

from corehq.apps.app_execution.models import AppExecutionLog, AppWorkflowConfig
Expand All @@ -16,28 +19,77 @@ def get_avg_duration_data(domain, start, end, workflow_id=None):
).values("date", "workflow_id")
.annotate(avg_duration=Avg('duration'))
.annotate(max_duration=Max('duration'))
.order_by("workflow_id", "date")
)

data = []
seen_workflows = set()
data = defaultdict(list)
seen_dates = defaultdict(set)
for row in chart_logs:
if row["workflow_id"] not in seen_workflows:
seen_workflows.add(row["workflow_id"])
data.append({
"key": row["workflow_id"],
"values": []
})
data[-1]["values"].append({
data[row["workflow_id"]].append({
"date": row["date"].isoformat(),
"avg_duration": row["avg_duration"].total_seconds(),
"max_duration": row["max_duration"].total_seconds(),
})
seen_dates[row["workflow_id"]].add(row["date"])

start = start.replace(minute=0, second=0, microsecond=0)
current = start
while current < end:
for workflow_id, dates in seen_dates.items():
if current not in dates:
data[workflow_id].append({"date": current.isoformat(), "avg_duration": None, "max_duration": None})
current += timedelta(hours=1)

workflow_names = {
workflow["id"]: workflow["name"]
for workflow in AppWorkflowConfig.objects.filter(id__in=seen_workflows).values("id", "name")
for workflow in AppWorkflowConfig.objects.filter(id__in=list(data)).values("id", "name")
}
for workflow_data in data:
workflow_data["label"] = workflow_names[workflow_data["key"]]
return data
return [
{
"key": workflow_id,
"label": workflow_names[workflow_id],
"values": sorted(data, key=lambda x: x["date"])
}
for workflow_id, data in data.items()
]


def get_status_data(domain, start, end, workflow_id=None):
query = AppExecutionLog.objects.filter(workflow__domain=domain, started__gte=start, started__lt=end)
if workflow_id:
query = query.filter(workflow_id=workflow_id)

chart_logs = (
query.annotate(date=Trunc("started", "hour", output_field=DateTimeField()))
.values("date", "success")
.annotate(count=Count("success"))
)

success = []
error = []
seen_success_dates = set()
seen_error_dates = set()
for row in chart_logs:
item = {
"date": row["date"].isoformat(),
"count": row["count"],
}
if row["success"]:
success.append(item)
seen_success_dates.add(row["date"])
else:
error.append(item)
seen_error_dates.add(row["date"])

start = start.replace(minute=0, second=0, microsecond=0)
current = start
while current < end:
if current not in seen_error_dates:
error.append({"date": current.isoformat(), "count": 0})
if current not in seen_success_dates:
success.append({"date": current.isoformat(), "count": 0})
current += timedelta(hours=1)

return [
{"key": "Success", "values": sorted(success, key=lambda x: x["date"])},
{"key": "Error", "values": sorted(error, key=lambda x: x["date"])},
]
22 changes: 12 additions & 10 deletions corehq/apps/app_execution/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@


class AppWorkflowConfigForm(forms.ModelForm):
run_every = forms.IntegerField(min_value=1, required=False, label="Run Every (minutes)")
username = forms.CharField(max_length=255, label="Username",
help_text="Username of the user to run the workflow")
har_file = forms.FileField(label="HAR File", required=False)
run_every = forms.IntegerField(min_value=1, required=False, label=_("Run Every (minutes)"))
username = forms.CharField(max_length=255, label=_("Username"),
help_text=_("Username of the user to run the workflow"))
har_file = forms.FileField(label=_("HAR File"), required=False)

class Meta:
model = AppWorkflowConfig
Expand Down Expand Up @@ -52,30 +52,30 @@ def __init__(self, request, *args, **kwargs):
if request.user.is_superuser:
fields += ["run_every", "notification_emails"]

har_help = _("HAR file recording should start with the selection of the app (navigate_menu_start).")
self.helper.layout = crispy.Layout(
crispy.Div(
crispy.Div(
*fields,
css_class="col",
),
crispy.Div(
crispy.HTML("<p>HAR file recording should start with the "
"selection of the app (navigate_menu_start).</p>"),
crispy.HTML(f"<p>{har_help}</p>"),
"har_file",
twbscrispy.StrictButton(
"Populate workflow from HAR file",
_("Populate workflow from HAR file"),
type='submit', css_class='btn-secondary', name="import_har", value="1",
formnovalidate=True,
),
crispy.HTML("<p>&nbsp;</p>"),
crispy.HTML("<p>Workflow:</p>"),
crispy.HTML(f"<p>{_('Workflow:')}</p>"),
InlineField("workflow"),
css_class="col"
),
css_class="row mb-3"
),
hqcrispy.FormActions(
twbscrispy.StrictButton("Save", type='submit', css_class='btn-primary')
twbscrispy.StrictButton(_("Save"), type='submit', css_class='btn-primary')
),
)

Expand All @@ -98,7 +98,9 @@ def clean_app_id(self):
try:
get_brief_app(domain, app_id)
except NoResultFound:
raise forms.ValidationError(f"App not found in domain: {domain}:{app_id}")
raise forms.ValidationError(_("App not found in domain: {domain}:{app_id}").format(
domain=domain, app_id=app_id
))

return app_id

Expand Down
Loading

0 comments on commit f76b71e

Please sign in to comment.