diff --git a/arches/app/const.py b/arches/app/const.py index e355a43684..7c9bfe0933 100644 --- a/arches/app/const.py +++ b/arches/app/const.py @@ -3,7 +3,8 @@ IntegrityCheckDescriptions = { 1005: "Nodes with ontologies found in graphs without ontologies", - 1012: "Node Groups without matching nodes", + 1012: "Node Groups without contained nodes", + 1013: "Node Groups without a grouping node", 1014: "Publication missing for language", } @@ -12,6 +13,7 @@ class IntegrityCheck(Enum): NODE_HAS_ONTOLOGY_GRAPH_DOES_NOT = 1005 NODELESS_NODE_GROUP = 1012 + NODEGROUP_WITHOUT_GROUPING_NODE = 1013 PUBLICATION_MISSING_FOR_LANGUAGE = 1014 def __str__(self): diff --git a/arches/app/models/graph.py b/arches/app/models/graph.py index b14d593ded..05f13d6d46 100644 --- a/arches/app/models/graph.py +++ b/arches/app/models/graph.py @@ -367,6 +367,7 @@ def add_node(self, node, nodegroups=None): node.nodegroup = self.get_or_create_nodegroup( nodegroupid=node.nodegroup_id ) + node.nodegroup.grouping_node_id = node.nodegroup_id if nodegroups is not None and str(node.nodegroup_id) in nodegroups: node.nodegroup.cardinality = nodegroups[str(node.nodegroup_id)][ "cardinality" @@ -1111,7 +1112,9 @@ def flatten_tree(tree, node_id_list=[]): if is_collector: old_nodegroup_id = node.nodegroup_id node.nodegroup = models.NodeGroup( - pk=node.pk, cardinality=node.nodegroup.cardinality + pk=node.pk, + cardinality=node.nodegroup.cardinality, + grouping_node=node, ) if old_nodegroup_id not in nodegroup_map: nodegroup_map[old_nodegroup_id] = node.nodegroup_id @@ -1284,7 +1287,8 @@ def update_node(self, node): new_node.fieldname = node["fieldname"] self.populate_null_nodegroups() - # new_node will always have a nodegroup id even it if was set to None becuase populate_null_nodegroups + # new_node will always have a nodegroup id even if if was set to None + # because populate_null_nodegroups # will populate the nodegroup id with the parent nodegroup # add/remove a card if a nodegroup was added/removed if new_node.nodegroup_id != old_node.nodegroup_id: @@ -1317,7 +1321,7 @@ def update_node(self, node): def delete_node(self, node=None): """ - deletes a node and all if it's children from a graph + deletes a node and all of its children from a graph Arguments: node -- a node id or Node model to delete from the graph @@ -2436,6 +2440,7 @@ def _update_source_nodegroup_hierarchy(nodegroup): source_nodegroup.cardinality = nodegroup.cardinality source_nodegroup.legacygroupid = nodegroup.legacygroupid + source_nodegroup.grouping_node_id = source_nodegroup.pk if nodegroup.parentnodegroup_id: nodegroup_parent_node = models.Node.objects.get( @@ -2480,7 +2485,7 @@ def _update_source_nodegroup_hierarchy(nodegroup): # graph ( the graph mapped to `self` ); we iterate over the item attributes and map # them to source item. If the item does not have a `source_identifier` attribute, it # has been newly created; we update the `graph_id` to match the source graph. We are - # not saving in this block so updates can accur in any order. + # not saving in this block so updates can occur in any order. for future_widget in list(editable_future_graph.widgets.values()): source_widget = future_widget.source_identifier @@ -2643,6 +2648,7 @@ def _update_source_nodegroup_hierarchy(nodegroup): setattr(source_node, key, getattr(future_node, key)) source_node.nodegroup_id = future_node.nodegroup_id + source_node.nodegroup.grouping_node_id = source_node.nodegroup_id if ( future_node_nodegroup_node and future_node_nodegroup_node.source_identifier_id @@ -2650,6 +2656,9 @@ def _update_source_nodegroup_hierarchy(nodegroup): source_node.nodegroup_id = ( future_node_nodegroup_node.source_identifier_id ) + source_node.nodegroup.grouping_node_id = ( + source_node.nodegroup_id + ) self.nodes[source_node.pk] = source_node else: # newly-created node @@ -2663,6 +2672,9 @@ def _update_source_nodegroup_hierarchy(nodegroup): future_node.nodegroup_id = ( future_node_nodegroup_node.source_identifier_id ) + future_node.nodegroup.grouping_node_id = ( + future_node.nodegroup_id + ) del editable_future_graph.nodes[future_node.pk] self.nodes[future_node.pk] = future_node diff --git a/arches/app/models/migrations/11613_nodegroup_grouping_node.py b/arches/app/models/migrations/11613_nodegroup_grouping_node.py new file mode 100644 index 0000000000..356af8fd70 --- /dev/null +++ b/arches/app/models/migrations/11613_nodegroup_grouping_node.py @@ -0,0 +1,69 @@ +# Generated by Django 5.1.3 on 2024-11-19 09:29 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("models", "11408_loadstaging_sortorder"), + ] + + def add_grouping_node(apps, schema_editor): + NodeGroup = apps.get_model("models", "NodeGroup") + nodegroups_with_nodes = NodeGroup.objects.filter(node__gt=0).distinct() + for nodegroup in nodegroups_with_nodes: + nodegroup.grouping_node_id = nodegroup.pk + NodeGroup.objects.bulk_update(nodegroups_with_nodes, ["grouping_node_id"]) + + PublishedGraph = apps.get_model("models", "PublishedGraph") + published_graphs = PublishedGraph.objects.all() + for published_graph in published_graphs: + for node_dict in published_graph.serialized_graph["nodes"]: + node_dict["grouping_node_id"] = node_dict["nodegroup_id"] + PublishedGraph.objects.bulk_update(published_graphs, ["serialized_graph"]) + + def remove_grouping_node(apps, schema_editor): + PublishedGraph = apps.get_model("models", "PublishedGraph") + published_graphs = PublishedGraph.objects.all() + for published_graph in published_graphs: + for node_dict in published_graph.serialized_graph["nodegroups"]: + node_dict.pop("grouping_node_id", None) + PublishedGraph.objects.bulk_update(published_graphs, ["serialized_graph"]) + + operations = [ + migrations.AddField( + model_name="nodegroup", + name="grouping_node", + field=models.OneToOneField( + blank=True, + db_column="groupingnodeid", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="grouping_node_nodegroup", + to="models.node", + ), + ), + migrations.AlterField( + model_name="nodegroup", + name="cardinality", + field=models.CharField( + blank=True, choices=[("1", "1"), ("n", "n")], default="1", max_length=1 + ), + ), + migrations.AlterField( + model_name="nodegroup", + name="parentnodegroup", + field=models.ForeignKey( + blank=True, + db_column="parentnodegroupid", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="children", + related_query_name="child", + to="models.nodegroup", + ), + ), + migrations.RunPython(add_grouping_node, remove_grouping_node), + ] diff --git a/arches/app/models/migrations/11613_nodegroup_grouping_node_constraints.py b/arches/app/models/migrations/11613_nodegroup_grouping_node_constraints.py new file mode 100644 index 0000000000..bee10daf09 --- /dev/null +++ b/arches/app/models/migrations/11613_nodegroup_grouping_node_constraints.py @@ -0,0 +1,33 @@ +# Generated by Django 5.1.3 on 2024-11-19 09:33 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("models", "11613_nodegroup_grouping_node"), + ] + + operations = [ + migrations.AddConstraint( + model_name="node", + constraint=models.CheckConstraint( + condition=models.Q( + ("istopnode", True), ("nodegroup__isnull", False), _connector="OR" + ), + name="has_nodegroup_or_istopnode", + ), + ), + migrations.AddConstraint( + model_name="nodegroup", + constraint=models.CheckConstraint( + condition=models.Q( + ("grouping_node", models.F("pk")), + ("grouping_node__isnull", True), + _connector="OR", + ), + name="grouping_node_matches_pk_or_null", + ), + ), + ] diff --git a/arches/app/models/migrations/7787_relational_data_model.py b/arches/app/models/migrations/7787_relational_data_model.py index 40d79a462c..0fc703bef4 100644 --- a/arches/app/models/migrations/7787_relational_data_model.py +++ b/arches/app/models/migrations/7787_relational_data_model.py @@ -9,7 +9,7 @@ class Migration(migrations.Migration): operations = [ migrations.RunSQL( - """ + r""" create extension if not exists "unaccent"; create or replace function __arches_slugify( diff --git a/arches/app/models/models.py b/arches/app/models/models.py index ed885b73ae..3bd241fa94 100644 --- a/arches/app/models/models.py +++ b/arches/app/models/models.py @@ -709,14 +709,27 @@ class Meta: class NodeGroup(models.Model): nodegroupid = models.UUIDField(primary_key=True) legacygroupid = models.TextField(blank=True, null=True) - cardinality = models.TextField(blank=True, default="1") + cardinality = models.CharField( + max_length=1, blank=True, default="1", choices={"1": "1", "n": "n"} + ) parentnodegroup = models.ForeignKey( "self", db_column="parentnodegroupid", blank=True, null=True, on_delete=models.CASCADE, + related_name="children", + related_query_name="child", ) # Allows nodegroups within nodegroups + grouping_node = models.OneToOneField( + "Node", + db_column="groupingnodeid", + blank=True, + null=True, + # models.RESTRICT might be better, but revisit after future graph refactor. + on_delete=models.SET_NULL, + related_name="grouping_node_nodegroup", + ) def __init__(self, *args, **kwargs): super(NodeGroup, self).__init__(*args, **kwargs) @@ -726,6 +739,13 @@ def __init__(self, *args, **kwargs): class Meta: managed = True db_table = "node_groups" + constraints = [ + models.CheckConstraint( + condition=Q(grouping_node=models.F("pk")) + | Q(grouping_node__isnull=True), + name="grouping_node_matches_pk_or_null", + ) + ] default_permissions = () permissions = ( @@ -874,15 +894,18 @@ def __init__(self, *args, **kwargs): def clean(self): if not self.alias: Graph.objects.get(pk=self.graph_id).create_node_alias(self) + if self.pk == self.source_identifier_id: + self.source_identifier_id = None def save(self, **kwargs): if not self.alias: - self.clean() add_to_update_fields(kwargs, "alias") add_to_update_fields(kwargs, "hascustomalias") if self.pk == self.source_identifier_id: - self.source_identifier_id = None add_to_update_fields(kwargs, "source_identifier_id") + + self.clean() + super(Node, self).save() class Meta: @@ -895,6 +918,10 @@ class Meta: models.UniqueConstraint( fields=["alias", "graph"], name="unique_alias_graph" ), + models.CheckConstraint( + condition=Q(istopnode=True) | Q(nodegroup__isnull=False), + name="has_nodegroup_or_istopnode", + ), ] diff --git a/arches/management/commands/validate.py b/arches/management/commands/validate.py index e821b6ece3..757e42f1b6 100644 --- a/arches/management/commands/validate.py +++ b/arches/management/commands/validate.py @@ -120,6 +120,11 @@ def handle(self, *args, **options): ), fix_action=DELETE_QUERYSET, ) + self.check_integrity( + check=IntegrityCheck.NODEGROUP_WITHOUT_GROUPING_NODE, # 1013 + queryset=models.NodeGroup.objects.filter(node__gt=0, grouping_node=None), + fix_action=None, + ) self.check_integrity( check=IntegrityCheck.PUBLICATION_MISSING_FOR_LANGUAGE, # 1014 queryset=( diff --git a/releases/8.0.0.md b/releases/8.0.0.md index 534dd729c2..ad3ea713a6 100644 --- a/releases/8.0.0.md +++ b/releases/8.0.0.md @@ -20,6 +20,7 @@ Arches 8.0.0 Release Notes ### Additional highlights - Add session-based REST APIs for login, logout [#11261](https://github.com/archesproject/arches/issues/11261) - Improve handling of longer model names [#11317](https://github.com/archesproject/arches/issues/11317) +- New column `NodeGroup.grouping_node`: one-to-one field to the grouping node [#11613](https://github.com/archesproject/arches/issues/11613) - Support more expressive plugin URLs [#11320](https://github.com/archesproject/arches/issues/11320) - Make node aliases not nullable [#10437](https://github.com/archesproject/arches/issues/10437) - Concepts API no longer responds with empty body for error conditions [#11519](https://github.com/archesproject/arches/issues/11519) diff --git a/tests/models/graph_tests.py b/tests/models/graph_tests.py index d2ff30cb5c..685ee8b45d 100644 --- a/tests/models/graph_tests.py +++ b/tests/models/graph_tests.py @@ -18,7 +18,6 @@ import uuid -from django.contrib.auth.models import User from tests.base_test import ArchesTestCase from arches.app.models import models from arches.app.models.graph import Graph, GraphValidationError @@ -141,6 +140,10 @@ def setUpTestData(cls): for node in nodes: models.Node.objects.create(**node).save() + models.NodeGroup.objects.filter( + pk="20000000-0000-0000-0000-100000000001" + ).update(grouping_node_id="20000000-0000-0000-0000-100000000001") + edges_dict = { "description": None, "domainnode_id": "20000000-0000-0000-0000-100000000001", @@ -1354,7 +1357,12 @@ def test_update_empty_graph_from_editable_future_graph(self): # ensures all relevant values are equal between graphs for key, value in editable_future_graph_serialized_nodegroup.items(): - if key not in ["parentnodegroup_id", "nodegroupid", "legacygroupid"]: + if key not in [ + "parentnodegroup_id", + "nodegroupid", + "grouping_node_id", + "legacygroupid", + ]: if type(value) == "dict": self.assertDictEqual( value, updated_source_graph_serialized_nodegroup[key] diff --git a/tests/models/resource_test.py b/tests/models/resource_test.py index 30df220784..37d1c91fd4 100644 --- a/tests/models/resource_test.py +++ b/tests/models/resource_test.py @@ -489,6 +489,7 @@ def test_self_referring_resource_instance_descriptor(self): graph = Graph.new(name="Self-referring descriptor test", is_resource=True) nodegroup = models.NodeGroup.objects.create() string_node = models.Node.objects.create( + pk=nodegroup.pk, graph=graph, nodegroup=nodegroup, name="String Node", @@ -502,6 +503,8 @@ def test_self_referring_resource_instance_descriptor(self): datatype="resource-instance", istopnode=False, ) + nodegroup.grouping_node = string_node + nodegroup.save() # Configure the primary descriptor to use the string node models.FunctionXGraph.objects.create( diff --git a/tests/models/tile_model_tests.py b/tests/models/tile_model_tests.py index e56ad1ba94..83f131c1ac 100644 --- a/tests/models/tile_model_tests.py +++ b/tests/models/tile_model_tests.py @@ -732,6 +732,7 @@ def test_check_for_missing_nodes(self): pk=UUID("41111111-0000-0000-0000-000000000000") ) required_file_list_node = Node( + pk=node_group.pk, graph=graph, name="Required file list", datatype="file-list", @@ -740,6 +741,8 @@ def test_check_for_missing_nodes(self): istopnode=False, ) required_file_list_node.save() + node_group.grouping_node = required_file_list_node + node_group.save() json = { "resourceinstance_id": "40000000-0000-0000-0000-000000000000",