Skip to content

Commit

Permalink
feat: turn python properties into graphql attributes (#568)
Browse files Browse the repository at this point in the history
  • Loading branch information
vncsna authored Feb 20, 2024
1 parent 5d6ae51 commit 075e8a5
Show file tree
Hide file tree
Showing 4 changed files with 51 additions and 150 deletions.
12 changes: 8 additions & 4 deletions bd_api/apps/account/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,28 +349,32 @@ def get_organization(self):

get_organization.short_description = "organização"

def get_graphql_pro_subscription(self) -> str:
@property
def pro_subscription(self) -> str:
"""BD Pro subscription role, one of bd_pro or bd_pro_empresas"""
if self.pro_owner_subscription:
return self.pro_owner_subscription.stripe_subscription
if self.pro_member_subscription:
return self.pro_member_subscription.stripe_subscription

def get_graphql_pro_subscription_role(self) -> str:
@property
def pro_subscription_role(self) -> str:
"""BD Pro subscription role, one of owner or member"""
if self.pro_owner_subscription:
return "owner"
if self.pro_member_subscription:
return "member"

def get_graphql_pro_subscription_slots(self) -> str:
@property
def pro_subscription_slots(self) -> str:
"""BD Pro subscription slots"""
if self.pro_owner_subscription:
return self.pro_owner_subscription.stripe_subscription_slots
if self.pro_member_subscription:
return self.pro_member_subscription.stripe_subscription_slots

def get_graphql_pro_subscription_status(self) -> str:
@property
def pro_subscription_status(self) -> str:
"""BD Pro subscription status"""
if self.pro_owner_subscription:
return self.pro_owner_subscription.stripe_subscription_status
Expand Down
154 changes: 24 additions & 130 deletions bd_api/apps/api/v1/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from django.core.exceptions import ValidationError
from django.db import models
from django.urls import reverse
from loguru import logger
from ordered_model.models import OrderedModel

from bd_api.apps.account.models import Account
Expand Down Expand Up @@ -190,7 +189,7 @@ def coverage_type(self):

coverage_type.short_description = "Coverage Type"

def similarity_of_area(self, other: "Coverage"):
def get_similarity_of_area(self, other: "Coverage"):
if not self.area:
return 0
if not other.area:
Expand All @@ -201,10 +200,10 @@ def similarity_of_area(self, other: "Coverage"):
return 1
return 0

def similarity_of_datetime(self, other: "Coverage"):
def get_similarity_of_datetime(self, other: "Coverage"):
for dt_self in self.datetime_ranges.all():
for dt_other in other.datetime_ranges.all():
if dt_self.similarity_of_datetime(dt_other):
if dt_self.get_similarity_of_datetime(dt_other):
return 1
return 0

Expand Down Expand Up @@ -453,32 +452,17 @@ class Meta:
verbose_name_plural = "Organizations"
ordering = ["slug"]

def has_picture(self):
"""Check if the organization has a picture"""
try:
hasattr(self.picture, "url")
except Exception:
return False
return self.picture is not None

has_picture.short_description = "Has Picture"
has_picture.boolean = True

def get_graphql_has_picture(self):
"""Get the has_picture property for graphql"""
return self.has_picture()

@property
def full_slug(self):
"""Get the full slug or Organization"""
if self.area.slug != "unknown":
return f"{self.area.slug}_{self.slug}"

return f"{self.slug}"

def get_graphql_full_slug(self):
"""Get the full slug or Organization for graphql"""
return self.full_slug
@property
def has_picture(self):
if self.picture and self.picture.url:
return True
return False


class Status(BaseModel):
Expand Down Expand Up @@ -567,10 +551,6 @@ def full_slug(self):
return f"{self.organization.area.slug}_{self.organization.slug}_{self.slug}"
return f"{self.organization.slug}_{self.slug}"

def get_graphql_full_slug(self):
"""Get the full slug or Dataset for graphql"""
return self.full_slug

@property
def coverage(self):
"""Get the temporal coverage of the dataset in the format YYYY-MM-DD - YYYY-MM-DD"""
Expand Down Expand Up @@ -690,13 +670,6 @@ def coverage(self):

return coverage_str

def get_graphql_coverage(self):
"""
Returns the temporal coverage of the dataset in the format
YYYY-MM-DD - YYYY-MM-DD for graphql
"""
return self.coverage

@property
def full_coverage(self) -> str:
"""
Expand All @@ -718,18 +691,11 @@ def full_coverage(self) -> str:
]
return json.dumps(full_coverage_dict)

def get_graphql_full_coverage(self):
return self.full_coverage

@property
def contains_tables(self):
"""Returns true if there are tables in the dataset"""
return len(self.tables.all()) > 0

def get_graphql_contains_tables(self):
"""Returns true if there are tables in the dataset for graphql"""
return self.contains_tables

@property
def contains_closed_data(self):
"""Returns true if there are tables or columns with closed coverages"""
Expand All @@ -747,10 +713,6 @@ def contains_closed_data(self):

return closed_data

def get_graphql_contains_closed_data(self):
"""Returns true if there are tables or columns with closed coverages for graphql"""
return self.contains_closed_data

@property
def contains_open_data(self):
"""Returns true if there are tables or columns with open coverages"""
Expand All @@ -764,48 +726,28 @@ def contains_open_data(self):

return open_data

def get_graphql_contains_open_data(self):
"""Returns true if there are tables or columns with open coverages for graphql"""
return self.contains_open_data

@property
def contains_closed_tables(self):
"""Returns true if there are tables with closed coverages (DEPRECATED)"""
closed_tables = self.tables.all().filter(is_closed=True)
return len(closed_tables) > 0

def get_graphql_contains_closed_tables(self):
"""Returns true if there are tables with closed coverages for graphql (DEPRECATED)"""
return self.contains_closed_tables

@property
def contains_open_tables(self):
"""Returns true if there are tables with open coverages (DEPRECATED)"""
open_tables = self.tables.all().filter(is_closed=False)
return len(open_tables) > 0

def get_graphql_contains_open_tables(self):
"""Returns true if there are tables with open coverages for graphql (DEPRECATED)"""
return self.contains_open_tables

@property
def contains_raw_data_sources(self):
"""Returns true if there are raw data sources in the dataset"""
return len(self.raw_data_sources.all()) > 0

def get_graphql_contains_raw_data_sources(self):
"""Returns true if there are raw data sources in the dataset for graphql"""
return self.contains_raw_data_sources

@property
def contains_information_requests(self):
"""Returns true if there are information requests in the dataset"""
return len(self.information_requests.all()) > 0

def get_graphql_contains_information_requests(self):
"""Returns true if there are information requests in the dataset for graphql"""
return self.contains_information_requests

@property
def table_last_updated_at(self):
updates = [
Expand All @@ -814,9 +756,6 @@ def table_last_updated_at(self):
] # fmt: skip
return max(updates) if updates else None

def get_graphql_table_last_updated_at(self):
return self.table_last_updated_at

@property
def raw_data_source_last_updated_at(self):
updates = [
Expand All @@ -825,9 +764,6 @@ def raw_data_source_last_updated_at(self):
] # fmt: skip
return max(updates) if updates else None

def get_graphql_raw_data_source_last_updated_at(self):
return self.raw_data_source_last_updated_at


class Update(BaseModel):
id = models.UUIDField(primary_key=True, default=uuid4)
Expand Down Expand Up @@ -984,9 +920,6 @@ def gbq_slug(self):
table = cloud_table.gcp_table_id
return f"basedosdados.{dataset}.{table}"

def get_graphql_gbq_slug(self):
return self.gbq_slug

@property
def gcs_slug(self):
"""Get the slug used in Google Cloud Storage"""
Expand All @@ -995,19 +928,12 @@ def gcs_slug(self):
table = cloud_table.gcp_table_id
return f"staging/{dataset}/{table}"

def get_graphql_gcs_slug(self):
return self.gcs_slug

@property
def partitions(self):
"""Returns a list of columns used to partition the table"""
partitions_list = [p.name for p in self.columns.all().filter(is_partition=True)]
return ", ".join(partitions_list)

def get_graphql_partitions(self):
"""Returns a list of columns used to partition the table"""
return self.partitions

@property
def contains_closed_data(self):
"""Returns true if there are columns with closed coverages"""
Expand All @@ -1022,10 +948,6 @@ def contains_closed_data(self):

return closed_data

def get_graphql_contains_closed_data(self):
"""Returns true if there are columns with closed coverages to be used in graphql"""
return self.contains_closed_data

@property
def full_coverage(self) -> str:
"""
Expand Down Expand Up @@ -1104,11 +1026,8 @@ def full_coverage(self) -> str:

return json.dumps(full_coverage)

def get_graphql_full_coverage(self):
return self.full_coverage

@property
def neighbors(self):
def neighbors(self) -> list[dict]:
"""Similiar tables and columns
- Tables and columns with similar directories
- Tables and columns with similar coverages or tags
Expand All @@ -1123,27 +1042,11 @@ def neighbors(self):
)
all_neighbors = []
for table in all_tables:
score_area = self.similarity_of_area(table)
score_datetime = self.similarity_of_datetime(table)
score_directory, cols = self.similarity_of_directory(table)
if score_directory:
all_neighbors.append(
(
cols,
table,
table.dataset,
score_area + score_datetime + score_directory,
)
)
logger.debug(f"[similarity_area] {self} {table} {score_area}")
logger.debug(f"[similarity_datetime] {self} {table} {score_datetime}")
logger.debug(f"[similarity_directory] {self} {table} {score_directory}")

return sorted(all_neighbors, key=lambda item: item[-1])[::-1][:20]

def get_graphql_neighbors(self) -> list[dict]:
all_neighbors = []
for columns, table, dataset, score in self.neighbors:
score_area = self.get_similarity_of_area(table)
score_datetime = self.get_similarity_of_datetime(table)
score_directory, columns = self.get_similarity_of_directory(table)
if not score_directory:
continue
column_id = []
column_name = []
for column in columns:
Expand All @@ -1155,40 +1058,37 @@ def get_graphql_neighbors(self) -> list[dict]:
"column_name": column_name,
"table_id": str(table.id),
"table_name": table.name,
"dataset_id": str(dataset.id),
"dataset_name": dataset.name,
"score": score,
"dataset_id": str(table.dataset.id),
"dataset_name": table.dataset.name,
"score": round(score_area + score_datetime + score_directory, 2),
}
)
return all_neighbors
return sorted(all_neighbors, key=lambda item: item["score"])[::-1][:20]

@property
def last_updated_at(self):
updates = [u.latest for u in self.updates.all() if u.latest]
return max(updates) if updates else None

def get_graphql_last_updated_at(self):
return self.last_updated_at

def similarity_of_area(self, other: "Table"):
def get_similarity_of_area(self, other: "Table"):
count_all = 0
count_yes = 0
for cov_self in self.coverages.all():
for cov_other in other.coverages.all():
count_all += 1
count_yes += cov_self.similarity_of_area(cov_other)
count_yes += cov_self.get_similarity_of_area(cov_other)
return count_yes / count_all if count_all else 0

def similarity_of_datetime(self, other: "Table"):
def get_similarity_of_datetime(self, other: "Table"):
count_all = 0
count_yes = 0
for cov_self in self.coverages.all():
for cov_other in other.coverages.all():
count_all += 1
count_yes += cov_self.similarity_of_datetime(cov_other)
count_yes += cov_self.get_similarity_of_datetime(cov_other)
return count_yes / count_all if count_all else 0

def similarity_of_directory(self, other: "Table"):
def get_similarity_of_directory(self, other: "Table"):
self_cols = self.columns.all()
self_dirs = self.columns.filter(directory_primary_key__isnull=False).all()
other_cols = other.columns.all()
Expand Down Expand Up @@ -1369,9 +1269,6 @@ def full_coverage(self) -> str:

return json.dumps(column_full_coverage)

def get_graphql_full_coverage(self):
return self.full_coverage

def clean(self) -> None:
"""Clean method for Column model"""
errors = {}
Expand Down Expand Up @@ -1581,9 +1478,6 @@ def last_updated_at(self):
updates = [u.latest for u in self.updates.all() if u.latest]
return max(updates) if updates else None

def get_graphql_last_updated_at(self):
return self.last_updated_at


class InformationRequest(BaseModel, OrderedModel):
"""Model definition for InformationRequest."""
Expand Down Expand Up @@ -1833,7 +1727,7 @@ def until(self):
self.end_second or 0,
)

def similarity_of_datetime(self, other: "DateTimeRange"):
def get_similarity_of_datetime(self, other: "DateTimeRange"):
if not self.since:
return 0
if not other.until:
Expand Down
Loading

0 comments on commit 075e8a5

Please sign in to comment.