From 038dfba3a23cd34389a8fdbd16c0858ad6084b1f Mon Sep 17 00:00:00 2001 From: Michael Date: Wed, 4 Dec 2024 11:21:03 +0200 Subject: [PATCH] added capacity assignment status --- src/sempy_labs/_generate_semantic_model.py | 22 +++---- src/sempy_labs/_translations.py | 77 +++++++++++++++------- src/sempy_labs/admin/__init__.py | 2 + src/sempy_labs/admin/_basic_functions.py | 61 ++++++++++++++--- 4 files changed, 120 insertions(+), 42 deletions(-) diff --git a/src/sempy_labs/_generate_semantic_model.py b/src/sempy_labs/_generate_semantic_model.py index fd83581f..cf513070 100644 --- a/src/sempy_labs/_generate_semantic_model.py +++ b/src/sempy_labs/_generate_semantic_model.py @@ -350,7 +350,9 @@ def get_semantic_model_bim( The Model.bim file for the semantic model. """ - bimJson = get_semantic_model_definition(dataset=dataset, workspace=workspace, format='TMSL', return_dataframe=False) + bimJson = get_semantic_model_definition( + dataset=dataset, workspace=workspace, format="TMSL", return_dataframe=False + ) if save_to_file_name is not None: if not lakehouse_attached(): @@ -406,13 +408,15 @@ def get_semantic_model_definition( A pandas dataframe with the semantic model definition or the file or files comprising the semantic model definition. """ - valid_formats = ['TMSL', 'TMDL'] + valid_formats = ["TMSL", "TMDL"] format = format.upper() - if format == 'BIM': + if format == "BIM": format = "TMSL" if format not in valid_formats: - raise ValueError(f"{icons.red_dot} Invalid format. Valid options: {valid_formats}.") + raise ValueError( + f"{icons.red_dot} Invalid format. Valid options: {valid_formats}." + ) (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) @@ -427,18 +431,14 @@ def get_semantic_model_definition( if return_dataframe: return pd.json_normalize(files) - elif format == 'TMSL': + elif format == "TMSL": payload = next( - (part["payload"] for part in files if part["path"] == "model.bim"), - None + (part["payload"] for part in files if part["path"] == "model.bim"), None ) return json.loads(_decode_b64(payload)) else: decoded_parts = [ - { - "file_name": part["path"], - "content": _decode_b64(part['payload']) - } + {"file_name": part["path"], "content": _decode_b64(part["payload"])} for part in files ] diff --git a/src/sempy_labs/_translations.py b/src/sempy_labs/_translations.py index e901d625..12db1c57 100644 --- a/src/sempy_labs/_translations.py +++ b/src/sempy_labs/_translations.py @@ -57,7 +57,7 @@ def _clean_text(text, exclude_chars): columns=["Object Type", "Name", "Description", "Display Folder"] ) - final_df = pd.DataFrame(columns=['Value', 'Translation']) + final_df = pd.DataFrame(columns=["Value", "Translation"]) with connect_semantic_model( dataset=dataset, readonly=False, workspace=workspace @@ -68,7 +68,7 @@ def _clean_text(text, exclude_chars): oDescription = _clean_text(o.Description, exclude_characters) new_data = { "Name": o.Name, - "TName": oName, + "TName": oName, "Object Type": "Table", "Description": o.Description, "TDescription": oDescription, @@ -165,24 +165,29 @@ def _clean_text(text, exclude_chars): ) df_panda = transDF.toPandas() - df_panda = df_panda[~df_panda[clm].isin([None, ''])][[clm, 'translation']] + df_panda = df_panda[~df_panda[clm].isin([None, ""])][[clm, "translation"]] - df_panda = df_panda.rename(columns={clm: 'value'}) + df_panda = df_panda.rename(columns={clm: "value"}) final_df = pd.concat([final_df, df_panda], ignore_index=True) def set_translation_if_exists(object, language, property, index): - if property == 'Name': + if property == "Name": trans = object.Name - elif property == 'Description': + elif property == "Description": trans = object.Description - elif property == 'Display Folder': + elif property == "Display Folder": trans = object.DisplayFolder - df_filt = final_df[final_df['value'] == trans] + df_filt = final_df[final_df["value"] == trans] if not df_filt.empty: - translation_value = df_filt['translation'].str[index].iloc[0] - tom.set_translation(object=object, language=language, property=property, value=translation_value) + translation_value = df_filt["translation"].str[index].iloc[0] + tom.set_translation( + object=object, + language=language, + property=property, + value=translation_value, + ) for language in languages: index = languages.index(language) @@ -192,23 +197,49 @@ def set_translation_if_exists(object, language, property, index): ) for t in tom.model.Tables: - set_translation_if_exists(object=t, language=language, property='Name', index=index) - set_translation_if_exists(object=t, language=language, property='Description', index=index) + set_translation_if_exists( + object=t, language=language, property="Name", index=index + ) + set_translation_if_exists( + object=t, language=language, property="Description", index=index + ) for c in tom.all_columns(): - set_translation_if_exists(object=c, language=language, property='Name', index=index) - set_translation_if_exists(object=c, language=language, property='Description', index=index) - set_translation_if_exists(object=c, language=language, property='Display Folder', index=index) + set_translation_if_exists( + object=c, language=language, property="Name", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Description", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Display Folder", index=index + ) for c in tom.all_measures(): - set_translation_if_exists(object=c, language=language, property='Name', index=index) - set_translation_if_exists(object=c, language=language, property='Description', index=index) - set_translation_if_exists(object=c, language=language, property='Display Folder', index=index) + set_translation_if_exists( + object=c, language=language, property="Name", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Description", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Display Folder", index=index + ) for c in tom.all_hierarchies(): - set_translation_if_exists(object=c, language=language, property='Name', index=index) - set_translation_if_exists(object=c, language=language, property='Description', index=index) - set_translation_if_exists(object=c, language=language, property='Display Folder', index=index) + set_translation_if_exists( + object=c, language=language, property="Name", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Description", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Display Folder", index=index + ) for c in tom.all_levels(): - set_translation_if_exists(object=c, language=language, property='Name', index=index) - set_translation_if_exists(object=c, language=language, property='Description', index=index) + set_translation_if_exists( + object=c, language=language, property="Name", index=index + ) + set_translation_if_exists( + object=c, language=language, property="Description", index=index + ) result = pd.DataFrame( columns=[ diff --git a/src/sempy_labs/admin/__init__.py b/src/sempy_labs/admin/__init__.py index 0aa74c8a..1219c47d 100644 --- a/src/sempy_labs/admin/__init__.py +++ b/src/sempy_labs/admin/__init__.py @@ -11,6 +11,7 @@ list_capacities_delegated_tenant_settings, list_access_entities, list_activity_events, + get_capacity_assignment_status, ) from sempy_labs.admin._domains import ( list_domains, @@ -64,4 +65,5 @@ "list_modified_workspaces", "list_git_connections", "list_reports", + "get_capacity_assignment_status", ] diff --git a/src/sempy_labs/admin/_basic_functions.py b/src/sempy_labs/admin/_basic_functions.py index 363fc63a..3656f6b4 100644 --- a/src/sempy_labs/admin/_basic_functions.py +++ b/src/sempy_labs/admin/_basic_functions.py @@ -11,7 +11,6 @@ import numpy as np import pandas as pd from dateutil.parser import parse as dtparser -import urllib.parse def list_workspaces( @@ -901,14 +900,20 @@ def _resolve_workspace_name_and_id( workspace: str | UUID, ) -> Tuple[str, UUID]: - dfW = list_workspaces(workspace=workspace) - try: - workspace_name = dfW["Name"].iloc[0] - workspace_id = dfW["Id"].iloc[0] - except Exception: - raise ValueError(f"{icons.red_dot} The '{workspace}' workspace was not found.") + if workspace is None: + workspace_id = fabric.get_workspace_id() + workspace_name = fabric.resolve_workspace_name(workspace_id) + else: + dfW = list_workspaces(workspace=workspace) + try: + workspace_name = dfW["Name"].iloc[0] + workspace_id = dfW["Id"].iloc[0] + except Exception: + raise ValueError( + f"{icons.red_dot} The '{workspace}' workspace was not found." + ) - return workspace_name, workspace_id + return workspace_name, workspace_id def list_reports( @@ -988,3 +993,43 @@ def list_reports( df["Modified Date"] = pd.to_datetime(df["Modified Date"], errors="coerce") return df + + +def get_capacity_assignment_status(workspace: Optional[str | UUID] = None): + + (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace) + + df = pd.DataFrame( + columns=[ + "Status", + "Activity Id", + "Start Time", + "End Time", + "Capacity Id", + "Capacity Name", + ] + ) + + client = fabric.FabricRestClient() + response = client.get(f"/v1.0/myorg/groups/{workspace_id}/CapacityAssignmentStatus") + + if response.status_code != 200: + raise FabricHTTPException(response) + + v = response.json() + capacity_id = v.get("capacityId") + + (capacity_name, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity_id) + + new_data = { + "Status": v.get("status"), + "Activity Id": v.get("activityId"), + "Start Time": v.get("startTime"), + "End Time": v.get("endTime"), + "Capacity Id": capacity_id, + "Capacity Name": capacity_name, + } + + df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True) + + return df