From 97e3b313d81d2bcb5cbbef492b4113a0c8632cca Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 25 Jul 2024 09:48:28 +0300 Subject: [PATCH] used f strings and fixed some bugs --- src/sempy_labs/_generate_semantic_model.py | 2 +- src/sempy_labs/_helper_functions.py | 32 ++++++++++++------- src/sempy_labs/_model_dependencies.py | 4 +-- .../directlake/_get_directlake_lakehouse.py | 13 ++++---- .../_update_directlake_partition_entity.py | 16 +++++++--- src/sempy_labs/report/_generate_report.py | 2 +- src/sempy_labs/report/_report_functions.py | 13 ++++++-- 7 files changed, 52 insertions(+), 30 deletions(-) diff --git a/src/sempy_labs/_generate_semantic_model.py b/src/sempy_labs/_generate_semantic_model.py index 8b63b85e..27e008be 100644 --- a/src/sempy_labs/_generate_semantic_model.py +++ b/src/sempy_labs/_generate_semantic_model.py @@ -126,7 +126,7 @@ def create_semantic_model_from_bim( lro_wait=True, ) - if response.status_code != 200: + if response.status_code not in [200, 201]: raise FabricHTTPException(response) print( f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace." diff --git a/src/sempy_labs/_helper_functions.py b/src/sempy_labs/_helper_functions.py index f7b50348..6f851caf 100644 --- a/src/sempy_labs/_helper_functions.py +++ b/src/sempy_labs/_helper_functions.py @@ -286,26 +286,34 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None) The ID of SQL Endpoint. """ + from sempy_labs.tom import connect_semantic_model + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) - dfP = fabric.list_partitions(dataset=dataset, workspace=workspace) - dfP_filt = dfP[dfP["Mode"] == "DirectLake"] + # dfP = fabric.list_partitions(dataset=dataset, workspace=workspace) + # dfP_filt = dfP[dfP["Mode"] == "DirectLake"] - if len(dfP_filt) == 0: - raise ValueError( - f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode." - ) + # if len(dfP_filt) == 0: + # raise ValueError( + # f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode." + # ) - dfE = fabric.list_expressions(dataset=dataset, workspace=workspace) - dfE_filt = dfE[dfE["Name"] == "DatabaseQuery"] - expr = dfE_filt["Expression"].iloc[0] + with connect_semantic_model( + dataset=dataset, readonly=True, workspace=workspace + ) as tom: + sqlEndpointId = None + for e in tom.model.Expressions: + if e.Name == "DatabaseQuery": + expr = e.Expression + matches = re.findall(r'"([^"]*)"', expr) + sqlEndpointId = matches[1] - matches = re.findall(r'"([^"]*)"', expr) - sqlEndpointId = matches[1] + if sqlEndpointId is None: + raise ValueError("SQL Endpoint not found.") - return sqlEndpointId + return sqlEndpointId def generate_embedded_filter(filter: str): diff --git a/src/sempy_labs/_model_dependencies.py b/src/sempy_labs/_model_dependencies.py index ddf93109..7eee5276 100644 --- a/src/sempy_labs/_model_dependencies.py +++ b/src/sempy_labs/_model_dependencies.py @@ -321,9 +321,7 @@ def measure_dependency_tree( child_node_name = ref_obj_name child_node = Node(child_node_name, parent=parent_node) if ref_obj_type == "Column": - child_node.custom_property = ( - f"{icons.column_icon} '{ref_obj_table_name}'" - ) + child_node.custom_property = f"{icons.column_icon} '{ref_obj_table_name}'" elif ref_obj_type == "Table": child_node.custom_property = f"{icons.table_icon} " elif ref_obj_type == "Measure": diff --git a/src/sempy_labs/directlake/_get_directlake_lakehouse.py b/src/sempy_labs/directlake/_get_directlake_lakehouse.py index 32338619..9d474b3e 100644 --- a/src/sempy_labs/directlake/_get_directlake_lakehouse.py +++ b/src/sempy_labs/directlake/_get_directlake_lakehouse.py @@ -6,7 +6,6 @@ ) from typing import Optional, Tuple from uuid import UUID -import sempy_labs._icons as icons def get_direct_lake_lakehouse( @@ -49,13 +48,13 @@ def get_direct_lake_lakehouse( lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) - dfP = fabric.list_partitions(dataset=dataset, workspace=workspace) - dfP_filt = dfP[dfP["Mode"] == "DirectLake"] + # dfP = fabric.list_partitions(dataset=dataset, workspace=workspace) + # dfP_filt = dfP[dfP["Mode"] == "DirectLake"] - if len(dfP_filt) == 0: - raise ValueError( - f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode." - ) + # if len(dfP_filt) == 0: + # raise ValueError( + # f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode." + # ) sqlEndpointId = get_direct_lake_sql_endpoint(dataset, workspace) diff --git a/src/sempy_labs/directlake/_update_directlake_partition_entity.py b/src/sempy_labs/directlake/_update_directlake_partition_entity.py index f65d19eb..c672f838 100644 --- a/src/sempy_labs/directlake/_update_directlake_partition_entity.py +++ b/src/sempy_labs/directlake/_update_directlake_partition_entity.py @@ -93,7 +93,8 @@ def add_table_to_direct_lake_semantic_model( dataset: str, table_name: str, lakehouse_table_name: str, - workspace: Optional[str | None] = None, + refresh: Optional[bool] = True, + workspace: Optional[str] = None, ): """ Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table. @@ -106,6 +107,8 @@ def add_table_to_direct_lake_semantic_model( Name of the table in the semantic model. lakehouse_table_name : str The name of the Fabric lakehouse table. + refresh : bool, default=True + Refreshes the table after it is added to the semantic model. workspace : str, default=None The name of the Fabric workspace in which the semantic model resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -128,9 +131,11 @@ def add_table_to_direct_lake_semantic_model( dataset=dataset, readonly=False, workspace=workspace ) as tom: - if tom.is_direct_lake() is False: + table_count = tom.model.Tables.Count + + if tom.is_direct_lake() is False and table_count > 0: raise ValueError( - "This function is only valid for Direct Lake semantic models." + "This function is only valid for Direct Lake semantic models or semantic models with no tables." ) if any( @@ -194,4 +199,7 @@ def add_table_to_direct_lake_semantic_model( f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset}' semantic model within the '{workspace}' workspace." ) - refresh_semantic_model(dataset=dataset, tables=table_name, workspace=workspace) + if refresh: + refresh_semantic_model( + dataset=dataset, tables=table_name, workspace=workspace + ) diff --git a/src/sempy_labs/report/_generate_report.py b/src/sempy_labs/report/_generate_report.py index 225c43ad..52d250e8 100644 --- a/src/sempy_labs/report/_generate_report.py +++ b/src/sempy_labs/report/_generate_report.py @@ -106,7 +106,7 @@ def create_report_from_reportjson( f"/v1/workspaces/{workspace_id}/reports", json=request_body, lro_wait=True ) - if response.status_code != 200: + if response.status_code not in [200, 201]: raise FabricHTTPException(response) print( f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace." diff --git a/src/sempy_labs/report/_report_functions.py b/src/sempy_labs/report/_report_functions.py index ff9107a8..8e9de152 100644 --- a/src/sempy_labs/report/_report_functions.py +++ b/src/sempy_labs/report/_report_functions.py @@ -39,7 +39,7 @@ def get_report_json( report : str Name of the Power BI report. workspace : str, default=None - The Fabric workspace name. + The Fabric workspace name in which the report exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. save_to_file_name : str, default=None @@ -51,6 +51,8 @@ def get_report_json( The report.json file for a given Power BI report. """ + from notebookutils import mssparkutils + (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) client = fabric.FabricRestClient() @@ -82,7 +84,14 @@ def get_report_json( ) lakehouse_id = fabric.get_lakehouse_id() - lakehouse = resolve_lakehouse_name(lakehouse_id, workspace) + for mp in mssparkutils.fs.mounts(): + if mp.mountPoint == "/default" and mp.storageType == "Lakehouse": + ind = mp.source.index("@") + lakehouse_workspace_id = mp.source[8:ind] + lakehouse_workspace = fabric.resolve_workspace_name( + lakehouse_workspace_id + ) + lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) folderPath = "/lakehouse/default/Files" fileExt = ".json" if not save_to_file_name.endswith(fileExt):