Skip to content

Commit

Permalink
used f strings and fixed some bugs
Browse files Browse the repository at this point in the history
  • Loading branch information
m-kovalsky committed Jul 25, 2024
1 parent 3eb0a8e commit 97e3b31
Show file tree
Hide file tree
Showing 7 changed files with 52 additions and 30 deletions.
2 changes: 1 addition & 1 deletion src/sempy_labs/_generate_semantic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def create_semantic_model_from_bim(
lro_wait=True,
)

if response.status_code != 200:
if response.status_code not in [200, 201]:
raise FabricHTTPException(response)
print(
f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
Expand Down
32 changes: 20 additions & 12 deletions src/sempy_labs/_helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,26 +286,34 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
The ID of SQL Endpoint.
"""

from sempy_labs.tom import connect_semantic_model

if workspace is None:
workspace_id = fabric.get_workspace_id()
workspace = fabric.resolve_workspace_name(workspace_id)

dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
# dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
# dfP_filt = dfP[dfP["Mode"] == "DirectLake"]

if len(dfP_filt) == 0:
raise ValueError(
f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode."
)
# if len(dfP_filt) == 0:
# raise ValueError(
# f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode."
# )

dfE = fabric.list_expressions(dataset=dataset, workspace=workspace)
dfE_filt = dfE[dfE["Name"] == "DatabaseQuery"]
expr = dfE_filt["Expression"].iloc[0]
with connect_semantic_model(
dataset=dataset, readonly=True, workspace=workspace
) as tom:
sqlEndpointId = None
for e in tom.model.Expressions:
if e.Name == "DatabaseQuery":
expr = e.Expression
matches = re.findall(r'"([^"]*)"', expr)
sqlEndpointId = matches[1]

matches = re.findall(r'"([^"]*)"', expr)
sqlEndpointId = matches[1]
if sqlEndpointId is None:
raise ValueError("SQL Endpoint not found.")

return sqlEndpointId
return sqlEndpointId


def generate_embedded_filter(filter: str):
Expand Down
4 changes: 1 addition & 3 deletions src/sempy_labs/_model_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,9 +321,7 @@ def measure_dependency_tree(
child_node_name = ref_obj_name
child_node = Node(child_node_name, parent=parent_node)
if ref_obj_type == "Column":
child_node.custom_property = (
f"{icons.column_icon} '{ref_obj_table_name}'"
)
child_node.custom_property = f"{icons.column_icon} '{ref_obj_table_name}'"
elif ref_obj_type == "Table":
child_node.custom_property = f"{icons.table_icon} "
elif ref_obj_type == "Measure":
Expand Down
13 changes: 6 additions & 7 deletions src/sempy_labs/directlake/_get_directlake_lakehouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
)
from typing import Optional, Tuple
from uuid import UUID
import sempy_labs._icons as icons


def get_direct_lake_lakehouse(
Expand Down Expand Up @@ -49,13 +48,13 @@ def get_direct_lake_lakehouse(
lakehouse_id = fabric.get_lakehouse_id()
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)

dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
# dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
# dfP_filt = dfP[dfP["Mode"] == "DirectLake"]

if len(dfP_filt) == 0:
raise ValueError(
f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
)
# if len(dfP_filt) == 0:
# raise ValueError(
# f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
# )

sqlEndpointId = get_direct_lake_sql_endpoint(dataset, workspace)

Expand Down
16 changes: 12 additions & 4 deletions src/sempy_labs/directlake/_update_directlake_partition_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,8 @@ def add_table_to_direct_lake_semantic_model(
dataset: str,
table_name: str,
lakehouse_table_name: str,
workspace: Optional[str | None] = None,
refresh: Optional[bool] = True,
workspace: Optional[str] = None,
):
"""
Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table.
Expand All @@ -106,6 +107,8 @@ def add_table_to_direct_lake_semantic_model(
Name of the table in the semantic model.
lakehouse_table_name : str
The name of the Fabric lakehouse table.
refresh : bool, default=True
Refreshes the table after it is added to the semantic model.
workspace : str, default=None
The name of the Fabric workspace in which the semantic model resides.
Defaults to None which resolves to the workspace of the attached lakehouse
Expand All @@ -128,9 +131,11 @@ def add_table_to_direct_lake_semantic_model(
dataset=dataset, readonly=False, workspace=workspace
) as tom:

if tom.is_direct_lake() is False:
table_count = tom.model.Tables.Count

if tom.is_direct_lake() is False and table_count > 0:
raise ValueError(
"This function is only valid for Direct Lake semantic models."
"This function is only valid for Direct Lake semantic models or semantic models with no tables."
)

if any(
Expand Down Expand Up @@ -194,4 +199,7 @@ def add_table_to_direct_lake_semantic_model(
f"{icons.green_dot} The '{lakeCName}' column has been added to the '{table_name}' table as a '{dt}' data type in the '{dataset}' semantic model within the '{workspace}' workspace."
)

refresh_semantic_model(dataset=dataset, tables=table_name, workspace=workspace)
if refresh:
refresh_semantic_model(
dataset=dataset, tables=table_name, workspace=workspace
)
2 changes: 1 addition & 1 deletion src/sempy_labs/report/_generate_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def create_report_from_reportjson(
f"/v1/workspaces/{workspace_id}/reports", json=request_body, lro_wait=True
)

if response.status_code != 200:
if response.status_code not in [200, 201]:
raise FabricHTTPException(response)
print(
f"{icons.green_dot} Succesfully created the '{report}' report within the '{workspace}' workspace."
Expand Down
13 changes: 11 additions & 2 deletions src/sempy_labs/report/_report_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def get_report_json(
report : str
Name of the Power BI report.
workspace : str, default=None
The Fabric workspace name.
The Fabric workspace name in which the report exists.
Defaults to None which resolves to the workspace of the attached lakehouse
or if no lakehouse attached, resolves to the workspace of the notebook.
save_to_file_name : str, default=None
Expand All @@ -51,6 +51,8 @@ def get_report_json(
The report.json file for a given Power BI report.
"""

from notebookutils import mssparkutils

(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)

client = fabric.FabricRestClient()
Expand Down Expand Up @@ -82,7 +84,14 @@ def get_report_json(
)

lakehouse_id = fabric.get_lakehouse_id()
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
for mp in mssparkutils.fs.mounts():
if mp.mountPoint == "/default" and mp.storageType == "Lakehouse":
ind = mp.source.index("@")
lakehouse_workspace_id = mp.source[8:ind]
lakehouse_workspace = fabric.resolve_workspace_name(
lakehouse_workspace_id
)
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
folderPath = "/lakehouse/default/Files"
fileExt = ".json"
if not save_to_file_name.endswith(fileExt):
Expand Down

0 comments on commit 97e3b31

Please sign in to comment.