diff --git a/src/sempy_labs/_connections.py b/src/sempy_labs/_connections.py index 0220f1e9..508feb5c 100644 --- a/src/sempy_labs/_connections.py +++ b/src/sempy_labs/_connections.py @@ -1,6 +1,6 @@ import sempy.fabric as fabric import pandas as pd -import sempy_labs._icons as icons +from sempy.fabric.exceptions import FabricHTTPException def create_connection_cloud( @@ -56,33 +56,30 @@ def create_connection_cloud( response = client.post("/v1/connections", json=request_body) - if response.status_code == 200: - o = response.json() - new_data = { - "Connection Id": o.get("id"), - "Connection Name": o.get("name"), - "Connectivity Type": o.get("connectivityType"), - "Connection Type": o.get("connectionDetails", {}).get("type"), - "Connection Path": o.get("connectionDetails", {}).get("path"), - "Privacy Level": o.get("privacyLevel"), - "Credential Type": o.get("credentialDetails", {}).get("credentialType"), - "Single Sign On Type": o.get("credentialDetails", {}).get( - "singleSignOnType" - ), - "Connection Encryption": o.get("credentialDetails", {}).get( - "connectionEncryption" - ), - "Skip Test Connection": o.get("credentialDetails", {}).get( - "skipTestConnection" - ), - } - df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) - - df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool) - - return df - else: - print(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + o = response.json() + new_data = { + "Connection Id": o.get("id"), + "Connection Name": o.get("name"), + "Connectivity Type": o.get("connectivityType"), + "Connection Type": o.get("connectionDetails", {}).get("type"), + "Connection Path": o.get("connectionDetails", {}).get("path"), + "Privacy Level": o.get("privacyLevel"), + "Credential Type": o.get("credentialDetails", {}).get("credentialType"), + "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"), + "Connection Encryption": o.get("credentialDetails", {}).get( + "connectionEncryption" + ), + "Skip Test Connection": o.get("credentialDetails", {}).get( + "skipTestConnection" + ), + } + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) + + df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool) + + return df def create_connection_on_prem( @@ -137,34 +134,31 @@ def create_connection_on_prem( response = client.post("/v1/connections", json=request_body) - if response.status_code == 200: - o = response.json() - new_data = { - "Connection Id": o.get("id"), - "Connection Name": o.get("name"), - "Gateway ID": o.get("gatewayId"), - "Connectivity Type": o.get("connectivityType"), - "Connection Type": o.get("connectionDetails", {}).get("type"), - "Connection Path": o.get("connectionDetails", {}).get("path"), - "Privacy Level": o.get("privacyLevel"), - "Credential Type": o.get("credentialDetails", {}).get("credentialType"), - "Single Sign On Type": o.get("credentialDetails", {}).get( - "singleSignOnType" - ), - "Connection Encryption": o.get("credentialDetails", {}).get( - "connectionEncryption" - ), - "Skip Test Connection": o.get("credentialDetails", {}).get( - "skipTestConnection" - ), - } - df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) - - df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool) - - return df - else: - print(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + o = response.json() + new_data = { + "Connection Id": o.get("id"), + "Connection Name": o.get("name"), + "Gateway ID": o.get("gatewayId"), + "Connectivity Type": o.get("connectivityType"), + "Connection Type": o.get("connectionDetails", {}).get("type"), + "Connection Path": o.get("connectionDetails", {}).get("path"), + "Privacy Level": o.get("privacyLevel"), + "Credential Type": o.get("credentialDetails", {}).get("credentialType"), + "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"), + "Connection Encryption": o.get("credentialDetails", {}).get( + "connectionEncryption" + ), + "Skip Test Connection": o.get("credentialDetails", {}).get( + "skipTestConnection" + ), + } + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) + + df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool) + + return df def create_connection_vnet( @@ -221,31 +215,28 @@ def create_connection_vnet( response = client.post("/v1/connections", json=request_body) - if response.status_code == 200: - o = response.json() - new_data = { - "Connection Id": o.get("id"), - "Connection Name": o.get("name"), - "Gateway ID": o.get("gatewayId"), - "Connectivity Type": o.get("connectivityType"), - "Connection Type": o.get("connectionDetails", {}).get("type"), - "Connection Path": o.get("connectionDetails", {}).get("path"), - "Privacy Level": o.get("privacyLevel"), - "Credential Type": o.get("credentialDetails", {}).get("credentialType"), - "Single Sign On Type": o.get("credentialDetails", {}).get( - "singleSignOnType" - ), - "Connection Encryption": o.get("credentialDetails", {}).get( - "connectionEncryption" - ), - "Skip Test Connection": o.get("credentialDetails", {}).get( - "skipTestConnection" - ), - } - df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) - - df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool) - - return df - else: - print(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + o = response.json() + new_data = { + "Connection Id": o.get("id"), + "Connection Name": o.get("name"), + "Gateway ID": o.get("gatewayId"), + "Connectivity Type": o.get("connectivityType"), + "Connection Type": o.get("connectionDetails", {}).get("type"), + "Connection Path": o.get("connectionDetails", {}).get("path"), + "Privacy Level": o.get("privacyLevel"), + "Credential Type": o.get("credentialDetails", {}).get("credentialType"), + "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"), + "Connection Encryption": o.get("credentialDetails", {}).get( + "connectionEncryption" + ), + "Skip Test Connection": o.get("credentialDetails", {}).get( + "skipTestConnection" + ), + } + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) + + df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool) + + return df diff --git a/src/sempy_labs/_generate_semantic_model.py b/src/sempy_labs/_generate_semantic_model.py index d1d21c88..2db6f1fc 100644 --- a/src/sempy_labs/_generate_semantic_model.py +++ b/src/sempy_labs/_generate_semantic_model.py @@ -11,6 +11,7 @@ ) from sempy_labs.lakehouse._lakehouse import lakehouse_attached import sempy_labs._icons as icons +from sempy.fabric.exceptions import FabricHTTPException def create_blank_semantic_model( diff --git a/src/sempy_labs/_list_functions.py b/src/sempy_labs/_list_functions.py index d7c5e039..de44b293 100644 --- a/src/sempy_labs/_list_functions.py +++ b/src/sempy_labs/_list_functions.py @@ -1083,18 +1083,15 @@ def update_item( f"/v1/workspaces/{workspace_id}/{itemType}/{itemId}", json=request_body ) - if response.status_code == 200: - if description is None: - print( - f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}'" - ) - else: - print( - f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}' and have a description of '{description}'" - ) + if response.status_code != 200: + raise FabricHTTPException(response) + if description is None: + print( + f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}'" + ) else: - raise ValueError( - f"{icons.red_dot}: The '{current_name}' {item_type} within the '{workspace}' workspace was not updateds." + print( + f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}' and have a description of '{description}'" ) @@ -1536,50 +1533,47 @@ def list_shortcuts( response = client.get( f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts" ) - if response.status_code == 200: - for s in response.json()["value"]: - shortcutName = s.get("name") - shortcutPath = s.get("path") - source = list(s["target"].keys())[0] - ( - sourceLakehouseName, - sourceWorkspaceName, - sourcePath, - connectionId, - location, - subpath, - ) = (None, None, None, None, None, None) - if source == "oneLake": - sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId") - sourcePath = s.get("target", {}).get(source, {}).get("path") - sourceWorkspaceId = ( - s.get("target", {}).get(source, {}).get("workspaceId") - ) - sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId) - sourceLakehouseName = resolve_lakehouse_name( - sourceLakehouseId, sourceWorkspaceName - ) - else: - connectionId = s.get("target", {}).get(source, {}).get("connectionId") - location = s.get("target", {}).get(source, {}).get("location") - subpath = s.get("target", {}).get(source, {}).get("subpath") - new_data = { - "Shortcut Name": shortcutName, - "Shortcut Path": shortcutPath, - "Source": source, - "Source Lakehouse Name": sourceLakehouseName, - "Source Workspace Name": sourceWorkspaceName, - "Source Path": sourcePath, - "Source Connection ID": connectionId, - "Source Location": location, - "Source SubPath": subpath, - } - df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) + if response.status_code != 200: + raise FabricHTTPException(response) + for s in response.json()["value"]: + shortcutName = s.get("name") + shortcutPath = s.get("path") + source = list(s["target"].keys())[0] + ( + sourceLakehouseName, + sourceWorkspaceName, + sourcePath, + connectionId, + location, + subpath, + ) = (None, None, None, None, None, None) + if source == "oneLake": + sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId") + sourcePath = s.get("target", {}).get(source, {}).get("path") + sourceWorkspaceId = s.get("target", {}).get(source, {}).get("workspaceId") + sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId) + sourceLakehouseName = resolve_lakehouse_name( + sourceLakehouseId, sourceWorkspaceName + ) + else: + connectionId = s.get("target", {}).get(source, {}).get("connectionId") + location = s.get("target", {}).get(source, {}).get("location") + subpath = s.get("target", {}).get(source, {}).get("subpath") + + new_data = { + "Shortcut Name": shortcutName, + "Shortcut Path": shortcutPath, + "Source": source, + "Source Lakehouse Name": sourceLakehouseName, + "Source Workspace Name": sourceWorkspaceName, + "Source Path": sourcePath, + "Source Connection ID": connectionId, + "Source Location": location, + "Source SubPath": subpath, + } + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) - print( - f"{icons.warning} This function relies on an API which is not yet official as of May 21, 2024. Once the API becomes official this function will work as expected." - ) return df @@ -2109,12 +2103,11 @@ def update_spark_settings( f"/v1/workspaces/{workspace_id}/spark/settings", json=request_body ) - if response.status_code == 200: - print( - f"{icons.green_dot} The spark settings within the '{workspace}' workspace have been updated accordingly." - ) - else: - raise ValueError(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The spark settings within the '{workspace}' workspace have been updated accordingly." + ) def add_user_to_workspace( @@ -2156,12 +2149,11 @@ def add_user_to_workspace( f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body ) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace}' workspace." - ) - else: - print(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace}' workspace." + ) def delete_user_from_workspace(email_address: str, workspace: Optional[str] = None): @@ -2186,12 +2178,11 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No client = fabric.PowerBIRestClient() response = client.delete(f"/v1.0/myorg/groups/{workspace_id}/users/{email_address}") - if response.status_code == 200: - print( - f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace." - ) - else: - print(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{email_address}' user has been removed from accessing the '{workspace}' workspace." + ) def update_workspace_user( @@ -2229,12 +2220,11 @@ def update_workspace_user( client = fabric.PowerBIRestClient() response = client.put(f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace}' workspace." - ) - else: - print(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{email_address}' user has been updated to a '{role_name}' within the '{workspace}' workspace." + ) def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame: @@ -2307,12 +2297,12 @@ def assign_workspace_to_dataflow_storage( response = client.post( f"/v1.0/myorg/groups/{workspace_id}/AssignToDataflowStorage", json=request_body ) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{dataflow_storage_account}' dataflow storage account has been assigned to the '{workspace}' workspacce." - ) - else: - print(f"{icons.red_dot} {response.status_code}") + + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{dataflow_storage_account}' dataflow storage account has been assigned to the '{workspace}' workspacce." + ) def list_capacities() -> pd.DataFrame: diff --git a/src/sempy_labs/_model_dependencies.py b/src/sempy_labs/_model_dependencies.py index fa5caba2..15b69f21 100644 --- a/src/sempy_labs/_model_dependencies.py +++ b/src/sempy_labs/_model_dependencies.py @@ -108,9 +108,9 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None): "Table Name": r["Table Name"], "Object Name": r["Object Name"], "Object Type": r["Object Type"], - "Referenced Object": dependency[5], - "Referenced Table": dependency[4], - "Referenced Object Type": dependency[6], + "Referenced Object": dependency[4], + "Referenced Table": dependency[3], + "Referenced Object Type": dependency[5], "Done": d, "Full Object Name": r["Full Object Name"], "Referenced Full Object Name": dependency[ diff --git a/src/sempy_labs/_query_scale_out.py b/src/sempy_labs/_query_scale_out.py index a4aa2f82..c5ef6ef3 100644 --- a/src/sempy_labs/_query_scale_out.py +++ b/src/sempy_labs/_query_scale_out.py @@ -3,6 +3,7 @@ from sempy_labs._helper_functions import resolve_dataset_id from typing import Optional import sempy_labs._icons as icons +from sempy.fabric.exceptions import FabricHTTPException def qso_sync(dataset: str, workspace: Optional[str] = None): @@ -38,14 +39,11 @@ def qso_sync(dataset: str, workspace: Optional[str] = None): f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync" ) - if response.status_code == 200: - print( - f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace." - ) - else: - raise ValueError( - f"{icons.red_dot} QSO sync failed for the '{dataset}' semantic model within the '{workspace}' workspace." - ) + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} QSO sync initiated for the '{dataset}' semantic model within the '{workspace}' workspace." + ) def qso_sync_status(dataset: str, workspace: Optional[str] = None): @@ -99,54 +97,54 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None): f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus" ) - if response.status_code == 200: - o = response.json() - sos = o["scaleOutStatus"] + if response.status_code != 200: + raise FabricHTTPException(response) + + o = response.json() + sos = o["scaleOutStatus"] + + if sos == "Enabled": + new_data = { + "Scale Out Status": o["scaleOutStatus"], + "Sync Start Time": o["syncStartTime"], + "Sync End Time": o["syncEndTime"], + "Commit Version": o["commitVersion"], + "Commit Timestamp": o["commitTimestamp"], + "Target Sync Version": o["targetSyncVersion"], + "Target Sync Timestamp": o["targetSyncTimestamp"], + "Trigger Reason": o["triggerReason"], + "Min Active Read Version": o["minActiveReadVersion"], + "Min Active Read Timestamp": o["minActiveReadTimestamp"], + } + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) - if sos == "Enabled": + for r in o["scaleOutReplicas"]: new_data = { - "Scale Out Status": o["scaleOutStatus"], - "Sync Start Time": o["syncStartTime"], - "Sync End Time": o["syncEndTime"], - "Commit Version": o["commitVersion"], - "Commit Timestamp": o["commitTimestamp"], - "Target Sync Version": o["targetSyncVersion"], - "Target Sync Timestamp": o["targetSyncTimestamp"], - "Trigger Reason": o["triggerReason"], - "Min Active Read Version": o["minActiveReadVersion"], - "Min Active Read Timestamp": o["minActiveReadTimestamp"], + "Replica ID": r["replicaId"], + "Replica Type": r["replicaType"], + "Replica Version": str(r["replicaVersion"]), + "Replica Timestamp": r["replicaTimestamp"], } - df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) - - for r in o["scaleOutReplicas"]: - new_data = { - "Replica ID": r["replicaId"], - "Replica Type": r["replicaType"], - "Replica Version": str(r["replicaVersion"]), - "Replica Timestamp": r["replicaTimestamp"], - } - dfRep = pd.concat( - [dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True - ) - - df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"]) - df["Sync End Time"] = pd.to_datetime(df["Sync End Time"]) - df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"]) - df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"]) - df["Min Active Read Timestamp"] = pd.to_datetime( - df["Min Active Read Timestamp"] + dfRep = pd.concat( + [dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True ) - dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"]) - df["Commit Version"] = df["Commit Version"].astype("int") - df["Target Sync Version"] = df["Target Sync Version"].astype("int") - df["Min Active Read Version"] = df["Min Active Read Version"].astype("int") - return df, dfRep - else: - print(f"{sos}\n\n") - return df, dfRep + df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"]) + df["Sync End Time"] = pd.to_datetime(df["Sync End Time"]) + df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"]) + df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"]) + df["Min Active Read Timestamp"] = pd.to_datetime( + df["Min Active Read Timestamp"] + ) + dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"]) + df["Commit Version"] = df["Commit Version"].astype("int") + df["Target Sync Version"] = df["Target Sync Version"].astype("int") + df["Min Active Read Version"] = df["Min Active Read Version"].astype("int") + + return df, dfRep else: - return response.status_code + print(f"{sos}\n\n") + return df, dfRep def disable_qso(dataset: str, workspace: Optional[str] = None): @@ -181,14 +179,15 @@ def disable_qso(dataset: str, workspace: Optional[str] = None): response = client.patch( f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body ) - if response.status_code == 200: - df = list_qso_settings(dataset=dataset, workspace=workspace) - print( - f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace." - ) - return df - else: - raise ValueError(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + + df = list_qso_settings(dataset=dataset, workspace=workspace) + print( + f"{icons.green_dot} Query scale out has been disabled for the '{dataset}' semantic model within the '{workspace}' workspace." + ) + + return df def set_qso( @@ -248,14 +247,14 @@ def set_qso( f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body, ) - if response.status_code == 200: - df = list_qso_settings(dataset=dataset, workspace=workspace) - print( - f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace." - ) - return df - else: - raise ValueError(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + + df = list_qso_settings(dataset=dataset, workspace=workspace) + print( + f"{icons.green_dot} Query scale out has been set on the '{dataset}' semantic model within the '{workspace}' workspace." + ) + return df else: raise ValueError( f"{icons.red_dot} Failed to set the '{dataset}' semantic model within the '{workspace}' workspace to large semantic model storage format. This is a prerequisite for enabling Query Scale Out.\n\"https://learn.microsoft.com/power-bi/enterprise/service-premium-scale-out#prerequisites\"" @@ -314,13 +313,11 @@ def set_semantic_model_storage_format( response = client.patch( f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body ) + if response.status_code != 200: + raise FabricHTTPException(response) + print(f"{icons.green_dot} Semantic model storage format set to '{storage_format}'.") - if response.status_code == 200: - return print( - f"{icons.green_dot} Semantic model storage format set to '{storage_format}'." - ) - else: - raise ValueError(f"{icons.red_dot} {response.status_code}") + return response.status_code def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] = None): @@ -433,9 +430,8 @@ def set_workspace_default_storage_format( client = fabric.PowerBIRestClient() response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body) - if response.status_code == 200: - print( - f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}." - ) - else: - raise ValueError(f"{icons.red_dot} {response.status_code}") + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}." + ) diff --git a/src/sempy_labs/_refresh_semantic_model.py b/src/sempy_labs/_refresh_semantic_model.py index e80eaf18..a527cefc 100644 --- a/src/sempy_labs/_refresh_semantic_model.py +++ b/src/sempy_labs/_refresh_semantic_model.py @@ -5,6 +5,7 @@ from sempy._utils._log import log import sempy_labs._icons as icons from sempy_labs._helper_functions import resolve_workspace_name_and_id +from sempy.fabric.exceptions import FabricHTTPException @log @@ -171,9 +172,9 @@ def cancel_dataset_refresh( response = client.delete( f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes/{request_id}" ) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled." - ) - else: - print(response.status_code) + + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled." + ) diff --git a/src/sempy_labs/lakehouse/_shortcuts.py b/src/sempy_labs/lakehouse/_shortcuts.py index 88d177fc..915fc62d 100644 --- a/src/sempy_labs/lakehouse/_shortcuts.py +++ b/src/sempy_labs/lakehouse/_shortcuts.py @@ -6,6 +6,7 @@ ) from typing import Optional import sempy_labs._icons as icons +from sempy.fabric.exceptions import FabricHTTPException def create_shortcut_onelake( @@ -194,9 +195,8 @@ def delete_shortcut( f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/Tables/{shortcut_name}" ) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted." - ) - else: - raise ValueError(f"{icons.red_dot} The '{shortcut_name}' has not been deleted.") + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace}' workspace has been deleted." + ) diff --git a/src/sempy_labs/report/_report_functions.py b/src/sempy_labs/report/_report_functions.py index e46bd8df..b14c6ca9 100644 --- a/src/sempy_labs/report/_report_functions.py +++ b/src/sempy_labs/report/_report_functions.py @@ -23,6 +23,7 @@ from typing import Any, List, Optional, Union from sempy._utils._log import log import sempy_labs._icons as icons +from sempy.fabric.exceptions import FabricHTTPException def get_report_json( @@ -499,15 +500,12 @@ def clone_report( f"/v1.0/myorg/groups/{workspace_id}/reports/{reportId}/Clone", json=request_body ) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the" - f" '{target_workspace}' workspace using the '{target_dataset}' semantic model." - ) - else: - raise ValueError( - f"{icons.red_dot} POST request failed with status code: {response.status_code}" - ) + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{report}' report has been successfully cloned as the '{cloned_report}' report within the" + f" '{target_workspace}' workspace using the '{target_dataset}' semantic model." + ) def launch_report(report: str, workspace: Optional[str] = None): diff --git a/src/sempy_labs/report/_report_rebind.py b/src/sempy_labs/report/_report_rebind.py index ca96277e..afaa5de5 100644 --- a/src/sempy_labs/report/_report_rebind.py +++ b/src/sempy_labs/report/_report_rebind.py @@ -3,6 +3,7 @@ from typing import Optional, List from sempy._utils._log import log import sempy_labs._icons as icons +from sempy.fabric.exceptions import FabricHTTPException @log @@ -60,14 +61,11 @@ def report_rebind( json=request_body, ) - if response.status_code == 200: - print( - f"{icons.green_dot} The '{rpt}' report has been successfully rebinded to the '{dataset}' semantic model." - ) - else: - raise ValueError( - f"{icons.red_dot} The '{rpt}' report within the '{report_workspace}' workspace failed to rebind to the '{dataset}' semantic model within the '{dataset_workspace}' workspace." - ) + if response.status_code != 200: + raise FabricHTTPException(response) + print( + f"{icons.green_dot} The '{rpt}' report has been successfully rebinded to the '{dataset}' semantic model." + ) @log