Skip to content

Commit

Permalink
added list_capacities, fixes.
Browse files Browse the repository at this point in the history
  • Loading branch information
m-kovalsky committed Jul 22, 2024
1 parent caf9ea2 commit e7b69d6
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 33 deletions.
6 changes: 4 additions & 2 deletions src/sempy_labs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,15 @@
from sempy_labs._generate_semantic_model import (
create_blank_semantic_model,
create_semantic_model_from_bim,
# deploy_semantic_model,
deploy_semantic_model,
get_semantic_model_bim,
)
from sempy_labs._list_functions import (
delete_custom_pool,
list_semantic_model_objects,
list_shortcuts,
get_object_level_security,
list_capacities,
# list_annotations,
# list_columns,
list_dashboards,
Expand Down Expand Up @@ -108,7 +109,7 @@
"evaluate_dax_impersonation",
"create_blank_semantic_model",
"create_semantic_model_from_bim",
#'deploy_semantic_model',
"deploy_semantic_model",
"get_semantic_model_bim",
"get_object_level_security",
#'list_annotations',
Expand Down Expand Up @@ -179,4 +180,5 @@
"update_workspace_user",
"list_workspace_users",
"assign_workspace_to_dataflow_storage",
"list_capacities",
]
42 changes: 25 additions & 17 deletions src/sempy_labs/_generate_semantic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,55 +153,63 @@ def conv_b64(file):


def deploy_semantic_model(
dataset: str,
new_dataset: Optional[str] = None,
workspace: Optional[str] = None,
new_dataset_workspace: Optional[str] = None,
source_dataset: str,
source_workspace: Optional[str] = None,
target_dataset: Optional[str] = None,
target_workspace: Optional[str] = None,
refresh_target_dataset: Optional[bool] = True,
):
"""
Deploys a semantic model based on an existing semantic model.
Parameters
----------
dataset : str
source_dataset : str
Name of the semantic model to deploy.
new_dataset: str
Name of the new semantic model to be created.
workspace : str, default=None
source_workspace : str, default=None
The Fabric workspace name.
Defaults to None which resolves to the workspace of the attached lakehouse
or if no lakehouse attached, resolves to the workspace of the notebook.
new_dataset_workspace : str, default=None
target_dataset: str
Name of the new semantic model to be created.
target_workspace : str, default=None
The Fabric workspace name in which the new semantic model will be deployed.
Defaults to None which resolves to the workspace of the attached lakehouse
or if no lakehouse attached, resolves to the workspace of the notebook.
refresh_target_dataset : bool, default=True
If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
Returns
-------
"""

workspace = fabric.resolve_workspace_name(workspace)
from sempy_labs import refresh_semantic_model

if new_dataset_workspace is None:
new_dataset_workspace = workspace
source_workspace = fabric.resolve_workspace_name(source_workspace)

if new_dataset is None:
new_dataset = dataset
if target_workspace is None:
target_workspace = source_workspace

if new_dataset == dataset and new_dataset_workspace == workspace:
if target_dataset is None:
target_dataset = source_dataset

if target_dataset == source_dataset and target_workspace == source_workspace:
print(
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
f"parameters have the same value. At least one of these must be different. Please update the parameters."
)
return

bim = get_semantic_model_bim(dataset=dataset, workspace=workspace)
bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)

create_semantic_model_from_bim(
dataset=new_dataset, bim_file=bim, workspace=new_dataset_workspace
dataset=target_dataset, bim_file=bim, workspace=target_workspace
)

if refresh_target_dataset:
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)


def get_semantic_model_bim(
dataset: str,
Expand Down
60 changes: 46 additions & 14 deletions src/sempy_labs/_list_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from pyspark.sql import SparkSession
from typing import Optional
import sempy_labs._icons as icons
from sempy.fabric.exceptions import FabricHTTPException


def get_object_level_security(
Expand Down Expand Up @@ -1835,12 +1836,11 @@ def update_custom_pool(
f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
)

if response.status_code == 200:
print(
f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
)
else:
raise ValueError(f"{icons.red_dot} {response.status_code}")
if response.status_code != 200:
raise FabricHTTPException(response)
print(
f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
)


def delete_custom_pool(pool_name: str, workspace: Optional[str | None] = None):
Expand Down Expand Up @@ -1874,12 +1874,11 @@ def delete_custom_pool(pool_name: str, workspace: Optional[str | None] = None):
client = fabric.FabricRestClient()
response = client.delete(f"/v1/workspaces/{workspace_id}/spark/pools/{poolId}")

if response.status_code == 200:
print(
f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
)
else:
print(f"{icons.red_dot} {response.status_code}")
if response.status_code != 200:
raise FabricHTTPException(response)
print(
f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
)


def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] = None):
Expand All @@ -1899,11 +1898,10 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
-------
"""

# https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/assign-to-capacity?tabs=HTTP
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)

dfC = fabric.list_capacities()
dfC_filt = dfC[dfC["Name"] == capacity_name]
dfC_filt = dfC[dfC["Display Name"] == capacity_name]
capacity_id = dfC_filt["Id"].iloc[0]

request_body = {"capacityId": capacity_id}
Expand Down Expand Up @@ -2315,3 +2313,37 @@ def assign_workspace_to_dataflow_storage(
)
else:
print(f"{icons.red_dot} {response.status_code}")


def list_capacities() -> pd.DataFrame:
"""
Shows the capacities and their properties.
Parameters
----------
Returns
-------
pandas.DataFrame
A pandas dataframe showing the capacities and their properties
"""

df = pd.DataFrame(
columns=["Id", "Display Name", "Sku", "Region", "State", "Admins"]
)

client = fabric.PowerBIRestClient()
response = client.get("/v1.0/myorg/capacities")

for i in response.json()["value"]:
new_data = {
"Id": i.get("id", {}).lower(),
"Display Name": i.get("displayName", {}),
"Sku": i.get("sku", {}),
"Region": i.get("region", {}),
"State": i.get("state", {}),
"Admins": [i.get("admins", [])],
}
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)

return df
2 changes: 2 additions & 0 deletions src/sempy_labs/_model_bpa_rules.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import sempy
import sempy.fabric as fabric
import pandas as pd
import re
Expand Down Expand Up @@ -30,6 +31,7 @@ def model_bpa_rules(
A pandas dataframe containing the default rules for the run_model_bpa function.
"""

sempy.fabric._client._utils._init_analysis_services()
import Microsoft.AnalysisServices.Tabular as TOM

workspace = fabric.resolve_workspace_name(workspace)
Expand Down

0 comments on commit e7b69d6

Please sign in to comment.