Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added list_semantic_model_errors #357

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/sempy_labs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,7 @@
list_lakehouses,
list_sql_endpoints,
update_item,
list_semantic_model_errors,
)
from sempy_labs._helper_functions import (
convert_to_friendly_case,
Expand Down Expand Up @@ -458,4 +459,5 @@
"update_vnet_gateway",
"update_on_premises_gateway",
"get_semantic_model_definition",
"list_semantic_model_errors",
]
104 changes: 104 additions & 0 deletions src/sempy_labs/_list_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1575,3 +1575,107 @@ def list_semantic_model_object_report_usage(
final_df.reset_index(drop=True, inplace=True)

return final_df


def list_semantic_model_errors(
dataset: str | UUID, workspace: Optional[str | UUID]
) -> pd.DataFrame:
"""
Shows a list of a semantic model's errors and their error messages (if they exist).

Parameters
----------
dataset : str | UUID
Name or ID of the semantic model.
workspace : str | UUID, default=None
The Fabric workspace name or ID.
Defaults to None which resolves to the workspace of the attached lakehouse
or if no lakehouse attached, resolves to the workspace of the notebook.

Returns
-------
pandas.DataFrame
A pandas dataframe showing a list of the errors and error messages for a given semantic model.
"""

from sempy_labs.tom import connect_semantic_model

(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
dataset, workspace=workspace_id
)

error_rows = []

with connect_semantic_model(
dataset=dataset_id, workspace=workspace_id, readonly=True
) as tom:
# Define mappings of TOM objects to object types and attributes
error_checks = [
("Column", tom.all_columns, lambda o: o.ErrorMessage),
("Partition", tom.all_partitions, lambda o: o.ErrorMessage),
(
"Partition - Data Coverage Expression",
tom.all_partitions,
lambda o: (
o.DataCoverageDefinition.ErrorMessage
if o.DataCoverageDefinition
else ""
),
),
("Row Level Security", tom.all_rls, lambda o: o.ErrorMessage),
("Calculation Item", tom.all_calculation_items, lambda o: o.ErrorMessage),
("Measure", tom.all_measures, lambda o: o.ErrorMessage),
(
"Measure - Detail Rows Expression",
tom.all_measures,
lambda o: (
o.DetailRowsDefinition.ErrorMessage
if o.DetailRowsDefinition
else ""
),
),
(
"Measure - Format String Expression",
tom.all_measures,
lambda o: (
o.FormatStringDefinition.ErrorMessage
if o.FormatStringDefinition
else ""
),
),
(
"Calculation Group - Multiple or Empty Selection Expression",
tom.all_calculation_groups,
lambda o: (
o.CalculationGroup.MultipleOrEmptySelectionExpression.ErrorMessage
if o.CalculationGroup.MultipleOrEmptySelectionExpression
else ""
),
),
(
"Calculation Group - No Selection Expression",
tom.all_calculation_groups,
lambda o: (
o.CalculationGroup.NoSelectionExpression.ErrorMessage
if o.CalculationGroup.NoSelectionExpression
else ""
),
),
]

# Iterate over all error checks
for object_type, getter, error_extractor in error_checks:
for obj in getter():
error_message = error_extractor(obj)
if error_message: # Only add rows if there's an error message
error_rows.append(
{
"Object Type": object_type,
"Table Name": obj.Parent.Name,
"Object Name": obj.Name,
"Error Message": error_message,
}
)

return pd.DataFrame(error_rows)
2 changes: 1 addition & 1 deletion src/sempy_labs/_notebooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def import_notebook_from_web(
elif len(dfI_filt) > 0 and overwrite:
print(f"{icons.info} Overwrite of notebooks is currently not supported.")
# update_notebook_definition(
# name=notebook_name, notebook_content=response.content, workspace=workspace
# name=notebook_name, notebook_content=response.content, workspace=workspace
# )
else:
raise ValueError(
Expand Down