Skip to content

Commit

Permalink
Merge branch 'm-kovalsky/vertipaqlakehouse'
Browse files Browse the repository at this point in the history
  • Loading branch information
m-kovalsky committed Jul 26, 2024
2 parents 24b1ad4 + b315252 commit 320c5c7
Show file tree
Hide file tree
Showing 3 changed files with 160 additions and 5 deletions.
5 changes: 3 additions & 2 deletions src/sempy_labs/_model_bpa.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,11 +201,12 @@ def run_model_bpa(
delta_table_name = "modelbparesults"

lakehouse_id = fabric.get_lakehouse_id()
lake_workspace = fabric.get_workspace_id()
lakehouse = resolve_lakehouse_name(
lakehouse_id=lakehouse_id, workspace=workspace
lakehouse_id=lakehouse_id, workspace=lake_workspace
)

lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=workspace)
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]

dfExport["Severity"].replace("⚠️", "Warning", inplace=True)
Expand Down
155 changes: 154 additions & 1 deletion src/sempy_labs/_translations.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import sempy
import pandas as pd
from typing import List, Optional, Union
from sempy._utils._log import log
Expand All @@ -10,7 +11,7 @@ def translate_semantic_model(
languages: Union[str, List[str]],
exclude_characters: Optional[str] = None,
workspace: Optional[str] = None,
):
) -> pd.DataFrame:
"""
Translates names, descriptions, display folders for all objects in a semantic model.
Expand All @@ -29,6 +30,8 @@ def translate_semantic_model(
Returns
-------
pandas.DataFrame
Shows a pandas dataframe which displays all of the translations in the semantic model.
"""

Expand Down Expand Up @@ -302,3 +305,153 @@ def set_translation_if_exists(
lang,
i,
)
result = pd.DataFrame(
columns=[
"Language",
"Object Type",
"Table Name",
"Object Name",
"Translated Object Name",
"Description",
"Translated Description",
"Display Folder",
"Translated Display Folder",
]
)
with connect_semantic_model(
dataset=dataset, readonly=True, workspace=workspace
) as tom:

sempy.fabric._client._utils._init_analysis_services()
import Microsoft.AnalysisServices.Tabular as TOM

for c in tom.model.Cultures:
for tr in c.ObjectTranslations:
oType = str(tr.Object.ObjectType)
oName = tr.Object.Name
tValue = tr.Value
prop = str(tr.Property)

if tr.Object.ObjectType == TOM.ObjectType.Table:
desc = tom.model.Tables[oName].Description
new_data = {
"Language": c.Name,
"Table Name": oName,
"Object Name": oName,
"Object Type": oType,
"Description": desc,
}
result = pd.concat(
[result, pd.DataFrame(new_data, index=[0])], ignore_index=True
)
condition = (
(result["Language"] == c.Name)
& (result["Table Name"] == oName)
& (result["Object Name"] == oName)
& (result["Object Type"] == oType)
)
elif tr.Object.ObjectType == TOM.ObjectType.Level:
hierarchyName = tr.Object.Parent.Name
tName = tr.Object.Parent.Parent.Name
levelName = "'" + hierarchyName + "'[" + oName + "]"
desc = (
tom.model.Tables[tName]
.Hierarchies[hierarchyName]
.Levels[oName]
.Description
)
new_data = {
"Language": c.Name,
"Table Name": tName,
"Object Name": levelName,
"Object Type": oType,
"Description": desc,
}
result = pd.concat(
[result, pd.DataFrame(new_data, index=[0])], ignore_index=True
)
condition = (
(result["Language"] == c.Name)
& (result["Table Name"] == tName)
& (result["Object Name"] == levelName)
& (result["Object Type"] == oType)
)
elif tr.Object.ObjectType == TOM.ObjectType.Column:
tName = tr.Object.Table.Name
desc = tom.model.Tables[tName].Columns[oName].Description
display_folder = (
tom.model.Tables[tName].Columns[oName].DisplayFolder
)
new_data = {
"Language": c.Name,
"Table Name": tName,
"Object Name": oName,
"Object Type": oType,
"Description": desc,
"Display Folder": display_folder,
}
result = pd.concat(
[result, pd.DataFrame(new_data, index=[0])], ignore_index=True
)
condition = (
(result["Language"] == c.Name)
& (result["Table Name"] == tName)
& (result["Object Name"] == oName)
& (result["Object Type"] == oType)
)
elif tr.Object.ObjectType == TOM.ObjectType.Measure:
tName = tr.Object.Table.Name
desc = tom.model.Tables[tName].Measures[oName].Description
display_folder = (
tom.model.Tables[tName].Measures[oName].DisplayFolder
)
new_data = {
"Language": c.Name,
"Table Name": tName,
"Object Name": oName,
"Object Type": oType,
"Description": desc,
"Display Folder": display_folder,
}
result = pd.concat(
[result, pd.DataFrame(new_data, index=[0])], ignore_index=True
)
condition = (
(result["Language"] == c.Name)
& (result["Table Name"] == tName)
& (result["Object Name"] == oName)
& (result["Object Type"] == oType)
)
elif tr.Object.ObjectType == TOM.ObjectType.Hierarchy:
tName = tr.Object.Table.Name
desc = tom.model.Tables[tName].Hierarchies[oName].Description
display_folder = (
tom.model.Tables[tName].Hierarchies[oName].DisplayFolder
)
new_data = {
"Language": c.Name,
"Table Name": tName,
"Object Name": oName,
"Object Type": oType,
"Description": desc,
"Display Folder": display_folder,
}
result = pd.concat(
[result, pd.DataFrame(new_data, index=[0])], ignore_index=True
)
condition = (
(result["Language"] == c.Name)
& (result["Table Name"] == tName)
& (result["Object Name"] == oName)
& (result["Object Type"] == oType)
)

if prop == "Caption":
result.loc[condition, "Translated Object Name"] = tValue
elif prop == "Description":
result.loc[condition, "Translated Description"] = tValue
else:
result.loc[condition, "Translated Display Folder"] = tValue
result.fillna("", inplace=True)

return result
5 changes: 3 additions & 2 deletions src/sempy_labs/_vertipaq.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,12 +452,13 @@ def vertipaq_analyzer(
spark = SparkSession.builder.getOrCreate()

lakehouse_id = fabric.get_lakehouse_id()
lake_workspace = fabric.resolve_workspace_name()
lakehouse = resolve_lakehouse_name(
lakehouse_id=lakehouse_id, workspace=workspace
lakehouse_id=lakehouse_id, workspace=lake_workspace
)
lakeTName = "vertipaq_analyzer_model"

lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=workspace)
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
lakeT_filt = lakeT[lakeT["Table Name"] == lakeTName]

query = f"SELECT MAX(RunId) FROM {lakehouse}.{lakeTName}"
Expand Down

0 comments on commit 320c5c7

Please sign in to comment.