Skip to content

Commit

Permalink
Merge branch 'm-kovalsky/fixmarkasdatetable'
Browse files Browse the repository at this point in the history
  • Loading branch information
m-kovalsky committed Oct 23, 2024
2 parents 2e0a6bd + 0dace7d commit b961377
Show file tree
Hide file tree
Showing 16 changed files with 108 additions and 65 deletions.
2 changes: 1 addition & 1 deletion notebooks/Migration to Direct Lake.ipynb

Large diffs are not rendered by default.

23 changes: 20 additions & 3 deletions src/sempy_labs/_generate_semantic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,11 +64,28 @@ def create_blank_semantic_model(
"name": '{dataset}',
"compatibilityLevel": {compatibility_level},
"model": {{
"culture": "en-US",
"defaultPowerBIDataSourceVersion": "powerBI_V3"
"cultures": [
{{
"name": "en-US",
"linguisticMetadata": {{
"content": {{
"Version": "1.0.0",
"Language": "en-US"
}},
"contentType": "json"
}}
}}
],
"collation": "Latin1_General_100_BIN2_UTF8",
"dataAccessOptions": {{
"legacyRedirects": true,
"returnErrorValuesAsNull": true,
}},
"defaultPowerBIDataSourceVersion": "powerBI_V3",
"sourceQueryCulture": "en-US",
}}
}}
}}
}}
}}
"""

Expand Down
4 changes: 4 additions & 0 deletions src/sempy_labs/_icons.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,3 +142,7 @@
"RunId": data_type_long,
"Timestamp": data_type_timestamp,
}

sll_ann_name = "PBI_ProTooling"
sll_prefix = "SLL_"
sll_tags = []
1 change: 1 addition & 0 deletions src/sempy_labs/_model_bpa.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ def map_language(language, language_list):
)

if extended:
icons.sll_tags.append("ModelBPAExtended")
with connect_semantic_model(
dataset=dataset, workspace=workspace, readonly=False
) as tom:
Expand Down
2 changes: 2 additions & 0 deletions src/sempy_labs/_translations.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ def _clean_text(text, exclude_chars):
columns=["Object Type", "Name", "Description", "Display Folder"]
)

icons.sll_tags.append("TranslateSemanticModel")

with connect_semantic_model(
dataset=dataset, readonly=False, workspace=workspace
) as tom:
Expand Down
2 changes: 2 additions & 0 deletions src/sempy_labs/directlake/_dl_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,8 @@ def generate_direct_lake_semantic_model(

dfLT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)

icons.sll_tags.append("GenerateDLModel")

# Validate lakehouse tables
for t in lakehouse_tables:
if t not in dfLT["Table Name"].values:
Expand Down
8 changes: 7 additions & 1 deletion src/sempy_labs/directlake/_get_shared_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get_shared_expression(
workspace = fabric.resolve_workspace_name(workspace)
if lakehouse is None:
lakehouse_id = fabric.get_lakehouse_id()
lakehouse = resolve_lakehouse_name(lakehouse_id)
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)

dfL = list_lakehouses(workspace=workspace)
lakeDetail = dfL[dfL["Lakehouse Name"] == lakehouse]
Expand All @@ -39,6 +39,12 @@ def get_shared_expression(
sqlepid = lakeDetail["SQL Endpoint ID"].iloc[0]
provStatus = lakeDetail["SQL Endpoint Provisioning Status"].iloc[0]

parts = sqlEPCS.split(".", 1)
if parts:
parts[0] = parts[0].upper()

sqlEPCS = ".".join(parts)

if provStatus == "InProgress":
raise ValueError(
f"{icons.red_dot} The SQL Endpoint for the '{lakehouse}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,7 @@ def show_unsupported_direct_lake_objects(
dfR["From Column Data Type"] = merged_from["Data Type"]
dfR["To Column Data Type"] = merged_to["Data Type"]

dfR_filt = dfR[
(
(dfR["From Column Data Type"] == "DateTime")
| (dfR["To Column Data Type"] == "DateTime")
)
| (dfR["From Column Data Type"] != dfR["To Column Data Type"])
]
dfR_filt = dfR[(dfR["From Column Data Type"] != dfR["To Column Data Type"])]
r = dfR_filt[
[
"From Table",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ def update_direct_lake_model_lakehouse_connection(
f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
)

icons.sll_tags.append("UpdateDLConnection")

shEx = get_shared_expression(lakehouse, lakehouse_workspace)

with connect_semantic_model(
Expand Down
5 changes: 3 additions & 2 deletions src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,8 @@ def migrate_field_parameters(
if new_dataset_workspace is None:
new_dataset_workspace = workspace

icons.sll_tags.append("DirectLakeMigration")

dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
dfC["Column Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
Expand Down Expand Up @@ -337,6 +339,7 @@ def dyn_connect():
try:
par = TOM.Partition()
par.Name = tName
par.Mode = TOM.ModeType.Import

parSource = TOM.CalculatedPartitionSource()
par.Source = parSource
Expand All @@ -345,7 +348,6 @@ def dyn_connect():
tbl = TOM.Table()
tbl.Name = tName
tbl.LineageTag = generate_guid()
tbl.SourceLineageTag = generate_guid()
tbl.Partitions.Add(par)

columns = ["Value1", "Value2", "Value3"]
Expand All @@ -356,7 +358,6 @@ def dyn_connect():
col.SourceColumn = "[" + colName + "]"
col.DataType = TOM.DataType.String
col.LineageTag = generate_guid()
col.SourceLineageTag = generate_guid()

tbl.Columns.Add(col)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@ def migrate_calc_tables_to_semantic_model(
lakehouse_id = fabric.get_lakehouse_id()
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)

icons.sll_tags.append("DirectLakeMigration")

# Get calc tables but not field parameters
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ def migrate_model_objects_to_semantic_model(
if new_dataset_workspace is None:
new_dataset_workspace = workspace

icons.sll_tags.append("DirectLakeMigration")

dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
Expand Down Expand Up @@ -334,14 +336,6 @@ def dyn_connect2():
print(
f"{icons.warning} The {relName} relationship was not created as Direct Lake does not support relationships based on columns with different data types."
)
# Direct Lake using DateTime columns
elif isDirectLake and (
r.FromColumn.DataType == TOM.DataType.DateTime
or r.ToColumn.DataType == TOM.DataType.DateTime
):
print(
f"{icons.red_dot} The {relName} relationship was not created as Direct Lake does not support relationships based on columns of DateTime data type."
)
# Columns do not exist in the new semantic model
elif not any(
c.Name == r.FromColumn.Name and c.Parent.Name == r.FromTable.Name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,8 @@ def migrate_tables_columns_to_semantic_model(
lakehouse_id = fabric.get_lakehouse_id()
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)

icons.sll_tags.append("DirectLakeMigration")

# Check that lakehouse is attached to the notebook
if not lakehouse_attached() and (lakehouse is None and lakehouse_workspace is None):
raise ValueError(
Expand Down Expand Up @@ -106,9 +108,24 @@ def dyn_connect():
with connect_semantic_model(
dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
) as tom:

# Additional updates
tom.set_annotation(
object=tom.model, name="__PBI_TimeIntelligenceEnabled", value="0"
)
tom.set_annotation(
object=tom.model, name="PBI_QueryOrder", value='["DatabaseQuery"]'
)

# Begin migration
if not any(e.Name == "DatabaseQuery" for e in tom.model.Expressions):
tom.add_expression("DatabaseQuery", expression=shEx)
print(f"{icons.green_dot} The 'DatabaseQuery' expression has been added.")
tom.set_annotation(
object=tom.model.Expressions["DatabaseQuery"],
name="PBI_IncludeFutureArtifacts",
value="False",
)

for i, r in dfT_filt.iterrows():
tName = r["Name"]
Expand Down
2 changes: 2 additions & 0 deletions src/sempy_labs/migration/_migration_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ def migration_validation(
if new_dataset_workspace is None:
new_dataset_workspace = workspace

icons.sll_tags.append("DirectLakeMigration")

dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
dfB = list_semantic_model_objects(
dataset=new_dataset, workspace=new_dataset_workspace
Expand Down
1 change: 1 addition & 0 deletions src/sempy_labs/migration/_refresh_calc_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):

spark = SparkSession.builder.getOrCreate()
workspace = fabric.resolve_workspace_name(workspace)
icons.sll_tags.append("DirectLakeMigration")

@retry(
sleep_time=1,
Expand Down
Loading

0 comments on commit b961377

Please sign in to comment.