Skip to content

Commit

Permalink
fix init functions
Browse files Browse the repository at this point in the history
  • Loading branch information
Markus Cozowicz committed Jun 10, 2024
1 parent 5289488 commit 01f708b
Show file tree
Hide file tree
Showing 11 changed files with 163 additions and 175 deletions.
159 changes: 76 additions & 83 deletions sempy_labs/__init__.py
Original file line number Diff line number Diff line change
@@ -1,39 +1,38 @@
from sempy_labs._clear_cache import clear_cache
#from sempy_labs._connections import (
#create_connection_cloud,
#create_connection_vnet,
#create_connection_on_prem
#)
from sempy_labs._dax import (
run_dax
)

# from sempy_labs._connections import (
# create_connection_cloud,
# create_connection_vnet,
# create_connection_on_prem
# )
from sempy_labs._dax import run_dax
from sempy_labs._generate_semantic_model import (
create_blank_semantic_model,
create_semantic_model_from_bim,
#deploy_semantic_model,
get_semantic_model_bim
# deploy_semantic_model,
get_semantic_model_bim,
)
from sempy_labs._list_functions import (
get_object_level_security,
#list_annotations,
#list_columns,
# list_annotations,
# list_columns,
list_dashboards,
list_dataflow_storage_accounts,
#list_datamarts,
#list_datapipelines,
#list_eventstreams,
#list_kpis,
#list_kqldatabases,
#list_kqlquerysets,
# list_datamarts,
# list_datapipelines,
# list_eventstreams,
# list_kpis,
# list_kqldatabases,
# list_kqlquerysets,
list_lakehouses,
#list_mirroredwarehouses,
#list_mlexperiments,
#list_mlmodels,
#list_relationships,
#list_sqlendpoints,
#list_tables,
# list_mirroredwarehouses,
# list_mlexperiments,
# list_mlmodels,
# list_relationships,
# list_sqlendpoints,
# list_tables,
list_warehouses,
#list_workspace_role_assignments,
# list_workspace_role_assignments,
create_warehouse,
update_item,
)
Expand All @@ -50,109 +49,103 @@
resolve_dataset_id,
resolve_dataset_name,
resolve_report_id,
resolve_report_name,
#language_validate
)
from sempy_labs._model_auto_build import (
model_auto_build
)
from sempy_labs._model_bpa import (
model_bpa_rules,
run_model_bpa
resolve_report_name,
# language_validate
)
from sempy_labs._model_auto_build import model_auto_build
from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
from sempy_labs._model_dependencies import (
measure_dependency_tree,
get_measure_dependencies,
get_model_calc_dependencies
get_model_calc_dependencies,
)
from sempy_labs._one_lake_integration import (
export_model_to_onelake,
)
#from sempy_labs._query_scale_out import (

# from sempy_labs._query_scale_out import (
# qso_sync,
# qso_sync_status,
# set_qso,
# list_qso_settings,
# disable_qso,
# set_semantic_model_storage_format,
# set_workspace_default_storage_format,
#)
# )
from sempy_labs._refresh_semantic_model import (
refresh_semantic_model,
cancel_dataset_refresh
)
from sempy_labs._translations import (
translate_semantic_model
cancel_dataset_refresh,
)
from sempy_labs._translations import translate_semantic_model
from sempy_labs._vertipaq import (
vertipaq_analyzer,
#visualize_vertipaq,
import_vertipaq_analyzer
# visualize_vertipaq,
import_vertipaq_analyzer,
)

__all__ = [
'clear_cache',
#create_connection_cloud,
#create_connection_vnet,
#create_connection_on_prem,
'run_dax',
'create_blank_semantic_model',
'create_semantic_model_from_bim',
"clear_cache",
# create_connection_cloud,
# create_connection_vnet,
# create_connection_on_prem,
"run_dax",
"create_blank_semantic_model",
"create_semantic_model_from_bim",
#'deploy_semantic_model',
'get_semantic_model_bim',
'get_object_level_security',
"get_semantic_model_bim",
"get_object_level_security",
#'list_annotations',
#'list_columns',
'list_dashboards',
'list_dataflow_storage_accounts',
"list_dashboards",
"list_dataflow_storage_accounts",
#'list_datamarts',
#'list_datapipelines',
#'list_eventstreams',
#'list_kpis',
#'list_kqldatabases',
#'list_kqlquerysets',
'list_lakehouses',
"list_lakehouses",
#'list_mirroredwarehouses',
#'list_mlexperiments',
#'list_mlmodels',
#'list_relationships',
#'list_sqlendpoints',
#'list_tables',
'list_warehouses',
"list_warehouses",
#'list_workspace_role_assignments',
'create_warehouse',
'update_item',
'create_abfss_path',
'format_dax_object_name',
'create_relationship_name',
'save_as_delta_table',
'generate_embedded_filter',
'get_direct_lake_sql_endpoint',
'resolve_lakehouse_id',
'resolve_lakehouse_name',
'resolve_dataset_id',
'resolve_dataset_name',
'resolve_report_id',
'resolve_report_name',
"create_warehouse",
"update_item",
"create_abfss_path",
"format_dax_object_name",
"create_relationship_name",
"save_as_delta_table",
"generate_embedded_filter",
"get_direct_lake_sql_endpoint",
"resolve_lakehouse_id",
"resolve_lakehouse_name",
"resolve_dataset_id",
"resolve_dataset_name",
"resolve_report_id",
"resolve_report_name",
#'language_validate',
'model_auto_build',
'model_bpa_rules',
'run_model_bpa',
'measure_dependency_tree',
'get_measure_dependencies',
'get_model_calc_dependencies',
'export_model_to_onelake',
"model_auto_build",
"model_bpa_rules",
"run_model_bpa",
"measure_dependency_tree",
"get_measure_dependencies",
"get_model_calc_dependencies",
"export_model_to_onelake",
#'qso_sync',
#'qso_sync_status',
#'set_qso',
#'list_qso_settings',
#'disable_qso',
#'set_semantic_model_storage_format',
#'set_workspace_default_storage_format',
'refresh_semantic_model',
'cancel_dataset_refresh',
'translate_semantic_model',
'vertipaq_analyzer',
"refresh_semantic_model",
"cancel_dataset_refresh",
"translate_semantic_model",
"vertipaq_analyzer",
#'visualize_vertipaq',
'import_vertipaq_analyzer'
]
"import_vertipaq_analyzer",
]
2 changes: 2 additions & 0 deletions sempy_labs/_generate_semantic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
import sempy_labs._icons as icons


def create_blank_semantic_model(
dataset: str,
compatibility_level: Optional[int] = 1605,
Expand Down Expand Up @@ -201,6 +202,7 @@ def deploy_semantic_model(
dataset=new_dataset, bim_file=bim, workspace=new_dataset_workspace
)


def get_semantic_model_bim(
dataset: str,
workspace: Optional[str] = None,
Expand Down
3 changes: 2 additions & 1 deletion sempy_labs/_helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,6 +483,7 @@ def save_as_delta_table(
f"{icons.green_dot} The dataframe has been saved as the '{delta_table_name}' table in the '{lakehouse}' lakehouse within the '{workspace}' workspace."
)


def language_validate(language: str):
"""
Validateds that the language specified exists within the supported langauges.
Expand Down Expand Up @@ -517,4 +518,4 @@ def language_validate(language: str):
)
return

return lang
return lang
2 changes: 2 additions & 0 deletions sempy_labs/_model_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,8 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
df = df.drop(["Done"], axis=1)

return df


@log
def measure_dependency_tree(
dataset: str, measure_name: str, workspace: Optional[str] = None
Expand Down
2 changes: 1 addition & 1 deletion sempy_labs/_refresh_semantic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def cancel_dataset_refresh(
):
"""
Cancels the refresh of a semantic model which was executed via the `Enhanced Refresh API <https://learn.microsoft.com/power-bi/connect-data/asynchronous-refresh>`_
Parameters
----------
dataset : str
Expand Down
58 changes: 25 additions & 33 deletions sempy_labs/directlake/__init__.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,45 @@
from sempy_labs.directlake._directlake_schema_compare import (
direct_lake_schema_compare
)
from sempy_labs.directlake._directlake_schema_sync import (
direct_lake_schema_sync
)
from sempy_labs.directlake._directlake_schema_compare import direct_lake_schema_compare
from sempy_labs.directlake._directlake_schema_sync import direct_lake_schema_sync
from sempy_labs.directlake._fallback import (
check_fallback_reason,
)
from sempy_labs.directlake._get_directlake_lakehouse import (
get_direct_lake_lakehouse
)
from sempy_labs.directlake._get_shared_expression import (
get_shared_expression
)
from sempy_labs.directlake._get_directlake_lakehouse import get_direct_lake_lakehouse
from sempy_labs.directlake._get_shared_expression import get_shared_expression
from sempy_labs.directlake._guardrails import (
get_direct_lake_guardrails,
get_sku_size,
get_directlake_guardrails_for_sku
get_directlake_guardrails_for_sku,
)
from sempy_labs.directlake._list_directlake_model_calc_tables import (
list_direct_lake_model_calc_tables
list_direct_lake_model_calc_tables,
)
from sempy_labs.directlake._show_unsupported_directlake_objects import (
show_unsupported_direct_lake_objects
show_unsupported_direct_lake_objects,
)
from sempy_labs.directlake._update_directlake_model_lakehouse_connection import (
update_direct_lake_model_lakehouse_connection
update_direct_lake_model_lakehouse_connection,
)
from sempy_labs.directlake._update_directlake_partition_entity import (
update_direct_lake_partition_entity
update_direct_lake_partition_entity,
)
from sempy_labs.directlake._warm_cache import (
warm_direct_lake_cache_isresident,
warm_direct_lake_cache_perspective
warm_direct_lake_cache_perspective,
)

__all__ = [
direct_lake_schema_compare,
direct_lake_schema_sync,
check_fallback_reason,
get_direct_lake_lakehouse,
get_shared_expression,
get_direct_lake_guardrails,
get_sku_size,
get_directlake_guardrails_for_sku,
list_direct_lake_model_calc_tables,
show_unsupported_direct_lake_objects,
update_direct_lake_model_lakehouse_connection,
update_direct_lake_partition_entity,
warm_direct_lake_cache_isresident,
warm_direct_lake_cache_perspective
]
"direct_lake_schema_compare",
"direct_lake_schema_sync",
"check_fallback_reason",
"get_direct_lake_lakehouse",
"get_shared_expression",
"get_direct_lake_guardrails",
"get_sku_size",
"get_directlake_guardrails_for_sku",
"list_direct_lake_model_calc_tables",
"show_unsupported_direct_lake_objects",
"update_direct_lake_model_lakehouse_connection",
"update_direct_lake_partition_entity",
"warm_direct_lake_cache_isresident",
"warm_direct_lake_cache_perspective",
]
33 changes: 15 additions & 18 deletions sempy_labs/lakehouse/__init__.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,24 @@
from sempy_labs.lakehouse._get_lakehouse_columns import (
get_lakehouse_columns
)
from sempy_labs.lakehouse._get_lakehouse_tables import (
get_lakehouse_tables
)
from sempy_labs.lakehouse._get_lakehouse_columns import get_lakehouse_columns
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
from sempy_labs.lakehouse._lakehouse import (
lakehouse_attached,
optimize_lakehouse_tables
optimize_lakehouse_tables,
)

from sempy_labs.lakehouse._shortcuts import (
list_shortcuts,
#create_shortcut,
# create_shortcut,
create_shortcut_onelake,
delete_shortcut
delete_shortcut,
)

__all__ = [
get_lakehouse_columns,
get_lakehouse_tables,
lakehouse_attached,
optimize_lakehouse_tables,
list_shortcuts,
#create_shortcut,
create_shortcut_onelake,
delete_shortcut
]
"get_lakehouse_columns",
"get_lakehouse_tables",
"lakehouse_attached",
"optimize_lakehouse_tables",
"list_shortcuts",
# create_shortcut,
"create_shortcut_onelake",
"delete_shortcut",
]
Loading

0 comments on commit 01f708b

Please sign in to comment.