diff --git a/src/sempy_labs/__init__.py b/src/sempy_labs/__init__.py index da57e48..0b849a4 100644 --- a/src/sempy_labs/__init__.py +++ b/src/sempy_labs/__init__.py @@ -1,4 +1,7 @@ -from sempy_labs._job_scheduler import list_item_job_instances +from sempy_labs._job_scheduler import ( + list_item_job_instances, + list_item_schedules, +) from sempy_labs._gateways import ( list_gateway_members, list_gateway_role_assigments, @@ -470,4 +473,5 @@ "bind_semantic_model_to_gateway", "list_semantic_model_errors", "list_item_job_instances", + "list_item_schedules", ] diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index 8643638..7b70c29 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -5,9 +5,11 @@ resolve_workspace_name_and_id, resolve_item_name_and_id, pagination, + lro, ) from sempy.fabric.exceptions import FabricHTTPException from uuid import UUID +import sempy_labs._icons as icons def list_item_job_instances( @@ -23,7 +25,7 @@ def list_item_job_instances( item : str | uuid.UUID The item name or ID type : str, default=None - The item type. If specifying the item name as the item, the item type is required. + The item `type `_. If specifying the item name as the item, the item type is required. workspace : str | uuid.UUID, default=None The Fabric workspace name or ID used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -57,7 +59,8 @@ def list_item_job_instances( "Job Type", "Invoke Type", "Status", - "Root Activity Id" "Start Time UTC", + "Root Activity Id", + "Start Time UTC", "End Time UTC", "Failure Reason", ] @@ -71,6 +74,7 @@ def list_item_job_instances( dfs = [] for r in responses: for v in r.get("value", []): + fail = v.get("failureReason", {}) new_data = { "Job Instance Id": v.get("id"), "Item Name": item_name, @@ -82,7 +86,7 @@ def list_item_job_instances( "Root Activity Id": v.get("rootActivityId"), "Start Time UTC": v.get("startTimeUtc"), "End Time UTC": v.get("endTimeUtc"), - "Failure Reason": v.get("failureReason"), + "Error Message": fail.get("message") if fail is not None else "", } dfs.append(pd.DataFrame(new_data, index=[0])) @@ -90,3 +94,128 @@ def list_item_job_instances( df = pd.concat(dfs, ignore_index=True) return df + + +def list_item_schedules( + item: str | UUID, + type: Optional[str] = None, + job_type: str = "DefaultJob", + workspace: Optional[str | UUID] = None, +) -> pd.DataFrame: + """ + Get scheduling settings for one specific item. + + This is a wrapper function for the following API: `Job Scheduler - List Item Schedules `_. + + Parameters + ---------- + item : str | uuid.UUID + The item name or ID + type : str, default=None + The item `type `_. If specifying the item name as the item, the item type is required. + job_type : str, default="DefaultJob" + The job type. + workspace : str | uuid.UUID, default=None + The Fabric workspace name or ID used by the lakehouse. + Defaults to None which resolves to the workspace of the attached lakehouse + or if no lakehouse attached, resolves to the workspace of the notebook. + + Returns + ------- + pandas.DataFrame + Shows a list of scheduling settings for one specific item. + """ + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (item_name, item_id) = resolve_item_name_and_id( + item=item, type=type, workspace=workspace + ) + + df = pd.DataFrame( + columns=[ + "Job Schedule Id", + "Enabled", + "Created Date Time", + "Start Date Time", + "End Date Time", + "Local Time Zone Id", + "Type", + "Interval", + "Weekdays", + "Times", + "Owner Id", + "Owner Type", + ] + ) + + client = fabric.FabricRestClient() + response = client.get( + f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules" + ) + + if response.status_code != 200: + raise FabricHTTPException(response) + + for v in response.json().get("value", []): + config = v.get("configuration", {}) + own = v.get("owner", {}) + new_data = { + "Job Schedule Id": v.get("id"), + "Enabled": v.get("enabled"), + "Created Date Time": v.get("createdDateTime"), + "Start Date Time": config.get("startDateTime"), + "End Date Time": config.get("endDateTime"), + "Local Time Zone Id": config.get("localTimeZoneId"), + "Type": config.get("type"), + "Interval": config.get("interval"), + "Weekdays": config.get("weekdays"), + "Times": config.get("times"), + "Owner Id": own.get("id"), + "Owner Type": own.get("type"), + } + + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) + + df["Enabled"] = df["Enabled"].astype(bool) + + return df + + +def run_on_demand_item_job( + item: str | UUID, + type: Optional[str] = None, + job_type: str = "DefaultJob", + workspace: Optional[str | UUID] = None, +): + """ + Run on-demand item job instance. + + This is a wrapper function for the following API: `Job Scheduler - Run On Demand Item Job `_. + + Parameters + ---------- + item : str | uuid.UUID + The item name or ID + type : str, default=None + The item `type `_. If specifying the item name as the item, the item type is required. + job_type : str, default="DefaultJob" + The job type. + workspace : str | uuid.UUID, default=None + The Fabric workspace name or ID used by the lakehouse. + Defaults to None which resolves to the workspace of the attached lakehouse + or if no lakehouse attached, resolves to the workspace of the notebook. + """ + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (item_name, item_id) = resolve_item_name_and_id( + item=item, type=type, workspace=workspace + ) + + client = fabric.FabricRestClient() + response = client.post( + f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}" + ) + + lro(client, response, return_status_code=True) + + print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.")