-
Notifications
You must be signed in to change notification settings - Fork 55
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
43 changed files
with
1,684 additions
and
625 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -11,4 +11,5 @@ anytree | |
IPython | ||
polib | ||
azure.mgmt.resource | ||
jsonpath_ng | ||
jsonpath_ng | ||
deltalake |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,118 @@ | ||
import sempy.fabric as fabric | ||
import pandas as pd | ||
import sempy_labs._icons as icons | ||
from typing import Optional | ||
from sempy_labs._helper_functions import ( | ||
resolve_workspace_name_and_id, | ||
lro, | ||
pagination, | ||
) | ||
from sempy.fabric.exceptions import FabricHTTPException | ||
|
||
|
||
def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame: | ||
""" | ||
Shows the data pipelines within a workspace. | ||
Parameters | ||
---------- | ||
workspace : str, default=None | ||
The Fabric workspace name. | ||
Defaults to None which resolves to the workspace of the attached lakehouse | ||
or if no lakehouse attached, resolves to the workspace of the notebook. | ||
Returns | ||
------- | ||
pandas.DataFrame | ||
A pandas dataframe showing the data pipelines within a workspace. | ||
""" | ||
|
||
df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"]) | ||
|
||
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace) | ||
|
||
client = fabric.FabricRestClient() | ||
response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines") | ||
if response.status_code != 200: | ||
raise FabricHTTPException(response) | ||
|
||
responses = pagination(client, response) | ||
|
||
for r in responses: | ||
for v in r.get("value", []): | ||
new_data = { | ||
"Data Pipeline Name": v.get("displayName"), | ||
"Data Pipeline ID": v.get("id"), | ||
"Description": v.get("description"), | ||
} | ||
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) | ||
|
||
return df | ||
|
||
|
||
def create_data_pipeline( | ||
name: str, description: Optional[str] = None, workspace: Optional[str] = None | ||
): | ||
""" | ||
Creates a Fabric data pipeline. | ||
Parameters | ||
---------- | ||
name: str | ||
Name of the data pipeline. | ||
description : str, default=None | ||
A description of the environment. | ||
workspace : str, default=None | ||
The Fabric workspace name. | ||
Defaults to None which resolves to the workspace of the attached lakehouse | ||
or if no lakehouse attached, resolves to the workspace of the notebook. | ||
""" | ||
|
||
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace) | ||
|
||
request_body = {"displayName": name} | ||
|
||
if description: | ||
request_body["description"] = description | ||
|
||
client = fabric.FabricRestClient() | ||
response = client.post( | ||
f"/v1/workspaces/{workspace_id}/dataPipelines", json=request_body | ||
) | ||
|
||
lro(client, response, status_codes=[201, 202]) | ||
|
||
print( | ||
f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace}' workspace." | ||
) | ||
|
||
|
||
def delete_data_pipeline(name: str, workspace: Optional[str] = None): | ||
""" | ||
Deletes a Fabric data pipeline. | ||
Parameters | ||
---------- | ||
name: str | ||
Name of the data pipeline. | ||
workspace : str, default=None | ||
The Fabric workspace name. | ||
Defaults to None which resolves to the workspace of the attached lakehouse | ||
or if no lakehouse attached, resolves to the workspace of the notebook. | ||
""" | ||
|
||
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace) | ||
|
||
item_id = fabric.resolve_item_id( | ||
item_name=name, type="DataPipeline", workspace=workspace | ||
) | ||
|
||
client = fabric.FabricRestClient() | ||
response = client.delete(f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}") | ||
|
||
if response.status_code != 200: | ||
raise FabricHTTPException(response) | ||
|
||
print( | ||
f"{icons.green_dot} The '{name}' data pipeline within the '{workspace}' workspace has been deleted." | ||
) |
Oops, something went wrong.