Skip to content

Commit

Permalink
Updated tests, removed not necessary imports, changed file path param…
Browse files Browse the repository at this point in the history
…eter
  • Loading branch information
marcinpurtak committed Dec 6, 2023
1 parent f282f55 commit 8f6075b
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 6 deletions.
19 changes: 15 additions & 4 deletions tests/integration/flows/test_sharepoint_to_adls.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,11 @@ def test_sharepoint_list_to_adls_run_flow_overwrite_true(mocked_class):
)
@pytest.mark.run
def test_sharepoint_list_to_adls_run_flow_fail_on_no_data_returned(mocked_class):
"""
Test will check if flow is failing when empty DF is passed
with the given parameter if_no_data_returned = "fail"
CSV file should not be generated!
"""
flow = SharepointListToADLS(
"test_sharepoint_to_adls_run_flow",
output_file_extension=".csv",
Expand All @@ -183,8 +188,6 @@ def test_sharepoint_list_to_adls_run_flow_fail_on_no_data_returned(mocked_class)
)
result = flow.run()
assert result.is_failed()
os.remove(ADLS_FILE_NAME_LIST + ".csv")
os.remove("test_sharepoint_to_adls_run_flow.json")


@mock.patch(
Expand All @@ -193,6 +196,11 @@ def test_sharepoint_list_to_adls_run_flow_fail_on_no_data_returned(mocked_class)
)
@pytest.mark.run
def test_sharepoint_list_to_adls_run_flow_success_on_no_data_returned(mocked_class):
"""
Test will check if flow will succeed when empty DF is passed
with the given parameter if_no_data_returned = "skip"
Empty csv should be generated!
"""
flow = SharepointListToADLS(
"test_sharepoint_to_adls_run_flow",
output_file_extension=".csv",
Expand All @@ -217,6 +225,11 @@ def test_sharepoint_list_to_adls_run_flow_success_on_no_data_returned(mocked_cla
def test_sharepoint_list_to_adls_run_flow_success_warn_on_no_data_returned(
mocked_class,
):
"""
Test will check if flow is failing when empty DF is passed
with the given parameter if_no_data_returned = "warn"
CSV file should not be generated!
"""
# Get prefect client instance
flow = SharepointListToADLS(
"test_sharepoint_to_adls_run_flow",
Expand All @@ -230,5 +243,3 @@ def test_sharepoint_list_to_adls_run_flow_success_warn_on_no_data_returned(
)
result = flow.run()
assert result.is_successful()
os.remove(ADLS_FILE_NAME_LIST + ".csv")
os.remove("test_sharepoint_to_adls_run_flow.json")
4 changes: 2 additions & 2 deletions viadot/flows/sharepoint_to_adls.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def __init__(
name (str): Prefect flow name.
list_title (str): Title of Sharepoint List.
site_url (str): URL to set of Sharepoint Lists.
file_name (str): Name of file in ADLS. Defaults to None.
file_name (str): Name of file(without extension) in ADLS . Defaults to None.
adls_dir_path (str): Azure Data Lake destination folder/catalog path. Defaults to None.
filters (dict, optional): Dictionary with operators which filters the SharepointList output. Defaults to None.
allowed dtypes: ('datetime','date','bool','int', 'float', 'complex', 'str')
Expand Down Expand Up @@ -371,7 +371,7 @@ def gen_flow(self) -> Flow:

file_to_adls_task = AzureDataLakeUpload()
file_to_adls_task.bind(
from_path=self.path,
from_path=self.local_file_path,
to_path=self.adls_dir_path,
overwrite=self.overwrite,
sp_credentials_secret=self.adls_sp_credentials_secret,
Expand Down
1 change: 1 addition & 0 deletions viadot/task_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from viadot.exceptions import CredentialError, ValidationError
from viadot.tasks import AzureDataLakeUpload, AzureKeyVaultSecret


logger = logging.get_logger()
METADATA_COLUMNS = {"_viadot_downloaded_at_utc": "DATETIME"}

Expand Down

0 comments on commit 8f6075b

Please sign in to comment.