Skip to content

Commit

Permalink
Merge pull request #847 from dyvenia/dev
Browse files Browse the repository at this point in the history
Release 0.4.25 PR
  • Loading branch information
Rafalz13 authored Jan 30, 2024
2 parents 48c14c0 + 25f4b1e commit 8d12bc1
Show file tree
Hide file tree
Showing 19 changed files with 164 additions and 185 deletions.
12 changes: 0 additions & 12 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,18 +64,6 @@ jobs:
run: |
pip install black
black --check .
continue-on-error: true

- name: Commit Black changes to the pull request
if: ${{ always() && steps.blackCheck.outcome == 'failure' }}
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/$GITHUB_REPOSITORY
black .
git checkout $GITHUB_HEAD_REF
git commit -am "🎨 Format Python code with Black"
git push
- name: Test with pytest
if: always()
Expand Down
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Changed


## [0.4.25] - 2024-01-30
### Added
- Added logic for if_empty param: `check_if_df_empty` task to `ADLSToAzureSQL` flow.
- Added `geopy` library to `requirements`.
- Added new parameter `validate_df_dict` to `ADLSToAzureSQL` class.
- Added new ViewType `agent_timeline_summary_view` to Genesys.


## [0.4.24] - 2023-12-08
### Fixed
- `task_utils/get_nested_value` fixed issue with non dict parameter passed without level(1st workflow)
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,5 @@ lumaCLI==0.0.19
Office365-REST-Python-Client==2.4.4
TM1py==1.11.3
nltk==3.8.1
scikit-learn==1.3.2
scikit-learn==1.3.2
geopy==2.4.1
45 changes: 45 additions & 0 deletions tests/integration/flows/test_adls_to_azure_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,3 +101,48 @@ def test_check_dtypes_sort():
assert False
except signals.FAIL:
assert True


def test_adls_to_azure_sql_mocked(TEST_CSV_FILE_PATH):
with mock.patch.object(ADLSToAzureSQL, "run", return_value=True) as mock_method:
instance = ADLSToAzureSQL(
name="test_adls_to_azure_sql_flow",
adls_path=TEST_CSV_FILE_PATH,
schema="sandbox",
table="test_bcp",
dtypes={"test_str": "VARCHAR(25)", "test_int": "INT"},
if_exists="replace",
)
instance.run()
mock_method.assert_called_with()


def test_adls_to_azure_sql_mocked_validate_df_param(TEST_CSV_FILE_PATH):
with mock.patch.object(ADLSToAzureSQL, "run", return_value=True) as mock_method:
instance = ADLSToAzureSQL(
name="test_adls_to_azure_sql_flow",
adls_path=TEST_CSV_FILE_PATH,
schema="sandbox",
table="test_bcp",
dtypes={"test_str": "VARCHAR(25)", "test_int": "INT"},
if_exists="replace",
validate_df_dict={"column_list_to_match": ["test_str", "test_int"]},
)
instance.run()
mock_method.assert_called_with()


def test_adls_to_azure_sql_mocked_wrong_param(TEST_CSV_FILE_PATH):
with pytest.raises(TypeError) as excinfo:
instance = ADLSToAzureSQL(
name="test_adls_to_azure_sql_flow",
adls_path=TEST_CSV_FILE_PATH,
schema="sandbox",
table="test_bcp",
dtypes={"test_str": "VARCHAR(25)", "test_int": "INT"},
if_exists="replace",
validate_df_dit={"column_list_to_match": ["test_str", "test_int"]},
)
instance.run()

assert "validate_df_dit" in str(excinfo)
3 changes: 0 additions & 3 deletions tests/integration/flows/test_aselite_to_adls.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@
TMP_FILE_NAME = "test_flow.csv"
MAIN_DF = None

df_task = ASELiteToDF()
file_to_adls_task = AzureDataLakeUpload()


def test_aselite_to_adls():
credentials_secret = PrefectSecret("aselite").run()
Expand Down
Empty file.
File renamed without changes.
7 changes: 0 additions & 7 deletions tests/integration/tasks/test_github.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,6 @@

from viadot.tasks.github import DownloadGitHubFile

# def test_github_clone_task():
# clone_repo_task = CloneRepo()
# repo = "fishtown-analytics/dbt"
# repo_name = repo.split("/")[-1]
# clone_repo_task.run(repo=repo)
# assert os.path.exists(repo_name)


def test_download_github_file():
task = DownloadGitHubFile()
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion tests/test_viadot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@


def test_version():
assert __version__ == "0.4.24"
assert __version__ == "0.4.25"
22 changes: 1 addition & 21 deletions tests/unit/tasks/test_uk_carbon_intensity.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import os

import openpyxl
import pytest
from openpyxl import load_workbook


from viadot.tasks.open_apis.uk_carbon_intensity import StatsToCSV, StatsToExcel

Expand All @@ -22,22 +21,3 @@ def ukci_task_excel():
ukci_task_excel = StatsToExcel()
yield ukci_task_excel
os.remove(TEST_FILE_PATH_EXCEL)


# def test_uk_carbon_intensity_to_csv(ukci_task):
# ukci_task.run(path=TEST_FILE_PATH)
# if_exist = os.path.isfile(TEST_FILE_PATH)
# assert if_exist == True


# def test_uk_carbon_intensity_to_excel(ukci_task_excel):
# ukci_task_excel.run(path=TEST_FILE_PATH_EXCEL)
# if_exist = os.path.isfile(TEST_FILE_PATH_EXCEL)
# assert if_exist == True


# def test_uk_carbon_intensity_to_excel_contain(ukci_task_excel):
# ukci_task_excel.run(path=TEST_FILE_PATH_EXCEL)
# excel_file = load_workbook(TEST_FILE_PATH_EXCEL)
# value = excel_file["A1"].value
# assert value == "from"
48 changes: 0 additions & 48 deletions tests/unit/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,51 +81,3 @@ def test_handle_if_empty(caplog):
src._handle_if_empty(if_empty="fail")
with pytest.raises(SKIP):
src._handle_if_empty(if_empty="skip")


# def test_to_csv_append():
# """Test whether `to_csv()` with the append option writes data of correct shape"""
# driver = "/usr/lib/x86_64-linux-gnu/odbc/libsqlite3odbc.so"
# db_name = "testfile.sqlite"
# server = "localhost"
# source = SQL(
# credentials=dict(driver=driver, db_name=db_name, server=server, user=None)
# )

# # Generate test table.
# df = pd.DataFrame({"a": [1, 2], "b": [3, 4]})
# source.create_table("test", dtypes={"a": "INT", "b": "INT"}, if_exists="replace")
# source.insert_into(TABLE, df)

# # Write the table to a CSV three times in `append` mode.
# for i in range(3):
# source.to_csv(path=PATH, query="SELECT * FROM test", if_exists="append")

# # Read the CSV and validate no. of rows and columns.
# out_df = pd.read_csv(PATH, sep="\t")

# target_length = 3 * df.shape[0]
# target_width = df.shape[0]

# actual_length = out_df.shape[0]
# actual_width = out_df.shape[1]

# assert actual_length == target_length and actual_width == target_width

# # Clean up.
# os.remove(PATH)


# GitHub changes the string and makes the test fail
# def test_conn_str():
# s = SQL(
# driver=CREDENTIALS["driver"],
# server=CREDENTIALS["server"],
# db=CREDENTIALS["db_name"],
# user=CREDENTIALS["user"],
# pw=CREDENTIALS["password"],
# )
# assert (
# s.conn_str
# == "DRIVER=ODBC Driver 17 for SQL Server;SERVER=s123.database.windows.net;DATABASE=a-b-c;UID={[email protected]};PWD={a123;@4}"
# )
2 changes: 1 addition & 1 deletion tests/unit/test_duckdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def test_create_table_from_multiple_parquet(duckdb):
duckdb.run(f"DROP SCHEMA {SCHEMA}")


def test__check_if_table_exists(duckdb, TEST_PARQUET_FILE_PATH):
def test_check_if_table_exists(duckdb, TEST_PARQUET_FILE_PATH):
assert not duckdb._check_if_table_exists(table=TABLE, schema=SCHEMA)
duckdb.create_table_from_parquet(
schema=SCHEMA, table=TABLE, path=TEST_PARQUET_FILE_PATH
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_supermetrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
}


def test___get_col_names_google_analytics_pivoted():
def test_get_col_names_google_analytics_pivoted():
columns = Supermetrics._get_col_names_google_analytics(response=RESPONSE_PIVOTED)
assert columns == [
"Date",
Expand All @@ -117,6 +117,6 @@ def test___get_col_names_google_analytics_pivoted():
]


def test___get_col_names_google_analytics_pivoted_no_data():
def test_get_col_names_google_analytics_pivoted_no_data():
with pytest.raises(ValueError):
Supermetrics._get_col_names_google_analytics(response=RESPONSE_PIVOTED_NO_DATA)
2 changes: 1 addition & 1 deletion viadot/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.4.24"
__version__ = "0.4.25"
Loading

0 comments on commit 8d12bc1

Please sign in to comment.