Skip to content

Commit

Permalink
Merge pull request #765 from dyvenia/dev
Browse files Browse the repository at this point in the history
Release 0.4.20 PR
  • Loading branch information
Rafalz13 authored Oct 12, 2023
2 parents 7d5cfd4 + 295264d commit a6ced4d
Show file tree
Hide file tree
Showing 20 changed files with 163 additions and 124 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -159,4 +159,4 @@ sap_netweaver_rfc
# Databricks-connect

.databricks-connect
.dotnet
.dotnet
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,29 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]
### Added

### Fixed

### Changed


## [0.4.20] - 2023-10-12
### Added
- Added `Office365-REST-Python-Client` library to `requirements`.
- Added `GetSalesQuotationData` view in `BusinessCore` source.
- Added new ViewType `queue_interaction_detail_view` to Genesys.
- Added new column `_viadot_source` to BigQuery extraction.

### Changed
- Changed the flow name from `TransformAndCatalog` to `TransformAndCatalogToLuma`.
- Modified `add_viadot_metadata_columns` to be able to apply a parameter source_name to the decorator for `to_df` function or function where the DataFrame is generated.
- Changed `SharepointToDF` task in order to implement `add_viadot_metadata_columns` with value `source_name="Sharepoint"` after changes.
- Changed `Mindful` credentials passed by the `auth` parameter, instead of the `header`.


## [0.4.19] - 2023-08-31
### Added
- Added `add_viadot_metadata_columns` function that will be used as a decorator for `to_df` class methods.
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,5 @@ avro-python3==1.10.2
pygit2>=1.10.1, <1.11.0
dbt-core==1.3.2
dbt-sqlserver==1.3.1
lumaCLI==0.0.18
lumaCLI==0.0.19
Office365-REST-Python-Client==2.4.4
52 changes: 6 additions & 46 deletions tests/integration/flows/test_prefect_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,13 @@ def expectation_suite():
expectation_suite = {
"data": {
"project": [
{
"id": "6f413380-e228-4d64-8e1b-41c6cd434a2a",
"name": "Installer Engagement",
"flows": [],
},
{
"id": "223a8acf-4cf0-4cf7-ae1f-b66f78e28813",
"name": "oso_reporting",
"name": "Admin",
"flows": [
{
"id": "b13dcc6d-b621-4acd-88be-2cf28715a7c5",
"name": "1-raw dakvenster_order_prod_info extract",
"name": "1-raw table_catalog extract",
"version": 3,
"flow_runs": [
{
Expand All @@ -34,23 +29,18 @@ def expectation_suite():
},
{
"id": "14b1a89e-f902-48a1-b6df-43cacdb91e1a",
"name": "1-raw dakvenster_order_prod_info extract",
"name": "1-raw table_catalog extract",
"version": 2,
"flow_runs": [],
},
{
"id": "a1eace09-38b4-46bf-bacf-a5d29bdbb633",
"name": "1-raw dakvenster_order_prod_info extract",
"name": "1-raw table_catalog extract",
"version": 1,
"flow_runs": [],
},
],
},
{
"id": "844372db-2d22-495d-a343-b8f8cbcf8963",
"name": "sap",
"flows": [],
},
{
"id": "512d0f29-2ceb-4177-b7d8-c5908da666ef",
"name": "integrations",
Expand All @@ -61,16 +51,6 @@ def expectation_suite():
"name": "dev_cdl",
"flows": [],
},
{
"id": "e2a926e2-ec86-4900-a24e-330a44b6cb19",
"name": "cic_test",
"flows": [],
},
{
"id": "667d5026-2f01-452a-b6fe-5437ca833066",
"name": "cic_dev",
"flows": [],
},
{
"id": "eac9b6d4-725a-4354-bf8f-25e7828ea2d8",
"name": "Admin",
Expand All @@ -86,31 +66,11 @@ def expectation_suite():
"name": "cic",
"flows": [],
},
{
"id": "dd2ccc32-2163-4f55-a746-1dbc6b28aaa4",
"name": "Hyperlocal",
"flows": [],
},
{
"id": "7131c357-bad7-43cf-aabc-87f9cf045384",
"name": "Installer Segmentation",
"flows": [],
},
{
"id": "94a8b8bf-14fa-4b64-ab78-af1d332dedd4",
"name": "Marketing KPI",
"flows": [],
},
{
"id": "ebe0e5aa-4add-4440-8c1a-6f9c74eb29fe",
"name": "dev",
"flows": [],
},
{
"id": "b5d924b0-4116-479f-a8f5-e28f9a9051ca",
"name": "velux",
"flows": [],
},
]
}
}
Expand All @@ -127,7 +87,7 @@ def test_prefect_logs(expectation_suite):
id
name
flows (
where : {name: {_eq: "1-raw google_analytics_oso_sps_gb extract"}}
where : {name: {_eq: "1-raw table_catalog extract"}}
) {
id
name
Expand Down Expand Up @@ -156,7 +116,7 @@ def test_prefect_logs(expectation_suite):
scheduled_start_time="2022-09-05",
filter_type="_gte",
local_file_path=f"prefect_extract_logs.parquet",
adls_path=f"raw/supermetrics/mp/prefect_extract_logs.parquet",
adls_path=f"raw/tests/prefect_extract_logs.parquet",
)

results = flow.run()
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/tasks/test_bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def test_bigquery_to_df_success():
credentials_key=CREDENTIALS_KEY,
)
df = bigquery_to_df_task.run()
expected_column = ["my_value"]
expected_column = ["my_value", "_viadot_source"]

assert isinstance(df, pd.DataFrame)
assert expected_column == list(df.columns)
Expand Down
23 changes: 12 additions & 11 deletions tests/integration/test_mindful.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@
os.system("clear")

credentials_mindful = local_config["MINDFUL"]
header = {
"Authorization": f"Bearer {credentials_mindful.get('VAULT')}",
}
auth = (
credentials_mindful["CUSTOMER_UUID"],
credentials_mindful["AUTH_TOKEN"],
)


class MockClass:
Expand Down Expand Up @@ -42,14 +43,14 @@ def json():

@pytest.mark.init
def test_instance_mindful():
mf = Mindful(header=header)
mf = Mindful(auth=auth)
assert isinstance(mf, Mindful)


@mock.patch("viadot.sources.mindful.handle_api_response", return_value=MockClass)
@pytest.mark.connect
def test_mindful_api_response(mock_connection):
mf = Mindful(header=header)
mf = Mindful(auth=auth)
mf.get_interactions_list()
mf.get_responses_list()
mock_connection.call_count == 2
Expand All @@ -58,7 +59,7 @@ def test_mindful_api_response(mock_connection):
@mock.patch("viadot.sources.mindful.handle_api_response", return_value=MockClass)
@pytest.mark.connect
def test_mindful_api_response2(mock_api_response):
mf = Mindful(header=header)
mf = Mindful(auth=auth)

response = mf.get_interactions_list()

Expand All @@ -69,7 +70,7 @@ def test_mindful_api_response2(mock_api_response):
@mock.patch("viadot.sources.mindful.handle_api_response", return_value=MockClass)
@pytest.mark.connect
def test_mindful_api_response3(mock_api_response):
mf = Mindful(header=header)
mf = Mindful(auth=auth)

response = mf.get_responses_list()

Expand All @@ -80,7 +81,7 @@ def test_mindful_api_response3(mock_api_response):
@mock.patch("viadot.sources.mindful.handle_api_response", return_value=MockClass)
@pytest.mark.connect
def test_mindful_api_response4(mock_api_response):
mf = Mindful(header=header)
mf = Mindful(auth=auth)

response = mf.get_survey_list()

Expand All @@ -91,7 +92,7 @@ def test_mindful_api_response4(mock_api_response):
@mock.patch("viadot.sources.Mindful._mindful_api_response", return_value=MockClass)
@pytest.mark.save
def test_mindful_interactions(mock_connection):
mf = Mindful(header=header)
mf = Mindful(auth=auth)
response = mf.get_interactions_list()
mf.response_to_file(response)
assert mf.endpoint == "interactions" and isinstance(mf.endpoint, str)
Expand All @@ -103,7 +104,7 @@ def test_mindful_interactions(mock_connection):
@mock.patch("viadot.sources.Mindful._mindful_api_response", return_value=MockClass)
@pytest.mark.save
def test_mindful_responses(mock_connection):
mf = Mindful(header=header)
mf = Mindful(auth=auth)
response = mf.get_responses_list()
mf.response_to_file(response)

Expand All @@ -115,7 +116,7 @@ def test_mindful_responses(mock_connection):
@mock.patch("viadot.sources.Mindful._mindful_api_response", return_value=MockClass)
@pytest.mark.save
def test_mindful_surveys(mock_connection):
mf = Mindful(header=header)
mf = Mindful(auth=auth)
response = mf.get_survey_list()
mf.response_to_file(response)

Expand Down
2 changes: 1 addition & 1 deletion tests/test_viadot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@


def test_version():
assert __version__ == "0.4.19"
assert __version__ == "0.4.20"
17 changes: 16 additions & 1 deletion tests/unit/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,22 @@


class ClassForDecorator:
source = "Source_name"

def __init__(self):
self.df = pd.DataFrame({"a": [123], "b": ["abc"]})

def to_df(self):
return self.df

@add_viadot_metadata_columns
@add_viadot_metadata_columns()
def to_df_decorated(self):
return self.df

@add_viadot_metadata_columns(source)
def to_df_decorated_parameter(self):
return self.df


def test_single_quotes_inside():
TEST_VALUE = "a'b"
Expand Down Expand Up @@ -138,3 +144,12 @@ def test_add_viadot_metadata_columns_base():
assert df_base.columns.to_list() == ["a", "b"]
assert df_decorated.columns.to_list() == ["a", "b", "_viadot_source"]
assert df_decorated["_viadot_source"][0] == "ClassForDecorator"


def test_add_viadot_metadata_columns_with_parameter():
df_base = ClassForDecorator().to_df()
df_decorated = ClassForDecorator().to_df_decorated_parameter()

assert df_base.columns.to_list() == ["a", "b"]
assert df_decorated.columns.to_list() == ["a", "b", "_viadot_source"]
assert df_decorated["_viadot_source"][0] == "Source_name"
2 changes: 1 addition & 1 deletion viadot/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.4.19"
__version__ = "0.4.20"
2 changes: 1 addition & 1 deletion viadot/flows/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,5 +45,5 @@
from .sql_server_to_duckdb import SQLServerToDuckDB
from .sql_server_to_parquet import SQLServerToParquet
from .sql_server_transform import SQLServerTransform
from .transform_and_catalog import TransformAndCatalog
from .transform_and_catalog import TransformAndCatalogToLuma
from .vid_club_to_adls import VidClubToADLS
Loading

0 comments on commit a6ced4d

Please sign in to comment.