Skip to content

Commit

Permalink
Merge pull request #576 from dyvenia/dev
Browse files Browse the repository at this point in the history
Release 0.4.11 PR
  • Loading branch information
Rafalz13 authored Dec 15, 2022
2 parents 3384bec + d2eae42 commit 9544482
Show file tree
Hide file tree
Showing 10 changed files with 266 additions and 115 deletions.
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -158,4 +158,3 @@ sap_netweaver_rfc
# Databricks-connect

.databricks-connect

9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]


# [0.4.11] - 2022-12-15
### Added
- Added into `Genesys` the new view type `AGENT`.

### Changed
- Changed data extraction logic for `Outlook` data.


## [0.4.10] - 2022-11-16
### Added
- Added `credentials_loader` function in utils
Expand Down
14 changes: 14 additions & 0 deletions tests/integration/test_genesys.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
@pytest.fixture
def var_dictionary():
variables = {
"start_date": "2022-08-12",
"media_type_list": ["callback", "chat"],
"queueIds_list": [
"1234567890",
Expand Down Expand Up @@ -38,24 +39,36 @@ def var_dictionary():
"https://apps.mypurecloud.de/example/url/test",
"1234567890qwertyuiopasdfghjklazxcvbn",
"chat",
"QUEUE_PERFORMANCE_DETAIL_VIEW",
"2022-08-12T23:00:00.000Z/2022-08-13T23:00:00.000Z",
"COMPLETED",
],
[
"1234567890qwertyuiopasdfghjklazxcvbn",
"https://apps.mypurecloud.de/example/url/test",
"1234567890qwertyuiopasdfghjklazxcvbn",
"chat",
"QUEUE_PERFORMANCE_DETAIL_VIEW",
"2022-08-12T23:00:00.000Z/2022-08-13T23:00:00.000Z",
"COMPLETED",
],
[
"1234567890qwertyuiopasdfghjklazxcvbn",
"https://apps.mypurecloud.de/example/url/test",
"1234567890qwertyuiopasdfghjklazxcvbn",
"callback",
"QUEUE_PERFORMANCE_DETAIL_VIEW",
"2022-08-12T23:00:00.000Z/2022-08-13T23:00:00.000Z",
"COMPLETED",
],
[
"1234567890qwertyuiopasdfghjklazxcvbn",
"https://apps.mypurecloud.de/example/url/test",
"1234567890qwertyuiopasdfghjklazxcvbn",
"callback",
"QUEUE_PERFORMANCE_DETAIL_VIEW",
"2022-08-12T23:00:00.000Z/2022-08-13T23:00:00.000Z",
"COMPLETED",
],
],
"entities": {
Expand Down Expand Up @@ -202,6 +215,7 @@ def test_download_reports(mock_download_files, var_dictionary):
g = Genesys()
g.ids_mapping = var_dictionary["ids_mapping"]
g.report_data = var_dictionary["report_data"]
g.start_date = var_dictionary["start_date"]
file_name_list = g.download_all_reporting_exports()

assert type(file_name_list) == list and len(file_name_list) > 0
Expand Down
2 changes: 1 addition & 1 deletion tests/test_viadot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@


def test_version():
assert __version__ == "0.4.10"
assert __version__ == "0.4.11"
2 changes: 1 addition & 1 deletion viadot/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.4.10"
__version__ = "0.4.11"
45 changes: 34 additions & 11 deletions viadot/flows/genesys_to_adls.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ class GenesysToADLS(Flow):
def __init__(
self,
name: str,
view_type: str = "queue_performance_detail_view",
view_type_time_sleep: int = 80,
media_type_list: List[str] = None,
queueIds_list: List[str] = None,
data_to_post_str: str = None,
Expand All @@ -87,6 +89,8 @@ def __init__(
Args:
name (str): The name of the Flow.
view_type (str, optional): The type of view export job to be created. Defaults to "queue_performance_detail_view".
view_type_time_sleep (int, optional): Waiting time to retrieve data from Genesys API. Defaults to 80.
media_type_list (List[str], optional): List of specific media types. Defaults to None.
queueIds_list (List[str], optional): List of specific queues ids. Defaults to None.
data_to_post_str (str, optional): String template to generate json body. Defaults to None.
Expand All @@ -108,6 +112,8 @@ def __init__(
"""
# GenesysToCSV
self.flow_name = name
self.view_type = view_type
self.view_type_time_sleep = view_type_time_sleep
self.media_type_list = media_type_list
self.queueIds_list = queueIds_list
self.data_to_post = data_to_post_str
Expand All @@ -134,17 +140,34 @@ def gen_flow(self) -> Flow:

to_csv = GenesysToCSV()

file_names = to_csv.bind(
media_type_list=self.media_type_list,
queueIds_list=self.queueIds_list,
data_to_post_str=self.data_to_post,
start_date=self.start_date,
end_date=self.end_date,
days_interval=self.days_interval,
environment=self.environment,
credentials_genesys=self.credentials_genesys,
flow=self,
)
if self.view_type == "queue_performance_detail_view":
file_names = to_csv.bind(
view_type=self.view_type,
media_type_list=self.media_type_list,
queueIds_list=self.queueIds_list,
data_to_post_str=self.data_to_post,
start_date=self.start_date,
end_date=self.end_date,
days_interval=self.days_interval,
environment=self.environment,
credentials_genesys=self.credentials_genesys,
flow=self,
)
elif self.view_type in [
"agent_performance_summary_view",
"agent_status_summary_view",
]:
file_names = to_csv.bind(
view_type=self.view_type,
view_type_time_sleep=self.view_type_time_sleep,
media_type_list=self.media_type_list,
queueIds_list=[""],
data_to_post_str=self.data_to_post,
start_date=self.start_date,
environment=self.environment,
credentials_genesys=self.credentials_genesys,
flow=self,
)

add_timestamp.bind(file_names, sep=self.sep, flow=self)

Expand Down
10 changes: 8 additions & 2 deletions viadot/flows/outlook_to_adls.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
df_to_csv,
df_to_parquet,
union_dfs_task,
credentials_loader,
)
from ..tasks import AzureDataLakeUpload, OutlookToDF

Expand All @@ -29,6 +30,7 @@ def __init__(
limit: int = 10000,
timeout: int = 1200,
if_exists: Literal["append", "replace", "skip"] = "append",
outlook_credentials_secret: str = "OUTLOOK",
*args: List[Any],
**kwargs: Dict[str, Any],
):
Expand All @@ -46,6 +48,7 @@ def __init__(
overwrite_adls (bool, optional): Whether to overwrite the file in ADLS. Defaults to True.
adls_sp_credentials_secret (str, optional): The name of the Azure Key Vault secret containing a dictionary with
ACCOUNT_NAME and Service Principal credentials (TENANT_ID, CLIENT_ID, CLIENT_SECRET) for the Azure Data Lake. Defaults to None.
outlook_credentials_secret (str, optional): The name of the Azure Key Vault secret containing a dictionary with outlook credentials.
limit (int, optional): Number of fetched top messages. Defaults to 10000.
timeout (int, optional): The amount of time (in seconds) to wait while running this task before a timeout occurs. Defaults to 1200.
if_exists (Literal['append', 'replace', 'skip'], optional): What to do if the local file already exists. Defaults to "append".
Expand All @@ -64,6 +67,7 @@ def __init__(
self.output_file_extension = output_file_extension
self.overwrite_adls = overwrite_adls
self.adls_sp_credentials_secret = adls_sp_credentials_secret
self.outlook_credentials_secret = outlook_credentials_secret

super().__init__(*args, name=name, **kwargs)

Expand All @@ -72,8 +76,10 @@ def __init__(
def gen_outlook_df(
self, mailbox_list: Union[str, List[str]], flow: Flow = None
) -> Task:

outlook_to_df = OutlookToDF(timeout=self.timeout)
credentials = credentials_loader.run(
credentials_secret=self.outlook_credentials_secret
)
outlook_to_df = OutlookToDF(timeout=self.timeout, credentials=credentials)

df = outlook_to_df.bind(
mailbox_name=mailbox_list,
Expand Down
57 changes: 51 additions & 6 deletions viadot/sources/genesys.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import json
import json, sys
import base64
import warnings
import asyncio
Expand All @@ -23,6 +23,7 @@
class Genesys(Source):
def __init__(
self,
view_type: str = "queue_performance_detail_view",
media_type_list: List[str] = None,
queueIds_list: List[str] = None,
data_to_post_str: str = None,
Expand All @@ -44,6 +45,7 @@ def __init__(
Genesys connector which allows for reports scheduling, listing and downloading into Data Frame or specified format output.
Args:
view_type (str, optional): The type of view export job to be created. Defaults to "queue_performance_detail_view".
media_type_list (List[str], optional): List of specific media types. Defaults to None.
queueIds_list (List[str], optional): List of specific queues ids. Defaults to None.
data_to_post_str (str, optional): String template to generate json body. Defaults to None.
Expand Down Expand Up @@ -81,6 +83,16 @@ def __init__(

super().__init__(*args, credentials=self.credentials_genesys, **kwargs)

self.view_type = view_type
if self.view_type not in [
"queue_performance_detail_view",
"agent_performance_summary_view",
"agent_status_summary_view",
]:
raise Exception(
f"View type {self.view_type} still is not implemented in viadot."
)

self.schedule_id = schedule_id
self.report_name = report_name
self.environment = environment
Expand Down Expand Up @@ -280,8 +292,11 @@ def get_reporting_exports_data(self):
tmp = [
entity.get("id"),
entity.get("downloadUrl"),
entity.get("filter").get("queueIds")[0],
entity.get("filter").get("mediaTypes")[0],
entity.get("filter").get("queueIds", [-1])[0],
entity.get("filter").get("mediaTypes", [-1])[0],
entity.get("viewType"),
entity.get("interval"),
entity.get("status"),
]
self.report_data.append(tmp)
assert len(self.report_data) > 0
Expand Down Expand Up @@ -343,9 +358,39 @@ def download_all_reporting_exports(
self.logger.info("IDS_MAPPING loaded from local credential.")

for single_report in self.report_data:
file_name = (
temp_ids_mapping.get(single_report[2]) + "_" + single_report[-1]
).upper()
self.logger.info(single_report)
if single_report[-1] == "RUNNING":
self.logger.warning(
"The request is still in progress and will be deleted, consider add more seconds in `view_type_time_sleep` parameter."
)
continue
elif single_report[-1] == "FAILED":
self.logger.warning(
"This message 'FAILED_GETTING_DATA_FROM_SERVICE' raised during script execution."
)
continue
elif self.start_date not in single_report[5]:
self.logger.warning(
f"The report with ID {single_report[0]} doesn't match with the interval date that you have already defined. \
The report won't be downloaded but will be deleted."
)
continue

if single_report[4].lower() == "queue_performance_detail_view":
file_name = (
temp_ids_mapping.get(single_report[2]) + "_" + single_report[3]
).upper()
elif single_report[4].lower() in [
"agent_performance_summary_view",
"agent_status_summary_view",
]:
date = self.start_date.replace("-", "")
file_name = self.view_type.upper() + "_" + f"{date}"
else:
self.logger.error(
f"View type {self.view_type} not defined in viadot, yet..."
)

self.download_report(
report_url=single_report[1],
output_file_name=file_name,
Expand Down
Loading

0 comments on commit 9544482

Please sign in to comment.