diff --git a/src/viadot/orchestration/prefect/flows/__init__.py b/src/viadot/orchestration/prefect/flows/__init__.py index 7687baaa5..1ce76f59a 100644 --- a/src/viadot/orchestration/prefect/flows/__init__.py +++ b/src/viadot/orchestration/prefect/flows/__init__.py @@ -16,6 +16,7 @@ from .mindful_to_adls import mindful_to_adls from .outlook_to_adls import outlook_to_adls from .salesforce_to_adls import salesforce_to_adls +from .sap_bw_to_adls import sap_bw_to_adls from .sap_to_parquet import sap_to_parquet from .sap_to_redshift_spectrum import sap_to_redshift_spectrum from .sftp_to_adls import sftp_to_adls @@ -47,6 +48,7 @@ "mindful_to_adls", "outlook_to_adls", "salesforce_to_adls", + "sap_bw_to_adls", "sap_to_parquet", "sap_to_redshift_spectrum", "sftp_to_adls", diff --git a/src/viadot/orchestration/prefect/flows/sap_bw_to_adls.py b/src/viadot/orchestration/prefect/flows/sap_bw_to_adls.py new file mode 100644 index 000000000..7703ec068 --- /dev/null +++ b/src/viadot/orchestration/prefect/flows/sap_bw_to_adls.py @@ -0,0 +1,60 @@ +"""Task to download data from SAP BW API into a Pandas DataFrame.""" + +from typing import Any + +from prefect import flow +from prefect.task_runners import ConcurrentTaskRunner + +from viadot.orchestration.prefect.tasks import df_to_adls, sap_bw_to_df + + +@flow( + name="SAP BW extraction to ADLS", + description="Extract data from SAP BW and load it into Azure Data Lake Storage.", + retries=1, + retry_delay_seconds=60, + task_runner=ConcurrentTaskRunner, +) +def sap_bw_to_adls( + config_key: str | None = None, + azure_key_vault_secret: str | None = None, + mdx_query: str | None = None, + mapping_dict: dict[str, Any] | None = None, + adls_azure_key_vault_secret: str | None = None, + adls_config_key: str | None = None, + adls_path: str | None = None, + adls_path_overwrite: bool = False, +) -> None: + """Flow for downloading data from SAP BW API to Azure Data Lake. + + Args: + config_key (Optional[str], optional): The key in the viadot config holding + relevant credentials. Defaults to None. + azure_key_vault_secret (Optional[str], optional): The name of the Azure Key + Vault secret where credentials are stored. Defaults to None. + mdx_query (str, optional): The MDX query to be passed to connection. + mapping_dict (dict[str, Any], optional): Dictionary with original and new + column names. Defaults to None. + adls_azure_key_vault_secret (str, optional): The name of the Azure Key. + Defaults to None. + adls_config_key (str, optional): The key in the viadot config holding relevant + credentials. Defaults to None. + adls_path (str, optional): Azure Data Lake destination folder/catalog path. + Defaults to None. + adls_path_overwrite (bool, optional): Whether to overwrite the file in ADLS. + Defaults to False. + """ + data_frame = sap_bw_to_df( + config_key=config_key, + azure_key_vault_secret=azure_key_vault_secret, + mdx_query=mdx_query, + mapping_dict=mapping_dict, + ) + + return df_to_adls( + df=data_frame, + path=adls_path, + credentials_secret=adls_azure_key_vault_secret, + config_key=adls_config_key, + overwrite=adls_path_overwrite, + ) diff --git a/src/viadot/orchestration/prefect/tasks/__init__.py b/src/viadot/orchestration/prefect/tasks/__init__.py index 02b8669c3..25c37df7a 100644 --- a/src/viadot/orchestration/prefect/tasks/__init__.py +++ b/src/viadot/orchestration/prefect/tasks/__init__.py @@ -20,6 +20,7 @@ from .redshift_spectrum import df_to_redshift_spectrum from .s3 import s3_upload_file from .salesforce import salesforce_to_df +from .sap_bw import sap_bw_to_df from .sap_rfc import sap_rfc_to_df from .sftp import sftp_list, sftp_to_df from .sharepoint import sharepoint_download_file, sharepoint_to_df @@ -32,12 +33,12 @@ "bcp", "clone_repo", "cloud_for_customers_to_df", - "customer_gauge_to_df", - "df_to_databricks", "create_sql_server_table", + "customer_gauge_to_df", "dbt_task", "df_to_adls", "df_to_databricks", + "df_to_databricks", "df_to_minio", "df_to_redshift_spectrum", "duckdb_query", @@ -51,6 +52,7 @@ "outlook_to_df", "s3_upload_file", "salesforce_to_df", + "sap_bw_to_df", "sap_rfc_to_df", "sftp_list", "sftp_to_df", diff --git a/src/viadot/orchestration/prefect/tasks/sap_bw.py b/src/viadot/orchestration/prefect/tasks/sap_bw.py new file mode 100644 index 000000000..b10cdc3bb --- /dev/null +++ b/src/viadot/orchestration/prefect/tasks/sap_bw.py @@ -0,0 +1,54 @@ +"""Task to download data from SAP BW into a Pandas DataFrame.""" + +import contextlib +from typing import Any + +import pandas as pd +from prefect import task + +from viadot.orchestration.prefect.exceptions import MissingSourceCredentialsError +from viadot.orchestration.prefect.utils import get_credentials + + +with contextlib.suppress(ImportError): + from viadot.sources import SAPBW + + +@task(retries=3, log_prints=True, retry_delay_seconds=10, timeout_seconds=60 * 60) +def sap_bw_to_df( + mdx_query: str, + config_key: str | None = None, + azure_key_vault_secret: str | None = None, + mapping_dict: dict[str, Any] | None = None, +) -> pd.DataFrame: + """Task to download data from SAP BW to DataFrame. + + Args: + mdx_query (str, required): The MDX query to be passed to connection. + config_key (Optional[str], optional): The key in the viadot config holding + relevant credentials. Defaults to None. + azure_key_vault_secret (Optional[str], optional): The name of the Azure Key + Vault secret where credentials are stored. Defaults to None. + mapping_dict (dict[str, Any], optional): Dictionary with original and new + column names. Defaults to None. + + Raises: + MissingSourceCredentialsError: If none credentials have been provided. + + + Returns: + pd.DataFrame: The response data as a Pandas Data Frame. + """ + if not (azure_key_vault_secret or config_key): + raise MissingSourceCredentialsError + + if not config_key: + credentials = get_credentials(azure_key_vault_secret) + + sap_bw = SAPBW( + credentials=credentials, + config_key=config_key, + ) + sap_bw.api_connection(mdx_query=mdx_query) + + return sap_bw.to_df(mapping_dict=mapping_dict) diff --git a/src/viadot/sources/__init__.py b/src/viadot/sources/__init__.py index 48e3184f8..3dc298ae3 100644 --- a/src/viadot/sources/__init__.py +++ b/src/viadot/sources/__init__.py @@ -30,10 +30,10 @@ "Hubspot", "Mediatool", "Mindful", - "Sftp", "Outlook", "SQLServer", "Salesforce", + "Sftp", "Sharepoint", "Supermetrics", "SupermetricsCredentials", # pragma: allowlist-secret @@ -61,9 +61,11 @@ __all__.extend(["MinIO"]) if find_spec("pyrfc"): + from viadot.sources.sap_bw import SAPBW # noqa: F401 from viadot.sources.sap_rfc import SAPRFC, SAPRFCV2 # noqa: F401 - __all__.extend(["SAPRFC", "SAPRFCV2"]) + __all__.extend(["SAPBW", "SAPRFC", "SAPRFCV2"]) + if find_spec("pyspark"): from viadot.sources.databricks import Databricks # noqa: F401 diff --git a/src/viadot/sources/sap_bw.py b/src/viadot/sources/sap_bw.py new file mode 100644 index 000000000..4da6de87f --- /dev/null +++ b/src/viadot/sources/sap_bw.py @@ -0,0 +1,223 @@ +"""SAP BW connector.""" + +import textwrap +from typing import Any + +import pandas as pd +from pydantic import BaseModel +import pyrfc + +from viadot.config import get_source_credentials +from viadot.exceptions import CredentialError, ValidationError +from viadot.sources.base import Source +from viadot.utils import add_viadot_metadata_columns + + +class SAPBWCredentials(BaseModel): + """Checking for values in SAP BW credentials dictionary. + + Two key values are held in the SAP BW connector: + - ashost: Indicates the host name or IP address of a specific SAP + application server. + - client: Specifies the SAP logon parameter client. + - passwd: Indicates the SAP logon parameter password. + - sysnr: Indicates the SAP system number—the 2-byte code that identifies the + system on the host. + - user: Indicates the SAP logon parameter user. + + Args: + BaseModel (pydantic.main.ModelMetaclass): A base class for creating + Pydantic models. + """ + + ashost: str + client: str + passwd: str + sysnr: str + user: str + + +class SAPBW(Source): + """Quering the SAP BW (SAP Business Warehouse) source using pyrfc library. + + Documentation to pyrfc can be found under: + https://sap.github.io/PyRFC/pyrfc.html + Documentation for SAP connection modules under: + https://www.se80.co.uk/sap-function-modules/list/?index=rsr_mdx + """ + + def __init__( + self, + *args, + credentials: SAPBWCredentials | None = None, + config_key: str = "sap_bw", + **kwargs, + ): + """Create an instance of SAP BW. + + Args: + credentials (Optional[SAPBWCredentials], optional): SAP BW credentials. + Defaults to None. + config_key (str, optional): The key in the viadot config holding relevant + credentials. Defaults to "sap_bw". + + Examples: + sap_bw = SAPBW( + credentials=credentials, + config_key=config_key, + ) + sap_bw.api_connection( + ... + ) + data_frame = sap_bw.to_df() + + Raises: + CredentialError: If credentials are not provided in local_config or + directly as a parameter. + """ + credentials = credentials or get_source_credentials(config_key) or None + if credentials is None: + message = "Missing credentials." + raise CredentialError(message) + self.credentials = credentials + + validated_creds = dict(SAPBWCredentials(**credentials)) + super().__init__(*args, credentials=validated_creds, **kwargs) + + self.query_output = None + + def _create_connection(self): + """Create the connection with SAP BW. + + Returns: + Connection: Connection to SAP. + """ + return pyrfc.Connection( + ashost=self.credentials.get("ashost"), + sysnr=self.credentials.get("sysnr"), + user=self.credentials.get("user"), + passwd=self.credentials.get("passwd"), + client=self.credentials.get("client"), + ) + + def api_connection(self, mdx_query: str) -> None: + """Generate the SAP BW output dataset from MDX query. + + Args: + mdx_query (str): The MDX query to be passed to connection. + """ + conn = self._create_connection() + + query = textwrap.wrap(mdx_query, 75) + properties = conn.call("RSR_MDX_CREATE_OBJECT", COMMAND_TEXT=query) + + datasetid = properties["DATASETID"] + self.query_output = conn.call("RSR_MDX_GET_FLAT_DATA", DATASETID=datasetid) + conn.close() + + def _apply_user_mapping( + self, + df: pd.DataFrame, + mapping_dict: dict[str, Any] | None = None, + ) -> pd.DataFrame: + """Apply the column mapping defined by user for the output dataframe. + + DataFrame will be cut to selected columns - if any other columns need to be + included in the output file, please add them to the mapping dictionary with + original names. + + Args: + df (pd.DataFrame): Input dataframe for the column mapping task. + mapping_dict (dict[str, Any], optional): Dictionary with original and new + column names. Defaults to None. + + Returns: + pd.DataFrame: Output DataFrame with mapped columns. + """ + self.logger.info("Applying user defined mapping for columns...") + df = df[mapping_dict.keys()] + df.columns = mapping_dict.values() + + self.logger.info("Successfully applied user mapping.") + + return df + + @add_viadot_metadata_columns + def to_df( + self, + if_empty: str = "warn", + mapping_dict: dict[str, Any] | None = None, + ) -> pd.DataFrame: + """Convert the SAP BW output JSON data into a dataframe. + + Args: + if_empty (str, optional): What to do if a fetch produce no data. + Defaults to "warn". + mapping_dict (dict[str, Any], optional): Dictionary with original and new + column names. Defaults to None. + + Raises: + ValidationError: Prints the original SAP error message in case of issues + with MDX execution. + + Returns: + pd.Dataframe: The response data as a pandas DataFrame, enriched + with viadot metadata columns. + """ + raw_data = {} + + if self.query_output["RETURN"]["MESSAGE"] == "": + results = self.query_output["DATA"] + for cell in results: + if cell["ROW"] not in raw_data: + raw_data[cell["ROW"]] = {} + if "].[" not in cell["DATA"]: + raw_data[cell["ROW"]][cell["COLUMN"]] = cell["DATA"] + rows = [raw_data[row] for row in raw_data] + cols = [x["DATA"] for x in self.query_output["HEADER"]] + + data_frame = pd.DataFrame(data=rows) + data_frame.columns = cols + + else: + data_frame = pd.DataFrame() + raise ValidationError(self.query_output["RETURN"]["MESSAGE"]) + + if mapping_dict: + data_frame = self._apply_user_mapping(data_frame, mapping_dict) + + if data_frame.empty: + self._handle_if_empty( + if_empty=if_empty, + message="The response does not contain any data.", + ) + else: + self.logger.info("Successfully downloaded data from the Mindful API.") + + return data_frame + + def get_available_columns(self, mdx_query: str) -> list[str]: + """Generate list of all available columns in a SAP BW table. + + Args: + mdx_query (str): The MDX query to be passed to connection. + + Returns: + list[str]: List of all available columns in the source table. + """ + conn = self._create_connection() + query = textwrap.wrap(mdx_query, width=75) + + properties = conn.call("RSR_MDX_CREATE_STORED_OBJECT", COMMAND_TEXT=query) + datasetid = properties["DATASETID"] + + if properties["RETURN"]["MESSAGE"] == "": + get_axis_info = conn.call("RSR_MDX_GET_AXIS_INFO", DATASETID=datasetid) + cols = get_axis_info["AXIS_DIMENSIONS"] + + all_available_columns = [x["DIM_UNAM"] for x in cols] + else: + all_available_columns = [] + self.logger.error(properties["RETURN"]["MESSAGE"]) + + return all_available_columns diff --git a/tests/integration/orchestration/prefect/flows/test_sap_bw.py b/tests/integration/orchestration/prefect/flows/test_sap_bw.py new file mode 100644 index 000000000..33a7f1b9e --- /dev/null +++ b/tests/integration/orchestration/prefect/flows/test_sap_bw.py @@ -0,0 +1,176 @@ +"""'test_sap_bw.py'.""" + +from datetime import date, timedelta + +from viadot.orchestration.prefect.flows import sap_bw_to_adls + + +present_day = date.today() +present_day_str = present_day.strftime("%Y%m%d") +past_day = date.today() - timedelta(7) +past_day_str = past_day.strftime("%Y%m%d") +mdx_query = f""" + SELECT + {{[Measures].[003YPR44RQTVKWX9OL316XK7J], + [Measures].[003YPR44RQTVKWX9T5BFEWUY9], + [Measures].[003YPR44RQTVKWX9UW92X7ELV], + [Measures].[003YPR44RQTVKWX9YLKYZKH5O], + [Measures].[003YPR44RQTVKWXA51D4J8HRZ], + [Measures].[003YPR44RQTVKWXAEL7KFSJ6U], + [Measures].[003YPR44RQTVKWXARA4PVZ9TK]}} + ON COLUMNS, + NON EMPTY + {{ [0BILLTOPRTY].[LEVEL01].MEMBERS * + [0CALMONTH__0CALMONTH2].[LEVEL01].MEMBERS * + [0COMP_CODE].[LEVEL01].MEMBERS * + [0COMP_CODE__ZCOMPCOTE].[LEVEL01].MEMBERS * + [0CREATEDBY].[LEVEL01].MEMBERS * + [0CREATEDON].[LEVEL01].MEMBERS * + [0DISTR_CHAN].[LEVEL01].MEMBERS * + [0DOC_NUMBER].[LEVEL01].MEMBERS * + [0DOC_TYPE].[LEVEL01].MEMBERS * + [0IMODOCCAT].[LEVEL01].MEMBERS * + [0MATERIAL__ZDSPPRIC].[LEVEL01].MEMBERS * + [0MATERIAL__ZPRDGRP].[LEVEL01].MEMBERS * + [0MATERIAL__ZPANEVACD].[LEVEL01].MEMBERS * + [0MATERIAL__ZPRODAREA].[LEVEL01].MEMBERS * + [0MATERIAL__ZRDBTYPE].[LEVEL01].MEMBERS * + [0MATERIAL__ZTYPEVAR].[LEVEL01].MEMBERS * + [0MATERIAL__ZVAR_C34].[LEVEL01].MEMBERS * + [0MATERIAL__ZVAR_CH1].[LEVEL01].MEMBERS * + [0MATERIAL__ZVAR_CH2].[LEVEL01].MEMBERS * + [0MATERIAL__ZVCIVAR2].[LEVEL01].MEMBERS * + [0ORD_REASON].[LEVEL01].MEMBERS * + [0PAYER].[LEVEL01].MEMBERS * + [0REASON_REJ].[LEVEL01].MEMBERS * + [0SALESORG].[LEVEL01].MEMBERS * + [0SHIP_TO].[LEVEL01].MEMBERS * + [0SOLD_TO].[LEVEL01].MEMBERS * + [0SOLD_TO__0ACCNT_GRP].[LEVEL01].MEMBERS * + [0USAGE_IND].[LEVEL01].MEMBERS * + [0USAGE_IND__ZSALECAT].[LEVEL01].MEMBERS * + [ZASE_ID].[LEVEL01].MEMBERS * + [ZBVTSPRST].[LEVEL01].MEMBERS * + [ZCALWEEK].[LEVEL01].MEMBERS * + [ZORD_CREA].[LEVEL01].MEMBERS * + [ZPONUMBER].[LEVEL01].MEMBERS * + [ZPUORTYPE].[LEVEL01].MEMBERS * + [ZSEG_HDR].[LEVEL01].MEMBERS * + [0SALESEMPLY].[LEVEL01].MEMBERS * + [0SHIP_TO__0ACCNT_GRP].[LEVEL01].MEMBERS * + [0SHIP_TO__0CITY].[LEVEL01].MEMBERS * + [0CALYEAR].[LEVEL01].MEMBERS * + [0CALMONTH].[LEVEL01].MEMBERS * + {{[0CALDAY].[{past_day_str}] : [0CALDAY].[{present_day_str}]}}}} + DIMENSION PROPERTIES + MEMBER_NAME, + MEMBER_CAPTION + ON ROWS + FROM ZCSALORD1/ZBW4_ZCSALORD1_002_BOA +""" + +mapping_dict = { + "[0BILLTOPRTY].[LEVEL01].[MEMBER_NAME]": "bill_to_party", + "[0BILLTOPRTY].[LEVEL01].[MEMBER_CAPTION]": "bill_to_party_id", + "[0CALMONTH__0CALMONTH2].[LEVEL01].[MEMBER_CAPTION]": "calendar_month_2", + "[0CALMONTH__0CALMONTH2].[LEVEL01].[MEMBER_NAME]": "calendar_month_id", + "[0COMP_CODE].[LEVEL01].[MEMBER_CAPTION]": "company_code", + "[0COMP_CODE].[LEVEL01].[MEMBER_NAME]": "company_code_name", + "[0COMP_CODE__ZCOMPCOTE].[LEVEL01].[MEMBER_CAPTION]": "company_code_cons_term", + "[0COMP_CODE__ZCOMPCOTE].[LEVEL01].[MEMBER_NAME]": "company_code_cons_term_name", + "[0CREATEDBY].[LEVEL01].[MEMBER_CAPTION]": "created_by", + "[0CREATEDBY].[LEVEL01].[MEMBER_NAME]": "created_by_name", + "[0CREATEDON].[LEVEL01].[MEMBER_CAPTION]": "created_on", + "[0CREATEDON].[LEVEL01].[MEMBER_NAME]": "created_on_name", + "[0DISTR_CHAN].[LEVEL01].[MEMBER_CAPTION]": "distribution_channel", + "[0DISTR_CHAN].[LEVEL01].[MEMBER_NAME]": "distribution_channel_name", + "[0DOC_NUMBER].[LEVEL01].[MEMBER_CAPTION]": "sales_document", + "[0DOC_NUMBER].[LEVEL01].[MEMBER_NAME]": "sales_document_name", + "[0DOC_TYPE].[LEVEL01].[MEMBER_CAPTION]": "sales_doc_type", + "[0DOC_TYPE].[LEVEL01].[MEMBER_NAME]": "sales_doc_type_name", + "[0IMODOCCAT].[LEVEL01].[MEMBER_CAPTION]": "sales_document_categ", + "[0IMODOCCAT].[LEVEL01].[MEMBER_NAME]": "sales_document_categ_name", + "[0MATERIAL__ZDSPPRIC].[LEVEL01].[MEMBER_CAPTION]": "dsp_pricing_group", + "[0MATERIAL__ZDSPPRIC].[LEVEL01].[MEMBER_NAME]": "dsp_pricing_group_name", + "[0MATERIAL__ZPANEVACD].[LEVEL01].[MEMBER_CAPTION]": "pane_variant_code", + "[0MATERIAL__ZPANEVACD].[LEVEL01].[MEMBER_NAME]": "pane_variant_code_name", + "[0MATERIAL__ZPRDGRP].[LEVEL01].[MEMBER_CAPTION]": "product_group", + "[0MATERIAL__ZPRDGRP].[LEVEL01].[MEMBER_NAME]": "product_group_name", + "[0MATERIAL__ZPRODAREA].[LEVEL01].[MEMBER_CAPTION]": "product_area", + "[0MATERIAL__ZPRODAREA].[LEVEL01].[MEMBER_NAME]": "product_area_name", + "[0MATERIAL__ZRDBTYPE].[LEVEL01].[MEMBER_CAPTION]": "type_of_material", + "[0MATERIAL__ZRDBTYPE].[LEVEL01].[MEMBER_NAME]": "type_of_material_name", + "[0MATERIAL__ZTYPEVAR].[LEVEL01].[MEMBER_CAPTION]": "material_type_variant", + "[0MATERIAL__ZTYPEVAR].[LEVEL01].[MEMBER_NAME]": "material_type_variant_name", + "[0MATERIAL__ZVAR_C34].[LEVEL01].[MEMBER_CAPTION]": "3_and_4_character", + "[0MATERIAL__ZVAR_C34].[LEVEL01].[MEMBER_NAME]": "3_and_4_character_name", + "[0MATERIAL__ZVAR_CH1].[LEVEL01].[MEMBER_CAPTION]": "1_character_of_vari", + "[0MATERIAL__ZVAR_CH1].[LEVEL01].[MEMBER_NAME]": "1_character_of_vari_name", + "[0MATERIAL__ZVAR_CH2].[LEVEL01].[MEMBER_CAPTION]": "2_character_of_vari", + "[0MATERIAL__ZVAR_CH2].[LEVEL01].[MEMBER_NAME]": "2_character_of_vari_name", + "[0MATERIAL__ZVCIVAR2].[LEVEL01].[MEMBER_CAPTION]": "product_variant", + "[0MATERIAL__ZVCIVAR2].[LEVEL01].[MEMBER_NAME]": "product_variant_name", + "[0ORD_REASON].[LEVEL01].[MEMBER_CAPTION]": "reason_for_order", + "[0ORD_REASON].[LEVEL01].[MEMBER_NAME]": "reason_for_order_name", + "[0PAYER].[LEVEL01].[MEMBER_CAPTION]": "payer", + "[0PAYER].[LEVEL01].[MEMBER_NAME]": "payer_name", + "[0REASON_REJ].[LEVEL01].[MEMBER_CAPTION]": "reason_for_rejection", + "[0REASON_REJ].[LEVEL01].[MEMBER_NAME]": "reason_for_rejection_name", + "[0SALESORG].[LEVEL01].[MEMBER_CAPTION]": "sales_organization", + "[0SALESORG].[LEVEL01].[MEMBER_NAME]": "sales_organization_name", + "[0SHIP_TO].[LEVEL01].[MEMBER_CAPTION]": "ship_to_party", + "[0SHIP_TO].[LEVEL01].[MEMBER_NAME]": "ship_to_party_name", + "[0SOLD_TO].[LEVEL01].[MEMBER_CAPTION]": "sold_to_party", + "[0SOLD_TO].[LEVEL01].[MEMBER_NAME]": "sold_to_party_name", + "[0SOLD_TO__0ACCNT_GRP].[LEVEL01].[MEMBER_CAPTION]": "customer_account_group_sold_to", + "[0SOLD_TO__0ACCNT_GRP].[LEVEL01].[MEMBER_NAME]": "customer_account_group_sold_to_name", + "[0USAGE_IND].[LEVEL01].[MEMBER_CAPTION]": "usage_indicator", + "[0USAGE_IND].[LEVEL01].[MEMBER_NAME]": "usage_indicator_name", + "[0USAGE_IND__ZSALECAT].[LEVEL01].[MEMBER_CAPTION]": "sales_cat_usage", + "[0USAGE_IND__ZSALECAT].[LEVEL01].[MEMBER_NAME]": "sales_cat_usage_name", + "[ZASE_ID].[LEVEL01].[MEMBER_CAPTION]": "ase_id", + "[ZASE_ID].[LEVEL01].[MEMBER_NAME]": "ase_id_name", + "[ZBVTSPRST].[LEVEL01].[MEMBER_CAPTION]": "order_status", + "[ZBVTSPRST].[LEVEL01].[MEMBER_NAME]": "order_status_name", + "[ZCALWEEK].[LEVEL01].[MEMBER_CAPTION]": "calendar_week", + "[ZCALWEEK].[LEVEL01].[MEMBER_NAME]": "calendar_week_name", + "[ZORD_CREA].[LEVEL01].[MEMBER_CAPTION]": "order_creation_date", + "[ZORD_CREA].[LEVEL01].[MEMBER_NAME]": "order_creation_date_name", + "[ZPONUMBER].[LEVEL01].[MEMBER_CAPTION]": "po_number", + "[ZPONUMBER].[LEVEL01].[MEMBER_NAME]": "po_number_name", + "[ZPUORTYPE].[LEVEL01].[MEMBER_CAPTION]": "purchase_order_type", + "[ZPUORTYPE].[LEVEL01].[MEMBER_NAME]": "purchase_order_type_name", + "[ZSEG_HDR].[LEVEL01].[MEMBER_CAPTION]": "segment_header", + "[ZSEG_HDR].[LEVEL01].[MEMBER_NAME]": "segment_header_name", + "[0SALESEMPLY].[LEVEL01].[MEMBER_CAPTION]": "sales_representative", + "[0SALESEMPLY].[LEVEL01].[MEMBER_NAME]": "sales_representative_name", + "[0SHIP_TO__0ACCNT_GRP].[LEVEL01].[MEMBER_CAPTION]": "customer_account_group_ship_to", + "[0SHIP_TO__0ACCNT_GRP].[LEVEL01].[MEMBER_NAME]": "customer_account_group_ship_to_name", + "[0SHIP_TO__0CITY].[LEVEL01].[MEMBER_CAPTION]": "location_ship_to", + "[0SHIP_TO__0CITY].[LEVEL01].[MEMBER_NAME]": "location_ship_to_name", + "[0CALDAY].[LEVEL01].[MEMBER_CAPTION]": "calendar_day", + "[0CALDAY].[LEVEL01].[MEMBER_NAME]": "calendar_day_name", + "[0CALMONTH].[LEVEL01].[MEMBER_CAPTION]": "calendar_month", + "[0CALMONTH].[LEVEL01].[MEMBER_NAME]": "calendar_month_name", + "[0CALYEAR].[LEVEL01].[MEMBER_CAPTION]": "calendar_year", + "[0CALYEAR].[LEVEL01].[MEMBER_NAME]": "calendar_year_name", + "[Measures].[003YPR44RQTVKWX9OL316XK7J]": "net_value", + "[Measures].[003YPR44RQTVKWX9T5BFEWUY9]": "order_quantity", + "[Measures].[003YPR44RQTVKWX9UW92X7ELV]": "open_orders_quantity", + "[Measures].[003YPR44RQTVKWX9YLKYZKH5O]": "number_of_sales_orders", + "[Measures].[003YPR44RQTVKWXA51D4J8HRZ]": "number_of_quotations", + "[Measures].[003YPR44RQTVKWXAEL7KFSJ6U]": "number_of_orders_created_from_quotations", + "[Measures].[003YPR44RQTVKWXARA4PVZ9TK]": "number_of_quotations_expired_validity_date", +} + + +def test_sap_bw_to_adls(sap_bw_config_key, adls_credentials_secret): + state = sap_bw_to_adls( + azure_key_vault_secret=sap_bw_config_key, + mdx_query=mdx_query, + mapping_dict=mapping_dict, + adls_path="raw/dyvenia_sandbox/sap_bw/sab_bw.parquet", + adls_azure_key_vault_secret=adls_credentials_secret, + adls_path_overwrite=True, + ) + assert state.is_successful()