Skip to content

Commit

Permalink
Merge pull request #30 from dqops/1.4.1
Browse files Browse the repository at this point in the history
1.4.1
  • Loading branch information
dqops authored Jun 5, 2024
2 parents 8dd2301 + 3d59b1c commit ecd2f99
Show file tree
Hide file tree
Showing 135 changed files with 2,933 additions and 786 deletions.
2 changes: 1 addition & 1 deletion .run/dqo run.run.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.4.0.jar" />
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.4.1.jar" />
<option name="VM_PARAMETERS" value="-XX:MaxRAMPercentage=60.0 --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED" />
<option name="PROGRAM_PARAMETERS" value="--server.port=8888" />
<option name="WORKING_DIRECTORY" value="C:\dev\dqoado" />
Expand Down
13 changes: 6 additions & 7 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# 1.4.0
* Ensure that only one instance of DuckDB engine is loaded
* Profiling and monitoring checks do not render the group by time period (current time)
* Small UI fixes
* Default check patterns (profiles) support disabling and targeting multiple tables and columns
* Global search screens to show tables and columns, filtered by labels
* Data quality check recalibration for failed data quality checks initiated from an incident details screen
# 1.4.1
* Small fixes to the code that calculates the current data quality status of tables and columns
* Fixed permalinks to screens
* Small changes to handling empty tables in column level data quality checks
* Incident generation fixed for monitoring checks (full table scan checks)

2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.4.0
1.4.1
2 changes: 1 addition & 1 deletion distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

<groupId>com.dqops</groupId>
<artifactId>dqo-distribution</artifactId>
<version>1.4.0</version> <!-- DQOps Version, do not touch (changed automatically) -->
<version>1.4.1</version> <!-- DQOps Version, do not touch (changed automatically) -->
<name>dqo-distribution</name>
<description>DQOps Data Quality Operations Center final assembly</description>
<packaging>pom</packaging>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,29 @@
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.remote_table_list_model import RemoteTableListModel
from ...types import Response
from ...types import UNSET, Response, Unset


def _get_kwargs(
connection_name: str,
schema_name: str,
*,
table_name_contains: Union[Unset, None, str] = UNSET,
) -> Dict[str, Any]:
pass

params: Dict[str, Any] = {}
params["tableNameContains"] = table_name_contains

params = {k: v for k, v in params.items() if v is not UNSET and v is not None}

return {
"method": "get",
"url": "api/datasource/connections/{connectionName}/schemas/{schemaName}/tables".format(
connectionName=connection_name,
schemaName=schema_name,
),
"params": params,
}


Expand Down Expand Up @@ -58,6 +66,7 @@ def sync_detailed(
schema_name: str,
*,
client: AuthenticatedClient,
table_name_contains: Union[Unset, None, str] = UNSET,
) -> Response[List["RemoteTableListModel"]]:
"""getRemoteDataSourceTables
Expand All @@ -67,6 +76,7 @@ def sync_detailed(
Args:
connection_name (str):
schema_name (str):
table_name_contains (Union[Unset, None, str]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
Expand All @@ -79,6 +89,7 @@ def sync_detailed(
kwargs = _get_kwargs(
connection_name=connection_name,
schema_name=schema_name,
table_name_contains=table_name_contains,
)

response = client.get_httpx_client().request(
Expand All @@ -93,6 +104,7 @@ def sync(
schema_name: str,
*,
client: AuthenticatedClient,
table_name_contains: Union[Unset, None, str] = UNSET,
) -> Optional[List["RemoteTableListModel"]]:
"""getRemoteDataSourceTables
Expand All @@ -102,6 +114,7 @@ def sync(
Args:
connection_name (str):
schema_name (str):
table_name_contains (Union[Unset, None, str]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
Expand All @@ -115,6 +128,7 @@ def sync(
connection_name=connection_name,
schema_name=schema_name,
client=client,
table_name_contains=table_name_contains,
).parsed


Expand All @@ -123,6 +137,7 @@ async def asyncio_detailed(
schema_name: str,
*,
client: AuthenticatedClient,
table_name_contains: Union[Unset, None, str] = UNSET,
) -> Response[List["RemoteTableListModel"]]:
"""getRemoteDataSourceTables
Expand All @@ -132,6 +147,7 @@ async def asyncio_detailed(
Args:
connection_name (str):
schema_name (str):
table_name_contains (Union[Unset, None, str]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
Expand All @@ -144,6 +160,7 @@ async def asyncio_detailed(
kwargs = _get_kwargs(
connection_name=connection_name,
schema_name=schema_name,
table_name_contains=table_name_contains,
)

response = await client.get_async_httpx_client().request(**kwargs)
Expand All @@ -156,6 +173,7 @@ async def asyncio(
schema_name: str,
*,
client: AuthenticatedClient,
table_name_contains: Union[Unset, None, str] = UNSET,
) -> Optional[List["RemoteTableListModel"]]:
"""getRemoteDataSourceTables
Expand All @@ -165,6 +183,7 @@ async def asyncio(
Args:
connection_name (str):
schema_name (str):
table_name_contains (Union[Unset, None, str]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
Expand All @@ -179,5 +198,6 @@ async def asyncio(
connection_name=connection_name,
schema_name=schema_name,
client=client,
table_name_contains=table_name_contains,
)
).parsed
6 changes: 6 additions & 0 deletions distribution/python/dqops/client/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1922,6 +1922,9 @@
from .time_period_gradient import TimePeriodGradient
from .time_window_filter_parameters import TimeWindowFilterParameters
from .timestamp_columns_spec import TimestampColumnsSpec
from .top_incident_grouping import TopIncidentGrouping
from .top_incidents_model import TopIncidentsModel
from .top_incidents_model_top_incidents import TopIncidentsModelTopIncidents
from .trino_engine_type import TrinoEngineType
from .trino_parameters_spec import TrinoParametersSpec
from .trino_parameters_spec_properties import TrinoParametersSpecProperties
Expand Down Expand Up @@ -2812,6 +2815,9 @@
"TimePeriodGradient",
"TimestampColumnsSpec",
"TimeWindowFilterParameters",
"TopIncidentGrouping",
"TopIncidentsModel",
"TopIncidentsModelTopIncidents",
"TrinoEngineType",
"TrinoParametersSpec",
"TrinoParametersSpecProperties",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,21 @@
class ImportSchemaQueueJobParameters:
"""
Attributes:
connection_name (Union[Unset, str]):
schema_name (Union[Unset, str]):
table_name_pattern (Union[Unset, str]):
connection_name (Union[Unset, str]): Connection name where the tables are imported.
schema_name (Union[Unset, str]): Source schema name from which the tables are imported.
table_name_contains (Union[Unset, str]): Optional filter for the names of tables to import, it is a text
(substring) that must be present inside table names. This filter is case sensitive.
"""

connection_name: Union[Unset, str] = UNSET
schema_name: Union[Unset, str] = UNSET
table_name_pattern: Union[Unset, str] = UNSET
table_name_contains: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
connection_name = self.connection_name
schema_name = self.schema_name
table_name_pattern = self.table_name_pattern
table_name_contains = self.table_name_contains

field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
Expand All @@ -34,8 +35,8 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["connectionName"] = connection_name
if schema_name is not UNSET:
field_dict["schemaName"] = schema_name
if table_name_pattern is not UNSET:
field_dict["tableNamePattern"] = table_name_pattern
if table_name_contains is not UNSET:
field_dict["tableNameContains"] = table_name_contains

return field_dict

Expand All @@ -46,12 +47,12 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:

schema_name = d.pop("schemaName", UNSET)

table_name_pattern = d.pop("tableNamePattern", UNSET)
table_name_contains = d.pop("tableNameContains", UNSET)

import_schema_queue_job_parameters = cls(
connection_name=connection_name,
schema_name=schema_name,
table_name_pattern=table_name_pattern,
table_name_contains=table_name_contains,
)

import_schema_queue_job_parameters.additional_properties = d
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,22 @@ class ImportTablesQueueJobParameters:
Attributes:
connection_name (Union[Unset, str]): Connection name
schema_name (Union[Unset, str]): Schema name
table_name_contains (Union[Unset, str]): Optional filter for the table names to import. The table names that are
imported must contain a substring matching this parameter. This filter is case sensitive.
table_names (Union[Unset, List[str]]): Optional list of table names inside the schema. When the list of tables
is empty, all tables are imported.
"""

connection_name: Union[Unset, str] = UNSET
schema_name: Union[Unset, str] = UNSET
table_name_contains: Union[Unset, str] = UNSET
table_names: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
connection_name = self.connection_name
schema_name = self.schema_name
table_name_contains = self.table_name_contains
table_names: Union[Unset, List[str]] = UNSET
if not isinstance(self.table_names, Unset):
table_names = self.table_names
Expand All @@ -37,6 +41,8 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["connectionName"] = connection_name
if schema_name is not UNSET:
field_dict["schemaName"] = schema_name
if table_name_contains is not UNSET:
field_dict["tableNameContains"] = table_name_contains
if table_names is not UNSET:
field_dict["tableNames"] = table_names

Expand All @@ -49,11 +55,14 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:

schema_name = d.pop("schemaName", UNSET)

table_name_contains = d.pop("tableNameContains", UNSET)

table_names = cast(List[str], d.pop("tableNames", UNSET))

import_tables_queue_job_parameters = cls(
connection_name=connection_name,
schema_name=schema_name,
table_name_contains=table_name_contains,
table_names=table_names,
)

Expand Down
10 changes: 10 additions & 0 deletions distribution/python/dqops/client/models/top_incident_grouping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from enum import Enum


class TopIncidentGrouping(str, Enum):
CATEGORY = "category"
CONNECTION = "connection"
DIMENSION = "dimension"

def __str__(self) -> str:
return str(self.value)
Loading

0 comments on commit ecd2f99

Please sign in to comment.