Skip to content

Commit

Permalink
Merge pull request #44 from dqops/1.10.0
Browse files Browse the repository at this point in the history
1.10.0
  • Loading branch information
dqops authored Nov 3, 2024
2 parents 64038d0 + f905d83 commit 838d2ed
Show file tree
Hide file tree
Showing 3,301 changed files with 467,232 additions and 22,911 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ venv/
/site/
/.cache/

# DQO Examples
# DQOps Examples
.localsettings.dqosettings.yaml
.data/
.logs/
Expand All @@ -79,3 +79,5 @@ venv/
/home/bin/.duckdb
/home/bin/v0.10.0

# paid components
/home/dqopspaid/
4 changes: 2 additions & 2 deletions .run/dqo run.run.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.9.0.jar" />
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.10.0.jar" />
<option name="VM_PARAMETERS" value="-XX:MaxRAMPercentage=60.0 --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED" />
<option name="PROGRAM_PARAMETERS" value="--server.port=8888" />
<option name="PROGRAM_PARAMETERS" value="--server.port=8888 --dqo.python.debug-mode=silent" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="ALTERNATIVE_JRE_PATH" value="temurin-17" />
<envs>
Expand Down
2 changes: 1 addition & 1 deletion .run/run app with paid extensions.run.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
</envs>
<option name="MAIN_CLASS_NAME" value="com.dqops.cli.CliApplication" />
<module name="dqo-dqops" />
<option name="PROGRAM_PARAMETERS" value="--server.port=8888 --dqo.webserver.authentication-method=oauth2" />
<option name="PROGRAM_PARAMETERS" value="--server.port=8888 --dqo.webserver.authentication-method=oauth2 --dqo.python.debug-mode=failed" />
<shortenClasspath name="ARGS_FILE" />
<option name="VM_PARAMETERS" value="-XX:MaxRAMPercentage=60.0 --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED" />
<extension name="software.aws.toolkits.jetbrains.core.execution.JavaAwsConnectionExtension">
Expand Down
22 changes: 10 additions & 12 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
# 1.9.0
# 1.9.1

* Data lineage editor suggests similar source and target tables
* Fixes in the management of tabs
* ClickHouse connector
* Teradata connector
* Small UI fixes
* Performance improvements in the anomaly detection code
* Other performance optimizations
* Run Python rules in parallel to speed up anomaly detection when tables have a different number of enabled checks

* SAP HANA support
* IBM DB2 support
* Improvements to the anomaly rules
* Missing UI for the data domains added
* Incident histogram shows time periods, not the execution day
* Incident notifications fixed to send notifications used a dedicated queue and work even when fallback addresses are not configured
* Data lineage flow diagrams
* Table similarity search to build data lineage
* Table and column screens have a new primary tab to show the data observability status, which is a shortened status
* Basic statistics are scheduled by CRON and reuse the profiling schedule
* Connections support configuring a CRON expression to import new tables
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.9.0
1.10.0
2 changes: 1 addition & 1 deletion distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

<groupId>com.dqops</groupId>
<artifactId>dqo-distribution</artifactId>
<version>1.9.0</version> <!-- DQOps Version, do not touch (changed automatically) -->
<version>1.10.0</version> <!-- DQOps Version, do not touch (changed automatically) -->
<name>dqo-distribution</name>
<description>DQOps Data Quality Operations Center final assembly</description>
<packaging>pom</packaging>
Expand Down
36 changes: 36 additions & 0 deletions distribution/python/dqops/client/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,8 @@
from .check_template import CheckTemplate
from .check_time_scale import CheckTimeScale
from .check_type import CheckType
from .click_house_parameters_spec import ClickHouseParametersSpec
from .click_house_parameters_spec_properties import ClickHouseParametersSpecProperties
from .cloud_synchronization_folders_status_model import (
CloudSynchronizationFoldersStatusModel,
)
Expand Down Expand Up @@ -1094,6 +1096,12 @@
from .column_sql_import_custom_result_sensor_parameters_spec import (
ColumnSqlImportCustomResultSensorParametersSpec,
)
from .column_sql_invalid_value_count_check_spec import (
ColumnSqlInvalidValueCountCheckSpec,
)
from .column_sql_invalid_value_count_sensor_parameters_spec import (
ColumnSqlInvalidValueCountSensorParametersSpec,
)
from .column_statistics_collectors_root_categories_spec import (
ColumnStatisticsCollectorsRootCategoriesSpec,
)
Expand Down Expand Up @@ -1552,6 +1560,8 @@
from .json_records_type import JsonRecordsType
from .label_model import LabelModel
from .local_data_domain_model import LocalDataDomainModel
from .maria_db_parameters_spec import MariaDbParametersSpec
from .maria_db_parameters_spec_properties import MariaDbParametersSpecProperties
from .max_count_rule_0_error_parameters_spec import MaxCountRule0ErrorParametersSpec
from .max_count_rule_0_warning_parameters_spec import MaxCountRule0WarningParametersSpec
from .max_count_rule_100_parameters_spec import MaxCountRule100ParametersSpec
Expand Down Expand Up @@ -1608,6 +1618,7 @@
from .parquet_file_format_spec import ParquetFileFormatSpec
from .partition_incremental_time_window_spec import PartitionIncrementalTimeWindowSpec
from .physical_table_name import PhysicalTableName
from .postgresql_engine_type import PostgresqlEngineType
from .postgresql_parameters_spec import PostgresqlParametersSpec
from .postgresql_parameters_spec_properties import PostgresqlParametersSpecProperties
from .postgresql_ssl_mode import PostgresqlSslMode
Expand All @@ -1623,6 +1634,8 @@
from .provider_sensor_runner_type import ProviderSensorRunnerType
from .provider_type import ProviderType
from .quality_category_model import QualityCategoryModel
from .quest_db_parameters_spec import QuestDbParametersSpec
from .quest_db_parameters_spec_properties import QuestDbParametersSpecProperties
from .redshift_authentication_mode import RedshiftAuthenticationMode
from .redshift_parameters_spec import RedshiftParametersSpec
from .redshift_parameters_spec_properties import RedshiftParametersSpecProperties
Expand All @@ -1643,6 +1656,7 @@
from .run_checks_parameters import RunChecksParameters
from .run_checks_queue_job_result import RunChecksQueueJobResult
from .run_checks_result import RunChecksResult
from .run_checks_target import RunChecksTarget
from .schedule_enabled_status_model import ScheduleEnabledStatusModel
from .schema_model import SchemaModel
from .schema_remote_model import SchemaRemoteModel
Expand Down Expand Up @@ -1986,6 +2000,12 @@
from .table_sql_import_custom_result_sensor_parameters_spec import (
TableSqlImportCustomResultSensorParametersSpec,
)
from .table_sql_invalid_record_count_check_spec import (
TableSqlInvalidRecordCountCheckSpec,
)
from .table_sql_invalid_record_count_sensor_parameters_spec import (
TableSqlInvalidRecordCountSensorParametersSpec,
)
from .table_statistics_collectors_root_categories_spec import (
TableStatisticsCollectorsRootCategoriesSpec,
)
Expand Down Expand Up @@ -2097,6 +2117,8 @@
from .target_rule_severity_level import TargetRuleSeverityLevel
from .target_table_pattern_spec import TargetTablePatternSpec
from .temporal_unit import TemporalUnit
from .teradata_parameters_spec import TeradataParametersSpec
from .teradata_parameters_spec_properties import TeradataParametersSpecProperties
from .text_built_in_date_formats import TextBuiltInDateFormats
from .time_period_gradient import TimePeriodGradient
from .time_window_filter_parameters import TimeWindowFilterParameters
Expand Down Expand Up @@ -2181,6 +2203,8 @@
"CheckTemplate",
"CheckTimeScale",
"CheckType",
"ClickHouseParametersSpec",
"ClickHouseParametersSpecProperties",
"CloudSynchronizationFoldersStatusModel",
"CollectErrorSamplesOnTableParameters",
"CollectErrorSamplesParameters",
Expand Down Expand Up @@ -2554,6 +2578,8 @@
"ColumnSqlConditionPassedPercentSensorParametersSpec",
"ColumnSqlImportCustomResultCheckSpec",
"ColumnSqlImportCustomResultSensorParametersSpec",
"ColumnSqlInvalidValueCountCheckSpec",
"ColumnSqlInvalidValueCountSensorParametersSpec",
"ColumnStatisticsCollectorsRootCategoriesSpec",
"ColumnStatisticsModel",
"ColumnStringsExpectedTextsInTopValuesCountSensorParametersSpec",
Expand Down Expand Up @@ -2796,6 +2822,8 @@
"JsonRecordsType",
"LabelModel",
"LocalDataDomainModel",
"MariaDbParametersSpec",
"MariaDbParametersSpecProperties",
"MaxCountRule0ErrorParametersSpec",
"MaxCountRule0WarningParametersSpec",
"MaxCountRule100ParametersSpec",
Expand Down Expand Up @@ -2840,6 +2868,7 @@
"ParquetFileFormatSpec",
"PartitionIncrementalTimeWindowSpec",
"PhysicalTableName",
"PostgresqlEngineType",
"PostgresqlParametersSpec",
"PostgresqlParametersSpecProperties",
"PostgresqlSslMode",
Expand All @@ -2853,6 +2882,8 @@
"ProviderSensorRunnerType",
"ProviderType",
"QualityCategoryModel",
"QuestDbParametersSpec",
"QuestDbParametersSpecProperties",
"RedshiftAuthenticationMode",
"RedshiftParametersSpec",
"RedshiftParametersSpecProperties",
Expand All @@ -2873,6 +2904,7 @@
"RunChecksParameters",
"RunChecksQueueJobResult",
"RunChecksResult",
"RunChecksTarget",
"ScheduleEnabledStatusModel",
"SchemaModel",
"SchemaRemoteModel",
Expand Down Expand Up @@ -3042,6 +3074,8 @@
"TableSqlConditionPassedPercentSensorParametersSpec",
"TableSqlImportCustomResultCheckSpec",
"TableSqlImportCustomResultSensorParametersSpec",
"TableSqlInvalidRecordCountCheckSpec",
"TableSqlInvalidRecordCountSensorParametersSpec",
"TableStatisticsCollectorsRootCategoriesSpec",
"TableStatisticsModel",
"TableTimelinessDailyMonitoringChecksSpec",
Expand Down Expand Up @@ -3085,6 +3119,8 @@
"TargetRuleSeverityLevel",
"TargetTablePatternSpec",
"TemporalUnit",
"TeradataParametersSpec",
"TeradataParametersSpecProperties",
"TextBuiltInDateFormats",
"TimePeriodGradient",
"TimestampColumnsSpec",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,26 @@ class AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec:
anomaly_percent (Union[Unset, float]): The probability (in percent) that the current sensor readout (measure) is
an anomaly, because the value is outside the regular range of previous readouts. The default time window of 90
time periods (days, etc.) is used, but at least 30 readouts must exist to run the calculation.
use_ai (Union[Unset, bool]): Use an AI model to predict anomalies. WARNING: anomaly detection by AI models is
not supported in an open-source distribution of DQOps. Please contact DQOps support to upgrade your instance to
a closed-source DQOps distribution.
"""

anomaly_percent: Union[Unset, float] = UNSET
use_ai: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
anomaly_percent = self.anomaly_percent
use_ai = self.use_ai

field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if anomaly_percent is not UNSET:
field_dict["anomaly_percent"] = anomaly_percent
if use_ai is not UNSET:
field_dict["use_ai"] = use_ai

return field_dict

Expand All @@ -38,8 +45,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
anomaly_percent = d.pop("anomaly_percent", UNSET)

use_ai = d.pop("use_ai", UNSET)

anomaly_differencing_percentile_moving_average_rule_error_05_pct_parameters_spec = cls(
anomaly_percent=anomaly_percent,
use_ai=use_ai,
)

anomaly_differencing_percentile_moving_average_rule_error_05_pct_parameters_spec.additional_properties = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,26 @@ class AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec:
anomaly_percent (Union[Unset, float]): The probability (in percent) that the current sensor readout (measure) is
an anomaly, because the value is outside the regular range of previous readouts. The default time window of 90
time periods (days, etc.) is used, but at least 30 readouts must exist to run the calculation.
use_ai (Union[Unset, bool]): Use an AI model to predict anomalies. WARNING: anomaly detection by AI models is
not supported in an open-source distribution of DQOps. Please contact DQOps support to upgrade your instance to
a closed-source DQOps distribution.
"""

anomaly_percent: Union[Unset, float] = UNSET
use_ai: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
anomaly_percent = self.anomaly_percent
use_ai = self.use_ai

field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if anomaly_percent is not UNSET:
field_dict["anomaly_percent"] = anomaly_percent
if use_ai is not UNSET:
field_dict["use_ai"] = use_ai

return field_dict

Expand All @@ -38,8 +45,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
anomaly_percent = d.pop("anomaly_percent", UNSET)

use_ai = d.pop("use_ai", UNSET)

anomaly_differencing_percentile_moving_average_rule_fatal_01_pct_parameters_spec = cls(
anomaly_percent=anomaly_percent,
use_ai=use_ai,
)

anomaly_differencing_percentile_moving_average_rule_fatal_01_pct_parameters_spec.additional_properties = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,26 @@ class AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec:
anomaly_percent (Union[Unset, float]): The probability (in percent) that the current sensor readout (measure) is
an anomaly, because the value is outside the regular range of previous readouts. The default time window of 90
time periods (days, etc.) is used, but at least 30 readouts must exist to run the calculation.
use_ai (Union[Unset, bool]): Use an AI model to predict anomalies. WARNING: anomaly detection by AI models is
not supported in an open-source distribution of DQOps. Please contact DQOps support to upgrade your instance to
a closed-source DQOps distribution.
"""

anomaly_percent: Union[Unset, float] = UNSET
use_ai: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
anomaly_percent = self.anomaly_percent
use_ai = self.use_ai

field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if anomaly_percent is not UNSET:
field_dict["anomaly_percent"] = anomaly_percent
if use_ai is not UNSET:
field_dict["use_ai"] = use_ai

return field_dict

Expand All @@ -38,8 +45,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
anomaly_percent = d.pop("anomaly_percent", UNSET)

use_ai = d.pop("use_ai", UNSET)

anomaly_differencing_percentile_moving_average_rule_warning_1_pct_parameters_spec = cls(
anomaly_percent=anomaly_percent,
use_ai=use_ai,
)

anomaly_differencing_percentile_moving_average_rule_warning_1_pct_parameters_spec.additional_properties = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,26 @@ class AnomalyPartitionRowCountRuleError05PctParametersSpec:
anomaly_percent (Union[Unset, float]): The probability (in percent) that the current daily row count is an
anomaly because the value is outside the regular range of previous partition volume measures. The default time
window of 90 time periods (days, etc.) is used, but at least 30 readouts must exist to run the calculation.
use_ai (Union[Unset, bool]): Use an AI model to predict anomalies. WARNING: anomaly detection by AI models is
not supported in an open-source distribution of DQOps. Please contact DQOps support to upgrade your instance to
a closed-source DQOps distribution.
"""

anomaly_percent: Union[Unset, float] = UNSET
use_ai: Union[Unset, bool] = UNSET
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
anomaly_percent = self.anomaly_percent
use_ai = self.use_ai

field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if anomaly_percent is not UNSET:
field_dict["anomaly_percent"] = anomaly_percent
if use_ai is not UNSET:
field_dict["use_ai"] = use_ai

return field_dict

Expand All @@ -36,8 +43,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
anomaly_percent = d.pop("anomaly_percent", UNSET)

use_ai = d.pop("use_ai", UNSET)

anomaly_partition_row_count_rule_error_05_pct_parameters_spec = cls(
anomaly_percent=anomaly_percent,
use_ai=use_ai,
)

anomaly_partition_row_count_rule_error_05_pct_parameters_spec.additional_properties = (
Expand Down
Loading

0 comments on commit 838d2ed

Please sign in to comment.