Skip to content

Commit

Permalink
Merge branch 'main' into fix-require-partition-error
Browse files Browse the repository at this point in the history
  • Loading branch information
tnk-ysk authored Feb 16, 2024
2 parents df6624e + 62a8f80 commit 720e1b0
Show file tree
Hide file tree
Showing 32 changed files with 548 additions and 407 deletions.
6 changes: 6 additions & 0 deletions .changes/unreleased/Dependencies-20240124-120321.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Dependencies
body: get dbt-tests-adapters from dbt-adapters repo
time: 2024-01-24T12:03:21.523295-08:00
custom:
Author: colin-rogers-dbt
PR: "1077"
6 changes: 6 additions & 0 deletions .changes/unreleased/Features-20231218-155409.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Features
body: Add support for checking table-last-modified by metadata
time: 2023-12-18T15:54:09.69635-05:00
custom:
Author: mikealfare
Issue: "938"
6 changes: 6 additions & 0 deletions .changes/unreleased/Features-20231219-201203.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Features
body: Support limiting get_catalog by object name
time: 2023-12-19T20:12:03.990725-05:00
custom:
Author: mikealfare
Issue: "950"
7 changes: 7 additions & 0 deletions .changes/unreleased/Features-20240205-174614.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
kind: Features
body: Support all types for unit testing in dbt-bigquery, expand coverage of
safe_cast macro
time: 2024-02-05T17:46:14.505597-05:00
custom:
Author: michelleark
Issue: "1090"
6 changes: 6 additions & 0 deletions .changes/unreleased/Under the Hood-20231116-062142.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Under the Hood
body: Primary and foreign key constraints are not enforced in BigQuery
time: 2023-11-16T06:21:42.935367-08:00
custom:
Author: dbeatty10
Issue: "1018"
6 changes: 6 additions & 0 deletions .changes/unreleased/Under the Hood-20240116-154305.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Under the Hood
body: Migrate to dbt-common and dbt-adapters package
time: 2024-01-16T15:43:05.046735-08:00
custom:
Author: colin-rogers-dbt
Issue: "1071"
28 changes: 0 additions & 28 deletions .github/workflows/jira-creation.yml

This file was deleted.

28 changes: 0 additions & 28 deletions .github/workflows/jira-label.yml

This file was deleted.

29 changes: 0 additions & 29 deletions .github/workflows/jira-transition.yml

This file was deleted.

18 changes: 9 additions & 9 deletions dbt/adapters/bigquery/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
from contextlib import contextmanager
from dataclasses import dataclass, field

from dbt.common.invocation import get_invocation_id
from dbt_common.invocation import get_invocation_id

from dbt.common.events.contextvars import get_node_info
from dbt_common.events.contextvars import get_node_info
from mashumaro.helper import pass_through

from functools import lru_cache
Expand All @@ -27,21 +27,21 @@
)

from dbt.adapters.bigquery import gcloud
from dbt.common.clients import agate_helper
from dbt.adapters.contracts.connection import ConnectionState, AdapterResponse
from dbt.common.exceptions import (
from dbt_common.clients import agate_helper
from dbt.adapters.contracts.connection import ConnectionState, AdapterResponse, Credentials
from dbt_common.exceptions import (
DbtRuntimeError,
DbtConfigError,
)
from dbt.common.exceptions import DbtDatabaseError
from dbt_common.exceptions import DbtDatabaseError
from dbt.adapters.exceptions.connection import FailedToConnectError
from dbt.adapters.base import BaseConnectionManager, Credentials
from dbt.adapters.base import BaseConnectionManager
from dbt.adapters.events.logging import AdapterLogger
from dbt.adapters.events.types import SQLQuery
from dbt.common.events.functions import fire_event
from dbt_common.events.functions import fire_event
from dbt.adapters.bigquery import __version__ as dbt_version

from dbt.common.dataclass_schema import ExtensibleDbtClassMixin, StrEnum
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, StrEnum

logger = AdapterLogger("BigQuery")

Expand Down
7 changes: 4 additions & 3 deletions dbt/adapters/bigquery/gcloud.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from dbt_common.exceptions import DbtRuntimeError

from dbt.adapters.events.logging import AdapterLogger
import dbt.common.exceptions
from dbt.common.clients.system import run_cmd
from dbt_common.clients.system import run_cmd

NOT_INSTALLED_MSG = """
dbt requires the gcloud SDK to be installed to authenticate with BigQuery.
Expand All @@ -25,4 +26,4 @@ def setup_default_credentials():
if gcloud_installed():
run_cmd(".", ["gcloud", "auth", "application-default", "login"])
else:
raise dbt.common.exceptions.DbtRuntimeError(NOT_INSTALLED_MSG)
raise DbtRuntimeError(NOT_INSTALLED_MSG)
66 changes: 49 additions & 17 deletions dbt/adapters/bigquery/impl.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from dataclasses import dataclass
from datetime import datetime
import json
import threading
from multiprocessing.context import SpawnContext
Expand All @@ -9,7 +10,7 @@
import agate
from dbt.adapters.contracts.relation import RelationConfig

import dbt.common.exceptions.base
import dbt_common.exceptions.base
from dbt.adapters.base import ( # type: ignore
AdapterConfig,
BaseAdapter,
Expand All @@ -20,22 +21,26 @@
SchemaSearchMap,
available,
)
from dbt.adapters.base.impl import FreshnessResponse
from dbt.adapters.cache import _make_ref_key_dict # type: ignore
import dbt.common.clients.agate_helper
from dbt.adapters.capability import Capability, CapabilityDict, CapabilitySupport, Support
import dbt_common.clients.agate_helper
from dbt.adapters.contracts.connection import AdapterResponse
from dbt.common.contracts.constraints import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint # type: ignore
from dbt.common.dataclass_schema import dbtClassMixin
from dbt.adapters.contracts.macros import MacroResolverProtocol
from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint # type: ignore
from dbt_common.dataclass_schema import dbtClassMixin
from dbt.adapters.events.logging import AdapterLogger
from dbt.common.events.functions import fire_event
from dbt_common.events.functions import fire_event
from dbt.adapters.events.types import SchemaCreation, SchemaDrop
import dbt.common.exceptions
from dbt.common.utils import filter_null_values
import dbt_common.exceptions
from dbt_common.utils import filter_null_values
import google.api_core
import google.auth
import google.oauth2
import google.cloud.bigquery
from google.cloud.bigquery import AccessEntry, SchemaField, Table as BigQueryTable
import google.cloud.exceptions
import pytz

from dbt.adapters.bigquery import BigQueryColumn, BigQueryConnectionManager
from dbt.adapters.bigquery.column import get_nested_column_data_types
Expand Down Expand Up @@ -114,10 +119,17 @@ class BigQueryAdapter(BaseAdapter):
ConstraintType.check: ConstraintSupport.NOT_SUPPORTED,
ConstraintType.not_null: ConstraintSupport.ENFORCED,
ConstraintType.unique: ConstraintSupport.NOT_SUPPORTED,
ConstraintType.primary_key: ConstraintSupport.ENFORCED,
ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED,
ConstraintType.foreign_key: ConstraintSupport.NOT_ENFORCED,
}

_capabilities: CapabilityDict = CapabilityDict(
{
Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Full),
Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full),
}
)

def __init__(self, config, mp_context: SpawnContext) -> None:
super().__init__(config, mp_context)
self.connections: BigQueryConnectionManager = self.connections
Expand Down Expand Up @@ -147,7 +159,7 @@ def drop_relation(self, relation: BigQueryRelation) -> None:
conn.handle.delete_table(table_ref, not_found_ok=True)

def truncate_relation(self, relation: BigQueryRelation) -> None:
raise dbt.common.exceptions.base.NotImplementedError(
raise dbt_common.exceptions.base.NotImplementedError(
"`truncate` is not implemented for this adapter!"
)

Expand All @@ -164,7 +176,7 @@ def rename_relation(
or from_relation.type == RelationType.View
or to_relation.type == RelationType.View
):
raise dbt.common.exceptions.DbtRuntimeError(
raise dbt_common.exceptions.DbtRuntimeError(
"Renaming of views is not currently supported in BigQuery"
)

Expand Down Expand Up @@ -390,7 +402,7 @@ def copy_table(self, source, destination, materialization):
elif materialization == "table":
write_disposition = WRITE_TRUNCATE
else:
raise dbt.common.exceptions.CompilationError(
raise dbt_common.exceptions.CompilationError(
'Copy table materialization must be "copy" or "table", but '
f"config.get('copy_materialization', 'table') was "
f"{materialization}"
Expand Down Expand Up @@ -437,11 +449,11 @@ def poll_until_job_completes(cls, job, timeout):
job.reload()

if job.state != "DONE":
raise dbt.common.exceptions.DbtRuntimeError("BigQuery Timeout Exceeded")
raise dbt_common.exceptions.DbtRuntimeError("BigQuery Timeout Exceeded")

elif job.error_result:
message = "\n".join(error["message"].strip() for error in job.errors)
raise dbt.common.exceptions.DbtRuntimeError(message)
raise dbt_common.exceptions.DbtRuntimeError(message)

def _bq_table_to_relation(self, bq_table) -> Union[BigQueryRelation, None]:
if bq_table is None:
Expand All @@ -465,7 +477,7 @@ def add_query(self, sql, auto_begin=True, bindings=None, abridge_sql_log=False):
if self.nice_connection_name() in ["on-run-start", "on-run-end"]:
self.warning_on_hooks(self.nice_connection_name())
else:
raise dbt.common.exceptions.base.NotImplementedError(
raise dbt_common.exceptions.base.NotImplementedError(
"`add_query` is not implemented for this adapter!"
)

Expand Down Expand Up @@ -709,6 +721,26 @@ def _get_catalog_schemas(self, relation_config: Iterable[RelationConfig]) -> Sch
)
return result

def calculate_freshness_from_metadata(
self,
source: BaseRelation,
macro_resolver: Optional[MacroResolverProtocol] = None,
) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
conn = self.connections.get_thread_connection()
client: google.cloud.bigquery.Client = conn.handle

table_ref = self.get_table_ref_from_relation(source)
table = client.get_table(table_ref)
snapshot = datetime.now(tz=pytz.UTC)

freshness = FreshnessResponse(
max_loaded_at=table.modified,
snapshotted_at=snapshot,
age=(snapshot - table.modified).total_seconds(),
)

return None, freshness

@available.parse(lambda *a, **k: {})
def get_common_options(
self, config: Dict[str, Any], node: Dict[str, Any], temporary: bool = False
Expand Down Expand Up @@ -777,7 +809,7 @@ def describe_relation(
bq_table = self.get_bq_table(relation)
parser = BigQueryMaterializedViewConfig
else:
raise dbt.common.exceptions.DbtRuntimeError(
raise dbt_common.exceptions.DbtRuntimeError(
f"The method `BigQueryAdapter.describe_relation` is not implemented "
f"for the relation type: {relation.type}"
)
Expand Down Expand Up @@ -843,7 +875,7 @@ def string_add_sql(
elif location == "prepend":
return f"concat('{value}', {add_to})"
else:
raise dbt.common.exceptions.DbtRuntimeError(
raise dbt_common.exceptions.DbtRuntimeError(
f'Got an unexpected location value of "{location}"'
)

Expand Down
4 changes: 2 additions & 2 deletions dbt/adapters/bigquery/relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
BigQueryPartitionConfigChange,
)
from dbt.adapters.contracts.relation import RelationType, RelationConfig
from dbt.common.exceptions import CompilationError
from dbt.common.utils.dict import filter_null_values
from dbt_common.exceptions import CompilationError
from dbt_common.utils.dict import filter_null_values


Self = TypeVar("Self", bound="BigQueryRelation")
Expand Down
4 changes: 2 additions & 2 deletions dbt/adapters/bigquery/relation_configs/_materialized_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,10 @@ def parse_relation_config(cls, relation_config: RelationConfig) -> Dict[str, Any
}

# optional
if "partition_by" in relation_config.config:
if relation_config.config and "partition_by" in relation_config.config:
config_dict.update({"partition": PartitionConfig.parse_model_node(relation_config)})

if "cluster_by" in relation_config.config:
if relation_config.config and "cluster_by" in relation_config.config:
config_dict.update(
{"cluster": BigQueryClusterConfig.parse_relation_config(relation_config)}
)
Expand Down
Loading

0 comments on commit 720e1b0

Please sign in to comment.