Skip to content

Commit

Permalink
remove holograms and supply defaults/validation logic (#880)
Browse files Browse the repository at this point in the history
* remove holograms and supply defaults/validation logic

* add changie

* mypy fixes
  • Loading branch information
colin-rogers-dbt authored Aug 31, 2023
1 parent 91f9887 commit 01c9fd0
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 8 deletions.
6 changes: 6 additions & 0 deletions .changes/unreleased/Under the Hood-20230830-160616.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Under the Hood
body: Remove dependency on hologram
time: 2023-08-30T16:06:16.444881-07:00
custom:
Author: colin-rogers-dbt
Issue: "881"
5 changes: 2 additions & 3 deletions dbt/adapters/spark/column.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@

from dbt.adapters.base.column import Column
from dbt.dataclass_schema import dbtClassMixin
from hologram import JsonDict

Self = TypeVar("Self", bound="SparkColumn")


@dataclass
class SparkColumn(dbtClassMixin, Column): # type: ignore
class SparkColumn(dbtClassMixin, Column):
table_database: Optional[str] = None
table_schema: Optional[str] = None
table_name: Optional[str] = None
Expand Down Expand Up @@ -63,7 +62,7 @@ def convert_table_stats(raw_stats: Optional[str]) -> Dict[str, Any]:
table_stats[f"stats:{key}:include"] = True
return table_stats

def to_column_dict(self, omit_none: bool = True, validate: bool = False) -> JsonDict:
def to_column_dict(self, omit_none: bool = True, validate: bool = False) -> Dict[str, Any]:
original_dict = self.to_dict(omit_none=omit_none)
# If there are stats, merge them into the root of the dict
original_stats = original_dict.pop("table_stats", None)
Expand Down
18 changes: 13 additions & 5 deletions dbt/adapters/spark/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from datetime import datetime
import sqlparams
from dbt.contracts.connection import Connection
from hologram.helpers import StrEnum
from dbt.dataclass_schema import StrEnum
from dataclasses import dataclass, field
from typing import Any, Dict, Optional, Union, Tuple, List, Generator, Iterable, Sequence

Expand Down Expand Up @@ -59,9 +59,10 @@ class SparkConnectionMethod(StrEnum):

@dataclass
class SparkCredentials(Credentials):
host: str
method: SparkConnectionMethod
database: Optional[str] # type: ignore
host: Optional[str] = None
schema: Optional[str] = None # type: ignore
method: SparkConnectionMethod = None # type: ignore
database: Optional[str] = None # type: ignore
driver: Optional[str] = None
cluster: Optional[str] = None
endpoint: Optional[str] = None
Expand Down Expand Up @@ -90,6 +91,13 @@ def cluster_id(self) -> Optional[str]:
return self.cluster

def __post_init__(self) -> None:
if self.method is None:
raise dbt.exceptions.DbtRuntimeError("Must specify `method` in profile")
if self.host is None:
raise dbt.exceptions.DbtRuntimeError("Must specify `host` in profile")
if self.schema is None:
raise dbt.exceptions.DbtRuntimeError("Must specify `schema` in profile")

# spark classifies database and schema as the same thing
if self.database is not None and self.database != self.schema:
raise dbt.exceptions.DbtRuntimeError(
Expand Down Expand Up @@ -154,7 +162,7 @@ def type(self) -> str:

@property
def unique_field(self) -> str:
return self.host
return self.host # type: ignore

def _connection_keys(self) -> Tuple[str, ...]:
return "host", "port", "cluster", "endpoint", "schema", "organization"
Expand Down

0 comments on commit 01c9fd0

Please sign in to comment.