Skip to content

Commit

Permalink
More comments on types and lint.
Browse files Browse the repository at this point in the history
  • Loading branch information
VersusFacit committed Jun 13, 2024
1 parent d11707e commit b4f2150
Showing 1 changed file with 8 additions and 4 deletions.
12 changes: 8 additions & 4 deletions dbt/adapters/spark/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,8 @@ def convert_text_type(cls, agate_table: "agate.Table", col_idx: int) -> str:

@classmethod
def convert_number_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
import agate

decimals = agate_table.aggregate(agate.MaxPrecision(col_idx))
return "double" if decimals else "bigint"

Expand All @@ -158,7 +160,7 @@ def convert_datetime_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
def quote(self, identifier: str) -> str:
return "`{}`".format(identifier)

def _get_relation_information(self, row: agate.Row) -> RelationInfo:
def _get_relation_information(self, row: "agate.Row") -> RelationInfo:
"""relation info was fetched with SHOW TABLES EXTENDED"""
try:
_schema, name, _, information = row
Expand All @@ -169,7 +171,7 @@ def _get_relation_information(self, row: agate.Row) -> RelationInfo:

return _schema, name, information

def _get_relation_information_using_describe(self, row: agate.Row) -> RelationInfo:
def _get_relation_information_using_describe(self, row: "agate.Row") -> RelationInfo:
"""Relation info fetched using SHOW TABLES and an auxiliary DESCRIBE statement"""
try:
_schema, name, _ = row
Expand Down Expand Up @@ -198,7 +200,7 @@ def _get_relation_information_using_describe(self, row: agate.Row) -> RelationIn
def _build_spark_relation_list(
self,
row_list: "agate.Table",
relation_info_func: Callable[[agate.Row], RelationInfo],
relation_info_func: Callable[["agate.Row"], RelationInfo],
) -> List[BaseRelation]:
"""Aggregate relations with format metadata included."""
relations = []
Expand Down Expand Up @@ -404,7 +406,6 @@ def _get_one_catalog(
schemas: Set[str],
used_schemas: FrozenSet[Tuple[str, str]],
) -> "agate.Table":
import agate
if len(schemas) != 1:
raise CompilationError(
f"Expected only one schema in spark _get_one_catalog, found " f"{schemas}"
Expand All @@ -417,6 +418,9 @@ def _get_one_catalog(
for relation in self.list_relations(database, schema):
logger.debug("Getting table schema for relation {}", str(relation))
columns.extend(self._get_columns_for_catalog(relation))

import agate

return agate.Table.from_object(columns, column_types=DEFAULT_TYPE_TESTER)

def check_schema_exists(self, database: str, schema: str) -> bool:
Expand Down

0 comments on commit b4f2150

Please sign in to comment.